code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import autoaugment
import PIL
import glob
import os
import time
from tqdm import tqdm
# AutoAugment: Learning Augmentation Policies from Data, <NAME> and <NAME> and <NAME> Vijay
# Vasudevan and <NAME>, 2019
# https://github.com/DeepVoltaire/AutoAugment
def invoke(num=100):
images = []
for img in glob.glob("dataset/input/*.jpg"):
n = PIL.Image.open(img)
images.append(n)
if not os.path.exists("./dataset/autoaugment_output/"):
os.mkdir("./dataset/autoaugment_output/")
start_time = time.time_ns()
for x in tqdm(range(num)):
policy = autoaugment.CIFAR10Policy() # Also see CIFAR10Policy, SVHNPolicy
transformed = policy(images[x % len(images)])
transformed.save("dataset/autoaugment_output/{}.jpg".format(x))
end_time = time.time_ns()
print("autoaugment took {} milliseconds to run".format((end_time - start_time) / 1_000_000))
|
[
"os.mkdir",
"os.path.exists",
"PIL.Image.open",
"time.time_ns",
"glob.glob",
"autoaugment.CIFAR10Policy"
] |
[((307, 339), 'glob.glob', 'glob.glob', (['"""dataset/input/*.jpg"""'], {}), "('dataset/input/*.jpg')\n", (316, 339), False, 'import glob\n'), ((527, 541), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (539, 541), False, 'import time\n'), ((799, 813), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (811, 813), False, 'import time\n'), ((353, 372), 'PIL.Image.open', 'PIL.Image.open', (['img'], {}), '(img)\n', (367, 372), False, 'import PIL\n'), ((410, 457), 'os.path.exists', 'os.path.exists', (['"""./dataset/autoaugment_output/"""'], {}), "('./dataset/autoaugment_output/')\n", (424, 457), False, 'import os\n'), ((467, 508), 'os.mkdir', 'os.mkdir', (['"""./dataset/autoaugment_output/"""'], {}), "('./dataset/autoaugment_output/')\n", (475, 508), False, 'import os\n'), ((591, 618), 'autoaugment.CIFAR10Policy', 'autoaugment.CIFAR10Policy', ([], {}), '()\n', (616, 618), False, 'import autoaugment\n')]
|
import os
# This shouldn't change... But if it does, we will probably figure it out after a few hours of debugging.
IKON_LOGIN_URL = 'https://account.ikonpass.com/en/login'
# Set your username and password as an environment variables in Powershell:
# $env:IKON_LOGIN_USERNAME = 'YOUR_USERNAME_HERE'
# $env:IKON_LOGIN_PASSWORD = '<PASSWORD>'
IKON_LOGIN_USERNAME = os.getenv('IKON_LOGIN_USERNAME')
IKON_LOGIN_PASSWORD = os.getenv('IKON_LOGIN_PASSWORD')
# This is the driver that is used for Selenium and headless web browsing. The driver version must match your installed
# version of Chrome.
CHROME_DRIVER_LOCATION = './chrome_driver/chromedriver.exe'
# How often in seconds to retry making a reservation
RESERVATION_ATTEMPT_RETRY_INTERVAL_SECONDS = 10
# Set the Twilio credentials as an environment variables in Powershell:
# $env:TWILIO_ACCOUNT_SID = 'SID_HERE'
# $env:TWILIO_AUTH_TOKEN = 'TOKEN_HERE'
TWILIO_ACCOUNT_SID = os.environ['TWILIO_ACCOUNT_SID']
TWILIO_AUTH_TOKEN = os.environ['TWILIO_AUTH_TOKEN']
TWILIO_FROM_NUMBER = '+12058986998'
# Set the to number as an environment variable in Powershell:
# $env:TWILIO_TO_NUMBER = 'TO_NUMBER_HERE'
# Use the following format '=15556781111'
TWILIO_TO_NUMBER = os.environ['TWILIO_TO_NUMBER']
# If you have Crystal Mountain Resort selected as a favourite on the Ikon Pass site, leave this as true, otherwise
# set it to 'False'
CRYSTAL_STARRED = True
|
[
"os.getenv"
] |
[((365, 397), 'os.getenv', 'os.getenv', (['"""IKON_LOGIN_USERNAME"""'], {}), "('IKON_LOGIN_USERNAME')\n", (374, 397), False, 'import os\n'), ((420, 452), 'os.getenv', 'os.getenv', (['"""IKON_LOGIN_PASSWORD"""'], {}), "('IKON_LOGIN_PASSWORD')\n", (429, 452), False, 'import os\n')]
|
from django.urls import reverse
import datetime
import debug # pyflakes:ignore
from ietf.utils.test_utils import TestCase, unicontent
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.meeting.models import Session, ResourceAssociation
from ietf.meeting.factories import MeetingFactory, SessionFactory
from ietf.person.models import Person
from ietf.utils.mail import outbox, empty_outbox
from pyquery import PyQuery
SECR_USER='secretary'
class SreqUrlTests(TestCase):
def test_urls(self):
MeetingFactory(type_id='ietf',date=datetime.date.today())
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.get("/secr/")
self.assertEqual(r.status_code, 200)
r = self.client.get("/secr/sreq/")
self.assertEqual(r.status_code, 200)
testgroup=GroupFactory()
r = self.client.get("/secr/sreq/%s/new/" % testgroup.acronym)
self.assertEqual(r.status_code, 200)
class SessionRequestTestCase(TestCase):
def test_main(self):
meeting = MeetingFactory(type_id='ietf', date=datetime.date.today())
SessionFactory.create_batch(2, meeting=meeting, status_id='sched')
SessionFactory.create_batch(2, meeting=meeting, status_id='unsched')
# An additional unscheduled group comes from make_immutable_base_data
url = reverse('ietf.secr.sreq.views.main')
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
sched = r.context['scheduled_groups']
unsched = r.context['unscheduled_groups']
self.assertEqual(len(unsched),3)
self.assertEqual(len(sched),2)
def test_approve(self):
meeting = MeetingFactory(type_id='ietf', date=datetime.date.today())
ad = Person.objects.get(user__username='ad')
area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group
mars = GroupFactory(parent=area, acronym='mars')
# create session waiting for approval
session = SessionFactory(meeting=meeting, group=mars, status_id='apprw')
url = reverse('ietf.secr.sreq.views.approve', kwargs={'acronym':'mars'})
self.client.login(username="ad", password="<PASSWORD>")
r = self.client.get(url)
self.assertRedirects(r,reverse('ietf.secr.sreq.views.view', kwargs={'acronym':'mars'}))
session = Session.objects.get(pk=session.pk)
self.assertEqual(session.status_id,'appr')
def test_cancel(self):
meeting = MeetingFactory(type_id='ietf', date=datetime.date.today())
ad = Person.objects.get(user__username='ad')
area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group
mars = SessionFactory(meeting=meeting, group__parent=area, group__acronym='mars', status_id='sched').group
url = reverse('ietf.secr.sreq.views.cancel', kwargs={'acronym':'mars'})
self.client.login(username="ad", password="<PASSWORD>")
r = self.client.get(url)
self.assertRedirects(r,reverse('ietf.secr.sreq.views.main'))
sessions = Session.objects.filter(meeting=meeting, group=mars)
self.assertEqual(sessions[0].status_id,'deleted')
def test_edit(self):
meeting = MeetingFactory(type_id='ietf', date=datetime.date.today())
mars = RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars').group
SessionFactory(meeting=meeting,group=mars,status_id='sched',scheduled=datetime.datetime.now())
url = reverse('ietf.secr.sreq.views.edit', kwargs={'acronym':'mars'})
self.client.login(username="marschairman", password="<PASSWORD>")
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
post_data = {'num_session':'2',
'length_session1':'3600',
'length_session2':'3600',
'attendees':'10',
'conflict1':'',
'comments':'need lights',
'submit': 'Continue'}
r = self.client.post(url, post_data, HTTP_HOST='example.com')
self.assertRedirects(r,reverse('ietf.secr.sreq.views.view', kwargs={'acronym':'mars'}))
def test_tool_status(self):
MeetingFactory(type_id='ietf', date=datetime.date.today())
url = reverse('ietf.secr.sreq.views.tool_status')
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
r = self.client.post(url, {'message':'locked', 'submit':'Lock'})
self.assertRedirects(r,reverse('ietf.secr.sreq.views.main'))
class SubmitRequestCase(TestCase):
def test_submit_request(self):
meeting = MeetingFactory(type_id='ietf', date=datetime.date.today())
ad = Person.objects.get(user__username='ad')
area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group
group = GroupFactory(parent=area)
session_count_before = Session.objects.filter(meeting=meeting, group=group).count()
url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym})
confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym})
main_url = reverse('ietf.secr.sreq.views.main')
post_data = {'num_session':'1',
'length_session1':'3600',
'attendees':'10',
'conflict1':'',
'comments':'need projector',
'submit': 'Continue'}
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.post(url,post_data)
self.assertEqual(r.status_code, 200)
post_data['submit'] = 'Submit'
r = self.client.post(confirm_url,post_data)
self.assertRedirects(r, main_url)
session_count_after = Session.objects.filter(meeting=meeting, group=group).count()
self.assertTrue(session_count_after == session_count_before + 1)
# test that second confirm does not add sessions
r = self.client.post(confirm_url,post_data)
self.assertRedirects(r, main_url)
session_count_after = Session.objects.filter(meeting=meeting, group=group).count()
self.assertTrue(session_count_after == session_count_before + 1)
def test_submit_request_invalid(self):
MeetingFactory(type_id='ietf', date=datetime.date.today())
ad = Person.objects.get(user__username='ad')
area = RoleFactory(name_id='ad', person=ad, group__type_id='area').group
group = GroupFactory(parent=area)
url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym})
post_data = {'num_session':'2',
'length_session1':'3600',
'attendees':'10',
'conflict1':'',
'comments':'need projector'}
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.post(url,post_data)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q('#session-request-form')),1)
self.assertTrue('You must enter a length for all sessions' in unicontent(r))
def test_request_notification(self):
meeting = MeetingFactory(type_id='ietf', date=datetime.date.today())
ad = Person.objects.get(user__username='ad')
area = GroupFactory(type_id='area')
RoleFactory(name_id='ad', person=ad, group=area)
group = GroupFactory(acronym='ames', parent=area)
RoleFactory(name_id='chair', group=group, person__user__username='ameschairman')
resource = ResourceAssociation.objects.create(name_id='project')
# Bit of a test data hack - the fixture now has no used resources to pick from
resource.name.used=True
resource.name.save()
url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':group.acronym})
confirm_url = reverse('ietf.secr.sreq.views.confirm',kwargs={'acronym':group.acronym})
len_before = len(outbox)
post_data = {'num_session':'1',
'length_session1':'3600',
'attendees':'10',
'bethere':str(ad.pk),
'conflict1':'',
'comments':'',
'resources': resource.pk,
'submit': 'Continue'}
self.client.login(username="ameschairman", password="<PASSWORD>")
# submit
r = self.client.post(url,post_data)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertTrue('Confirm' in unicode(q("title")))
# confirm
post_data['submit'] = 'Submit'
r = self.client.post(confirm_url,post_data)
self.assertRedirects(r, reverse('ietf.secr.sreq.views.main'))
self.assertEqual(len(outbox),len_before+1)
notification = outbox[-1]
notification_payload = unicode(notification.get_payload(decode=True),"utf-8","replace")
session = Session.objects.get(meeting=meeting,group=group)
self.assertEqual(session.resources.count(),1)
self.assertEqual(session.people_constraints.count(),1)
resource = session.resources.first()
self.assertTrue(resource.desc in notification_payload)
self.assertTrue(ad.ascii_name() in notification_payload)
class LockAppTestCase(TestCase):
def setUp(self):
self.meeting = MeetingFactory(type_id='ietf', date=datetime.date.today(),session_request_lock_message='locked')
self.group = GroupFactory(acronym='mars')
RoleFactory(name_id='chair', group=self.group, person__user__username='marschairman')
SessionFactory(group=self.group,meeting=self.meeting)
def test_edit_request(self):
url = reverse('ietf.secr.sreq.views.edit',kwargs={'acronym':self.group.acronym})
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q(':disabled[name="submit"]')), 1)
def test_view_request(self):
url = reverse('ietf.secr.sreq.views.view',kwargs={'acronym':self.group.acronym})
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.get(url,follow=True)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q(':disabled[name="edit"]')), 1)
def test_new_request(self):
url = reverse('ietf.secr.sreq.views.new',kwargs={'acronym':self.group.acronym})
# try as WG Chair
self.client.login(username="marschairman", password="<PASSWORD>")
r = self.client.get(url, follow=True)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q('#session-request-form')),0)
# try as Secretariat
self.client.login(username="secretary", password="<PASSWORD>")
r = self.client.get(url,follow=True)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q('#session-request-form')),1)
class NotMeetingCase(TestCase):
def test_not_meeting(self):
MeetingFactory(type_id='ietf',date=datetime.date.today())
group = GroupFactory(acronym='mars')
url = reverse('ietf.secr.sreq.views.no_session',kwargs={'acronym':group.acronym})
self.client.login(username="secretary", password="<PASSWORD>")
empty_outbox()
r = self.client.get(url,follow=True)
# If the view invoked by that get throws an exception (such as an integrity error),
# the traceback from this test will talk about a TransactionManagementError and
# yell about executing queries before the end of an 'atomic' block
# This is a sign of a problem - a get shouldn't have a side-effect like this one does
self.assertEqual(r.status_code, 200)
self.assertTrue('A message was sent to notify not having a session' in unicontent(r))
r = self.client.get(url,follow=True)
self.assertEqual(r.status_code, 200)
self.assertTrue('is already marked as not meeting' in unicontent(r))
self.assertEqual(len(outbox),1)
self.assertTrue('Not having a session' in outbox[0]['Subject'])
self.assertTrue('session-request@' in outbox[0]['To'])
class RetrievePreviousCase(TestCase):
pass
# test error if already scheduled
# test get previous exists/doesn't exist
# test that groups scheduled and unscheduled add up to total groups
# test access by unauthorized
|
[
"ietf.meeting.models.Session.objects.filter",
"ietf.meeting.models.ResourceAssociation.objects.create",
"pyquery.PyQuery",
"ietf.utils.mail.empty_outbox",
"ietf.utils.test_utils.unicontent",
"ietf.meeting.factories.SessionFactory.create_batch",
"ietf.person.models.Person.objects.get",
"datetime.date.today",
"django.urls.reverse",
"ietf.group.factories.RoleFactory",
"ietf.meeting.factories.SessionFactory",
"ietf.group.factories.GroupFactory",
"datetime.datetime.now",
"ietf.meeting.models.Session.objects.get"
] |
[((874, 888), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {}), '()\n', (886, 888), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((1155, 1221), 'ietf.meeting.factories.SessionFactory.create_batch', 'SessionFactory.create_batch', (['(2)'], {'meeting': 'meeting', 'status_id': '"""sched"""'}), "(2, meeting=meeting, status_id='sched')\n", (1182, 1221), False, 'from ietf.meeting.factories import MeetingFactory, SessionFactory\n'), ((1230, 1298), 'ietf.meeting.factories.SessionFactory.create_batch', 'SessionFactory.create_batch', (['(2)'], {'meeting': 'meeting', 'status_id': '"""unsched"""'}), "(2, meeting=meeting, status_id='unsched')\n", (1257, 1298), False, 'from ietf.meeting.factories import MeetingFactory, SessionFactory\n'), ((1391, 1427), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.main"""'], {}), "('ietf.secr.sreq.views.main')\n", (1398, 1427), False, 'from django.urls import reverse\n'), ((1872, 1911), 'ietf.person.models.Person.objects.get', 'Person.objects.get', ([], {'user__username': '"""ad"""'}), "(user__username='ad')\n", (1890, 1911), False, 'from ietf.person.models import Person\n'), ((2008, 2049), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'parent': 'area', 'acronym': '"""mars"""'}), "(parent=area, acronym='mars')\n", (2020, 2049), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((2114, 2176), 'ietf.meeting.factories.SessionFactory', 'SessionFactory', ([], {'meeting': 'meeting', 'group': 'mars', 'status_id': '"""apprw"""'}), "(meeting=meeting, group=mars, status_id='apprw')\n", (2128, 2176), False, 'from ietf.meeting.factories import MeetingFactory, SessionFactory\n'), ((2191, 2258), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.approve"""'], {'kwargs': "{'acronym': 'mars'}"}), "('ietf.secr.sreq.views.approve', kwargs={'acronym': 'mars'})\n", (2198, 2258), False, 'from django.urls import reverse\n'), ((2469, 2503), 'ietf.meeting.models.Session.objects.get', 'Session.objects.get', ([], {'pk': 'session.pk'}), '(pk=session.pk)\n', (2488, 2503), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((2681, 2720), 'ietf.person.models.Person.objects.get', 'Person.objects.get', ([], {'user__username': '"""ad"""'}), "(user__username='ad')\n", (2699, 2720), False, 'from ietf.person.models import Person\n'), ((2931, 2997), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.cancel"""'], {'kwargs': "{'acronym': 'mars'}"}), "('ietf.secr.sreq.views.cancel', kwargs={'acronym': 'mars'})\n", (2938, 2997), False, 'from django.urls import reverse\n'), ((3182, 3233), 'ietf.meeting.models.Session.objects.filter', 'Session.objects.filter', ([], {'meeting': 'meeting', 'group': 'mars'}), '(meeting=meeting, group=mars)\n', (3204, 3233), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((3629, 3693), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.edit"""'], {'kwargs': "{'acronym': 'mars'}"}), "('ietf.secr.sreq.views.edit', kwargs={'acronym': 'mars'})\n", (3636, 3693), False, 'from django.urls import reverse\n'), ((4444, 4487), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.tool_status"""'], {}), "('ietf.secr.sreq.views.tool_status')\n", (4451, 4487), False, 'from django.urls import reverse\n'), ((4948, 4987), 'ietf.person.models.Person.objects.get', 'Person.objects.get', ([], {'user__username': '"""ad"""'}), "(user__username='ad')\n", (4966, 4987), False, 'from ietf.person.models import Person\n'), ((5085, 5110), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'parent': 'area'}), '(parent=area)\n', (5097, 5110), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((5217, 5287), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.new"""'], {'kwargs': "{'acronym': group.acronym}"}), "('ietf.secr.sreq.views.new', kwargs={'acronym': group.acronym})\n", (5224, 5287), False, 'from django.urls import reverse\n'), ((5308, 5382), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.confirm"""'], {'kwargs': "{'acronym': group.acronym}"}), "('ietf.secr.sreq.views.confirm', kwargs={'acronym': group.acronym})\n", (5315, 5382), False, 'from django.urls import reverse\n'), ((5400, 5436), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.main"""'], {}), "('ietf.secr.sreq.views.main')\n", (5407, 5436), False, 'from django.urls import reverse\n'), ((6590, 6629), 'ietf.person.models.Person.objects.get', 'Person.objects.get', ([], {'user__username': '"""ad"""'}), "(user__username='ad')\n", (6608, 6629), False, 'from ietf.person.models import Person\n'), ((6727, 6752), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'parent': 'area'}), '(parent=area)\n', (6739, 6752), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((6767, 6837), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.new"""'], {'kwargs': "{'acronym': group.acronym}"}), "('ietf.secr.sreq.views.new', kwargs={'acronym': group.acronym})\n", (6774, 6837), False, 'from django.urls import reverse\n'), ((7221, 7239), 'pyquery.PyQuery', 'PyQuery', (['r.content'], {}), '(r.content)\n', (7228, 7239), False, 'from pyquery import PyQuery\n'), ((7517, 7556), 'ietf.person.models.Person.objects.get', 'Person.objects.get', ([], {'user__username': '"""ad"""'}), "(user__username='ad')\n", (7535, 7556), False, 'from ietf.person.models import Person\n'), ((7572, 7600), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'type_id': '"""area"""'}), "(type_id='area')\n", (7584, 7600), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((7609, 7657), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""ad"""', 'person': 'ad', 'group': 'area'}), "(name_id='ad', person=ad, group=area)\n", (7620, 7657), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((7674, 7715), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'acronym': '"""ames"""', 'parent': 'area'}), "(acronym='ames', parent=area)\n", (7686, 7715), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((7724, 7809), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""chair"""', 'group': 'group', 'person__user__username': '"""ameschairman"""'}), "(name_id='chair', group=group, person__user__username='ameschairman'\n )\n", (7735, 7809), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((7824, 7877), 'ietf.meeting.models.ResourceAssociation.objects.create', 'ResourceAssociation.objects.create', ([], {'name_id': '"""project"""'}), "(name_id='project')\n", (7858, 7877), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((8041, 8111), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.new"""'], {'kwargs': "{'acronym': group.acronym}"}), "('ietf.secr.sreq.views.new', kwargs={'acronym': group.acronym})\n", (8048, 8111), False, 'from django.urls import reverse\n'), ((8132, 8206), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.confirm"""'], {'kwargs': "{'acronym': group.acronym}"}), "('ietf.secr.sreq.views.confirm', kwargs={'acronym': group.acronym})\n", (8139, 8206), False, 'from django.urls import reverse\n'), ((8762, 8780), 'pyquery.PyQuery', 'PyQuery', (['r.content'], {}), '(r.content)\n', (8769, 8780), False, 'from pyquery import PyQuery\n'), ((9217, 9266), 'ietf.meeting.models.Session.objects.get', 'Session.objects.get', ([], {'meeting': 'meeting', 'group': 'group'}), '(meeting=meeting, group=group)\n', (9236, 9266), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((9752, 9780), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'acronym': '"""mars"""'}), "(acronym='mars')\n", (9764, 9780), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((9789, 9879), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""chair"""', 'group': 'self.group', 'person__user__username': '"""marschairman"""'}), "(name_id='chair', group=self.group, person__user__username=\n 'marschairman')\n", (9800, 9879), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((9883, 9937), 'ietf.meeting.factories.SessionFactory', 'SessionFactory', ([], {'group': 'self.group', 'meeting': 'self.meeting'}), '(group=self.group, meeting=self.meeting)\n', (9897, 9937), False, 'from ietf.meeting.factories import MeetingFactory, SessionFactory\n'), ((9985, 10061), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.edit"""'], {'kwargs': "{'acronym': self.group.acronym}"}), "('ietf.secr.sreq.views.edit', kwargs={'acronym': self.group.acronym})\n", (9992, 10061), False, 'from django.urls import reverse\n'), ((10221, 10239), 'pyquery.PyQuery', 'PyQuery', (['r.content'], {}), '(r.content)\n', (10228, 10239), False, 'from pyquery import PyQuery\n'), ((10356, 10432), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.view"""'], {'kwargs': "{'acronym': self.group.acronym}"}), "('ietf.secr.sreq.views.view', kwargs={'acronym': self.group.acronym})\n", (10363, 10432), False, 'from django.urls import reverse\n'), ((10604, 10622), 'pyquery.PyQuery', 'PyQuery', (['r.content'], {}), '(r.content)\n', (10611, 10622), False, 'from pyquery import PyQuery\n'), ((10740, 10815), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.new"""'], {'kwargs': "{'acronym': self.group.acronym}"}), "('ietf.secr.sreq.views.new', kwargs={'acronym': self.group.acronym})\n", (10747, 10815), False, 'from django.urls import reverse\n'), ((11026, 11044), 'pyquery.PyQuery', 'PyQuery', (['r.content'], {}), '(r.content)\n', (11033, 11044), False, 'from pyquery import PyQuery\n'), ((11316, 11334), 'pyquery.PyQuery', 'PyQuery', (['r.content'], {}), '(r.content)\n', (11323, 11334), False, 'from pyquery import PyQuery\n'), ((11546, 11574), 'ietf.group.factories.GroupFactory', 'GroupFactory', ([], {'acronym': '"""mars"""'}), "(acronym='mars')\n", (11558, 11574), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((11589, 11666), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.no_session"""'], {'kwargs': "{'acronym': group.acronym}"}), "('ietf.secr.sreq.views.no_session', kwargs={'acronym': group.acronym})\n", (11596, 11666), False, 'from django.urls import reverse\n'), ((11746, 11760), 'ietf.utils.mail.empty_outbox', 'empty_outbox', ([], {}), '()\n', (11758, 11760), False, 'from ietf.utils.mail import outbox, empty_outbox\n'), ((1927, 1986), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""ad"""', 'person': 'ad', 'group__type_id': '"""area"""'}), "(name_id='ad', person=ad, group__type_id='area')\n", (1938, 1986), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((2386, 2450), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.view"""'], {'kwargs': "{'acronym': 'mars'}"}), "('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'})\n", (2393, 2450), False, 'from django.urls import reverse\n'), ((2736, 2795), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""ad"""', 'person': 'ad', 'group__type_id': '"""area"""'}), "(name_id='ad', person=ad, group__type_id='area')\n", (2747, 2795), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((2817, 2914), 'ietf.meeting.factories.SessionFactory', 'SessionFactory', ([], {'meeting': 'meeting', 'group__parent': 'area', 'group__acronym': '"""mars"""', 'status_id': '"""sched"""'}), "(meeting=meeting, group__parent=area, group__acronym='mars',\n status_id='sched')\n", (2831, 2914), False, 'from ietf.meeting.factories import MeetingFactory, SessionFactory\n'), ((3125, 3161), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.main"""'], {}), "('ietf.secr.sreq.views.main')\n", (3132, 3161), False, 'from django.urls import reverse\n'), ((3414, 3508), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""chair"""', 'person__user__username': '"""marschairman"""', 'group__acronym': '"""mars"""'}), "(name_id='chair', person__user__username='marschairman',\n group__acronym='mars')\n", (3425, 3508), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((4246, 4310), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.view"""'], {'kwargs': "{'acronym': 'mars'}"}), "('ietf.secr.sreq.views.view', kwargs={'acronym': 'mars'})\n", (4253, 4310), False, 'from django.urls import reverse\n'), ((4741, 4777), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.main"""'], {}), "('ietf.secr.sreq.views.main')\n", (4748, 4777), False, 'from django.urls import reverse\n'), ((5003, 5062), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""ad"""', 'person': 'ad', 'group__type_id': '"""area"""'}), "(name_id='ad', person=ad, group__type_id='area')\n", (5014, 5062), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((6645, 6704), 'ietf.group.factories.RoleFactory', 'RoleFactory', ([], {'name_id': '"""ad"""', 'person': 'ad', 'group__type_id': '"""area"""'}), "(name_id='ad', person=ad, group__type_id='area')\n", (6656, 6704), False, 'from ietf.group.factories import GroupFactory, RoleFactory\n'), ((8980, 9016), 'django.urls.reverse', 'reverse', (['"""ietf.secr.sreq.views.main"""'], {}), "('ietf.secr.sreq.views.main')\n", (8987, 9016), False, 'from django.urls import reverse\n'), ((587, 608), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (606, 608), False, 'import datetime\n'), ((1124, 1145), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1143, 1145), False, 'import datetime\n'), ((1836, 1857), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1855, 1857), False, 'import datetime\n'), ((2645, 2666), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2664, 2666), False, 'import datetime\n'), ((3376, 3397), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3395, 3397), False, 'import datetime\n'), ((3589, 3612), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3610, 3612), False, 'import datetime\n'), ((4407, 4428), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4426, 4428), False, 'import datetime\n'), ((4912, 4933), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4931, 4933), False, 'import datetime\n'), ((5142, 5194), 'ietf.meeting.models.Session.objects.filter', 'Session.objects.filter', ([], {'meeting': 'meeting', 'group': 'group'}), '(meeting=meeting, group=group)\n', (5164, 5194), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((6016, 6068), 'ietf.meeting.models.Session.objects.filter', 'Session.objects.filter', ([], {'meeting': 'meeting', 'group': 'group'}), '(meeting=meeting, group=group)\n', (6038, 6068), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((6332, 6384), 'ietf.meeting.models.Session.objects.filter', 'Session.objects.filter', ([], {'meeting': 'meeting', 'group': 'group'}), '(meeting=meeting, group=group)\n', (6354, 6384), False, 'from ietf.meeting.models import Session, ResourceAssociation\n'), ((6554, 6575), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (6573, 6575), False, 'import datetime\n'), ((7370, 7383), 'ietf.utils.test_utils.unicontent', 'unicontent', (['r'], {}), '(r)\n', (7380, 7383), False, 'from ietf.utils.test_utils import TestCase, unicontent\n'), ((7481, 7502), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (7500, 7502), False, 'import datetime\n'), ((9670, 9691), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (9689, 9691), False, 'import datetime\n'), ((11507, 11528), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (11526, 11528), False, 'import datetime\n'), ((12281, 12294), 'ietf.utils.test_utils.unicontent', 'unicontent', (['r'], {}), '(r)\n', (12291, 12294), False, 'from ietf.utils.test_utils import TestCase, unicontent\n'), ((12449, 12462), 'ietf.utils.test_utils.unicontent', 'unicontent', (['r'], {}), '(r)\n', (12459, 12462), False, 'from ietf.utils.test_utils import TestCase, unicontent\n')]
|
from glob import glob
files = glob("/mnt/work/endrebak/ldetect/1kg/partition_covariances/CEU/chr22/*.pq")
import pandas as pd
import numpy as np
d = {}
dfs = []
for i, f in enumerate(files):
print(i/len(files))
df = pd.read_parquet(f)
dfs.append(df)
# for k1, k2, v in df.itertuples(index=False):
# d[k1, k2] = v
# df = pd.concat(dfs)
# print(len(d))
|
[
"pandas.read_parquet",
"glob.glob"
] |
[((30, 105), 'glob.glob', 'glob', (['"""/mnt/work/endrebak/ldetect/1kg/partition_covariances/CEU/chr22/*.pq"""'], {}), "('/mnt/work/endrebak/ldetect/1kg/partition_covariances/CEU/chr22/*.pq')\n", (34, 105), False, 'from glob import glob\n'), ((227, 245), 'pandas.read_parquet', 'pd.read_parquet', (['f'], {}), '(f)\n', (242, 245), True, 'import pandas as pd\n')]
|
import argparse
import boto3
import json
import os
import sys
from six.moves import urllib
import uuid
import traceback
from botocore.exceptions import ClientError
from dictionary_sorter import divide
from dictionary_sorter import merge
from dictionary_sorter import build
from harness import config
from harness import decider
from harness import worker
from harness import cloudwatch
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--domain", required=True)
parser.add_argument("-t", "--task-list", required=True)
parser.add_argument("--div-task", required=True)
parser.add_argument("--div-task-version", default="1.0")
parser.add_argument("--merge-task", required=True)
parser.add_argument("--merge-task-version", default="1.0")
parser.add_argument("--build-task", default=None)
parser.add_argument("--build-task-version", default="1.0")
parser.add_argument("-rd", "--run-decider", action="store_true")
parser.add_argument("--region", default=None)
parser.add_argument("--config-bucket", default=None)
parser.add_argument("--log-group", default=None)
parser.add_argument("--log-db", default=None)
parser.add_argument("--kvs-db", default=None)
parser.add_argument("--profile", default=None)
parser.add_argument("--role-arn", default=None)
parser.add_argument("--stdout", default=None)
args = parser.parse_args()
try:
# Fetch instance identity: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
with urllib.request.urlopen('http://169.254.169.254/latest/dynamic/instance-identity/document') as response:
info = json.load(response)
ec2_region = info['region']
identity = info['instanceId']
print("Running on EC2 instance {} in region {}".format(identity, ec2_region))
except:
ec2_region = "us-east-1"
identity = os.environ.get("COMPUTERNAME", "<unavailable>")
print("Couldn't load EC2 instance data from environment, using computer hostname {}".format(identity))
if not args.region:
args.region = ec2_region
# You can supply a profile to use if you are testing locally.
session = boto3.Session(region_name=args.region, profile_name=args.profile)
# You can supply a role arn to use if you are testing locally.
if args.role_arn:
sts_result = session.client('sts').assume_role(
DurationSeconds=3600,
RoleSessionName="Harness-" + str(uuid.uuid4()),
RoleArn=args.role_arn
)['Credentials']
session = boto3.Session(
region_name=args.region,
aws_access_key_id=sts_result['AccessKeyId'],
aws_secret_access_key=sts_result['SecretAccessKey'],
aws_session_token=sts_result['SessionToken']
)
if args.stdout:
if args.stdout == 'cloudwatch':
writeHandler = cloudwatch.OutputHandler('HARNESS-DEBUG', session, args.region, identity, 'decider' if args.run_decider else 'worker')
else:
fp = open(args.stdout, "w")
sys.stdout = fp
sys.stderr = fp
divide_task = config.TaskConfig(args.div_task, args.div_task_version, divide.handler)
merge_task = config.TaskConfig(args.merge_task, args.merge_task_version, merge.handler)
build_task = config.TaskConfig(args.build_task, args.build_task_version, build.handler) if args.build_task else merge_task
harness_config = config.Config(session, args.region, args.domain, args.task_list, divide_task, build_task,
merge_task, args.log_group, args.log_db, args.kvs_db, args.config_bucket, identity)
try:
if args.run_decider:
decider.run_decider(harness_config)
else:
worker.run_worker(harness_config)
except Exception as e:
message = "Error - " + str(e) + "\n" + traceback.format_exc()
print(message)
|
[
"json.load",
"uuid.uuid4",
"argparse.ArgumentParser",
"boto3.Session",
"harness.config.Config",
"harness.decider.run_decider",
"os.environ.get",
"six.moves.urllib.request.urlopen",
"harness.worker.run_worker",
"traceback.format_exc",
"harness.config.TaskConfig",
"harness.cloudwatch.OutputHandler"
] |
[((429, 454), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (452, 454), False, 'import argparse\n'), ((2255, 2320), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'args.region', 'profile_name': 'args.profile'}), '(region_name=args.region, profile_name=args.profile)\n', (2268, 2320), False, 'import boto3\n'), ((3215, 3286), 'harness.config.TaskConfig', 'config.TaskConfig', (['args.div_task', 'args.div_task_version', 'divide.handler'], {}), '(args.div_task, args.div_task_version, divide.handler)\n', (3232, 3286), False, 'from harness import config\n'), ((3304, 3378), 'harness.config.TaskConfig', 'config.TaskConfig', (['args.merge_task', 'args.merge_task_version', 'merge.handler'], {}), '(args.merge_task, args.merge_task_version, merge.handler)\n', (3321, 3378), False, 'from harness import config\n'), ((3527, 3709), 'harness.config.Config', 'config.Config', (['session', 'args.region', 'args.domain', 'args.task_list', 'divide_task', 'build_task', 'merge_task', 'args.log_group', 'args.log_db', 'args.kvs_db', 'args.config_bucket', 'identity'], {}), '(session, args.region, args.domain, args.task_list,\n divide_task, build_task, merge_task, args.log_group, args.log_db, args.\n kvs_db, args.config_bucket, identity)\n', (3540, 3709), False, 'from harness import config\n'), ((2638, 2829), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'args.region', 'aws_access_key_id': "sts_result['AccessKeyId']", 'aws_secret_access_key': "sts_result['SecretAccessKey']", 'aws_session_token': "sts_result['SessionToken']"}), "(region_name=args.region, aws_access_key_id=sts_result[\n 'AccessKeyId'], aws_secret_access_key=sts_result['SecretAccessKey'],\n aws_session_token=sts_result['SessionToken'])\n", (2651, 2829), False, 'import boto3\n'), ((3396, 3470), 'harness.config.TaskConfig', 'config.TaskConfig', (['args.build_task', 'args.build_task_version', 'build.handler'], {}), '(args.build_task, args.build_task_version, build.handler)\n', (3413, 3470), False, 'from harness import config\n'), ((1578, 1673), 'six.moves.urllib.request.urlopen', 'urllib.request.urlopen', (['"""http://169.254.169.254/latest/dynamic/instance-identity/document"""'], {}), "(\n 'http://169.254.169.254/latest/dynamic/instance-identity/document')\n", (1600, 1673), False, 'from six.moves import urllib\n'), ((1701, 1720), 'json.load', 'json.load', (['response'], {}), '(response)\n', (1710, 1720), False, 'import json\n'), ((1957, 2004), 'os.environ.get', 'os.environ.get', (['"""COMPUTERNAME"""', '"""<unavailable>"""'], {}), "('COMPUTERNAME', '<unavailable>')\n", (1971, 2004), False, 'import os\n'), ((2967, 3090), 'harness.cloudwatch.OutputHandler', 'cloudwatch.OutputHandler', (['"""HARNESS-DEBUG"""', 'session', 'args.region', 'identity', "('decider' if args.run_decider else 'worker')"], {}), "('HARNESS-DEBUG', session, args.region, identity, \n 'decider' if args.run_decider else 'worker')\n", (2991, 3090), False, 'from harness import cloudwatch\n'), ((3787, 3822), 'harness.decider.run_decider', 'decider.run_decider', (['harness_config'], {}), '(harness_config)\n', (3806, 3822), False, 'from harness import decider\n'), ((3849, 3882), 'harness.worker.run_worker', 'worker.run_worker', (['harness_config'], {}), '(harness_config)\n', (3866, 3882), False, 'from harness import worker\n'), ((3961, 3983), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3981, 3983), False, 'import traceback\n'), ((2546, 2558), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2556, 2558), False, 'import uuid\n')]
|
"""
Entities of a dataset classes.
"""
from qas.wikidata import Wikidata, WikidataItemsNotFound
STRICT_NAME = False # filter not same Wikidata
PRIMARY_COUNT = 1
RETRY_PARALLEL_MATCHING = False
def matching_parallel(indexed_noun_phrases, disable_wordnet=False):
# optimization step
# parallel enitites linking
# building paired permutation initially
permuted_noun_phrases = []
matching_queries = []
for idx, noun_phrase in indexed_noun_phrases:
permutations = noun_phrase.get_permutations(
disable_wordnet=disable_wordnet)
permuted_noun_phrases.append((idx, noun_phrase, permutations, ))
for permutation in permutations:
matching_queries.append(permutation.text)
# parallel linking step
matching = Wikidata.search_by_label_parallel(matching_queries)
return permuted_noun_phrases, matching
class Item():
pass
class NoEnglishLabelAvailable(Exception):
pass
class WikidataItem(Item):
def __init__(self, item_id, label=None, description=None, claims={}):
self.item_id = item_id
self.label = label
self.description = description
self.claims = claims
@classmethod
def from_search_result(cls, data):
item_id = data['id']
label = None
if 'label' in data:
label = data['label']
description = None
if 'description' in data:
description = data['description']
return cls(item_id, label=label, description=description)
@classmethod
def from_get_result(cls, data):
item_id = data['id']
label = None
try:
label = data['labels']['en']['value']
except KeyError:
raise NoEnglishLabelAvailable()
description = None
try:
description = data['description']['en']['value']
except KeyError:
pass
if 'claims' in data:
claims = Wikidata.extract_claims(data)
# print(item_id, label, claims)
return cls(item_id,
label=label,
description=description,
claims=claims)
def __str__(self):
return str(self.item_id)
def __hash__(self):
return hash(self.item_id)
class UniversalItem():
def __init__(self, wikidata_item, dbpedia_item):
self.wikidata_item = wikidata_item
self.dbpedia_item = dbpedia_item
self.primary = False
@classmethod
def from_wikidata_item(cls, wikidata_item):
dbpedia_item = None
return cls(wikidata_item, dbpedia_item)
@property
def label(self):
# resit chto delat s raznymi labely? s searchu?
return self.wikidata_item.label
@property
def wd_item_id(self):
return self.wikidata_item.item_id
def __str__(self):
result = "( {}, {}, {})"
return result.format(
str(self.wikidata_item),
str(self.wikidata_item.label) if self.wikidata_item.label
is not None else "-",
"https://www.wikidata.org/wiki/{} ".format(str(self.wikidata_item)))
def __eq__(self, other):
if self.wikidata_item is not None and other.wikidata_item is not None:
if self.wikidata_item.item_id == other.wikidata_item.item_id:
return True
return False
def __hash__(self):
# return hash((hash(self.wikidata_item), hash(self.dbpedia_item)))
return self.wikidata_item.__hash__()
class EmptyItemsBatch(Exception):
pass
class ItemsBatch(object):
def __init__(self, noun_phrase, matching={}):
self.noun_phrase = noun_phrase
wikidata_results = []
try:
results = None
if noun_phrase.text in matching:
results = matching[noun_phrase.text]
if results is None:
if RETRY_PARALLEL_MATCHING:
results = Wikidata.search_by_label(noun_phrase.text)
else:
raise WikidataItemsNotFound()
else:
results = Wikidata.search_by_label(noun_phrase.text)
for result in results:
wikidata_item = WikidataItem.from_search_result(result)
wikidata_results.append(wikidata_item)
except WikidataItemsNotFound:
# print("WikidataItemsNotFound ", noun_phrase)
pass
# DBpedia items
self.batch = []
for item in wikidata_results:
self.batch.append(UniversalItem.from_wikidata_item(item))
if len(self.batch) == 0:
raise EmptyItemsBatch()
for i in range(PRIMARY_COUNT):
try:
self.batch[i].primary = True
except IndexError:
break
# self.strict = []
# self.strict_filter()
# self.super_strict()
def strict_filter(self):
strict = []
for item in self.batch:
if item.wikidata_item.label is None:
continue
if item.wikidata_item.label.lower() == \
self.noun_phrase.text.lower():
strict.append(item)
return strict
@staticmethod
def super_strict(batch):
return [item for item in batch if item.primary]
def strictify(self):
# print("called on", self.noun_phrase.text, [str(x) for x in self.batch])
if STRICT_NAME:
self.batch = self.strict_filter()
self.batch = self.super_strict(self.batch)
def __str__(self):
result = "<BATCH> {} ({})\n\t\t" + "{} " * len(self.batch)
return result.format(
self.noun_phrase.text,
len(self.batch),
*self.batch)
class EmptyEntity(Exception):
pass
class Entity():
def __init__(self, noun_phrase, permutations=None,
matching=None, log=None):
self.log = log
self.noun_phrase = noun_phrase
# parse permutation in case of non-optimzied and non-parallel
if permutations is None:
permutations = noun_phrase.get_permutations()
# # widget with permutations
# print(self.noun_phrase.text)
# for permutation in permutations:
# print("\t"+str(permutation))
self.candidates = []
for permutation in permutations:
try:
self.candidates.append(ItemsBatch(permutation,
matching=matching))
except EmptyItemsBatch:
# print("empty items batch for", permutation)
pass
if len(self.candidates) == 0:
raise EmptyEntity()
@property
def items(self):
result = []
for candidate in self.candidates:
for item in candidate.batch:
result.append(item)
return result
def strictify(self):
for candidate in self.candidates:
candidate.strictify()
def __str__(self):
result = "<ENTITY> {} ({})\n" + "\t{}\n" * len(self.candidates)
return result.format(
self.noun_phrase.text,
len(self.candidates),
*self.candidates)
class EntitySet():
def __init__(self, entities, log=None):
self.log = log
self.set = entities
@property
def items(self):
result = []
for entity in self.set:
for item in entity.items:
result.append(item)
return result
@classmethod
def merge(cls, entities_sets):
# for idx, entity_set in enumerate(entities_sets):
# print(idx, [entity.noun_phrase.text for entity in entity_set.set])
for idx1, entity_set1 in enumerate(entities_sets):
for idx2, entity_set2 in enumerate(entities_sets):
if idx1 != idx2:
intersection = set(entity_set1.items).intersection(entity_set2.items)
for shared_item in intersection:
entity_set1.log.debug("Merging two sets beacause of shared %s",
str(shared_item))
if len(intersection):
merged_entity_sets = [entity_set
for idx, entity_set in enumerate(entities_sets)
if idx not in [idx1, idx2]]
merged_set = cls(entity_set1.set + entity_set2.set,
log=entity_set1.log)
# print(merged_set)
merged_entity_sets.append(merged_set)
# print(merged_entity_sets)
return cls.merge(merged_entity_sets)
return entities_sets
def __str__(self):
result = "<ENTITY_SET>\n" + "{}\n" * len(self.set)
return result.format(*self.set)
|
[
"qas.wikidata.Wikidata.extract_claims",
"qas.wikidata.Wikidata.search_by_label",
"qas.wikidata.WikidataItemsNotFound",
"qas.wikidata.Wikidata.search_by_label_parallel"
] |
[((782, 833), 'qas.wikidata.Wikidata.search_by_label_parallel', 'Wikidata.search_by_label_parallel', (['matching_queries'], {}), '(matching_queries)\n', (815, 833), False, 'from qas.wikidata import Wikidata, WikidataItemsNotFound\n'), ((1954, 1983), 'qas.wikidata.Wikidata.extract_claims', 'Wikidata.extract_claims', (['data'], {}), '(data)\n', (1977, 1983), False, 'from qas.wikidata import Wikidata, WikidataItemsNotFound\n'), ((4127, 4169), 'qas.wikidata.Wikidata.search_by_label', 'Wikidata.search_by_label', (['noun_phrase.text'], {}), '(noun_phrase.text)\n', (4151, 4169), False, 'from qas.wikidata import Wikidata, WikidataItemsNotFound\n'), ((3960, 4002), 'qas.wikidata.Wikidata.search_by_label', 'Wikidata.search_by_label', (['noun_phrase.text'], {}), '(noun_phrase.text)\n', (3984, 4002), False, 'from qas.wikidata import Wikidata, WikidataItemsNotFound\n'), ((4059, 4082), 'qas.wikidata.WikidataItemsNotFound', 'WikidataItemsNotFound', ([], {}), '()\n', (4080, 4082), False, 'from qas.wikidata import Wikidata, WikidataItemsNotFound\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
P1 tests for Add Remove Network to VM
Test Plan: https://cwiki.apache.org/confluence/display/CLOUDSTACK/Add+Remove+networks+to+VM+Test+cases
Issue Link: https://issues.apache.org/jira/browse/CLOUDSTACK-645
Feature Specifications: https://cwiki.apache.org/confluence/display/CLOUDSTACK/Add+Remove+Networks+to+VMs
"""
import random
import time
import unittest
from ddt import ddt, data
from marvin.cloudstackAPI import (addNicToVirtualMachine,
removeNicFromVirtualMachine,
updateDefaultNicForVirtualMachine)
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.codes import PASS
from marvin.lib.base import (
Account,
Domain,
ServiceOffering,
VirtualMachine,
NetworkOffering,
Network,
VpcOffering,
VPC,
PublicIPAddress,
FireWallRule,
NATRule
)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_virtual_machines,
list_events,
list_zones,
get_free_vlan,
update_resource_limit,
list_nat_rules
)
from marvin.lib.utils import (validateList,
random_gen,
get_hypervisor_type)
# Import Local Modules
from nose.plugins.attrib import attr
class Services:
"""Test Add Remove Network Services
"""
def __init__(self):
self.services = {
"sleep": 60,
"ostype": "CentOS 5.3 (64-bit)",
# Cent OS 5.3 (64 bit)
"isolated_network_offering": {
"name": 'Test Isolated Network offering',
"displaytext": 'Test Isolated Network offering',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
},
},
"shared_network_offering": {
"name": 'Test Shared Network Offering',
"displaytext": 'Test Shared Network Offering',
"guestiptype": 'Shared',
"supportedservices": 'Dhcp,Dns,UserData',
"specifyVlan": "True",
"specifyIpRanges": "True",
"traffictype": 'GUEST',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"UserData": 'VirtualRouter'
},
},
"shared_network": {
"name": "Test Shared Network",
"displaytext": "Test Shared Network",
"gateway": "172.16.17.1",
"netmask": "255.255.255.0",
"startip": "172.16.17.2",
"endip": "172.16.17.20",
},
"shared_network_2": {
"name": "Test Shared Network",
"displaytext": "Test Shared Network",
"gateway": "172.16.18.1",
"netmask": "255.255.255.0",
"startip": "172.16.18.2",
"endip": "172.16.18.20",
},
"isolated_network": {
"name": "Test Isolated Network",
"displaytext": "Test Isolated Network",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
# in MHz
"memory": 256,
# In MBs
},
"account": {
"email": "<EMAIL>",
"firstname": "Test_add_remove_network_vm",
"lastname": "User",
"username": "test_add_remove_network_vm",
"password": "password",
},
"domain": {
"name": "Domain_add_nw_to_vm",
},
"virtual_machine": {
"displayname": "testserver",
"username": "root", # VM creds for SSH
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"vpc_offering": {
"name": 'VPC off add remove network',
"displaytext": 'VPC off add remove network',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat,NetworkACL',
},
"vpc": {
"name": "TestVPC add remove network",
"displaytext": "TestVPC add remove network",
"cidr": '10.0.0.1/24'
},
"natrule": {
"privateport": 22,
"publicport": 22,
"protocol": "TCP"
},
}
@ddt
class TestAddNetworkToVirtualMachine(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestAddNetworkToVirtualMachine, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"], accountid=cls.account.name,
domainid=cls.account.domainid, serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.defaultNetworkId = cls.virtual_machine.nic[0].networkid
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"])
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.shared_network_offering = NetworkOffering.create(cls.api_client, cls.services["shared_network_offering"])
cls._cleanup.append(cls.shared_network_offering)
cls.shared_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
cls.services["shared_network"]["vlan"] = get_free_vlan(cls.api_client, cls.zone.id)[1]
shared_network_subnet_number = random.randrange(1, 254)
cls.services["shared_network"]["gateway"] = "172.16." + str(shared_network_subnet_number) + ".1"
cls.services["shared_network"]["startip"] = "172.16." + str(shared_network_subnet_number) + ".2"
cls.services["shared_network"]["endip"] = "172.16." + str(shared_network_subnet_number) + ".20"
cls.shared_nw_endip = cls.services["shared_network"]["endip"]
cls.shared_network = Network.create(cls.api_client, cls.services["shared_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.shared_network_offering.id)
cls._cleanup.append(cls.shared_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.addednics = []
shared_network_subnet_number = random.randrange(1, 254)
self.services["shared_network"]["gateway"] = "172.16." + str(shared_network_subnet_number) + ".1"
self.services["shared_network"]["startip"] = "172.16." + str(shared_network_subnet_number) + ".2"
self.services["shared_network"]["endip"] = "172.16." + str(shared_network_subnet_number) + ".20"
self.services["shared_network_2"]["gateway"] = "172.16." + str(shared_network_subnet_number + 1) + ".1"
self.services["shared_network_2"]["startip"] = "172.16." + str(shared_network_subnet_number + 1) + ".2"
self.services["shared_network_2"]["endip"] = "172.16." + str(shared_network_subnet_number + 1) + ".20"
self.cleanup = []
def tearDown(self):
try:
for nic in self.addednics:
self.virtual_machine.remove_nic(self.apiclient, nic.id)
except Exception as e:
self.debug("Exception during removal of nics : %s" % e)
super(TestAddNetworkToVirtualMachine, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
# Disable Network Offerings
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
cls.shared_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disable of networks : %s" % e)
super(TestAddNetworkToVirtualMachine, cls).tearDownClass()
def addNetworkToVm(self, network, vm, ipaddress=None):
"""Add network to VM and check if new nic added in the VM"""
self.debug("Adding %s Network: %s to virtual machine %s" %
(network.type, network.id, vm.id))
vm.add_nic(self.apiclient, network.id, ipaddress=ipaddress)
vm_list = list_virtual_machines(self.apiclient, id=vm.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
nics = [x for x in vm_list[0].nic if x.networkid == network.id]
self.debug("Filtered nics list: %s:" % nics)
# Only the nics added to self.virtual_machine should be added to this list
# Nics added to their list are removed before execution of next test case because we are using
# same virtual machine in all test cases, so it is important that the common
# virtual machine should contain only the default nic whenever new test case
# execution starts
if vm.id == self.virtual_machine.id:
self.addednics.append(nics[-1])
self.assertTrue(len(nics) == 1, "nics list should contain the nic of added isolated network,\
the number of nics for the network should be 1, instead they are %s" %
len(nics))
if ipaddress is not None:
self.assertEqual(nics[0].ipaddress, ipaddress, "The ip address of nic does not match with \
the ip address passed while adding network to vm. ip address of nic is %s \
while passed ip address is %s" % (nics[0].ipaddress, ipaddress))
return
@attr(tags=["advanced", "dvs"])
@data("isolated", "shared")
def test_01_add_nw_running_vm(self, value):
"""Add network to running VM"""
# 1. Deploy VM in an account
# 2. Add isolated/shared network to the VM which is in running state
# Validate the following:
# 1. New nic is generated for the added network
# 2. Event NIC.CREATE is generated
network = None # The network which we are adding to the vm
if value == "isolated":
network = self.isolated_network
elif value == "shared":
network = self.shared_network
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
self.addNetworkToVm(network, self.virtual_machine)
self.debug("Retrieving the list of events matching 'NIC.CREATE' in account: %s" % self.account.name)
events = list_events(self.apiclient, account=self.account.name, domainid=self.account.domainid,
type='NIC.CREATE')
event_list_validation_result = validateList(events)
self.assertEqual(event_list_validation_result[0], PASS, "event list validation failed due to %s" %
event_list_validation_result[2])
self.debug("Events list contains event NIC.CREATE")
return
@attr(tags=["advanced", "dvs"])
@data("isolated", "shared")
def test_02_add_nw_stopped_vm(self, value):
"""Add network to stopped VM"""
# 1. Deploy VM in an account
# 2. Stop the VM
# 3. Add isolated/shared network to the stopped VM
# Validate the following:
# 1. New nic is generated for the added network
try:
self.virtual_machine.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop VM: %s" % e)
network = None # The network which we are adding to the vm
if value == "isolated":
network = self.isolated_network
elif value == "shared":
network = self.shared_network
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
self.addNetworkToVm(network, self.virtual_machine)
self.debug("Starting Virtual Machine: %s" % self.virtual_machine.id)
self.virtual_machine.start(self.apiclient)
return
@attr(tags=["advanced", "dvs"])
@data("isolated", "shared")
def test_03_add_nw_multiple_times(self, value):
"""Add same network multiple times to running VM"""
# 1. Deploy VM in an account
# 2. Add isolated/shared network to the VM
# 3. Try Adding same network again to the VM
# Validate the following:
# 1. Adding same network to vm multiple times fails
network = None # The network which we are adding to the vm
if value == "isolated":
network = self.isolated_network
elif value == "shared":
network = self.shared_network
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
try:
virtual_machine = VirtualMachine.create(
self.api_client, self.services["virtual_machine"],
accountid=self.account.name, domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.defaultNetworkId])
self.cleanup.append(virtual_machine)
except Exception as e:
self.fail("Failed to deply virtual machine: %s" % e)
# Adding network to vm for the first time
self.addNetworkToVm(network, virtual_machine)
# Trying to add same network to vm for the second time
with self.assertRaises(Exception) as e:
self.addNetworkToVm(network, virtual_machine)
self.debug("Adding same network again failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
@data("isolated")
def test_04_vpc_nw_running_vm(self, value):
"""Add VPC network to running VM belonging to isolated network"""
# 1. Deploy VM in an account
# 2. Add isolated network to the VM
# 3. Create VPC
# 4. Try adding VPC to the VM
# Validate the following:
# 1. Adding VPC to vm should fail
try:
virtual_machine = VirtualMachine.create(
self.api_client, self.services["virtual_machine"],
accountid=self.account.name, domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.defaultNetworkId])
self.cleanup.append(virtual_machine)
except Exception as e:
self.fail("Failed to deply virtual machine: %s" % e)
network = self.isolated_network
self.addNetworkToVm(network, virtual_machine)
self.debug("Creating VPC offering")
vpc_off = VpcOffering.create(self.api_client, self.services["vpc_offering"])
self.cleanup.append(vpc_off)
self.debug("Created VPC offering: %s" % vpc_off.id)
self.debug("Enabling the VPC offering")
vpc_off.update(self.apiclient, state='Enabled')
self.debug("Creating VPC")
vpc = VPC.create(self.apiclient, self.services["vpc"], vpcofferingid=vpc_off.id, zoneid=self.zone.id,
account=self.account.name, domainid=self.account.domainid)
self.cleanup.append(vpc)
self.debug("Trying to add VPC to vm belonging to isolated network, this should fail")
with self.assertRaises(Exception):
virtual_machine.add_nic(self.apiclient, vpc.id)
self.debug("Disabling vpc offering: %s" % vpc_off.id)
vpc_off.update(self.apiclient, state='Disabled')
return
@attr(tags=["advanced", "dvs"])
@data("isolated")
def test_05_add_vpc_nw_stopped_vm(self, value):
"""Add VPC network to stopped VM belonging to isolated network"""
# 1. Deploy VM in an account
# 2. Stop the VM
# 3. Add isolated network to the VM
# 4. Create VPC
# 5. Try adding VPC to the stopped VM
# Validate the following:
# 1. Adding VPC to vm should fail
try:
self.virtual_machine.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop virtual machine: %s" % e)
self.addNetworkToVm(self.isolated_network, self.virtual_machine)
self.debug("Creating VPC offering")
vpc_off = VpcOffering.create(self.api_client, self.services["vpc_offering"])
self.cleanup.append(vpc_off)
self.debug("Created VPC offering: %s" % vpc_off.id)
self.debug("Enabling the VPC offering")
vpc_off.update(self.apiclient, state='Enabled')
self.debug("Creating VPC")
vpc = VPC.create(self.apiclient, self.services["vpc"], vpcofferingid=vpc_off.id, zoneid=self.zone.id,
account=self.account.name, domainid=self.account.domainid)
self.cleanup.append(vpc)
self.debug("Trying to add VPC to vm belonging to isolated network, this should fail")
with self.assertRaises(Exception):
self.virtual_machine.add_nic(self.apiclient, vpc.id)
self.debug("Starting virtual machine")
self.virtual_machine.start(self.apiclient)
self.debug("Disabling vpc offering: %s" % vpc_off.id)
vpc_off.update(self.apiclient, state='Disabled')
return
@attr(tags=["advanced", "dvs"])
def test_06_add_nw_ipaddress_running_vm(self):
"""Add network and ip address to running VM"""
# 1. Deploy VM in an account
# 2. Add shared network and ip address to this VM
# Validate the following:
# 1. New nic gets added for the shared network
# 2. The newly added nic has the ip address same as
# that passed while adding the network
try:
virtual_machine = VirtualMachine.create(
self.api_client, self.services["virtual_machine"],
accountid=self.account.name, domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.defaultNetworkId])
self.cleanup.append(virtual_machine)
except Exception as e:
self.fail("Failed to deply virtual machine: %s" % e)
ipaddress = self.shared_nw_endip
self.debug("Adding network to vm with ip address %s: " % ipaddress)
self.addNetworkToVm(self.shared_network, virtual_machine, ipaddress=ipaddress)
return
@attr(tags=["advanced", "dvs"])
def test_10_add_nw_invalid_ipaddress_running_vm(self):
"""Add network with invalid ip address to running VM"""
# 1. Deploy VM in an account
# 2. Add shared network with invalid ip address to this VM
# Validate the following:
# 1. Adding network to VM should fail because of invalid ip address
ipaddress = "257.257.257.257" # Invalid ip address
self.debug("Adding network to vm with ip address %s: " % ipaddress)
with self.assertRaises(Exception) as e:
self.addNetworkToVm(self.shared_network, self.virtual_machine,
ipaddress=ipaddress)
self.debug("API failed with exception: %s" % e.exception)
return
# was tags=["advanced", "dvs"],
# the apiclient that is being used to test this has to much rights?
@attr(tags=["TODO"])
@data("isolated", "shared")
def test_14_add_nw_different_account(self, value):
"""Add network to running VM"""
# 1. Deploy VM in an account
# 2. Create new account under same domain and create network in that account
# 3. Add isolated/shared network belonging to other account to the VM in first account
# Validate the following:
# 1. Adding network should fail
network = None # The network which we are adding to the vm
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
if value == "isolated":
network = Network.create(self.api_client, self.services["isolated_network"], account.name,
account.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network)
elif value == "shared":
self.services["shared_network_2"]["zoneid"] = self.zone.id
self.services["shared_network_2"]["vlan"] = get_free_vlan(self.apiclient, self.zone.id)[1]
network = Network.create(self.api_client, self.services["shared_network_2"], account.name,
account.domainid, networkofferingid=self.shared_network_offering.id)
self.cleanup.append(network)
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
self.debug("Trying to %s network in account %s to a vm in account %s, This should fail" %
(network.type, account.name, self.account.name))
try:
vm_with_nic = self.virtual_machine.add_nic(self.apiclient, network.id)
nics = [x for x in vm_with_nic.nic if x.networkid == network.id]
self.addednics.append(nics[-1])
except Exception:
pass
else:
self.fail("User was able to add NIC, test failed! This issue has been hit: CLOUDSTACK-10071")
return
@attr(tags=["advanced", "dvs"])
def test_24_add_nw_different_domain(self):
"""Add network to running VM"""
# 1. Create two domains
# 2. Create network in one domain and create virtual machine in other domain
# 3. Ad isolated/shared network belonging to one domain to the vm belonging to other domain
# Validate the following:
# 1. Adding network should fail
network = None # The network which we are adding to the vm
try:
self.child_domain_1 = Domain.create(self.apiclient,
services=self.services["domain"],
parentdomainid=self.domain.id)
self.cleanup.append(self.child_domain_1)
self.child_do_admin_1 = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.child_domain_1.id
)
self.cleanup.append(self.child_do_admin_1)
self.child_domain_2 = Domain.create(self.apiclient,
services=self.services["domain"],
parentdomainid=self.domain.id)
self.cleanup.append(self.child_domain_2)
self.child_do_admin_2 = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.child_domain_2.id)
self.cleanup.append(self.child_do_admin_2)
except Exception as e:
self.fail(e)
network = Network.create(self.api_client, self.services["isolated_network"], self.child_do_admin_1.name,
self.child_do_admin_1.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network)
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"], accountid=self.child_do_admin_2.name,
domainid=self.child_do_admin_2.domainid, serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
self.cleanup.append(virtual_machine)
time.sleep(self.services["sleep"])
self.debug("Trying to %s network in domain %s to a vm in domain %s, This should fail" %
(network.type, self.child_domain_1.name, self.child_domain_2.name))
with self.assertRaises(Exception) as e:
virtual_machine.add_nic(self.apiclient, network.id)
self.debug("Operation failed with exception %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_25_add_nw_above_account_limit(self):
"""Add network to VM with maximum network limit reached"""
# 1. Create an account and create maximum allowed networks in the account
# 2. Deploy VM in this account
# 3. Create a network in other account and add to this VM
# Validate the following:
# 1. Adding network should fail
self.debug("Creating account 1")
account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(account_1)
self.debug("setting network limit of account: %s as 1" % account_1.name)
update_resource_limit(
self.apiclient,
6, # Network
max=1,
account=account_1.name,
domainid=account_1.domainid
)
self.debug("Creating isolated network in account: %s" % account_1.name)
network_1 = Network.create(self.api_client, self.services["isolated_network"], account_1.name,
account_1.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network_1)
self.debug("created network %s" % network_1.name)
self.debug("Deploying virtual machine in account: %s" % account_1.name)
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"], accountid=account_1.name,
domainid=account_1.domainid, serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
self.cleanup.append(virtual_machine)
self.debug("Deployed virtual machine : %s" % virtual_machine.id)
self.debug("Creating another account")
account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(account_2)
self.debug("Created account %s" % account_2.name)
self.debug("Creating network in account %s" % account_2.name)
network_2 = Network.create(self.api_client, self.services["isolated_network"], account_2.name,
account_2.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network_2)
self.debug("Created network %s" % network_2.name)
self.debug("Trying to add netwrok %s to VM %s, this should fail" %
(network_2.name, virtual_machine.id))
with self.assertRaises(Exception) as e:
virtual_machine.add_nic(self.apiclient, network_2.id)
self.debug("Operation failed with exception %s" % e.exception)
return
class TestRemoveNetworkFromVirtualMachine(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestRemoveNetworkFromVirtualMachine, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"], accountid=cls.account.name,
domainid=cls.account.domainid, serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
# Create Shared Network Offering
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"])
cls._cleanup.append(cls.isolated_network_offering)
# Enable Isolated Network offering
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
self.addednics = []
def tearDown(self):
try:
for nic in self.addednics:
self.virtual_machine.remove_nic(self.apiclient, nic.id)
except Exception as e:
self.debug("Exception during removal of nics : %s" % e)
super(TestRemoveNetworkFromVirtualMachine, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling network offering : %s" % e)
super(TestRemoveNetworkFromVirtualMachine, cls).tearDownClass()
def addNetworkToVm(self, network, vm):
"""Add network to VM and check if new nic added in the VM"""
self.debug("Adding %s Network: %s to virtual machine %s" %
(network.type, network.id, vm.id))
vm.add_nic(self.apiclient, network.id)
vm_list = list_virtual_machines(self.apiclient, id=vm.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
# Add nic of network to list so that it can be deleted later accessing its id from this list
self.nics = [x for x in vm_list[0].nic if x.networkid == network.id]
self.debug("Filtered nics list: %s:" % self.nics)
self.assertTrue(len(self.nics) == 1, "nics list should contain the nic of added isolated network,\
the number of nics for the network should be 1, instead they are %s" %
len(self.nics))
return self.nics
@attr(tags=["advanced", "dvs"])
def test_07_remove_nic_running_vm(self):
"""Remove nic from running VM"""
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Remove the nic added by the newly added network
# Validate the following:
# 1. Newly added nic is removed
# 2. Event NIC.DELETE is generated
self.addNetworkToVm(self.isolated_network, self.virtual_machine)
# Access the nic of the added network from self.nics object which is fillled
# in addNetworkToVm function
self.debug("Removing added nic %s from vm %s" %
(self.nics[0].id, self.virtual_machine.id))
self.virtual_machine.remove_nic(self.apiclient, self.nics[0].id)
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
# Verify the nic is removed from the virtual machine
self.debug("Verifying the nic is removed from the virtual machine")
self.assertFalse(any(x.networkid == self.isolated_network.id for x in vm_list[0].nic),
"nic still present in the virtual machine nic list")
self.debug("nic removed successfully")
self.debug("Retrieving events list matching events 'NIC.DELETE'")
events = list_events(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
type='NIC.DELETE'
)
event_list_validation_result = validateList(events)
self.assertEqual(event_list_validation_result[0], PASS, "vm list validation failed due to %s" %
event_list_validation_result[2])
self.debug("Events list contains event NIC.DELETE")
self.debug("events: %s" % events)
return
@attr(tags=["advanced", "dvs"])
def test_08_remove_default_nic(self):
"""Test Remove default nic of running VM"""
# 1. Deploy Vm in account
# 2. Try to remove the default nic of the VM
# Validate the following:
# 1. Default nic of vm is not removed
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
self.assertEqual(len(vm_list[0].nic), 1, "There should only be default nic present in the vm")
self.debug("Trying to remove the default nic of vm : %s, this should fail" %
self.virtual_machine.id)
with self.assertRaises(Exception):
self.virtual_machine.remove_nic(self.apiclient, vm_list[0].nic[0].id)
self.debug("Removing default nic of vm failed")
return
@attr(tags=["advanced", "dvs"])
def test_09_remove_foreign_nic(self):
"""Remove nic which does not belong to VM"""
# 1. Add VM in an account
# 1. Add new account and deploy vm in it
# 2. Try to remove nic of the new vm from first vm
# Validate the following:
# 1. Nic remove operation should fail
self.debug("Creating new account")
account = Account.create(
self.api_client,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(account)
self.debug("created new account : %s" % account.name)
self.debug("Deploying virtual machine in this account")
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"], accountid=account.name,
domainid=account.domainid, serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
self.debug("Deployed virtual machine: %s" % virtual_machine.id)
self.debug("Trying to remove nic of new virtual machine from existing virtual machine, This \
operation should fail")
with self.assertRaises(Exception) as e:
self.virtual_machine.remove_nic(self.apiclient, virtual_machine.nic[0].id)
self.debug("Operation failed with exception: %s" % e.exception)
return
@attr(tags=["advanced"], required_hardware="true")
def test_29_remove_nic_CS22503(self):
"""Test to verify remove nic from vm if the nic ip is same as another vm ip in another network"""
# 1. Deploy vm v1 with networks n1 and n2
# 2. Check the ip address of nic in n2 say ip1
# 3. Deployed vm v2 in another network say n3 with same IP address as ip1 using
# 'deployVirtualMachine' api with 'ipaddress' as one of the parameters.
# 4. Acquire public IP in n3 network.
# 5. Configure PF on the acquired IP and assign it to vm v2
# 6. Try to remove nic n2 from v1. Should be successfull
# There was a bug due to both vms has same ip address, so not allowing to remove nic
vm1 = self.virtual_machine
nic2 = self.addNetworkToVm(self.isolated_network, vm1)
self.addednics.append(nic2)
# get the ip address of the nic added in 2nd network
vm1_ip = nic2[0].ipaddress
self.assertIsNotNone(vm1_ip, "New nic did not get the ip address")
# Create network n3
self.network3 = Network.create(
self.api_client,
self.services["isolated_network"],
self.account.name,
self.account.domainid,
networkofferingid=self.isolated_network_offering.id
)
self.cleanup.append(self.network3)
self.vm2 = VirtualMachine.create(
self.api_client,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[self.network3.id],
ipaddress=vm1_ip,
mode=self.zone.networktype
)
self.cleanup.append(self.vm2)
vm2 = VirtualMachine.list(
self.api_client,
id=self.vm2.id
)
self.assertEqual(validateList(vm2)[0], PASS, "list vms returned invalid response")
self.assertIsNotNone(vm2[0].nic[0].ipaddress, "vm2 didn't get the ip address")
self.assertEqual(
vm1_ip,
vm2[0].nic[0].ipaddress,
"vm2 did not get the ip address passed while deploying vm"
)
ip_address = PublicIPAddress.create(
self.apiclient,
self.account.name,
self.zone.id,
self.account.domainid,
self.services["virtual_machine"],
self.network3.id
)
self.cleanup.append(ip_address)
# Open up firewall port for SSH
FireWallRule.create(
self.apiclient,
ipaddressid=ip_address.ipaddress.id,
protocol=self.services["natrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule"]["publicport"],
endport=self.services["natrule"]["publicport"]
)
# Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
self.vm2,
self.services["natrule"],
ip_address.ipaddress.id
)
list_nat_rule_response = list_nat_rules(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
validateList(list_nat_rule_response)[0],
PASS,
"Check list response returns a valid list"
)
self.assertEqual(
list_nat_rule_response[0].id,
nat_rule.id,
"Check Correct Port forwarding Rule is returned"
)
# Try to remove nic 2 from vm1
try:
vm1.remove_nic(self.apiclient, self.nics[0].id)
vm1_res = VirtualMachine.list(self.apiclient, id=vm1.id)
self.assertEqual(validateList(vm1_res)[0], PASS, "invalid listvm response")
self.assertEqual(
len(vm1_res[0].nic),
1,
"VM has more than one nic even after removing the 2nd nic"
)
except Exception as e:
self.fail("Failed to delete the nic from vm")
return
@attr(tags=["advanced"], required_hardware="true")
def test_30_remove_nic_reattach(self):
"""
Test to verify vm start after NIC removal and reattach
# 1.Create vm which has 3 nics(e.g. #0,#1,#2)
# 2.Stop the vm
# 3.Remove second nic(#1)
# 4.Add/Reattach same network(#1)
# 5.Start the instance
"""
self.ntwk2 = Network.create(
self.apiclient,
self.services["isolated_network"],
self.account.name,
self.account.domainid,
networkofferingid=self.isolated_network_offering.id
)
self.cleanup.append(self.ntwk2)
self.ntwk3 = Network.create(
self.apiclient,
self.services["isolated_network"],
self.account.name,
self.account.domainid,
networkofferingid=self.isolated_network_offering.id
)
self.cleanup.append(self.ntwk3)
self.test_vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.isolated_network.id, self.ntwk2.id, self.ntwk3.id]
)
self.cleanup.append(self.test_vm)
self.assertIsNotNone(self.test_vm, "Failed to create vm with 3 nics")
vm_res = VirtualMachine.list(
self.apiclient,
id=self.test_vm.id
)
self.assertEqual(validateList(vm_res)[0], PASS, "Invalid list vm response")
self.nics = vm_res[0].nic
self.assertEqual(
validateList(self.nics)[0],
PASS,
"vm response does not contain nics info"
)
self.assertEqual(len(self.nics), 3, "Not all nics found in vm response")
self.test_vm.stop(self.apiclient)
vm_res2 = VirtualMachine.list(
self.apiclient,
id=self.test_vm.id
)
self.assertEqual(validateList(vm_res2)[0], PASS, "Invalid response")
self.assertEqual(
vm_res2[0].state,
"Stopped",
"VM did not stop properly"
)
"""
get the network id of the nic which we are remove from the nic, so that we can
use that network id for reattach
"""
nic_to_attach = [x for x in [self.isolated_network, self.ntwk2, self.ntwk3] \
if x.id == self.nics[1].networkid]
self.assertEqual(validateList(nic_to_attach)[0], PASS, "No matching nics")
self.assertEqual(len(nic_to_attach), 1, "More than one nic in same network")
try:
self.test_vm.remove_nic(self.apiclient, nicId=self.nics[1].id)
self.test_vm.add_nic(
self.apiclient,
nic_to_attach[0].id
)
self.test_vm.start(self.apiclient)
except Exception as e:
self.fail("Failed to start vm after nic removal and attachment")
vm_res3 = VirtualMachine.list(self.apiclient, id=self.test_vm.id)
self.assertEqual(
validateList(vm_res3)[0],
PASS,
"Invalid listvm response after nic detach and attach"
)
self.assertEqual(
vm_res3[0].state,
"Running",
"VM didn't come to running state after nic detach and attach"
)
vm_nics = vm_res3[0].nic
self.assertEqual(validateList(vm_nics)[0], PASS, "Invalid nics after vm stop/start")
self.assertEqual(
len(vm_nics),
3,
"Nic is not attached/detected"
)
self.addednics.extend(vm_nics)
return
class TestUpdateVirtualMachineNIC(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestUpdateVirtualMachineNIC, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"])
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
self.addednics = []
def tearDown(self):
try:
for nic in self.addednics:
self.virtual_machine.remove_nic(self.apiclient, nic.id)
except Exception as e:
self.debug("Exception during removal of nics : %s" % e)
super(TestUpdateVirtualMachineNIC, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disable of network offering : %s" % e)
super(TestUpdateVirtualMachineNIC, cls).tearDownClass()
def addNetworkToVm(self, network, vm):
"""Add network to VM and check if new nic added in the VM"""
self.debug("Adding %s Network: %s to virtual machine %s" %
(network.type, network.id, vm.id))
vm.add_nic(self.apiclient, network.id)
vm_list = list_virtual_machines(self.apiclient, id=vm.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
# Add nic of network to list so that it can be deleted later accessing its id from this list
self.nics = [x for x in vm_list[0].nic if x.networkid == network.id]
self.debug("Filtered nics list: %s:" % self.nics)
self.assertTrue(len(self.nics) == 1, "nics list should contain the nic of added isolated network,\
the number of nics for the network should be 1, instead they are %s" %
len(self.nics))
self.addednics.append(self.nics[0])
return
@attr(tags=["advanced", "dvs"])
def test_11_update_nic_running_vm(self):
"""update default nic of running VM"""
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Update default nic of VM (Make the newly added NIC as default)
# Validate the following:
# 1. Default nic is updated
# 2. Previous default nic is now non-default
# 3. Event NIC.UPDATE is generated
self.addNetworkToVm(self.isolated_network, self.virtual_machine)
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
self.virtual_machine.update_default_nic(self.apiclient, nicId=nonDefaultNicIdBeforeUpdate)
self.debug("Again listing the NIC list of VM to verify the update operation was successful")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdAfterUpdate = nic.id
self.assertEqual(nonDefaultNicIdBeforeUpdate, defaultNicIdAfterUpdate, "old non default NIC not made\
default one, update_default_nic API failed")
self.debug("Retrieving events list matching events 'NIC.UPDATE'")
events = list_events(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
type='NIC.UPDATE'
)
event_list_validation_result = validateList(events)
self.assertEqual(event_list_validation_result[0], PASS, "event list validation failed due to %s" %
event_list_validation_result[2])
self.debug("Events list contains event NIC.UPDATE")
self.debug("events: %s" % events)
return
@attr(tags=["advanced", "dvs"])
def test_12_make_default_nic_as_default(self):
"""Try to set default nic of vm again as default"""
# 1. Deploy Vm in account
# 2. Set default nic of vm again as default
# Validate the following:
# 1. updateDefaultNic API fails
self.debug("Listing virtual machine to get default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
defaultNicId = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicId = nic.id
self.debug("Trying to set default nic again as default nic, This should fail")
with self.assertRaises(Exception) as e:
self.virtual_machine.update_default_nic(self.apiclient, nicId=defaultNicId)
self.debug("updateDefaultNic operation failed as expected with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_13_set_foreign_nic_as_default(self):
"""set nic which does not belong to VM as its default one"""
# 1. Add VM in an account
# 1. Add new account and deploy vm in it
# 2. Try to set nic of the new vm as default nic of first vm
# Validate the following:
# 1. updateDefaultNic operation should fail
self.debug("Creating new account")
account = Account.create(self.api_client, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("created new account : %s" % account.name)
self.debug("Deploying virtual machine in this account")
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"],
accountid=account.name, domainid=account.domainid,
serviceofferingid=self.service_offering.id, mode=self.zone.networktype)
self.cleanup.append(virtual_machine)
time.sleep(self.services["sleep"])
self.debug("Deployed virtual machine: %s" % virtual_machine.id)
foreignNicId = virtual_machine.nic[0].id
self.debug("Trying to set nic of new virtual machine as default nic of existing virtual machine, This \
operation should fail")
with self.assertRaises(Exception) as e:
self.virtual_machine.update_default_nic(self.apiclient, nicId=foreignNicId)
self.debug("updateDefaultNic operation failed as expected with exception: %s" %
e.exception)
return
class TestFailureScenariosAddNetworkToVM(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestFailureScenariosAddNetworkToVM, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"], )
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
super(TestFailureScenariosAddNetworkToVM, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling network offering : %s" % e)
super(TestFailureScenariosAddNetworkToVM, cls).tearDownClass()
@attr(tags=["advanced", "dvs"])
def test_15_add_nic_wrong_vm_id(self):
"""Add network to vm with wrong vm id"""
# 1. Call add network to VM API with correct network id but wrong vm id
# Validate the following:
# 1. API should throw exception saying unable to find virtual machine
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = random_gen(id="virtual_machine", size=30)
cmd.networkid = self.isolated_network.id
with self.assertRaises(Exception) as e:
self.apiclient.addNicToVirtualMachine(cmd)
self.debug("addNicToVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_16_add_nic_wrong_network_id(self):
"""Add network to vm with wrong network id"""
# 1. Call add network to VM API with correct network id but wrong network id
# Validate the following:
# 1. API should throw exception saying unable to find a network
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.networkid = random_gen(id="network_id", size=30)
with self.assertRaises(Exception) as e:
self.apiclient.addNicToVirtualMachine(cmd)
self.debug("addNicToVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_17_add_nic_different_zone(self):
"""Add network to vm where both belong to different zones"""
# 1. Deploy a VM in zone 1
# 2. Create a network in zone 2
# 3. Try to add this network to the VM (both belong to different zones)
# Validate the following:
# 1. API should throw exception vminstance is in zone<id>, but network is in zone <id>
foreignZoneId = None
zones = list_zones(self.apiclient, available=True)
list_zones_validation_result = validateList(zones)
self.assertEqual(list_zones_validation_result[0], PASS, "list zones validation failed due to: %s" %
list_zones_validation_result[2])
if len(zones) >= 2:
for zone in zones:
if zone.id != self.zone.id:
foreignZoneId = zone.id
break
else:
self.skipTest("This test requires at least two zones to be present in the setup")
self.services["isolated_network"]["zoneid"] = foreignZoneId
self.debug("Creating isolated network in zone %s which is foreign to VM" %
foreignZoneId)
isolated_network = Network.create(self.apiclient, self.services["isolated_network"],
self.account.name, self.account.domainid,
networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(isolated_network)
self.debug("Created isolated network %s in zone %s" %
(isolated_network.id, foreignZoneId))
self.debug("Trying to add network to VM, both belonging to different zones")
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.networkid = isolated_network.id
with self.assertRaises(Exception) as e:
time.sleep(5)
self.apiclient.addNicToVirtualMachine(cmd)
self.debug("addNicToVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["invalid"])
def test_18_add_nic_basic_zone(self):
"""Add network to vm in basic zone"""
# 1. Deploy a vm and create network in basic zone
# 2. Try adding network to vm
# Validate following
# 1. API should throw exception saying Can't add a new nic to vm in basic network
basicZone = None
zones = list_zones(self.apiclient, available=True)
list_zones_validation_result = validateList(zones)
self.assertEqual(list_zones_validation_result[0], PASS, "list zones validation failed due to: %s" %
list_zones_validation_result[2])
for zone in zones:
if zone.networktype.lower() == 'BASIC':
basicZone = zone.id
break
if basicZone is None:
self.skipTest("This test requires at least one basic zone to be present in the setup")
self.services["isolated_network"]["zoneid"] = basicZone.id
self.debug("Creating isolated network in basic zone: %s" % basicZone.id)
isolated_network = Network.create(self.apiclient, self.services["isolated_network"],
networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(isolated_network)
self.debug("Created isolated network %s:" % isolated_network.id)
self.services["virtual_machine"]["zoneid"] = basicZone.id
self.debug("Deploying virtual machine in basic zone: %s" % basicZone.id)
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"],
serviceofferingid=self.service_offering.id,
mode=basicZone.networktype)
self.cleanup.append(virtual_machine)
time.sleep(self.services["sleep"])
self.debug("Deployed virtual machine %s: " % virtual_machine.id)
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = virtual_machine.id
cmd.networkid = isolated_network.id
self.dedbug("Trying to add isolated network to VM (both in basic zone,\
this operation should fail")
with self.assertRaises(Exception) as e:
time.sleep(5)
self.apiclient.addNicToVirtualMachine(cmd)
return
@attr(tags=["advanced", "dvs"])
def test_26_add_nic_insufficient_permission(self):
"""Try to add network to vm with insufficient permission"""
# 1. Call add network to VM API with api client of other account
# Validate the following:
# 1. API should throw exception saying insufficient permission
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.networkid = self.isolated_network.id
self.debug("Creating new account")
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Created account %s" % account.name)
self.debug("creating user api client for account: %s" % account.name)
api_client = self.testClient.getUserApiClient(UserName=account.name, DomainName=self.account.domain)
self.debug("Trying to add network to vm with this api client, this should fail due to \
insufficient permission")
with self.assertRaises(Exception) as e:
time.sleep(5)
api_client.addNicToVirtualMachine(cmd)
return
class TestFailureScenariosRemoveNicFromVM(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestFailureScenariosRemoveNicFromVM, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"], )
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
cls.virtual_machine.add_nic(cls.api_client, cls.isolated_network.id)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
super(TestFailureScenariosRemoveNicFromVM, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling of network offering : %s" % e)
super(TestFailureScenariosRemoveNicFromVM, cls).tearDownClass()
@attr(tags=["advanced", "dvs"])
def test_19_remove_nic_wrong_vm_id(self):
"""Try to remove nic from a vm providing wrong vm id to API"""
# (Frist two steps are perfromed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Remove the nic added by the newly added network providing wrong vm id to the API
# Validate the following:
# 1. API throws exception unable to find a virtual machine with id
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
vm = vm_list_validation_result[1]
nics = [x for x in vm.nic if x.networkid == self.isolated_network.id]
self.assertEqual(len(nics), 1, "There should be exactly one nic corresponding to the isolate\
network %s" % self.isolated_network.id)
cmd = removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id + random_gen()
cmd.nicid = nics[0].id
with self.assertRaises(Exception) as e:
self.apiclient.removeNicFromVirtualMachine(cmd)
self.debug("removeNicFromVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_20_remove_nic_wrong_nic_id(self):
"""Try to remove nic from a vm providing wrong nic id to API"""
# (Frist two steps are perfromed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Remove the nic added by the newly added network providing wrong nic id to the API
# Validate the following:
# 1. API throws exception unable to find nic with id
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
vm = vm_list_validation_result[1]
nics = [x for x in vm.nic if x.networkid == self.isolated_network.id]
self.assertEqual(len(nics), 1, "There should be exactly one nic corresponding to the isolate\
network %s" % self.isolated_network.id)
cmd = removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nics[0].id + random_gen()
with self.assertRaises(Exception) as e:
self.apiclient.removeNicFromVirtualMachine(cmd)
self.debug("removeNicFromVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_27_remove_nic_insufficient_permission(self):
"""Try to remove nic from vm with insufficient permission"""
# 1. Call remove network from VM API with api client of other account
# Validate the following:
# 1. API should throw exception saying insufficient permission
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
vm = vm_list_validation_result[1]
nics = [x for x in vm.nic if x.networkid == self.isolated_network.id]
self.assertEqual(len(nics), 1, "There should be exactly one nic corresponding to the isolate\
network %s" % self.isolated_network.id)
cmd = removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nics[0].id
self.debug("Creating new account")
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Created account %s" % account.name)
self.debug("creating user api client for account: %s" % account.name)
api_client = self.testClient.getUserApiClient(UserName=account.name, DomainName=self.account.domain)
self.debug("Trying to add network to vm with this api client, this should fail due to \
insufficient permission")
with self.assertRaises(Exception) as e:
api_client.removeNicFromVirtualMachine(cmd)
self.debug("removeNicFromVirtualMachine API failed with exception: %s" % e.exception)
self.apiclient.removeNicFromVirtualMachine(cmd)
return
class TestFailureScenariosUpdateVirtualMachineNIC(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestFailureScenariosUpdateVirtualMachineNIC, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.addednics = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.defaultNetworkId = cls.virtual_machine.nic[0].networkid
# Create Shared Network Offering
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"], )
cls._cleanup.append(cls.isolated_network_offering)
# Enable Isolated Network offering
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"],
cls.account.name, cls.account.domainid,
networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
vm_with_nic = cls.virtual_machine.add_nic(cls.api_client, cls.isolated_network.id)
nics = [x for x in vm_with_nic.nic if x.networkid == cls.isolated_network.id]
cls.addednics.append(nics[-1])
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
super(TestFailureScenariosUpdateVirtualMachineNIC, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
for nic in cls.addednics:
cls.virtual_machine.remove_nic(cls.apiclient, nic.id)
except Exception as e:
cls.debug("Exception during removal of nics : %s" % e)
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling of network offering : %s" % e)
super(TestFailureScenariosUpdateVirtualMachineNIC, cls).tearDownClass()
@attr(tags=["advanced", "dvs"])
def test_21_update_nic_wrong_vm_id(self):
"""update default nic of vm providing wrong vm id to the API"""
# (First two steps are performed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Update default nic of VM (Make the newly added NIC as default) by providing wrong
# vm id to the API
# Validate the following:
# 1. API throws exception saying can't find the virtual machine
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id + random_gen()
cmd.nicid = nonDefaultNicIdBeforeUpdate
with self.assertRaises(Exception) as e:
self.apiclient.updateDefaultNicForVirtualMachine(cmd)
self.debug("updateDefaultNicForVirtualMachine API failed with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_22_update_nic_wrong_nic_id(self):
"""update default nic of vm providing wrong nic id to the API"""
# (First two steps are performed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Update default nic of VM (Make the newly added NIC as default) by providing wrong
# nic id to the API
# Validate the following:
# 1. API throws exception saying can't find the nic with id
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nonDefaultNicIdBeforeUpdate + random_gen()
with self.assertRaises(Exception) as e:
self.apiclient.updateDefaultNicForVirtualMachine(cmd)
self.debug("updateDefaultNicForVirtualMachine API failed with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_23_update_nic_incorrect_vm_state(self):
"""update default nic of vm when vm is state is not Running or Stopped"""
# (First two steps are performed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Destroy virtual machine so that the VM state becomes Destroyed or Expunging
# 4. Update default nic of VM (Make the newly added NIC as default)
# Validate the following:
# 1. API throws exception instance is not Running or Stopped
self.debug("Creating new account")
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Creating virtual machine in the account %s" % account.name)
virtual_machine = VirtualMachine.create(self.api_client, self.services["virtual_machine"],
accountid=account.name, domainid=account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
time.sleep(self.services["sleep"])
self.debug("Created virtual machine %s" % virtual_machine.id)
self.debug("Creating isolated network in account %s" % account.name)
isolated_network = Network.create(self.apiclient, self.services["isolated_network"], account.name,
account.domainid, networkofferingid=self.isolated_network_offering.id)
self.debug("Created isolated network %s" % isolated_network.id)
self.debug("Adding isolated network %s to vm %s" % (isolated_network.id, virtual_machine.id))
virtual_machine.add_nic(self.apiclient, isolated_network.id)
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=virtual_machine.id, listall=True)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Destroying VM %s" % virtual_machine.id)
virtual_machine.delete(self.apiclient, expunge=False)
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = virtual_machine.id
cmd.nicid = nonDefaultNicIdBeforeUpdate
with self.assertRaises(Exception) as e:
self.apiclient.updateDefaultNicForVirtualMachine(cmd)
self.debug("updateDefaultNicForVirtualMachine API failed with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_28_update_nic_insufficient_permission(self):
"""Try to update default nic of vm with insufficient permission"""
# 1. Call update nic of VM API with api client of other account
# Validate the following:
# 1. API should throw exception saying insufficient permission
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Created account %s" % account.name)
self.debug("creating user api client for account: %s" % account.name)
api_client = self.testClient.getUserApiClient(UserName=account.name, DomainName=self.account.domain)
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nonDefaultNicIdBeforeUpdate
with self.assertRaises(Exception) as e:
api_client.updateDefaultNicForVirtualMachine(cmd)
return
|
[
"marvin.lib.base.FireWallRule.create",
"marvin.lib.common.list_nat_rules",
"marvin.lib.base.Account.create",
"marvin.cloudstackAPI.removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd",
"marvin.lib.utils.get_hypervisor_type",
"marvin.lib.common.list_events",
"marvin.lib.base.VPC.create",
"marvin.lib.common.get_template",
"marvin.lib.utils.validateList",
"marvin.lib.base.ServiceOffering.create",
"marvin.lib.common.update_resource_limit",
"marvin.lib.common.get_domain",
"marvin.lib.base.Domain.create",
"marvin.lib.base.NATRule.create",
"marvin.cloudstackAPI.addNicToVirtualMachine.addNicToVirtualMachineCmd",
"ddt.data",
"marvin.lib.common.get_free_vlan",
"marvin.lib.base.NetworkOffering.create",
"marvin.lib.common.list_zones",
"marvin.cloudstackAPI.updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd",
"time.sleep",
"marvin.lib.base.PublicIPAddress.create",
"unittest.SkipTest",
"marvin.lib.base.VirtualMachine.list",
"marvin.lib.base.VirtualMachine.create",
"marvin.lib.base.VpcOffering.create",
"marvin.lib.utils.random_gen",
"marvin.lib.base.Network.create",
"marvin.lib.common.list_virtual_machines",
"random.randrange",
"nose.plugins.attrib.attr"
] |
[((13249, 13279), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (13253, 13279), False, 'from nose.plugins.attrib import attr\n'), ((13285, 13311), 'ddt.data', 'data', (['"""isolated"""', '"""shared"""'], {}), "('isolated', 'shared')\n", (13289, 13311), False, 'from ddt import ddt, data\n'), ((14630, 14660), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (14634, 14660), False, 'from nose.plugins.attrib import attr\n'), ((14666, 14692), 'ddt.data', 'data', (['"""isolated"""', '"""shared"""'], {}), "('isolated', 'shared')\n", (14670, 14692), False, 'from ddt import ddt, data\n'), ((15706, 15736), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (15710, 15736), False, 'from nose.plugins.attrib import attr\n'), ((15742, 15768), 'ddt.data', 'data', (['"""isolated"""', '"""shared"""'], {}), "('isolated', 'shared')\n", (15746, 15768), False, 'from ddt import ddt, data\n'), ((17371, 17401), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (17375, 17401), False, 'from nose.plugins.attrib import attr\n'), ((17407, 17423), 'ddt.data', 'data', (['"""isolated"""'], {}), "('isolated')\n", (17411, 17423), False, 'from ddt import ddt, data\n'), ((19306, 19336), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (19310, 19336), False, 'from nose.plugins.attrib import attr\n'), ((19342, 19358), 'ddt.data', 'data', (['"""isolated"""'], {}), "('isolated')\n", (19346, 19358), False, 'from ddt import ddt, data\n'), ((21010, 21040), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (21014, 21040), False, 'from nose.plugins.attrib import attr\n'), ((22181, 22211), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (22185, 22211), False, 'from nose.plugins.attrib import attr\n'), ((23064, 23083), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['TODO']"}), "(tags=['TODO'])\n", (23068, 23083), False, 'from nose.plugins.attrib import attr\n'), ((23089, 23115), 'ddt.data', 'data', (['"""isolated"""', '"""shared"""'], {}), "('isolated', 'shared')\n", (23093, 23115), False, 'from ddt import ddt, data\n'), ((25154, 25184), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (25158, 25184), False, 'from nose.plugins.attrib import attr\n'), ((27888, 27918), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (27892, 27918), False, 'from nose.plugins.attrib import attr\n'), ((35139, 35169), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (35143, 35169), False, 'from nose.plugins.attrib import attr\n'), ((37208, 37238), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (37212, 37238), False, 'from nose.plugins.attrib import attr\n'), ((38303, 38333), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (38307, 38333), False, 'from nose.plugins.attrib import attr\n'), ((39774, 39823), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced']", 'required_hardware': '"""true"""'}), "(tags=['advanced'], required_hardware='true')\n", (39778, 39823), False, 'from nose.plugins.attrib import attr\n'), ((43864, 43913), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced']", 'required_hardware': '"""true"""'}), "(tags=['advanced'], required_hardware='true')\n", (43868, 43913), False, 'from nose.plugins.attrib import attr\n'), ((52044, 52074), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (52048, 52074), False, 'from nose.plugins.attrib import attr\n'), ((55026, 55056), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (55030, 55056), False, 'from nose.plugins.attrib import attr\n'), ((56201, 56231), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (56205, 56231), False, 'from nose.plugins.attrib import attr\n'), ((60745, 60775), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (60749, 60775), False, 'from nose.plugins.attrib import attr\n'), ((61469, 61499), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (61473, 61499), False, 'from nose.plugins.attrib import attr\n'), ((62196, 62226), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (62200, 62226), False, 'from nose.plugins.attrib import attr\n'), ((64356, 64378), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['invalid']"}), "(tags=['invalid'])\n", (64360, 64378), False, 'from nose.plugins.attrib import attr\n'), ((66733, 66763), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (66737, 66763), False, 'from nose.plugins.attrib import attr\n'), ((71032, 71062), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (71036, 71062), False, 'from nose.plugins.attrib import attr\n'), ((72499, 72529), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (72503, 72529), False, 'from nose.plugins.attrib import attr\n'), ((73955, 73985), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (73959, 73985), False, 'from nose.plugins.attrib import attr\n'), ((79541, 79571), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (79545, 79571), False, 'from nose.plugins.attrib import attr\n'), ((81496, 81526), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (81500, 81526), False, 'from nose.plugins.attrib import attr\n'), ((83451, 83481), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (83455, 83481), False, 'from nose.plugins.attrib import attr\n'), ((86859, 86889), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'dvs']"}), "(tags=['advanced', 'dvs'])\n", (86863, 86889), False, 'from nose.plugins.attrib import attr\n'), ((6513, 6548), 'marvin.lib.utils.get_hypervisor_type', 'get_hypervisor_type', (['cls.api_client'], {}), '(cls.api_client)\n', (6532, 6548), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((6805, 6831), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (6815, 6831), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((6930, 6995), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (6942, 6995), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((7380, 7459), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (7394, 7459), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((7533, 7605), 'marvin.lib.base.ServiceOffering.create', 'ServiceOffering.create', (['cls.api_client', "cls.services['service_offering']"], {}), "(cls.api_client, cls.services['service_offering'])\n", (7555, 7605), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((7687, 7894), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['cls.api_client', "cls.services['virtual_machine']"], {'accountid': 'cls.account.name', 'domainid': 'cls.account.domainid', 'serviceofferingid': 'cls.service_offering.id', 'mode': 'cls.zone.networktype'}), "(cls.api_client, cls.services['virtual_machine'],\n accountid=cls.account.name, domainid=cls.account.domainid,\n serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)\n", (7708, 7894), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((8150, 8236), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['isolated_network_offering']"], {}), "(cls.api_client, cls.services[\n 'isolated_network_offering'])\n", (8172, 8236), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((8408, 8487), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['shared_network_offering']"], {}), "(cls.api_client, cls.services['shared_network_offering'])\n", (8430, 8487), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((8653, 8819), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['isolated_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.isolated_network_offering.id'}), "(cls.api_client, cls.services['isolated_network'], cls.\n account.name, cls.account.domainid, networkofferingid=cls.\n isolated_network_offering.id)\n", (8667, 8819), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((9042, 9066), 'random.randrange', 'random.randrange', (['(1)', '(254)'], {}), '(1, 254)\n', (9058, 9066), False, 'import random\n'), ((9483, 9645), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['shared_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.shared_network_offering.id'}), "(cls.api_client, cls.services['shared_network'], cls.account.\n name, cls.account.domainid, networkofferingid=cls.\n shared_network_offering.id)\n", (9497, 9645), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((9947, 9971), 'random.randrange', 'random.randrange', (['(1)', '(254)'], {}), '(1, 254)\n', (9963, 9971), False, 'import random\n'), ((11733, 11780), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'vm.id'}), '(self.apiclient, id=vm.id)\n', (11754, 11780), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((11817, 11838), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (11829, 11838), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((14188, 14298), 'marvin.lib.common.list_events', 'list_events', (['self.apiclient'], {'account': 'self.account.name', 'domainid': 'self.account.domainid', 'type': '"""NIC.CREATE"""'}), "(self.apiclient, account=self.account.name, domainid=self.\n account.domainid, type='NIC.CREATE')\n", (14199, 14298), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((14362, 14382), 'marvin.lib.utils.validateList', 'validateList', (['events'], {}), '(events)\n', (14374, 14382), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((18437, 18503), 'marvin.lib.base.VpcOffering.create', 'VpcOffering.create', (['self.api_client', "self.services['vpc_offering']"], {}), "(self.api_client, self.services['vpc_offering'])\n", (18455, 18503), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((18754, 18917), 'marvin.lib.base.VPC.create', 'VPC.create', (['self.apiclient', "self.services['vpc']"], {'vpcofferingid': 'vpc_off.id', 'zoneid': 'self.zone.id', 'account': 'self.account.name', 'domainid': 'self.account.domainid'}), "(self.apiclient, self.services['vpc'], vpcofferingid=vpc_off.id,\n zoneid=self.zone.id, account=self.account.name, domainid=self.account.\n domainid)\n", (18764, 18917), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((20039, 20105), 'marvin.lib.base.VpcOffering.create', 'VpcOffering.create', (['self.api_client', "self.services['vpc_offering']"], {}), "(self.api_client, self.services['vpc_offering'])\n", (20057, 20105), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((20356, 20519), 'marvin.lib.base.VPC.create', 'VPC.create', (['self.apiclient', "self.services['vpc']"], {'vpcofferingid': 'vpc_off.id', 'zoneid': 'self.zone.id', 'account': 'self.account.name', 'domainid': 'self.account.domainid'}), "(self.apiclient, self.services['vpc'], vpcofferingid=vpc_off.id,\n zoneid=self.zone.id, account=self.account.name, domainid=self.account.\n domainid)\n", (20366, 20519), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((23591, 23677), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (23605, 23677), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((26811, 26999), 'marvin.lib.base.Network.create', 'Network.create', (['self.api_client', "self.services['isolated_network']", 'self.child_do_admin_1.name', 'self.child_do_admin_1.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.api_client, self.services['isolated_network'], self.\n child_do_admin_1.name, self.child_do_admin_1.domainid,\n networkofferingid=self.isolated_network_offering.id)\n", (26825, 26999), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((27088, 27324), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.apiclient', "self.services['virtual_machine']"], {'accountid': 'self.child_do_admin_2.name', 'domainid': 'self.child_do_admin_2.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype'}), "(self.apiclient, self.services['virtual_machine'],\n accountid=self.child_do_admin_2.name, domainid=self.child_do_admin_2.\n domainid, serviceofferingid=self.service_offering.id, mode=self.zone.\n networktype)\n", (27109, 27324), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((27461, 27495), 'time.sleep', 'time.sleep', (["self.services['sleep']"], {}), "(self.services['sleep'])\n", (27471, 27495), False, 'import time\n'), ((28361, 28447), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (28375, 28447), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((28618, 28722), 'marvin.lib.common.update_resource_limit', 'update_resource_limit', (['self.apiclient', '(6)'], {'max': '(1)', 'account': 'account_1.name', 'domainid': 'account_1.domainid'}), '(self.apiclient, 6, max=1, account=account_1.name,\n domainid=account_1.domainid)\n', (28639, 28722), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((28902, 29066), 'marvin.lib.base.Network.create', 'Network.create', (['self.api_client', "self.services['isolated_network']", 'account_1.name', 'account_1.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.api_client, self.services['isolated_network'],\n account_1.name, account_1.domainid, networkofferingid=self.\n isolated_network_offering.id)\n", (28916, 29066), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((29299, 29505), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.apiclient', "self.services['virtual_machine']"], {'accountid': 'account_1.name', 'domainid': 'account_1.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype'}), "(self.apiclient, self.services['virtual_machine'],\n accountid=account_1.name, domainid=account_1.domainid,\n serviceofferingid=self.service_offering.id, mode=self.zone.networktype)\n", (29320, 29505), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((29782, 29868), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (29796, 29868), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((30099, 30263), 'marvin.lib.base.Network.create', 'Network.create', (['self.api_client', "self.services['isolated_network']", 'account_2.name', 'account_2.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.api_client, self.services['isolated_network'],\n account_2.name, account_2.domainid, networkofferingid=self.\n isolated_network_offering.id)\n", (30113, 30263), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((31048, 31083), 'marvin.lib.utils.get_hypervisor_type', 'get_hypervisor_type', (['cls.api_client'], {}), '(cls.api_client)\n', (31067, 31083), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((31297, 31323), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (31307, 31323), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((31422, 31487), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (31434, 31487), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((31872, 31951), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (31886, 31951), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((32025, 32097), 'marvin.lib.base.ServiceOffering.create', 'ServiceOffering.create', (['cls.api_client', "cls.services['service_offering']"], {}), "(cls.api_client, cls.services['service_offering'])\n", (32047, 32097), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((32179, 32386), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['cls.api_client', "cls.services['virtual_machine']"], {'accountid': 'cls.account.name', 'domainid': 'cls.account.domainid', 'serviceofferingid': 'cls.service_offering.id', 'mode': 'cls.zone.networktype'}), "(cls.api_client, cls.services['virtual_machine'],\n accountid=cls.account.name, domainid=cls.account.domainid,\n serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)\n", (32200, 32386), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((32613, 32699), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['isolated_network_offering']"], {}), "(cls.api_client, cls.services[\n 'isolated_network_offering'])\n", (32635, 32699), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((32907, 33073), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['isolated_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.isolated_network_offering.id'}), "(cls.api_client, cls.services['isolated_network'], cls.\n account.name, cls.account.domainid, networkofferingid=cls.\n isolated_network_offering.id)\n", (32921, 33073), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((34304, 34351), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'vm.id'}), '(self.apiclient, id=vm.id)\n', (34325, 34351), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((34388, 34409), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (34400, 34409), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((35907, 35972), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (35928, 35972), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((36009, 36030), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (36021, 36030), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((36699, 36809), 'marvin.lib.common.list_events', 'list_events', (['self.apiclient'], {'account': 'self.account.name', 'domainid': 'self.account.domainid', 'type': '"""NIC.DELETE"""'}), "(self.apiclient, account=self.account.name, domainid=self.\n account.domainid, type='NIC.DELETE')\n", (36710, 36809), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((36902, 36922), 'marvin.lib.utils.validateList', 'validateList', (['events'], {}), '(events)\n', (36914, 36922), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((37521, 37586), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (37542, 37586), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((37623, 37644), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (37635, 37644), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((38715, 38802), 'marvin.lib.base.Account.create', 'Account.create', (['self.api_client', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.api_client, self.services['account'], domainid=self.\n domain.id)\n", (38729, 38802), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((39033, 39236), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.apiclient', "self.services['virtual_machine']"], {'accountid': 'account.name', 'domainid': 'account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype'}), "(self.apiclient, self.services['virtual_machine'],\n accountid=account.name, domainid=account.domainid, serviceofferingid=\n self.service_offering.id, mode=self.zone.networktype)\n", (39054, 39236), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((40877, 41048), 'marvin.lib.base.Network.create', 'Network.create', (['self.api_client', "self.services['isolated_network']", 'self.account.name', 'self.account.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.api_client, self.services['isolated_network'], self.\n account.name, self.account.domainid, networkofferingid=self.\n isolated_network_offering.id)\n", (40891, 41048), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((41171, 41438), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.api_client', "self.services['virtual_machine']"], {'accountid': 'self.account.name', 'domainid': 'self.account.domainid', 'serviceofferingid': 'self.service_offering.id', 'networkids': '[self.network3.id]', 'ipaddress': 'vm1_ip', 'mode': 'self.zone.networktype'}), "(self.api_client, self.services['virtual_machine'],\n accountid=self.account.name, domainid=self.account.domainid,\n serviceofferingid=self.service_offering.id, networkids=[self.network3.\n id], ipaddress=vm1_ip, mode=self.zone.networktype)\n", (41192, 41438), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((41584, 41636), 'marvin.lib.base.VirtualMachine.list', 'VirtualMachine.list', (['self.api_client'], {'id': 'self.vm2.id'}), '(self.api_client, id=self.vm2.id)\n', (41603, 41636), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((42034, 42184), 'marvin.lib.base.PublicIPAddress.create', 'PublicIPAddress.create', (['self.apiclient', 'self.account.name', 'self.zone.id', 'self.account.domainid', "self.services['virtual_machine']", 'self.network3.id'], {}), "(self.apiclient, self.account.name, self.zone.id,\n self.account.domainid, self.services['virtual_machine'], self.network3.id)\n", (42056, 42184), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((42351, 42605), 'marvin.lib.base.FireWallRule.create', 'FireWallRule.create', (['self.apiclient'], {'ipaddressid': 'ip_address.ipaddress.id', 'protocol': "self.services['natrule']['protocol']", 'cidrlist': "['0.0.0.0/0']", 'startport': "self.services['natrule']['publicport']", 'endport': "self.services['natrule']['publicport']"}), "(self.apiclient, ipaddressid=ip_address.ipaddress.id,\n protocol=self.services['natrule']['protocol'], cidrlist=['0.0.0.0/0'],\n startport=self.services['natrule']['publicport'], endport=self.services\n ['natrule']['publicport'])\n", (42370, 42605), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((42720, 42815), 'marvin.lib.base.NATRule.create', 'NATRule.create', (['self.apiclient', 'self.vm2', "self.services['natrule']", 'ip_address.ipaddress.id'], {}), "(self.apiclient, self.vm2, self.services['natrule'],\n ip_address.ipaddress.id)\n", (42734, 42815), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((42903, 42949), 'marvin.lib.common.list_nat_rules', 'list_nat_rules', (['self.apiclient'], {'id': 'nat_rule.id'}), '(self.apiclient, id=nat_rule.id)\n', (42917, 42949), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((44257, 44427), 'marvin.lib.base.Network.create', 'Network.create', (['self.apiclient', "self.services['isolated_network']", 'self.account.name', 'self.account.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.apiclient, self.services['isolated_network'], self.\n account.name, self.account.domainid, networkofferingid=self.\n isolated_network_offering.id)\n", (44271, 44427), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((44549, 44719), 'marvin.lib.base.Network.create', 'Network.create', (['self.apiclient', "self.services['isolated_network']", 'self.account.name', 'self.account.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.apiclient, self.services['isolated_network'], self.\n account.name, self.account.domainid, networkofferingid=self.\n isolated_network_offering.id)\n", (44563, 44719), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((44843, 45128), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.apiclient', "self.services['virtual_machine']"], {'accountid': 'self.account.name', 'domainid': 'self.account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype', 'networkids': '[self.isolated_network.id, self.ntwk2.id, self.ntwk3.id]'}), "(self.apiclient, self.services['virtual_machine'],\n accountid=self.account.name, domainid=self.account.domainid,\n serviceofferingid=self.service_offering.id, mode=self.zone.networktype,\n networkids=[self.isolated_network.id, self.ntwk2.id, self.ntwk3.id])\n", (44864, 45128), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((45348, 45403), 'marvin.lib.base.VirtualMachine.list', 'VirtualMachine.list', (['self.apiclient'], {'id': 'self.test_vm.id'}), '(self.apiclient, id=self.test_vm.id)\n', (45367, 45403), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((45844, 45899), 'marvin.lib.base.VirtualMachine.list', 'VirtualMachine.list', (['self.apiclient'], {'id': 'self.test_vm.id'}), '(self.apiclient, id=self.test_vm.id)\n', (45863, 45899), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((46983, 47038), 'marvin.lib.base.VirtualMachine.list', 'VirtualMachine.list', (['self.apiclient'], {'id': 'self.test_vm.id'}), '(self.apiclient, id=self.test_vm.id)\n', (47002, 47038), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((47965, 48000), 'marvin.lib.utils.get_hypervisor_type', 'get_hypervisor_type', (['cls.api_client'], {}), '(cls.api_client)\n', (47984, 48000), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((48214, 48240), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (48224, 48240), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((48339, 48404), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (48351, 48404), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((48789, 48868), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (48803, 48868), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((48942, 49014), 'marvin.lib.base.ServiceOffering.create', 'ServiceOffering.create', (['cls.api_client', "cls.services['service_offering']"], {}), "(cls.api_client, cls.services['service_offering'])\n", (48964, 49014), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((49096, 49303), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['cls.api_client', "cls.services['virtual_machine']"], {'accountid': 'cls.account.name', 'domainid': 'cls.account.domainid', 'serviceofferingid': 'cls.service_offering.id', 'mode': 'cls.zone.networktype'}), "(cls.api_client, cls.services['virtual_machine'],\n accountid=cls.account.name, domainid=cls.account.domainid,\n serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)\n", (49117, 49303), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((49542, 49628), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['isolated_network_offering']"], {}), "(cls.api_client, cls.services[\n 'isolated_network_offering'])\n", (49564, 49628), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((49793, 49959), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['isolated_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.isolated_network_offering.id'}), "(cls.api_client, cls.services['isolated_network'], cls.\n account.name, cls.account.domainid, networkofferingid=cls.\n isolated_network_offering.id)\n", (49807, 49959), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((51175, 51222), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'vm.id'}), '(self.apiclient, id=vm.id)\n', (51196, 51222), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((51259, 51280), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (51271, 51280), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((52673, 52738), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (52694, 52738), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((52775, 52796), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (52787, 52796), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((53753, 53818), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (53774, 53818), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((53855, 53876), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (53867, 53876), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((54514, 54624), 'marvin.lib.common.list_events', 'list_events', (['self.apiclient'], {'account': 'self.account.name', 'domainid': 'self.account.domainid', 'type': '"""NIC.UPDATE"""'}), "(self.apiclient, account=self.account.name, domainid=self.\n account.domainid, type='NIC.UPDATE')\n", (54525, 54624), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((54717, 54737), 'marvin.lib.utils.validateList', 'validateList', (['events'], {}), '(events)\n', (54729, 54737), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((55414, 55479), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (55435, 55479), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((55516, 55537), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (55528, 55537), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((56654, 56741), 'marvin.lib.base.Account.create', 'Account.create', (['self.api_client', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.api_client, self.services['account'], domainid=self.\n domain.id)\n", (56668, 56741), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((56926, 57129), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.apiclient', "self.services['virtual_machine']"], {'accountid': 'account.name', 'domainid': 'account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype'}), "(self.apiclient, self.services['virtual_machine'],\n accountid=account.name, domainid=account.domainid, serviceofferingid=\n self.service_offering.id, mode=self.zone.networktype)\n", (56947, 57129), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((57270, 57304), 'time.sleep', 'time.sleep', (["self.services['sleep']"], {}), "(self.services['sleep'])\n", (57280, 57304), False, 'import time\n'), ((58182, 58217), 'marvin.lib.utils.get_hypervisor_type', 'get_hypervisor_type', (['cls.api_client'], {}), '(cls.api_client)\n', (58201, 58217), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((58431, 58457), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (58441, 58457), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((58555, 58620), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (58567, 58620), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((58940, 59019), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (58954, 59019), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((59093, 59165), 'marvin.lib.base.ServiceOffering.create', 'ServiceOffering.create', (['cls.api_client', "cls.services['service_offering']"], {}), "(cls.api_client, cls.services['service_offering'])\n", (59115, 59165), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((59247, 59454), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['cls.api_client', "cls.services['virtual_machine']"], {'accountid': 'cls.account.name', 'domainid': 'cls.account.domainid', 'serviceofferingid': 'cls.service_offering.id', 'mode': 'cls.zone.networktype'}), "(cls.api_client, cls.services['virtual_machine'],\n accountid=cls.account.name, domainid=cls.account.domainid,\n serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)\n", (59268, 59454), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((59641, 59727), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['isolated_network_offering']"], {}), "(cls.api_client, cls.services[\n 'isolated_network_offering'])\n", (59663, 59727), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((59895, 60061), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['isolated_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.isolated_network_offering.id'}), "(cls.api_client, cls.services['isolated_network'], cls.\n account.name, cls.account.domainid, networkofferingid=cls.\n isolated_network_offering.id)\n", (59909, 60061), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((61077, 61127), 'marvin.cloudstackAPI.addNicToVirtualMachine.addNicToVirtualMachineCmd', 'addNicToVirtualMachine.addNicToVirtualMachineCmd', ([], {}), '()\n', (61125, 61127), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((61159, 61200), 'marvin.lib.utils.random_gen', 'random_gen', ([], {'id': '"""virtual_machine"""', 'size': '(30)'}), "(id='virtual_machine', size=30)\n", (61169, 61200), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((61810, 61860), 'marvin.cloudstackAPI.addNicToVirtualMachine.addNicToVirtualMachineCmd', 'addNicToVirtualMachine.addNicToVirtualMachineCmd', ([], {}), '()\n', (61858, 61860), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((61940, 61976), 'marvin.lib.utils.random_gen', 'random_gen', ([], {'id': '"""network_id"""', 'size': '(30)'}), "(id='network_id', size=30)\n", (61950, 61976), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((62676, 62718), 'marvin.lib.common.list_zones', 'list_zones', (['self.apiclient'], {'available': '(True)'}), '(self.apiclient, available=True)\n', (62686, 62718), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((62758, 62777), 'marvin.lib.utils.validateList', 'validateList', (['zones'], {}), '(zones)\n', (62770, 62777), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((63451, 63621), 'marvin.lib.base.Network.create', 'Network.create', (['self.apiclient', "self.services['isolated_network']", 'self.account.name', 'self.account.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.apiclient, self.services['isolated_network'], self.\n account.name, self.account.domainid, networkofferingid=self.\n isolated_network_offering.id)\n", (63465, 63621), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((63961, 64011), 'marvin.cloudstackAPI.addNicToVirtualMachine.addNicToVirtualMachineCmd', 'addNicToVirtualMachine.addNicToVirtualMachineCmd', ([], {}), '()\n', (64009, 64011), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((64727, 64769), 'marvin.lib.common.list_zones', 'list_zones', (['self.apiclient'], {'available': '(True)'}), '(self.apiclient, available=True)\n', (64737, 64769), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((64809, 64828), 'marvin.lib.utils.validateList', 'validateList', (['zones'], {}), '(zones)\n', (64821, 64828), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((65436, 65558), 'marvin.lib.base.Network.create', 'Network.create', (['self.apiclient', "self.services['isolated_network']"], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.apiclient, self.services['isolated_network'],\n networkofferingid=self.isolated_network_offering.id)\n", (65450, 65558), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((65891, 66038), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.apiclient', "self.services['virtual_machine']"], {'serviceofferingid': 'self.service_offering.id', 'mode': 'basicZone.networktype'}), "(self.apiclient, self.services['virtual_machine'],\n serviceofferingid=self.service_offering.id, mode=basicZone.networktype)\n", (65912, 66038), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((66184, 66218), 'time.sleep', 'time.sleep', (["self.services['sleep']"], {}), "(self.services['sleep'])\n", (66194, 66218), False, 'import time\n'), ((66307, 66357), 'marvin.cloudstackAPI.addNicToVirtualMachine.addNicToVirtualMachineCmd', 'addNicToVirtualMachine.addNicToVirtualMachineCmd', ([], {}), '()\n', (66355, 66357), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((67082, 67132), 'marvin.cloudstackAPI.addNicToVirtualMachine.addNicToVirtualMachineCmd', 'addNicToVirtualMachine.addNicToVirtualMachineCmd', ([], {}), '()\n', (67130, 67132), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((67300, 67386), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (67314, 67386), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((68270, 68305), 'marvin.lib.utils.get_hypervisor_type', 'get_hypervisor_type', (['cls.api_client'], {}), '(cls.api_client)\n', (68289, 68305), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((68519, 68545), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (68529, 68545), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((68644, 68709), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (68656, 68709), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((69094, 69173), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (69108, 69173), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((69247, 69319), 'marvin.lib.base.ServiceOffering.create', 'ServiceOffering.create', (['cls.api_client', "cls.services['service_offering']"], {}), "(cls.api_client, cls.services['service_offering'])\n", (69269, 69319), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((69401, 69608), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['cls.api_client', "cls.services['virtual_machine']"], {'accountid': 'cls.account.name', 'domainid': 'cls.account.domainid', 'serviceofferingid': 'cls.service_offering.id', 'mode': 'cls.zone.networktype'}), "(cls.api_client, cls.services['virtual_machine'],\n accountid=cls.account.name, domainid=cls.account.domainid,\n serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)\n", (69422, 69608), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((69847, 69933), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['isolated_network_offering']"], {}), "(cls.api_client, cls.services[\n 'isolated_network_offering'])\n", (69869, 69933), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((70099, 70265), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['isolated_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.isolated_network_offering.id'}), "(cls.api_client, cls.services['isolated_network'], cls.\n account.name, cls.account.domainid, networkofferingid=cls.\n isolated_network_offering.id)\n", (70113, 70265), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((71525, 71590), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (71546, 71590), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((71627, 71648), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (71639, 71648), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((72108, 72168), 'marvin.cloudstackAPI.removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd', 'removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd', ([], {}), '()\n', (72166, 72168), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((72981, 73046), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (73002, 73046), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((73083, 73104), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (73095, 73104), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((73564, 73624), 'marvin.cloudstackAPI.removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd', 'removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd', ([], {}), '()\n', (73622, 73624), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((74317, 74382), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (74338, 74382), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((74419, 74440), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (74431, 74440), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((74900, 74960), 'marvin.cloudstackAPI.removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd', 'removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd', ([], {}), '()\n', (74958, 74960), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((75110, 75196), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (75124, 75196), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((76230, 76265), 'marvin.lib.utils.get_hypervisor_type', 'get_hypervisor_type', (['cls.api_client'], {}), '(cls.api_client)\n', (76249, 76265), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((76479, 76505), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (76489, 76505), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((76604, 76669), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (76616, 76669), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((77081, 77160), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (77095, 77160), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((77234, 77306), 'marvin.lib.base.ServiceOffering.create', 'ServiceOffering.create', (['cls.api_client', "cls.services['service_offering']"], {}), "(cls.api_client, cls.services['service_offering'])\n", (77256, 77306), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((77388, 77595), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['cls.api_client', "cls.services['virtual_machine']"], {'accountid': 'cls.account.name', 'domainid': 'cls.account.domainid', 'serviceofferingid': 'cls.service_offering.id', 'mode': 'cls.zone.networktype'}), "(cls.api_client, cls.services['virtual_machine'],\n accountid=cls.account.name, domainid=cls.account.domainid,\n serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)\n", (77409, 77595), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((77892, 77978), 'marvin.lib.base.NetworkOffering.create', 'NetworkOffering.create', (['cls.api_client', "cls.services['isolated_network_offering']"], {}), "(cls.api_client, cls.services[\n 'isolated_network_offering'])\n", (77914, 77978), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((78188, 78354), 'marvin.lib.base.Network.create', 'Network.create', (['cls.api_client', "cls.services['isolated_network']", 'cls.account.name', 'cls.account.domainid'], {'networkofferingid': 'cls.isolated_network_offering.id'}), "(cls.api_client, cls.services['isolated_network'], cls.\n account.name, cls.account.domainid, networkofferingid=cls.\n isolated_network_offering.id)\n", (78202, 78354), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((80165, 80230), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (80186, 80230), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((80267, 80288), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (80279, 80288), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((81041, 81113), 'marvin.cloudstackAPI.updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', 'updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', ([], {}), '()\n', (81111, 81113), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((82119, 82184), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (82140, 82184), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((82221, 82242), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (82233, 82242), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((82996, 83068), 'marvin.cloudstackAPI.updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', 'updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', ([], {}), '()\n', (83066, 83068), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((84070, 84156), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (84084, 84156), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((84296, 84500), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.api_client', "self.services['virtual_machine']"], {'accountid': 'account.name', 'domainid': 'account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype'}), "(self.api_client, self.services['virtual_machine'],\n accountid=account.name, domainid=account.domainid, serviceofferingid=\n self.service_offering.id, mode=self.zone.networktype)\n", (84317, 84500), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((84644, 84678), 'time.sleep', 'time.sleep', (["self.services['sleep']"], {}), "(self.services['sleep'])\n", (84654, 84678), False, 'import time\n'), ((84854, 85014), 'marvin.lib.base.Network.create', 'Network.create', (['self.apiclient', "self.services['isolated_network']", 'account.name', 'account.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.apiclient, self.services['isolated_network'], account.\n name, account.domainid, networkofferingid=self.isolated_network_offering.id\n )\n", (84868, 85014), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((85416, 85490), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'virtual_machine.id', 'listall': '(True)'}), '(self.apiclient, id=virtual_machine.id, listall=True)\n', (85437, 85490), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((85527, 85548), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (85539, 85548), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((86425, 86497), 'marvin.cloudstackAPI.updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', 'updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', ([], {}), '()\n', (86495, 86497), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((87221, 87307), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'domainid': 'self.domain.id'}), "(self.apiclient, self.services['account'], domainid=self.\n domain.id)\n", (87235, 87307), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((87709, 87774), 'marvin.lib.common.list_virtual_machines', 'list_virtual_machines', (['self.apiclient'], {'id': 'self.virtual_machine.id'}), '(self.apiclient, id=self.virtual_machine.id)\n', (87730, 87774), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((87811, 87832), 'marvin.lib.utils.validateList', 'validateList', (['vm_list'], {}), '(vm_list)\n', (87823, 87832), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((88586, 88658), 'marvin.cloudstackAPI.updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', 'updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd', ([], {}), '()\n', (88656, 88658), False, 'from marvin.cloudstackAPI import addNicToVirtualMachine, removeNicFromVirtualMachine, updateDefaultNicForVirtualMachine\n'), ((6626, 6698), 'unittest.SkipTest', 'unittest.SkipTest', (['"""This feature is supported only on XenServer and KVM"""'], {}), "('This feature is supported only on XenServer and KVM')\n", (6643, 6698), False, 'import unittest\n'), ((8956, 8998), 'marvin.lib.common.get_free_vlan', 'get_free_vlan', (['cls.api_client', 'cls.zone.id'], {}), '(cls.api_client, cls.zone.id)\n', (8969, 8998), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((16514, 16767), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.api_client', "self.services['virtual_machine']"], {'accountid': 'self.account.name', 'domainid': 'self.account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype', 'networkids': '[self.defaultNetworkId]'}), "(self.api_client, self.services['virtual_machine'],\n accountid=self.account.name, domainid=self.account.domainid,\n serviceofferingid=self.service_offering.id, mode=self.zone.networktype,\n networkids=[self.defaultNetworkId])\n", (16535, 16767), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((17811, 18064), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.api_client', "self.services['virtual_machine']"], {'accountid': 'self.account.name', 'domainid': 'self.account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype', 'networkids': '[self.defaultNetworkId]'}), "(self.api_client, self.services['virtual_machine'],\n accountid=self.account.name, domainid=self.account.domainid,\n serviceofferingid=self.service_offering.id, mode=self.zone.networktype,\n networkids=[self.defaultNetworkId])\n", (17832, 18064), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((21487, 21740), 'marvin.lib.base.VirtualMachine.create', 'VirtualMachine.create', (['self.api_client', "self.services['virtual_machine']"], {'accountid': 'self.account.name', 'domainid': 'self.account.domainid', 'serviceofferingid': 'self.service_offering.id', 'mode': 'self.zone.networktype', 'networkids': '[self.defaultNetworkId]'}), "(self.api_client, self.services['virtual_machine'],\n accountid=self.account.name, domainid=self.account.domainid,\n serviceofferingid=self.service_offering.id, mode=self.zone.networktype,\n networkids=[self.defaultNetworkId])\n", (21508, 21740), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((23765, 23926), 'marvin.lib.base.Network.create', 'Network.create', (['self.api_client', "self.services['isolated_network']", 'account.name', 'account.domainid'], {'networkofferingid': 'self.isolated_network_offering.id'}), "(self.api_client, self.services['isolated_network'], account.\n name, account.domainid, networkofferingid=self.isolated_network_offering.id\n )\n", (23779, 23926), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((25682, 25780), 'marvin.lib.base.Domain.create', 'Domain.create', (['self.apiclient'], {'services': "self.services['domain']", 'parentdomainid': 'self.domain.id'}), "(self.apiclient, services=self.services['domain'],\n parentdomainid=self.domain.id)\n", (25695, 25780), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((25963, 26068), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'admin': '(True)', 'domainid': 'self.child_domain_1.id'}), "(self.apiclient, self.services['account'], admin=True,\n domainid=self.child_domain_1.id)\n", (25977, 26068), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((26233, 26331), 'marvin.lib.base.Domain.create', 'Domain.create', (['self.apiclient'], {'services': "self.services['domain']", 'parentdomainid': 'self.domain.id'}), "(self.apiclient, services=self.services['domain'],\n parentdomainid=self.domain.id)\n", (26246, 26331), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((26514, 26619), 'marvin.lib.base.Account.create', 'Account.create', (['self.apiclient', "self.services['account']"], {'admin': '(True)', 'domainid': 'self.child_domain_2.id'}), "(self.apiclient, self.services['account'], admin=True,\n domainid=self.child_domain_2.id)\n", (26528, 26619), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((31161, 31233), 'unittest.SkipTest', 'unittest.SkipTest', (['"""This feature is supported only on XenServer and KVM"""'], {}), "('This feature is supported only on XenServer and KVM')\n", (31178, 31233), False, 'import unittest\n'), ((43444, 43490), 'marvin.lib.base.VirtualMachine.list', 'VirtualMachine.list', (['self.apiclient'], {'id': 'vm1.id'}), '(self.apiclient, id=vm1.id)\n', (43463, 43490), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((48078, 48150), 'unittest.SkipTest', 'unittest.SkipTest', (['"""This feature is supported only on XenServer and KVM"""'], {}), "('This feature is supported only on XenServer and KVM')\n", (48095, 48150), False, 'import unittest\n'), ((58295, 58367), 'unittest.SkipTest', 'unittest.SkipTest', (['"""This feature is supported only on XenServer and KVM"""'], {}), "('This feature is supported only on XenServer and KVM')\n", (58312, 58367), False, 'import unittest\n'), ((64172, 64185), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (64182, 64185), False, 'import time\n'), ((66642, 66655), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (66652, 66655), False, 'import time\n'), ((67868, 67881), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (67878, 67881), False, 'import time\n'), ((68383, 68455), 'unittest.SkipTest', 'unittest.SkipTest', (['"""This feature is supported only on XenServer and KVM"""'], {}), "('This feature is supported only on XenServer and KVM')\n", (68400, 68455), False, 'import unittest\n'), ((72226, 72238), 'marvin.lib.utils.random_gen', 'random_gen', ([], {}), '()\n', (72236, 72238), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((73713, 73725), 'marvin.lib.utils.random_gen', 'random_gen', ([], {}), '()\n', (73723, 73725), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((76343, 76415), 'unittest.SkipTest', 'unittest.SkipTest', (['"""This feature is supported only on XenServer and KVM"""'], {}), "('This feature is supported only on XenServer and KVM')\n", (76360, 76415), False, 'import unittest\n'), ((81171, 81183), 'marvin.lib.utils.random_gen', 'random_gen', ([], {}), '()\n', (81181, 81183), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((83174, 83186), 'marvin.lib.utils.random_gen', 'random_gen', ([], {}), '()\n', (83184, 83186), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((24223, 24377), 'marvin.lib.base.Network.create', 'Network.create', (['self.api_client', "self.services['shared_network_2']", 'account.name', 'account.domainid'], {'networkofferingid': 'self.shared_network_offering.id'}), "(self.api_client, self.services['shared_network_2'], account.\n name, account.domainid, networkofferingid=self.shared_network_offering.id)\n", (24237, 24377), False, 'from marvin.lib.base import Account, Domain, ServiceOffering, VirtualMachine, NetworkOffering, Network, VpcOffering, VPC, PublicIPAddress, FireWallRule, NATRule\n'), ((41696, 41713), 'marvin.lib.utils.validateList', 'validateList', (['vm2'], {}), '(vm2)\n', (41708, 41713), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((43022, 43058), 'marvin.lib.utils.validateList', 'validateList', (['list_nat_rule_response'], {}), '(list_nat_rule_response)\n', (43034, 43058), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((45463, 45483), 'marvin.lib.utils.validateList', 'validateList', (['vm_res'], {}), '(vm_res)\n', (45475, 45483), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((45594, 45617), 'marvin.lib.utils.validateList', 'validateList', (['self.nics'], {}), '(self.nics)\n', (45606, 45617), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((45959, 45980), 'marvin.lib.utils.validateList', 'validateList', (['vm_res2'], {}), '(vm_res2)\n', (45971, 45980), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((46463, 46490), 'marvin.lib.utils.validateList', 'validateList', (['nic_to_attach'], {}), '(nic_to_attach)\n', (46475, 46490), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((47077, 47098), 'marvin.lib.utils.validateList', 'validateList', (['vm_res3'], {}), '(vm_res3)\n', (47089, 47098), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((47418, 47439), 'marvin.lib.utils.validateList', 'validateList', (['vm_nics'], {}), '(vm_nics)\n', (47430, 47439), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n'), ((24154, 24197), 'marvin.lib.common.get_free_vlan', 'get_free_vlan', (['self.apiclient', 'self.zone.id'], {}), '(self.apiclient, self.zone.id)\n', (24167, 24197), False, 'from marvin.lib.common import get_domain, get_zone, get_template, list_virtual_machines, list_events, list_zones, get_free_vlan, update_resource_limit, list_nat_rules\n'), ((43520, 43541), 'marvin.lib.utils.validateList', 'validateList', (['vm1_res'], {}), '(vm1_res)\n', (43532, 43541), False, 'from marvin.lib.utils import validateList, random_gen, get_hypervisor_type\n')]
|
"""well_utils.py: functions used by the classes in resqpy.well"""
version = '10th November 2021'
# Nexus is a registered trademark of the Halliburton Company
# RMS and ROXAR are registered trademarks of Roxar Software Solutions AS, an Emerson company
import logging
log = logging.getLogger(__name__)
import numpy as np
import resqpy.olio.grid_functions as gf
import resqpy.olio.intersection as intersect
import resqpy.olio.keyword_files as kf
import resqpy.olio.xml_et as rqet
def load_hdf5_array(object, node, array_attribute, tag = 'Values', dtype = 'float', model = None):
"""Loads the property array data as an attribute of object, from the hdf5 referenced in xml node.
:meta private:
"""
assert (rqet.node_type(node) in ['DoubleHdf5Array', 'IntegerHdf5Array', 'Point3dHdf5Array'])
if model is None:
model = object.model
h5_key_pair = model.h5_uuid_and_path_for_node(node, tag = tag)
if h5_key_pair is None:
return None
return model.h5_array_element(h5_key_pair,
index = None,
cache_array = True,
dtype = dtype,
object = object,
array_attribute = array_attribute)
def extract_xyz(xyz_node):
"""Extracts an x,y,z coordinate from a solitary point xml node.
argument:
xyz_node: the xml node representing the solitary point (in 3D space)
returns:
triple float: (x, y, z) coordinates as a tuple
"""
if xyz_node is None:
return None
xyz = np.zeros(3)
for axis in range(3):
xyz[axis] = rqet.find_tag_float(xyz_node, 'Coordinate' + str(axis + 1), must_exist = True)
return tuple(xyz)
def well_names_in_cellio_file(cellio_file):
"""Returns a list of well names as found in the RMS blocked well export cell I/O file."""
well_list = []
with open(cellio_file, 'r') as fp:
while True:
kf.skip_blank_lines_and_comments(fp)
line = fp.readline() # file format version number?
if line == '':
break # end of file
fp.readline() # 'Undefined'
words = fp.readline().split()
assert len(words), 'missing header info (well name) in cell I/O file'
well_list.append(words[0])
while not kf.blank_line(fp):
fp.readline() # skip to block of data for next well
return well_list
# 'private' functions
def find_entry_and_exit(cp, entry_vector, exit_vector, well_name):
"""Returns (entry_axis, entry_polarity, entry_xyz, exit_axis, exit_polarity, exit_xyz).
:meta private:
"""
cell_centre = np.mean(cp, axis = (0, 1, 2))
face_triangles = gf.triangles_for_cell_faces(cp).reshape(-1, 3, 3) # flattened first index 4 values per face
entry_points = intersect.line_triangles_intersects(cell_centre, entry_vector, face_triangles, line_segment = True)
entry_axis = entry_polarity = entry_xyz = exit_xyz = None
for t in range(24):
if not np.any(np.isnan(entry_points[t])):
entry_xyz = entry_points[t]
entry_axis = t // 8
entry_polarity = (t - 8 * entry_axis) // 4
break
assert entry_axis is not None, 'failed to find entry face for a perforation in well ' + str(well_name)
exit_points = intersect.line_triangles_intersects(cell_centre, exit_vector, face_triangles, line_segment = True)
exit_axis = exit_polarity = None
for t in range(24):
if not np.any(np.isnan(exit_points[t])):
exit_xyz = entry_points[t]
exit_axis = t // 8
exit_polarity = (t - 8 * exit_axis) // 4
break
assert exit_axis is not None, 'failed to find exit face for a perforation in well ' + str(well_name)
return (entry_axis, entry_polarity, entry_xyz, exit_axis, exit_polarity, exit_xyz)
def _as_optional_array(arr):
"""If not None, cast as numpy array.
Casting directly to an array can be problematic: np.array(None) creates an unsized array, which is potentially
confusing.
"""
if arr is None:
return None
else:
return np.array(arr)
def _pl(i, e = False):
return '' if i == 1 else 'es' if e else 's'
def _derive_from_wellspec_verify_col_list(add_properties):
""" Verify additional properties to be added to the WELLSPEC file.
argument:
add_properties (boolean): if True, the additional properties specified will be added to the WELLSPEC file
returns:
list of columns to be added to the WELLSPEC file
"""
if add_properties:
if isinstance(add_properties, list):
col_list = ['IW', 'JW', 'L'] + [col.upper() for col in add_properties if col not in ['IW', 'JW', 'L']]
else:
col_list = []
else:
col_list = ['IW', 'JW', 'L', 'ANGLA', 'ANGLV']
return col_list
def _derive_from_wellspec_check_grid_name(check_grid_name, grid, col_list):
""" Verify the grid object to which the cell indices in the WELLSPEC table belong.
arguments:
check_grid_name (boolean): if True, the citation title of the grid will be extracted and returned
grid (grid object): the grid object whose citation titles will be returned
col_list (list): list of strings of column names to be added to the WELLSPEC file. If a citation title is
extracted from the grid object, 'GRID' will be added to the col_list
returns:
string of grid citation title extracted from the grid object
list of columns to be added to the WELLSPEC file
"""
if check_grid_name:
grid_name = rqet.citation_title_for_node(grid.root).upper()
if not grid_name:
name_for_check = None
else:
col_list.append('GRID')
name_for_check = grid_name
else:
name_for_check = None
return name_for_check, col_list
|
[
"resqpy.olio.xml_et.citation_title_for_node",
"numpy.zeros",
"resqpy.olio.keyword_files.skip_blank_lines_and_comments",
"numpy.isnan",
"resqpy.olio.grid_functions.triangles_for_cell_faces",
"resqpy.olio.keyword_files.blank_line",
"numpy.mean",
"numpy.array",
"resqpy.olio.xml_et.node_type",
"resqpy.olio.intersection.line_triangles_intersects",
"logging.getLogger"
] |
[((276, 303), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (293, 303), False, 'import logging\n'), ((1616, 1627), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (1624, 1627), True, 'import numpy as np\n'), ((2738, 2765), 'numpy.mean', 'np.mean', (['cp'], {'axis': '(0, 1, 2)'}), '(cp, axis=(0, 1, 2))\n', (2745, 2765), True, 'import numpy as np\n'), ((2901, 3002), 'resqpy.olio.intersection.line_triangles_intersects', 'intersect.line_triangles_intersects', (['cell_centre', 'entry_vector', 'face_triangles'], {'line_segment': '(True)'}), '(cell_centre, entry_vector,\n face_triangles, line_segment=True)\n', (2936, 3002), True, 'import resqpy.olio.intersection as intersect\n'), ((3407, 3507), 'resqpy.olio.intersection.line_triangles_intersects', 'intersect.line_triangles_intersects', (['cell_centre', 'exit_vector', 'face_triangles'], {'line_segment': '(True)'}), '(cell_centre, exit_vector,\n face_triangles, line_segment=True)\n', (3442, 3507), True, 'import resqpy.olio.intersection as intersect\n'), ((727, 747), 'resqpy.olio.xml_et.node_type', 'rqet.node_type', (['node'], {}), '(node)\n', (741, 747), True, 'import resqpy.olio.xml_et as rqet\n'), ((4226, 4239), 'numpy.array', 'np.array', (['arr'], {}), '(arr)\n', (4234, 4239), True, 'import numpy as np\n'), ((2006, 2042), 'resqpy.olio.keyword_files.skip_blank_lines_and_comments', 'kf.skip_blank_lines_and_comments', (['fp'], {}), '(fp)\n', (2038, 2042), True, 'import resqpy.olio.keyword_files as kf\n'), ((2789, 2820), 'resqpy.olio.grid_functions.triangles_for_cell_faces', 'gf.triangles_for_cell_faces', (['cp'], {}), '(cp)\n', (2816, 2820), True, 'import resqpy.olio.grid_functions as gf\n'), ((2397, 2414), 'resqpy.olio.keyword_files.blank_line', 'kf.blank_line', (['fp'], {}), '(fp)\n', (2410, 2414), True, 'import resqpy.olio.keyword_files as kf\n'), ((3109, 3134), 'numpy.isnan', 'np.isnan', (['entry_points[t]'], {}), '(entry_points[t])\n', (3117, 3134), True, 'import numpy as np\n'), ((3589, 3613), 'numpy.isnan', 'np.isnan', (['exit_points[t]'], {}), '(exit_points[t])\n', (3597, 3613), True, 'import numpy as np\n'), ((5725, 5764), 'resqpy.olio.xml_et.citation_title_for_node', 'rqet.citation_title_for_node', (['grid.root'], {}), '(grid.root)\n', (5753, 5764), True, 'import resqpy.olio.xml_et as rqet\n')]
|
## ----------------------------------------------------------------------------
from tensorflow.keras import models, layers
from tensorflow.keras import metrics
from tensorflow.keras import optimizers
from tensorflow.keras import losses
from tensorflow.keras import regularizers
class MulticlassAUC(metrics.AUC):
"""AUC for a single class in a muliticlass problem.
Parameters
----------
pos_label : int
Label of the positive class (the one whose AUC is being computed).
from_logits : bool, optional (default: False)
If True, assume predictions are not standardized to be between 0 and 1.
In this case, predictions will be squeezed into probabilities using the
softmax function.
sparse : bool, optional (default: True)
If True, ground truth labels should be encoded as integer indices in the
range [0, n_classes-1]. Otherwise, ground truth labels should be one-hot
encoded indicator vectors (with a 1 in the true label position and 0
elsewhere).
**kwargs : keyword arguments
Keyword arguments for tf.keras.metrics.AUC.__init__(). For example, the
curve type (curve='ROC' or curve='PR').
"""
def __init__(self, pos_label, from_logits=False, sparse=True, **kwargs):
super().__init__(**kwargs)
self.pos_label = pos_label
self.from_logits = from_logits
self.sparse = sparse
def update_state(self, y_true, y_pred, **kwargs):
"""Accumulates confusion matrix statistics.
Parameters
----------
y_true : tf.Tensor
The ground truth values. Either an integer tensor of shape
(n_examples,) (if sparse=True) or a one-hot tensor of shape
(n_examples, n_classes) (if sparse=False).
y_pred : tf.Tensor
The predicted values, a tensor of shape (n_examples, n_classes).
**kwargs : keyword arguments
Extra keyword arguments for tf.keras.metrics.AUC.update_state
(e.g., sample_weight).
"""
if self.sparse:
y_true = tf.math.equal(y_true, self.pos_label)
y_true = tf.squeeze(y_true)
else:
y_true = y_true[..., self.pos_label]
if self.from_logits:
y_pred = tf.nn.softmax(y_pred, axis=-1)
y_pred = y_pred[..., self.pos_label]
super().update_state(y_true, y_pred, **kwargs)
METRICS = [
metrics.CategoricalAccuracy(name='accuracy'),
metrics.Precision(class_id = 1, name='precision'),
metrics.Recall(class_id = 1, name='recall'),
MulticlassAUC(pos_label = 1, sparse = False, name = 'auc')
]
def build_model(lr = .0004, conv_filters = 16, dense_neurons = 16, dense_layers = 1,
activity_reg = 0.001, dropout_rate = 0.2, input_channels = 2):
model = models.Sequential()
model.add(layers.Input(shape=(15, 35, input_channels))) ## define input shape
model.add(layers.Conv2D(conv_filters,
(3,3), activity_regularizer=regularizers.l2(0.01)))
model.add(layers.Activation('relu')) ## add convolutional layer
model.add(layers.MaxPooling2D((2,2)))
model.add(layers.Dropout(dropout_rate))
model.add(layers.Conv2D(conv_filters, (3,3), activity_regularizer=regularizers.l2(activity_reg)))
model.add(layers.Activation('relu')) ## add convolutional layer
model.add(layers.MaxPooling2D((2,2))) ## pooling layer
model.add(layers.Dropout(dropout_rate))
model.add(layers.Flatten()) ## converts from 2D array to 1D array
for i in range(dense_layers):
model.add(layers.Dense(dense_neurons, activity_regularizer=regularizers.l2(activity_reg))) ## dense layer
model.add(layers.Activation('relu'))
model.add(layers.Dense(2, activation='softmax')) ## classifier layer (binary class where 1=extreme)
model.compile(loss=losses.CategoricalCrossentropy(),
optimizer=optimizers.Adam(learning_rate = lr),
metrics=METRICS)
return(model)
|
[
"tensorflow.keras.layers.MaxPooling2D",
"tensorflow.keras.regularizers.l2",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.metrics.CategoricalAccuracy",
"tensorflow.keras.metrics.Recall",
"tensorflow.keras.losses.CategoricalCrossentropy",
"tensorflow.keras.layers.Activation",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.layers.Input",
"tensorflow.keras.models.Sequential",
"tensorflow.keras.metrics.Precision",
"tensorflow.keras.layers.Flatten"
] |
[((2446, 2490), 'tensorflow.keras.metrics.CategoricalAccuracy', 'metrics.CategoricalAccuracy', ([], {'name': '"""accuracy"""'}), "(name='accuracy')\n", (2473, 2490), False, 'from tensorflow.keras import metrics\n'), ((2496, 2543), 'tensorflow.keras.metrics.Precision', 'metrics.Precision', ([], {'class_id': '(1)', 'name': '"""precision"""'}), "(class_id=1, name='precision')\n", (2513, 2543), False, 'from tensorflow.keras import metrics\n'), ((2551, 2592), 'tensorflow.keras.metrics.Recall', 'metrics.Recall', ([], {'class_id': '(1)', 'name': '"""recall"""'}), "(class_id=1, name='recall')\n", (2565, 2592), False, 'from tensorflow.keras import metrics\n'), ((2840, 2859), 'tensorflow.keras.models.Sequential', 'models.Sequential', ([], {}), '()\n', (2857, 2859), False, 'from tensorflow.keras import models, layers\n'), ((2875, 2919), 'tensorflow.keras.layers.Input', 'layers.Input', ([], {'shape': '(15, 35, input_channels)'}), '(shape=(15, 35, input_channels))\n', (2887, 2919), False, 'from tensorflow.keras import models, layers\n'), ((3086, 3111), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (3103, 3111), False, 'from tensorflow.keras import models, layers\n'), ((3154, 3181), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (3173, 3181), False, 'from tensorflow.keras import models, layers\n'), ((3196, 3224), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (3210, 3224), False, 'from tensorflow.keras import models, layers\n'), ((3372, 3397), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (3389, 3397), False, 'from tensorflow.keras import models, layers\n'), ((3440, 3467), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (3459, 3467), False, 'from tensorflow.keras import models, layers\n'), ((3499, 3527), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout_rate'], {}), '(dropout_rate)\n', (3513, 3527), False, 'from tensorflow.keras import models, layers\n'), ((3572, 3588), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (3586, 3588), False, 'from tensorflow.keras import models, layers\n'), ((3846, 3883), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2)'], {'activation': '"""softmax"""'}), "(2, activation='softmax')\n", (3858, 3883), False, 'from tensorflow.keras import models, layers\n'), ((3799, 3824), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (3816, 3824), False, 'from tensorflow.keras import models, layers\n'), ((3964, 3996), 'tensorflow.keras.losses.CategoricalCrossentropy', 'losses.CategoricalCrossentropy', ([], {}), '()\n', (3994, 3996), False, 'from tensorflow.keras import losses\n'), ((4027, 4060), 'tensorflow.keras.optimizers.Adam', 'optimizers.Adam', ([], {'learning_rate': 'lr'}), '(learning_rate=lr)\n', (4042, 4060), False, 'from tensorflow.keras import optimizers\n'), ((3047, 3068), 'tensorflow.keras.regularizers.l2', 'regularizers.l2', (['(0.01)'], {}), '(0.01)\n', (3062, 3068), False, 'from tensorflow.keras import regularizers\n'), ((3325, 3354), 'tensorflow.keras.regularizers.l2', 'regularizers.l2', (['activity_reg'], {}), '(activity_reg)\n', (3340, 3354), False, 'from tensorflow.keras import regularizers\n'), ((3734, 3763), 'tensorflow.keras.regularizers.l2', 'regularizers.l2', (['activity_reg'], {}), '(activity_reg)\n', (3749, 3763), False, 'from tensorflow.keras import regularizers\n')]
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import sys
import time
from math import ceil
from typing import List, Tuple
from unittest.mock import patch
import numpy as np
from ax.core.arm import Arm
from ax.core.base_trial import TrialStatus
from ax.core.generator_run import GeneratorRun
from ax.core.metric import Metric
from ax.core.outcome_constraint import OutcomeConstraint
from ax.core.parameter import (
ChoiceParameter,
FixedParameter,
ParameterType,
RangeParameter,
)
from ax.core.types import ComparisonOp
from ax.exceptions.core import DataRequiredError, UnsupportedPlotError
from ax.metrics.branin import branin
from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy
from ax.modelbridge.registry import MODEL_KEY_TO_MODEL_SETUP, Models
from ax.service.ax_client import AxClient
from ax.storage.sqa_store.db import init_test_engine_and_session_factory
from ax.storage.sqa_store.decoder import Decoder
from ax.storage.sqa_store.encoder import Encoder
from ax.storage.sqa_store.sqa_config import SQAConfig
from ax.storage.sqa_store.structs import DBSettings
from ax.utils.common.testutils import TestCase
from ax.utils.common.timeutils import current_timestamp_in_millis
from ax.utils.common.typeutils import checked_cast, not_none
from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans
def run_trials_using_recommended_parallelism(
ax_client: AxClient,
recommended_parallelism: List[Tuple[int, int]],
total_trials: int,
) -> int:
remaining_trials = total_trials
for num_trials, parallelism_setting in recommended_parallelism:
if num_trials == -1:
num_trials = remaining_trials
for _ in range(ceil(num_trials / parallelism_setting)):
in_flight_trials = []
if parallelism_setting > remaining_trials:
parallelism_setting = remaining_trials
for _ in range(parallelism_setting):
params, idx = ax_client.get_next_trial()
in_flight_trials.append((params, idx))
remaining_trials -= 1
for _ in range(parallelism_setting):
params, idx = in_flight_trials.pop()
ax_client.complete_trial(idx, branin(params["x"], params["y"]))
# If all went well and no errors were raised, remaining_trials should be 0.
return remaining_trials
class TestAxClient(TestCase):
"""Tests service-like API functionality."""
def setUp(self):
# To avoid tests timing out due to GP fit / gen times.
patch.dict(
f"{Models.__module__}.MODEL_KEY_TO_MODEL_SETUP",
{"GPEI": MODEL_KEY_TO_MODEL_SETUP["Sobol"]},
).start()
def test_interruption(self) -> None:
ax_client = AxClient()
ax_client.create_experiment(
name="test",
parameters=[ # pyre-fixme[6]: expected union that should include
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
objective_name="branin",
minimize=True,
)
for i in range(6):
parameterization, trial_index = ax_client.get_next_trial()
self.assertFalse( # There should be non-complete trials.
all(t.status.is_terminal for t in ax_client.experiment.trials.values())
)
x, y = parameterization.get("x"), parameterization.get("y")
ax_client.complete_trial(
trial_index,
raw_data=checked_cast(
float, branin(checked_cast(float, x), checked_cast(float, y))
),
)
old_client = ax_client
serialized = ax_client.to_json_snapshot()
ax_client = AxClient.from_json_snapshot(serialized)
self.assertEqual(len(ax_client.experiment.trials.keys()), i + 1)
self.assertIsNot(ax_client, old_client)
self.assertTrue( # There should be no non-complete trials.
all(t.status.is_terminal for t in ax_client.experiment.trials.values())
)
@patch(
"ax.modelbridge.base.observations_from_data",
autospec=True,
return_value=([get_observation1()]),
)
@patch(
"ax.modelbridge.random.RandomModelBridge.get_training_data",
autospec=True,
return_value=([get_observation1()]),
)
@patch(
"ax.modelbridge.random.RandomModelBridge._predict",
autospec=True,
return_value=[get_observation1trans().data],
)
@patch(
"ax.modelbridge.random.RandomModelBridge.feature_importances",
autospec=True,
return_value={"x": 0.9, "y": 1.1},
)
def test_default_generation_strategy_continuous(self, _a, _b, _c, _d) -> None:
"""Test that Sobol+GPEI is used if no GenerationStrategy is provided."""
ax_client = AxClient()
ax_client.create_experiment(
parameters=[ # pyre-fixme[6]: expected union that should include
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
objective_name="a",
minimize=True,
)
self.assertEqual(
[s.model for s in not_none(ax_client.generation_strategy)._steps],
[Models.SOBOL, Models.GPEI],
)
with self.assertRaisesRegex(ValueError, ".* no trials"):
ax_client.get_optimization_trace(objective_optimum=branin.fmin)
for i in range(6):
parameterization, trial_index = ax_client.get_next_trial()
x, y = parameterization.get("x"), parameterization.get("y")
ax_client.complete_trial(
trial_index,
raw_data={
"a": (
checked_cast(
float,
branin(checked_cast(float, x), checked_cast(float, y)),
),
0.0,
)
},
sample_size=i,
)
self.assertEqual(ax_client.generation_strategy.model._model_key, "GPEI")
ax_client.get_optimization_trace(objective_optimum=branin.fmin)
ax_client.get_contour_plot()
ax_client.get_feature_importances()
trials_df = ax_client.get_trials_data_frame()
self.assertIn("x", trials_df)
self.assertIn("y", trials_df)
self.assertIn("a", trials_df)
self.assertEqual(len(trials_df), 6)
def test_default_generation_strategy_discrete(self) -> None:
"""Test that Sobol is used if no GenerationStrategy is provided and
the search space is discrete.
"""
# Test that Sobol is chosen when all parameters are choice.
ax_client = AxClient()
ax_client.create_experiment(
parameters=[ # pyre-fixme[6]: expected union that should include
{"name": "x", "type": "choice", "values": [1, 2, 3]},
{"name": "y", "type": "choice", "values": [1, 2, 3]},
]
)
self.assertEqual(
[s.model for s in not_none(ax_client.generation_strategy)._steps],
[Models.SOBOL],
)
self.assertEqual(ax_client.get_max_parallelism(), [(-1, -1)])
self.assertTrue(ax_client.get_trials_data_frame().empty)
def test_create_experiment(self) -> None:
"""Test basic experiment creation."""
ax_client = AxClient(
GenerationStrategy(
steps=[GenerationStep(model=Models.SOBOL, num_trials=30)]
)
)
with self.assertRaisesRegex(ValueError, "Experiment not set on Ax client"):
ax_client.experiment
ax_client.create_experiment(
name="test_experiment",
parameters=[
{
"name": "x",
"type": "range",
"bounds": [0.001, 0.1],
"value_type": "float",
"log_scale": True,
},
{
"name": "y",
"type": "choice",
"values": [1, 2, 3],
"value_type": "int",
"is_ordered": True,
},
{"name": "x3", "type": "fixed", "value": 2, "value_type": "int"},
{
"name": "x4",
"type": "range",
"bounds": [1.0, 3.0],
"value_type": "int",
},
{
"name": "x5",
"type": "choice",
"values": ["one", "two", "three"],
"value_type": "str",
},
{
"name": "x6",
"type": "range",
"bounds": [1.0, 3.0],
"value_type": "int",
},
],
objective_name="test_objective",
minimize=True,
outcome_constraints=["some_metric >= 3", "some_metric <= 4.0"],
parameter_constraints=["x4 <= x6"],
)
assert ax_client._experiment is not None
self.assertEqual(ax_client._experiment, ax_client.experiment)
self.assertEqual(
ax_client._experiment.search_space.parameters["x"],
RangeParameter(
name="x",
parameter_type=ParameterType.FLOAT,
lower=0.001,
upper=0.1,
log_scale=True,
),
)
self.assertEqual(
ax_client._experiment.search_space.parameters["y"],
ChoiceParameter(
name="y",
parameter_type=ParameterType.INT,
values=[1, 2, 3],
is_ordered=True,
),
)
self.assertEqual(
ax_client._experiment.search_space.parameters["x3"],
FixedParameter(name="x3", parameter_type=ParameterType.INT, value=2),
)
self.assertEqual(
ax_client._experiment.search_space.parameters["x4"],
RangeParameter(
name="x4", parameter_type=ParameterType.INT, lower=1.0, upper=3.0
),
)
self.assertEqual(
ax_client._experiment.search_space.parameters["x5"],
ChoiceParameter(
name="x5",
parameter_type=ParameterType.STRING,
values=["one", "two", "three"],
),
)
self.assertEqual(
ax_client._experiment.optimization_config.outcome_constraints[0],
OutcomeConstraint(
metric=Metric(name="some_metric"),
op=ComparisonOp.GEQ,
bound=3.0,
relative=False,
),
)
self.assertEqual(
ax_client._experiment.optimization_config.outcome_constraints[1],
OutcomeConstraint(
metric=Metric(name="some_metric"),
op=ComparisonOp.LEQ,
bound=4.0,
relative=False,
),
)
self.assertTrue(ax_client._experiment.optimization_config.objective.minimize)
def test_constraint_same_as_objective(self):
"""Check that we do not allow constraints on the objective metric."""
ax_client = AxClient(
GenerationStrategy(
steps=[GenerationStep(model=Models.SOBOL, num_trials=30)]
)
)
with self.assertRaises(ValueError):
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x3", "type": "fixed", "value": 2, "value_type": "int"}
],
objective_name="test_objective",
outcome_constraints=["test_objective >= 3"],
)
def test_raw_data_format(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
for _ in range(6):
parameterization, trial_index = ax_client.get_next_trial()
x, y = parameterization.get("x"), parameterization.get("y")
ax_client.complete_trial(trial_index, raw_data=(branin(x, y), 0.0))
with self.assertRaisesRegex(ValueError, "Raw data has an invalid type"):
ax_client.update_trial_data(trial_index, raw_data="invalid_data")
def test_raw_data_format_with_fidelities(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 1.0]},
],
minimize=True,
)
for _ in range(6):
parameterization, trial_index = ax_client.get_next_trial()
x, y = parameterization.get("x"), parameterization.get("y")
ax_client.complete_trial(
trial_index,
raw_data=[
({"y": y / 2.0}, {"objective": (branin(x, y / 2.0), 0.0)}),
({"y": y}, {"objective": (branin(x, y), 0.0)}),
],
)
def test_keep_generating_without_data(self):
# Check that normally numebr of arms to generate is enforced.
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
for _ in range(5):
parameterization, trial_index = ax_client.get_next_trial()
with self.assertRaisesRegex(DataRequiredError, "All trials for current model"):
ax_client.get_next_trial()
# Check thatwith enforce_sequential_optimization off, we can keep
# generating.
ax_client = AxClient(enforce_sequential_optimization=False)
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
self.assertFalse(
ax_client.generation_strategy._steps[0].enforce_num_trials, False
)
self.assertFalse(ax_client.generation_strategy._steps[1].max_parallelism, None)
for _ in range(10):
parameterization, trial_index = ax_client.get_next_trial()
def test_trial_completion(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
params, idx = ax_client.get_next_trial()
# Can't update before completing.
with self.assertRaisesRegex(ValueError, ".* not yet"):
ax_client.update_trial_data(
trial_index=idx, raw_data={"objective": (0, 0.0)}
)
ax_client.complete_trial(trial_index=idx, raw_data={"objective": (0, 0.0)})
# Cannot complete a trial twice, should use `update_trial_data`.
with self.assertRaisesRegex(ValueError, ".* already been completed"):
ax_client.complete_trial(trial_index=idx, raw_data={"objective": (0, 0.0)})
# Cannot update trial data with observation for a metric it already has.
with self.assertRaisesRegex(ValueError, ".* contained an observation"):
ax_client.update_trial_data(
trial_index=idx, raw_data={"objective": (0, 0.0)}
)
# Same as above, except objective name should be getting inferred.
with self.assertRaisesRegex(ValueError, ".* contained an observation"):
ax_client.update_trial_data(trial_index=idx, raw_data=1.0)
ax_client.update_trial_data(trial_index=idx, raw_data={"m1": (1, 0.0)})
metrics_in_data = ax_client.experiment.fetch_data().df["metric_name"].values
self.assertIn("m1", metrics_in_data)
self.assertIn("objective", metrics_in_data)
self.assertEqual(ax_client.get_best_parameters()[0], params)
params2, idy = ax_client.get_next_trial()
ax_client.complete_trial(trial_index=idy, raw_data=(-1, 0.0))
self.assertEqual(ax_client.get_best_parameters()[0], params2)
params3, idx3 = ax_client.get_next_trial()
ax_client.complete_trial(
trial_index=idx3, raw_data=-2, metadata={"dummy": "test"}
)
self.assertEqual(ax_client.get_best_parameters()[0], params3)
self.assertEqual(
ax_client.experiment.trials.get(2).run_metadata.get("dummy"), "test"
)
best_trial_values = ax_client.get_best_parameters()[1]
self.assertEqual(best_trial_values[0], {"objective": -2.0})
self.assertTrue(math.isnan(best_trial_values[1]["objective"]["objective"]))
def test_abandon_trial(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
# An abandoned trial adds no data.
params, idx = ax_client.get_next_trial()
ax_client.abandon_trial(trial_index=idx)
data = ax_client.experiment.fetch_data()
self.assertEqual(len(data.df.index), 0)
# Can't update a completed trial.
params2, idx2 = ax_client.get_next_trial()
ax_client.complete_trial(trial_index=idx2, raw_data={"objective": (0, 0.0)})
with self.assertRaisesRegex(ValueError, ".* in a terminal state."):
ax_client.abandon_trial(trial_index=idx2)
def test_ttl_trial(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
# A ttl trial that ends adds no data.
params, idx = ax_client.get_next_trial(ttl_seconds=1)
self.assertTrue(ax_client.experiment.trials.get(idx).status.is_running)
time.sleep(1) # Wait for TTL to elapse.
self.assertTrue(ax_client.experiment.trials.get(idx).status.is_failed)
# Also make sure we can no longer complete the trial as it is failed.
with self.assertRaisesRegex(
ValueError, ".* has been marked FAILED, so it no longer expects data."
):
ax_client.complete_trial(trial_index=idx, raw_data={"objective": (0, 0.0)})
params2, idy = ax_client.get_next_trial(ttl_seconds=1)
ax_client.complete_trial(trial_index=idy, raw_data=(-1, 0.0))
self.assertEqual(ax_client.get_best_parameters()[0], params2)
def test_start_and_end_time_in_trial_completion(self):
start_time = current_timestamp_in_millis()
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
params, idx = ax_client.get_next_trial()
ax_client.complete_trial(
trial_index=idx,
raw_data=1.0,
metadata={
"start_time": start_time,
"end_time": current_timestamp_in_millis(),
},
)
dat = ax_client.experiment.fetch_data().df
self.assertGreater(dat["end_time"][0], dat["start_time"][0])
def test_fail_on_batch(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
batch_trial = ax_client.experiment.new_batch_trial(
generator_run=GeneratorRun(
arms=[
Arm(parameters={"x": 0, "y": 1}),
Arm(parameters={"x": 0, "y": 1}),
]
)
)
with self.assertRaises(NotImplementedError):
ax_client.complete_trial(batch_trial.index, 0)
def test_log_failure(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
_, idx = ax_client.get_next_trial()
ax_client.log_trial_failure(idx, metadata={"dummy": "test"})
self.assertTrue(ax_client.experiment.trials.get(idx).status.is_failed)
self.assertEqual(
ax_client.experiment.trials.get(idx).run_metadata.get("dummy"), "test"
)
with self.assertRaisesRegex(ValueError, ".* no longer expects"):
ax_client.complete_trial(idx, {})
def test_attach_trial_and_get_trial_parameters(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
params, idx = ax_client.attach_trial(parameters={"x": 0.0, "y": 1.0})
ax_client.complete_trial(trial_index=idx, raw_data=5)
self.assertEqual(ax_client.get_best_parameters()[0], params)
self.assertEqual(
ax_client.get_trial_parameters(trial_index=idx), {"x": 0, "y": 1}
)
with self.assertRaises(ValueError):
ax_client.get_trial_parameters(
trial_index=10
) # No trial #10 in experiment.
with self.assertRaisesRegex(ValueError, ".* is of type"):
ax_client.attach_trial({"x": 1, "y": 2})
def test_attach_trial_ttl_seconds(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
params, idx = ax_client.attach_trial(
parameters={"x": 0.0, "y": 1.0}, ttl_seconds=1
)
self.assertTrue(ax_client.experiment.trials.get(idx).status.is_running)
time.sleep(1) # Wait for TTL to elapse.
self.assertTrue(ax_client.experiment.trials.get(idx).status.is_failed)
# Also make sure we can no longer complete the trial as it is failed.
with self.assertRaisesRegex(
ValueError, ".* has been marked FAILED, so it no longer expects data."
):
ax_client.complete_trial(trial_index=idx, raw_data=5)
params2, idx2 = ax_client.attach_trial(
parameters={"x": 0.0, "y": 1.0}, ttl_seconds=1
)
ax_client.complete_trial(trial_index=idx2, raw_data=5)
self.assertEqual(ax_client.get_best_parameters()[0], params2)
self.assertEqual(
ax_client.get_trial_parameters(trial_index=idx2), {"x": 0, "y": 1}
)
def test_attach_trial_numpy(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
params, idx = ax_client.attach_trial(parameters={"x": 0.0, "y": 1.0})
ax_client.complete_trial(trial_index=idx, raw_data=np.int32(5))
self.assertEqual(ax_client.get_best_parameters()[0], params)
def test_relative_oc_without_sq(self):
"""Must specify status quo to have relative outcome constraint."""
ax_client = AxClient()
with self.assertRaises(ValueError):
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
objective_name="test_objective",
minimize=True,
outcome_constraints=["some_metric <= 4.0%"],
)
def test_recommended_parallelism(self):
ax_client = AxClient()
with self.assertRaisesRegex(ValueError, "No generation strategy"):
ax_client.get_max_parallelism()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
self.assertEqual(ax_client.get_max_parallelism(), [(5, 5), (-1, 3)])
self.assertEqual(
run_trials_using_recommended_parallelism(
ax_client, ax_client.get_max_parallelism(), 20
),
0,
)
# With incorrect parallelism setting, the 'need more data' error should
# still be raised.
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
with self.assertRaisesRegex(DataRequiredError, "All trials for current model "):
run_trials_using_recommended_parallelism(ax_client, [(6, 6), (-1, 3)], 20)
@patch.dict(sys.modules, {"ax.storage.sqa_store.structs": None})
@patch.dict(sys.modules, {"sqalchemy": None})
@patch("ax.service.ax_client.DBSettings", None)
def test_no_sqa(self):
# Make sure we couldn't import sqa_store.structs (this could happen when
# SQLAlchemy is not installed).
with self.assertRaises(ModuleNotFoundError):
import ax_client.storage.sqa_store.structs # noqa F401
# Make sure we can still import ax_client.
__import__("ax.service.ax_client")
AxClient() # Make sure we still can instantiate client w/o db settings.
# DBSettings should be defined in `ax_client` now, but incorrectly typed
# `db_settings` argument should still make instantiation fail.
with self.assertRaisesRegex(ValueError, "`db_settings` argument should "):
AxClient(db_settings="badly_typed_db_settings")
def test_plotting_validation(self):
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x3", "type": "fixed", "value": 2, "value_type": "int"}
]
)
with self.assertRaisesRegex(ValueError, ".* there are no trials"):
ax_client.get_contour_plot()
with self.assertRaisesRegex(ValueError, ".* there are no trials"):
ax_client.get_feature_importances()
ax_client.get_next_trial()
with self.assertRaisesRegex(ValueError, ".* less than 2 parameters"):
ax_client.get_contour_plot()
ax_client = AxClient()
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
]
)
ax_client.get_next_trial()
with self.assertRaisesRegex(ValueError, "If `param_x` is provided"):
ax_client.get_contour_plot(param_x="y")
with self.assertRaisesRegex(ValueError, "If `param_x` is provided"):
ax_client.get_contour_plot(param_y="y")
with self.assertRaisesRegex(ValueError, 'Parameter "x3"'):
ax_client.get_contour_plot(param_x="x3", param_y="x3")
with self.assertRaisesRegex(ValueError, 'Parameter "x4"'):
ax_client.get_contour_plot(param_x="x", param_y="x4")
with self.assertRaisesRegex(ValueError, 'Metric "nonexistent"'):
ax_client.get_contour_plot(
param_x="x", param_y="y", metric_name="nonexistent"
)
with self.assertRaisesRegex(UnsupportedPlotError, "Could not obtain contour"):
ax_client.get_contour_plot(
param_x="x", param_y="y", metric_name="objective"
)
with self.assertRaisesRegex(ValueError, "Could not obtain feature"):
ax_client.get_feature_importances()
def test_sqa_storage(self):
init_test_engine_and_session_factory(force_init=True)
config = SQAConfig()
encoder = Encoder(config=config)
decoder = Decoder(config=config)
db_settings = DBSettings(encoder=encoder, decoder=decoder)
ax_client = AxClient(db_settings=db_settings)
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
for _ in range(5):
parameters, trial_index = ax_client.get_next_trial()
ax_client.complete_trial(
trial_index=trial_index, raw_data=branin(*parameters.values())
)
gs = ax_client.generation_strategy
ax_client = AxClient(db_settings=db_settings)
ax_client.load_experiment_from_database("test_experiment")
# Trial #4 was completed after the last time the generation strategy
# generated candidates, so pre-save generation strategy was not
# "aware" of completion of trial #4. Post-restoration generation
# strategy is aware of it, however, since it gets restored with most
# up-to-date experiment data. Do adding trial #4 to the seen completed
# trials of pre-storage GS to check their equality otherwise.
gs._seen_trial_indices_by_status[TrialStatus.COMPLETED].add(4)
self.assertEqual(gs, ax_client.generation_strategy)
with self.assertRaises(ValueError):
# Overwriting existing experiment.
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
with self.assertRaises(ValueError):
# Overwriting existing experiment with overwrite flag with present
# DB settings. This should fail as we no longer allow overwriting
# experiments stored in the DB.
ax_client.create_experiment(
name="test_experiment",
parameters=[{"name": "x", "type": "range", "bounds": [-5.0, 10.0]}],
overwrite_existing_experiment=True,
)
# Original experiment should still be in DB and not have been overwritten.
self.assertEqual(len(ax_client.experiment.trials), 5)
def test_overwrite(self):
init_test_engine_and_session_factory(force_init=True)
ax_client = AxClient()
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
# Log a trial
parameters, trial_index = ax_client.get_next_trial()
ax_client.complete_trial(
trial_index=trial_index, raw_data=branin(*parameters.values())
)
with self.assertRaises(ValueError):
# Overwriting existing experiment.
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
)
# Overwriting existing experiment with overwrite flag.
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x1", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "x2", "type": "range", "bounds": [0.0, 15.0]},
],
overwrite_existing_experiment=True,
)
# There should be no trials, as we just put in a fresh experiment.
self.assertEqual(len(ax_client.experiment.trials), 0)
# Log a trial
parameters, trial_index = ax_client.get_next_trial()
self.assertIn("x1", parameters.keys())
self.assertIn("x2", parameters.keys())
ax_client.complete_trial(
trial_index=trial_index, raw_data=branin(*parameters.values())
)
def test_fixed_random_seed_reproducibility(self):
ax_client = AxClient(random_seed=239)
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
]
)
for _ in range(5):
params, idx = ax_client.get_next_trial()
ax_client.complete_trial(idx, branin(params.get("x"), params.get("y")))
trial_parameters_1 = [
t.arm.parameters for t in ax_client.experiment.trials.values()
]
ax_client = AxClient(random_seed=239)
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
]
)
for _ in range(5):
params, idx = ax_client.get_next_trial()
ax_client.complete_trial(idx, branin(params.get("x"), params.get("y")))
trial_parameters_2 = [
t.arm.parameters for t in ax_client.experiment.trials.values()
]
self.assertEqual(trial_parameters_1, trial_parameters_2)
def test_init_position_saved(self):
ax_client = AxClient(random_seed=239)
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
name="sobol_init_position_test",
)
for _ in range(4):
# For each generated trial, snapshot the client before generating it,
# then recreate client, regenerate the trial and compare the trial
# generated before and after snapshotting. If the state of Sobol is
# recorded correctly, the newly generated trial will be the same as
# the one generated before the snapshotting.
serialized = ax_client.to_json_snapshot()
params, idx = ax_client.get_next_trial()
ax_client = AxClient.from_json_snapshot(serialized)
with self.subTest(ax=ax_client, params=params, idx=idx):
new_params, new_idx = ax_client.get_next_trial()
self.assertEqual(params, new_params)
self.assertEqual(idx, new_idx)
self.assertEqual(
ax_client.experiment.trials[
idx
]._generator_run._model_state_after_gen["init_position"],
idx + 1,
)
ax_client.complete_trial(idx, branin(params.get("x"), params.get("y")))
def test_unnamed_experiment_snapshot(self):
ax_client = AxClient(random_seed=239)
ax_client.create_experiment(
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
]
)
serialized = ax_client.to_json_snapshot()
ax_client = AxClient.from_json_snapshot(serialized)
self.assertIsNone(ax_client.experiment._name)
@patch(
"ax.modelbridge.base.observations_from_data",
autospec=True,
return_value=([get_observation1()]),
)
@patch(
"ax.modelbridge.random.RandomModelBridge.get_training_data",
autospec=True,
return_value=([get_observation1()]),
)
@patch(
"ax.modelbridge.random.RandomModelBridge._predict",
autospec=True,
return_value=[get_observation1trans().data],
)
def test_get_model_predictions(self, _predict, _tr_data, _obs_from_data):
ax_client = AxClient()
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
objective_name="a",
)
ax_client.get_next_trial()
ax_client.experiment.trials[0].arm._name = "1_1"
self.assertEqual(ax_client.get_model_predictions(), {0: {"a": (9.0, 1.0)}})
def test_deprecated_save_load_method_errors(self):
ax_client = AxClient()
with self.assertRaises(NotImplementedError):
ax_client.save()
with self.assertRaises(NotImplementedError):
ax_client.load()
with self.assertRaises(NotImplementedError):
ax_client.load_experiment("test_experiment")
with self.assertRaises(NotImplementedError):
ax_client.get_recommended_max_parallelism()
def test_find_last_trial_with_parameterization(self):
ax_client = AxClient()
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
objective_name="a",
)
params, trial_idx = ax_client.get_next_trial()
found_trial_idx = ax_client._find_last_trial_with_parameterization(
parameterization=params
)
self.assertEqual(found_trial_idx, trial_idx)
# Check that it's indeed the _last_ trial with params that is found.
_, new_trial_idx = ax_client.attach_trial(parameters=params)
found_trial_idx = ax_client._find_last_trial_with_parameterization(
parameterization=params
)
self.assertEqual(found_trial_idx, new_trial_idx)
with self.assertRaisesRegex(ValueError, "No .* matches"):
found_trial_idx = ax_client._find_last_trial_with_parameterization(
parameterization={k: v + 1.0 for k, v in params.items()}
)
def test_verify_parameterization(self):
ax_client = AxClient()
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
objective_name="a",
)
params, trial_idx = ax_client.get_next_trial()
self.assertTrue(
ax_client.verify_trial_parameterization(
trial_index=trial_idx, parameterization=params
)
)
# Make sure it still works if ordering in the parameterization is diff.
self.assertTrue(
ax_client.verify_trial_parameterization(
trial_index=trial_idx,
parameterization={k: params[k] for k in reversed(list(params.keys()))},
)
)
self.assertFalse(
ax_client.verify_trial_parameterization(
trial_index=trial_idx,
parameterization={k: v + 1.0 for k, v in params.items()},
)
)
def test_tracking_metric_addition(self):
ax_client = AxClient()
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
objective_name="a",
)
params, trial_idx = ax_client.get_next_trial()
self.assertEqual(list(ax_client.experiment.metrics.keys()), ["a"])
ax_client.complete_trial(trial_index=trial_idx, raw_data={"a": 1.0, "b": 2.0})
self.assertEqual(list(ax_client.experiment.metrics.keys()), ["b", "a"])
@patch(
"ax.core.experiment.Experiment.new_trial",
side_effect=RuntimeError("cholesky_cpu error - bad matrix"),
)
def test_annotate_exception(self, _):
ax_client = AxClient()
ax_client.create_experiment(
name="test_experiment",
parameters=[
{"name": "x", "type": "range", "bounds": [-5.0, 10.0]},
{"name": "y", "type": "range", "bounds": [0.0, 15.0]},
],
minimize=True,
objective_name="a",
)
with self.assertRaisesRegex(
expected_exception=RuntimeError,
expected_regex="Cholesky errors typically occur",
):
ax_client.get_next_trial()
|
[
"ax.storage.sqa_store.encoder.Encoder",
"ax.core.metric.Metric",
"ax.metrics.branin.branin",
"ax.storage.sqa_store.decoder.Decoder",
"ax.core.parameter.ChoiceParameter",
"ax.utils.common.typeutils.not_none",
"ax.service.ax_client.AxClient.from_json_snapshot",
"ax.core.parameter.FixedParameter",
"numpy.int32",
"ax.modelbridge.generation_strategy.GenerationStep",
"math.isnan",
"math.ceil",
"unittest.mock.patch.dict",
"time.sleep",
"unittest.mock.patch",
"ax.utils.testing.modeling_stubs.get_observation1trans",
"ax.service.ax_client.AxClient",
"ax.utils.common.typeutils.checked_cast",
"ax.storage.sqa_store.structs.DBSettings",
"ax.core.parameter.RangeParameter",
"ax.utils.testing.modeling_stubs.get_observation1",
"ax.core.arm.Arm",
"ax.storage.sqa_store.db.init_test_engine_and_session_factory",
"ax.storage.sqa_store.sqa_config.SQAConfig",
"ax.utils.common.timeutils.current_timestamp_in_millis"
] |
[((4809, 4931), 'unittest.mock.patch', 'patch', (['"""ax.modelbridge.random.RandomModelBridge.feature_importances"""'], {'autospec': '(True)', 'return_value': "{'x': 0.9, 'y': 1.1}"}), "('ax.modelbridge.random.RandomModelBridge.feature_importances',\n autospec=True, return_value={'x': 0.9, 'y': 1.1})\n", (4814, 4931), False, 'from unittest.mock import patch\n'), ((26648, 26711), 'unittest.mock.patch.dict', 'patch.dict', (['sys.modules', "{'ax.storage.sqa_store.structs': None}"], {}), "(sys.modules, {'ax.storage.sqa_store.structs': None})\n", (26658, 26711), False, 'from unittest.mock import patch\n'), ((26717, 26761), 'unittest.mock.patch.dict', 'patch.dict', (['sys.modules', "{'sqalchemy': None}"], {}), "(sys.modules, {'sqalchemy': None})\n", (26727, 26761), False, 'from unittest.mock import patch\n'), ((26767, 26813), 'unittest.mock.patch', 'patch', (['"""ax.service.ax_client.DBSettings"""', 'None'], {}), "('ax.service.ax_client.DBSettings', None)\n", (26772, 26813), False, 'from unittest.mock import patch\n'), ((2952, 2962), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (2960, 2962), False, 'from ax.service.ax_client import AxClient\n'), ((5143, 5153), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (5151, 5153), False, 'from ax.service.ax_client import AxClient\n'), ((7109, 7119), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (7117, 7119), False, 'from ax.service.ax_client import AxClient\n'), ((12322, 12332), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (12330, 12332), False, 'from ax.service.ax_client import AxClient\n'), ((13072, 13082), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (13080, 13082), False, 'from ax.service.ax_client import AxClient\n'), ((13924, 13934), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (13932, 13934), False, 'from ax.service.ax_client import AxClient\n'), ((14533, 14580), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'enforce_sequential_optimization': '(False)'}), '(enforce_sequential_optimization=False)\n', (14541, 14580), False, 'from ax.service.ax_client import AxClient\n'), ((15197, 15207), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (15205, 15207), False, 'from ax.service.ax_client import AxClient\n'), ((17714, 17724), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (17722, 17724), False, 'from ax.service.ax_client import AxClient\n'), ((18581, 18591), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (18589, 18591), False, 'from ax.service.ax_client import AxClient\n'), ((19046, 19059), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (19056, 19059), False, 'import time\n'), ((19748, 19777), 'ax.utils.common.timeutils.current_timestamp_in_millis', 'current_timestamp_in_millis', ([], {}), '()\n', (19775, 19777), False, 'from ax.utils.common.timeutils import current_timestamp_in_millis\n'), ((19798, 19808), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (19806, 19808), False, 'from ax.service.ax_client import AxClient\n'), ((20528, 20538), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (20536, 20538), False, 'from ax.service.ax_client import AxClient\n'), ((21234, 21244), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (21242, 21244), False, 'from ax.service.ax_client import AxClient\n'), ((22011, 22021), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (22019, 22021), False, 'from ax.service.ax_client import AxClient\n'), ((22951, 22961), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (22959, 22961), False, 'from ax.service.ax_client import AxClient\n'), ((23422, 23435), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (23432, 23435), False, 'import time\n'), ((24243, 24253), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (24251, 24253), False, 'from ax.service.ax_client import AxClient\n'), ((24869, 24879), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (24877, 24879), False, 'from ax.service.ax_client import AxClient\n'), ((25424, 25434), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (25432, 25434), False, 'from ax.service.ax_client import AxClient\n'), ((26198, 26208), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (26206, 26208), False, 'from ax.service.ax_client import AxClient\n'), ((27185, 27195), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (27193, 27195), False, 'from ax.service.ax_client import AxClient\n'), ((27614, 27624), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (27622, 27624), False, 'from ax.service.ax_client import AxClient\n'), ((28205, 28215), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (28213, 28215), False, 'from ax.service.ax_client import AxClient\n'), ((29573, 29626), 'ax.storage.sqa_store.db.init_test_engine_and_session_factory', 'init_test_engine_and_session_factory', ([], {'force_init': '(True)'}), '(force_init=True)\n', (29609, 29626), False, 'from ax.storage.sqa_store.db import init_test_engine_and_session_factory\n'), ((29644, 29655), 'ax.storage.sqa_store.sqa_config.SQAConfig', 'SQAConfig', ([], {}), '()\n', (29653, 29655), False, 'from ax.storage.sqa_store.sqa_config import SQAConfig\n'), ((29674, 29696), 'ax.storage.sqa_store.encoder.Encoder', 'Encoder', ([], {'config': 'config'}), '(config=config)\n', (29681, 29696), False, 'from ax.storage.sqa_store.encoder import Encoder\n'), ((29715, 29737), 'ax.storage.sqa_store.decoder.Decoder', 'Decoder', ([], {'config': 'config'}), '(config=config)\n', (29722, 29737), False, 'from ax.storage.sqa_store.decoder import Decoder\n'), ((29760, 29804), 'ax.storage.sqa_store.structs.DBSettings', 'DBSettings', ([], {'encoder': 'encoder', 'decoder': 'decoder'}), '(encoder=encoder, decoder=decoder)\n', (29770, 29804), False, 'from ax.storage.sqa_store.structs import DBSettings\n'), ((29825, 29858), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'db_settings': 'db_settings'}), '(db_settings=db_settings)\n', (29833, 29858), False, 'from ax.service.ax_client import AxClient\n'), ((30438, 30471), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'db_settings': 'db_settings'}), '(db_settings=db_settings)\n', (30446, 30471), False, 'from ax.service.ax_client import AxClient\n'), ((32195, 32248), 'ax.storage.sqa_store.db.init_test_engine_and_session_factory', 'init_test_engine_and_session_factory', ([], {'force_init': '(True)'}), '(force_init=True)\n', (32231, 32248), False, 'from ax.storage.sqa_store.db import init_test_engine_and_session_factory\n'), ((32269, 32279), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (32277, 32279), False, 'from ax.service.ax_client import AxClient\n'), ((34081, 34106), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'random_seed': '(239)'}), '(random_seed=239)\n', (34089, 34106), False, 'from ax.service.ax_client import AxClient\n'), ((34636, 34661), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'random_seed': '(239)'}), '(random_seed=239)\n', (34644, 34661), False, 'from ax.service.ax_client import AxClient\n'), ((35297, 35322), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'random_seed': '(239)'}), '(random_seed=239)\n', (35305, 35322), False, 'from ax.service.ax_client import AxClient\n'), ((36797, 36822), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'random_seed': '(239)'}), '(random_seed=239)\n', (36805, 36822), False, 'from ax.service.ax_client import AxClient\n'), ((37122, 37161), 'ax.service.ax_client.AxClient.from_json_snapshot', 'AxClient.from_json_snapshot', (['serialized'], {}), '(serialized)\n', (37149, 37161), False, 'from ax.service.ax_client import AxClient\n'), ((37764, 37774), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (37772, 37774), False, 'from ax.service.ax_client import AxClient\n'), ((38352, 38362), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (38360, 38362), False, 'from ax.service.ax_client import AxClient\n'), ((38825, 38835), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (38833, 38835), False, 'from ax.service.ax_client import AxClient\n'), ((40014, 40024), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (40022, 40024), False, 'from ax.service.ax_client import AxClient\n'), ((41161, 41171), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (41169, 41171), False, 'from ax.service.ax_client import AxClient\n'), ((41995, 42005), 'ax.service.ax_client.AxClient', 'AxClient', ([], {}), '()\n', (42003, 42005), False, 'from ax.service.ax_client import AxClient\n'), ((1895, 1933), 'math.ceil', 'ceil', (['(num_trials / parallelism_setting)'], {}), '(num_trials / parallelism_setting)\n', (1899, 1933), False, 'from math import ceil\n'), ((4011, 4050), 'ax.service.ax_client.AxClient.from_json_snapshot', 'AxClient.from_json_snapshot', (['serialized'], {}), '(serialized)\n', (4038, 4050), False, 'from ax.service.ax_client import AxClient\n'), ((9719, 9823), 'ax.core.parameter.RangeParameter', 'RangeParameter', ([], {'name': '"""x"""', 'parameter_type': 'ParameterType.FLOAT', 'lower': '(0.001)', 'upper': '(0.1)', 'log_scale': '(True)'}), "(name='x', parameter_type=ParameterType.FLOAT, lower=0.001,\n upper=0.1, log_scale=True)\n", (9733, 9823), False, 'from ax.core.parameter import ChoiceParameter, FixedParameter, ParameterType, RangeParameter\n'), ((10028, 10127), 'ax.core.parameter.ChoiceParameter', 'ChoiceParameter', ([], {'name': '"""y"""', 'parameter_type': 'ParameterType.INT', 'values': '[1, 2, 3]', 'is_ordered': '(True)'}), "(name='y', parameter_type=ParameterType.INT, values=[1, 2, 3\n ], is_ordered=True)\n", (10043, 10127), False, 'from ax.core.parameter import ChoiceParameter, FixedParameter, ParameterType, RangeParameter\n'), ((10316, 10384), 'ax.core.parameter.FixedParameter', 'FixedParameter', ([], {'name': '"""x3"""', 'parameter_type': 'ParameterType.INT', 'value': '(2)'}), "(name='x3', parameter_type=ParameterType.INT, value=2)\n", (10330, 10384), False, 'from ax.core.parameter import ChoiceParameter, FixedParameter, ParameterType, RangeParameter\n'), ((10499, 10584), 'ax.core.parameter.RangeParameter', 'RangeParameter', ([], {'name': '"""x4"""', 'parameter_type': 'ParameterType.INT', 'lower': '(1.0)', 'upper': '(3.0)'}), "(name='x4', parameter_type=ParameterType.INT, lower=1.0,\n upper=3.0)\n", (10513, 10584), False, 'from ax.core.parameter import ChoiceParameter, FixedParameter, ParameterType, RangeParameter\n'), ((10725, 10825), 'ax.core.parameter.ChoiceParameter', 'ChoiceParameter', ([], {'name': '"""x5"""', 'parameter_type': 'ParameterType.STRING', 'values': "['one', 'two', 'three']"}), "(name='x5', parameter_type=ParameterType.STRING, values=[\n 'one', 'two', 'three'])\n", (10740, 10825), False, 'from ax.core.parameter import ChoiceParameter, FixedParameter, ParameterType, RangeParameter\n'), ((17599, 17657), 'math.isnan', 'math.isnan', (["best_trial_values[1]['objective']['objective']"], {}), "(best_trial_values[1]['objective']['objective'])\n", (17609, 17657), False, 'import math\n'), ((27505, 27552), 'ax.service.ax_client.AxClient', 'AxClient', ([], {'db_settings': '"""badly_typed_db_settings"""'}), "(db_settings='badly_typed_db_settings')\n", (27513, 27552), False, 'from ax.service.ax_client import AxClient\n'), ((36134, 36173), 'ax.service.ax_client.AxClient.from_json_snapshot', 'AxClient.from_json_snapshot', (['serialized'], {}), '(serialized)\n', (36161, 36173), False, 'from ax.service.ax_client import AxClient\n'), ((2742, 2850), 'unittest.mock.patch.dict', 'patch.dict', (['f"""{Models.__module__}.MODEL_KEY_TO_MODEL_SETUP"""', "{'GPEI': MODEL_KEY_TO_MODEL_SETUP['Sobol']}"], {}), "(f'{Models.__module__}.MODEL_KEY_TO_MODEL_SETUP', {'GPEI':\n MODEL_KEY_TO_MODEL_SETUP['Sobol']})\n", (2752, 2850), False, 'from unittest.mock import patch\n'), ((4467, 4485), 'ax.utils.testing.modeling_stubs.get_observation1', 'get_observation1', ([], {}), '()\n', (4483, 4485), False, 'from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans\n'), ((4622, 4640), 'ax.utils.testing.modeling_stubs.get_observation1', 'get_observation1', ([], {}), '()\n', (4638, 4640), False, 'from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans\n'), ((24648, 24659), 'numpy.int32', 'np.int32', (['(5)'], {}), '(5)\n', (24656, 24659), True, 'import numpy as np\n'), ((37329, 37347), 'ax.utils.testing.modeling_stubs.get_observation1', 'get_observation1', ([], {}), '()\n', (37345, 37347), False, 'from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans\n'), ((37484, 37502), 'ax.utils.testing.modeling_stubs.get_observation1', 'get_observation1', ([], {}), '()\n', (37500, 37502), False, 'from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans\n'), ((2427, 2459), 'ax.metrics.branin.branin', 'branin', (["params['x']", "params['y']"], {}), "(params['x'], params['y'])\n", (2433, 2459), False, 'from ax.metrics.branin import branin\n'), ((4767, 4790), 'ax.utils.testing.modeling_stubs.get_observation1trans', 'get_observation1trans', ([], {}), '()\n', (4788, 4790), False, 'from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans\n'), ((11053, 11079), 'ax.core.metric.Metric', 'Metric', ([], {'name': '"""some_metric"""'}), "(name='some_metric')\n", (11059, 11079), False, 'from ax.core.metric import Metric\n'), ((11360, 11386), 'ax.core.metric.Metric', 'Metric', ([], {'name': '"""some_metric"""'}), "(name='some_metric')\n", (11366, 11386), False, 'from ax.core.metric import Metric\n'), ((20297, 20326), 'ax.utils.common.timeutils.current_timestamp_in_millis', 'current_timestamp_in_millis', ([], {}), '()\n', (20324, 20326), False, 'from ax.utils.common.timeutils import current_timestamp_in_millis\n'), ((37629, 37652), 'ax.utils.testing.modeling_stubs.get_observation1trans', 'get_observation1trans', ([], {}), '()\n', (37650, 37652), False, 'from ax.utils.testing.modeling_stubs import get_observation1, get_observation1trans\n'), ((5552, 5591), 'ax.utils.common.typeutils.not_none', 'not_none', (['ax_client.generation_strategy'], {}), '(ax_client.generation_strategy)\n', (5560, 5591), False, 'from ax.utils.common.typeutils import checked_cast, not_none\n'), ((7455, 7494), 'ax.utils.common.typeutils.not_none', 'not_none', (['ax_client.generation_strategy'], {}), '(ax_client.generation_strategy)\n', (7463, 7494), False, 'from ax.utils.common.typeutils import checked_cast, not_none\n'), ((7855, 7904), 'ax.modelbridge.generation_strategy.GenerationStep', 'GenerationStep', ([], {'model': 'Models.SOBOL', 'num_trials': '(30)'}), '(model=Models.SOBOL, num_trials=30)\n', (7869, 7904), False, 'from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy\n'), ((11808, 11857), 'ax.modelbridge.generation_strategy.GenerationStep', 'GenerationStep', ([], {'model': 'Models.SOBOL', 'num_trials': '(30)'}), '(model=Models.SOBOL, num_trials=30)\n', (11822, 11857), False, 'from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy\n'), ((12820, 12832), 'ax.metrics.branin.branin', 'branin', (['x', 'y'], {}), '(x, y)\n', (12826, 12832), False, 'from ax.metrics.branin import branin\n'), ((3817, 3839), 'ax.utils.common.typeutils.checked_cast', 'checked_cast', (['float', 'x'], {}), '(float, x)\n', (3829, 3839), False, 'from ax.utils.common.typeutils import checked_cast, not_none\n'), ((3841, 3863), 'ax.utils.common.typeutils.checked_cast', 'checked_cast', (['float', 'y'], {}), '(float, y)\n', (3853, 3863), False, 'from ax.utils.common.typeutils import checked_cast, not_none\n'), ((20939, 20971), 'ax.core.arm.Arm', 'Arm', ([], {'parameters': "{'x': 0, 'y': 1}"}), "(parameters={'x': 0, 'y': 1})\n", (20942, 20971), False, 'from ax.core.arm import Arm\n'), ((20993, 21025), 'ax.core.arm.Arm', 'Arm', ([], {'parameters': "{'x': 0, 'y': 1}"}), "(parameters={'x': 0, 'y': 1})\n", (20996, 21025), False, 'from ax.core.arm import Arm\n'), ((6192, 6214), 'ax.utils.common.typeutils.checked_cast', 'checked_cast', (['float', 'x'], {}), '(float, x)\n', (6204, 6214), False, 'from ax.utils.common.typeutils import checked_cast, not_none\n'), ((6216, 6238), 'ax.utils.common.typeutils.checked_cast', 'checked_cast', (['float', 'y'], {}), '(float, y)\n', (6228, 6238), False, 'from ax.utils.common.typeutils import checked_cast, not_none\n'), ((13655, 13673), 'ax.metrics.branin.branin', 'branin', (['x', '(y / 2.0)'], {}), '(x, y / 2.0)\n', (13661, 13673), False, 'from ax.metrics.branin import branin\n'), ((13729, 13741), 'ax.metrics.branin.branin', 'branin', (['x', 'y'], {}), '(x, y)\n', (13735, 13741), False, 'from ax.metrics.branin import branin\n')]
|
from datetime import datetime
import pytest
import pytz
from pytz import timezone
from opennem.utils.dates import parse_date
from opennem.utils.timezone import is_aware
UTC = pytz.utc
class TestUtilDates(object):
def test_date_none(self):
with pytest.raises(ValueError) as excinfo:
parse_date(None)
assert "Require a datetime or string" in str(
excinfo
), "Empty string raises exception"
def test_date_empty(self):
with pytest.raises(ValueError) as excinfo:
parse_date("")
assert "Invalid date string passed" in str(
excinfo
), "Empty string raises exception"
def test_wem_date_comissioned(self):
subject = parse_date("1/11/08 0:00")
comparator = datetime(2008, 11, 1, 0, 0, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_nem_dispatch_interval_amiguous(self):
subject = parse_date("1/9/19 4:00")
comparator = datetime(2019, 9, 1, 4, 0, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_nem_dispatch_interval(self):
subject = parse_date("30/9/19 4:00")
comparator = datetime(2019, 9, 30, 4, 0, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_nem_dispatch_scada_interval(self):
subject = parse_date("2020/06/01 21:35:00", dayfirst=False)
comparator = datetime(2020, 6, 1, 21, 35, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_nem_settlementdate(self):
subject = parse_date("2020/10/07 10:15:00", dayfirst=False)
comparator = datetime(2020, 10, 7, 10, 15, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_nem_excel_formatted(self):
subject = parse_date("27/9/2019 2:55:00 pm")
comparator = datetime(2019, 9, 27, 14, 55, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_bom_date(self):
subject = parse_date("20201008133000", dayfirst=False)
comparator = datetime(2020, 10, 8, 13, 30, 0, 0)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is False, "Date has no timezone info"
def test_bom_date_utc(self):
subject = parse_date("20201008133000", dayfirst=False, is_utc=True)
comparator = datetime(2020, 10, 8, 13, 30, 0, 0, tzinfo=UTC)
assert subject == comparator, "Parses date correctly"
assert is_aware(subject) is True, "Date has timezone info"
|
[
"pytest.raises",
"datetime.datetime",
"opennem.utils.timezone.is_aware",
"opennem.utils.dates.parse_date"
] |
[((732, 758), 'opennem.utils.dates.parse_date', 'parse_date', (['"""1/11/08 0:00"""'], {}), "('1/11/08 0:00')\n", (742, 758), False, 'from opennem.utils.dates import parse_date\n'), ((780, 810), 'datetime.datetime', 'datetime', (['(2008)', '(11)', '(1)', '(0)', '(0)', '(0)'], {}), '(2008, 11, 1, 0, 0, 0)\n', (788, 810), False, 'from datetime import datetime\n'), ((1015, 1040), 'opennem.utils.dates.parse_date', 'parse_date', (['"""1/9/19 4:00"""'], {}), "('1/9/19 4:00')\n", (1025, 1040), False, 'from opennem.utils.dates import parse_date\n'), ((1062, 1091), 'datetime.datetime', 'datetime', (['(2019)', '(9)', '(1)', '(4)', '(0)', '(0)'], {}), '(2019, 9, 1, 4, 0, 0)\n', (1070, 1091), False, 'from datetime import datetime\n'), ((1287, 1313), 'opennem.utils.dates.parse_date', 'parse_date', (['"""30/9/19 4:00"""'], {}), "('30/9/19 4:00')\n", (1297, 1313), False, 'from opennem.utils.dates import parse_date\n'), ((1335, 1365), 'datetime.datetime', 'datetime', (['(2019)', '(9)', '(30)', '(4)', '(0)', '(0)'], {}), '(2019, 9, 30, 4, 0, 0)\n', (1343, 1365), False, 'from datetime import datetime\n'), ((1567, 1616), 'opennem.utils.dates.parse_date', 'parse_date', (['"""2020/06/01 21:35:00"""'], {'dayfirst': '(False)'}), "('2020/06/01 21:35:00', dayfirst=False)\n", (1577, 1616), False, 'from opennem.utils.dates import parse_date\n'), ((1638, 1669), 'datetime.datetime', 'datetime', (['(2020)', '(6)', '(1)', '(21)', '(35)', '(0)'], {}), '(2020, 6, 1, 21, 35, 0)\n', (1646, 1669), False, 'from datetime import datetime\n'), ((1862, 1911), 'opennem.utils.dates.parse_date', 'parse_date', (['"""2020/10/07 10:15:00"""'], {'dayfirst': '(False)'}), "('2020/10/07 10:15:00', dayfirst=False)\n", (1872, 1911), False, 'from opennem.utils.dates import parse_date\n'), ((1933, 1965), 'datetime.datetime', 'datetime', (['(2020)', '(10)', '(7)', '(10)', '(15)', '(0)'], {}), '(2020, 10, 7, 10, 15, 0)\n', (1941, 1965), False, 'from datetime import datetime\n'), ((2159, 2194), 'opennem.utils.dates.parse_date', 'parse_date', (['"""27/9/2019 2:55:00 pm"""'], {}), "('27/9/2019 2:55:00 pm')\n", (2169, 2194), False, 'from opennem.utils.dates import parse_date\n'), ((2216, 2248), 'datetime.datetime', 'datetime', (['(2019)', '(9)', '(27)', '(14)', '(55)', '(0)'], {}), '(2019, 9, 27, 14, 55, 0)\n', (2224, 2248), False, 'from datetime import datetime\n'), ((2431, 2475), 'opennem.utils.dates.parse_date', 'parse_date', (['"""20201008133000"""'], {'dayfirst': '(False)'}), "('20201008133000', dayfirst=False)\n", (2441, 2475), False, 'from opennem.utils.dates import parse_date\n'), ((2497, 2532), 'datetime.datetime', 'datetime', (['(2020)', '(10)', '(8)', '(13)', '(30)', '(0)', '(0)'], {}), '(2020, 10, 8, 13, 30, 0, 0)\n', (2505, 2532), False, 'from datetime import datetime\n'), ((2719, 2776), 'opennem.utils.dates.parse_date', 'parse_date', (['"""20201008133000"""'], {'dayfirst': '(False)', 'is_utc': '(True)'}), "('20201008133000', dayfirst=False, is_utc=True)\n", (2729, 2776), False, 'from opennem.utils.dates import parse_date\n'), ((2798, 2845), 'datetime.datetime', 'datetime', (['(2020)', '(10)', '(8)', '(13)', '(30)', '(0)', '(0)'], {'tzinfo': 'UTC'}), '(2020, 10, 8, 13, 30, 0, 0, tzinfo=UTC)\n', (2806, 2845), False, 'from datetime import datetime\n'), ((261, 286), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (274, 286), False, 'import pytest\n'), ((311, 327), 'opennem.utils.dates.parse_date', 'parse_date', (['None'], {}), '(None)\n', (321, 327), False, 'from opennem.utils.dates import parse_date\n'), ((491, 516), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (504, 516), False, 'import pytest\n'), ((541, 555), 'opennem.utils.dates.parse_date', 'parse_date', (['""""""'], {}), "('')\n", (551, 555), False, 'from opennem.utils.dates import parse_date\n'), ((889, 906), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (897, 906), False, 'from opennem.utils.timezone import is_aware\n'), ((1170, 1187), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (1178, 1187), False, 'from opennem.utils.timezone import is_aware\n'), ((1444, 1461), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (1452, 1461), False, 'from opennem.utils.timezone import is_aware\n'), ((1748, 1765), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (1756, 1765), False, 'from opennem.utils.timezone import is_aware\n'), ((2044, 2061), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (2052, 2061), False, 'from opennem.utils.timezone import is_aware\n'), ((2327, 2344), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (2335, 2344), False, 'from opennem.utils.timezone import is_aware\n'), ((2611, 2628), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (2619, 2628), False, 'from opennem.utils.timezone import is_aware\n'), ((2924, 2941), 'opennem.utils.timezone.is_aware', 'is_aware', (['subject'], {}), '(subject)\n', (2932, 2941), False, 'from opennem.utils.timezone import is_aware\n')]
|
import tornado, tornado.web, tornado.ioloop
import os
PATHS = { 'webroot': './html', 'modules': {} }
def check_for_node_modules( node_modules ):
if os.path.isdir( node_modules ):
for dirname in os.listdir( node_modules ):
path = os.path.join(node_modules, dirname)
for filename in os.listdir(path):
if filename.endswith('.js'):
PATHS[ 'modules' ][ filename ] = os.path.join( path, filename )
check_for_node_modules('/usr/local/lib/node_modules')
def get_main_page(server):
root = PATHS['webroot']
r = ['<html><head><title>index</title></head><body>']
r.append( '<ul>' )
files = os.listdir( root )
files.sort()
for name in files:
path = os.path.join( root, name )
if os.path.isfile( path ):
r.append( '<a href="%s"><li>%s</li></a>' %(name,name) )
r.append('</ul>')
r.append('</body></html>')
return ''.join(r)
def convert_python_html_document( data ):
'''
rewrites html document, converts python scripts into javascript.
example:
<script type="text/python">
print("hello world")
</script>
'''
doc = list()
script = None
for line in data.splitlines():
if line.strip().startswith('<script'):
if 'type="text/python"' in line:
doc.append( '<script type="text/javascript">')
script = list()
else:
doc.append( line )
elif line.strip() == '</script>':
if script:
#src = '\n'.join( script ) ## TODO fix '\n'
src = chr(10).join(script)
js = pythonjs.translator.to_javascript( src )
doc.append( js )
doc.append( line )
script = None
elif isinstance( script, list ):
script.append( line )
else:
doc.append( line )
return '\n'.join( doc )
class MainHandler( tornado.web.RequestHandler ):
def get(self, path=None):
print('path', path)
if not path:
self.write( get_main_page() )
elif path == 'pythonjs.js':
data = pythonjs.runtime.javascript
self.set_header("Content-Type", "text/javascript; charset=utf-8")
self.set_header("Content-Length", len(data))
self.write(data)
else:
local_path = os.path.join( PATHS['webroot'], path )
if os.path.isfile( local_path ):
data = open(local_path, 'r').read()
if path.endswith( '.html' ):
data = convert_python_html_document( data )
self.set_header("Content-Type", "text/html; charset=utf-8")
elif path.endswith( '.js' ):
self.set_header("Content-Type", "text/javascript; charset=utf-8")
self.set_header("Content-Length", len(data))
self.write( data )
elif path in PATHS['modules']:
data = open( PATHS['modules'][path], 'r' ).read()
self.set_header("Content-Type", "text/javascript; charset=utf-8")
#self.set_header("Content-Length", len(data)) ## TODO fix me, why do some bytes get clipped?
self.write( data )
elif path == 'favicon.ico':
self.write('')
else:
print('file not found!')
raise tornado.web.HTTPError(404)
handlers = [
('/', MainHandler)
]
app = tornado.web.Application( handlers )
app.listen( 8080 )
tornado.ioloop.IOLoop.instance().start()
|
[
"tornado.web.HTTPError",
"os.path.join",
"tornado.ioloop.IOLoop.instance",
"os.path.isdir",
"os.path.isfile",
"tornado.web.Application",
"os.listdir"
] |
[((2915, 2948), 'tornado.web.Application', 'tornado.web.Application', (['handlers'], {}), '(handlers)\n', (2938, 2948), False, 'import tornado, tornado.web, tornado.ioloop\n'), ((151, 178), 'os.path.isdir', 'os.path.isdir', (['node_modules'], {}), '(node_modules)\n', (164, 178), False, 'import os\n'), ((605, 621), 'os.listdir', 'os.listdir', (['root'], {}), '(root)\n', (615, 621), False, 'import os\n'), ((199, 223), 'os.listdir', 'os.listdir', (['node_modules'], {}), '(node_modules)\n', (209, 223), False, 'import os\n'), ((667, 691), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (679, 691), False, 'import os\n'), ((699, 719), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (713, 719), False, 'import os\n'), ((2970, 3002), 'tornado.ioloop.IOLoop.instance', 'tornado.ioloop.IOLoop.instance', ([], {}), '()\n', (3000, 3002), False, 'import tornado, tornado.web, tornado.ioloop\n'), ((237, 272), 'os.path.join', 'os.path.join', (['node_modules', 'dirname'], {}), '(node_modules, dirname)\n', (249, 272), False, 'import os\n'), ((292, 308), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (302, 308), False, 'import os\n'), ((2025, 2061), 'os.path.join', 'os.path.join', (["PATHS['webroot']", 'path'], {}), "(PATHS['webroot'], path)\n", (2037, 2061), False, 'import os\n'), ((2070, 2096), 'os.path.isfile', 'os.path.isfile', (['local_path'], {}), '(local_path)\n', (2084, 2096), False, 'import os\n'), ((381, 409), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (393, 409), False, 'import os\n'), ((2843, 2869), 'tornado.web.HTTPError', 'tornado.web.HTTPError', (['(404)'], {}), '(404)\n', (2864, 2869), False, 'import tornado, tornado.web, tornado.ioloop\n')]
|
"""
main.py
@author: ksuchak1990
Python script for running experiments with the enkf.
"""
# Imports
import numpy as np
from experiment_utils import Modeller, Visualiser
np.random.seed(42)
# Functions
# def testing():
# """
# Testing function
# Overall function that wraps around what we want to run at any specific
# time.
# """
# with open('results/data.json') as json_file:
# data = json.load(json_file)
# forecasts, analyses, observations = process_repeat_results(data)
# plot_all_results(forecasts, analyses, observations)
# plot_with_errors(forecasts, analyses, observations)
# run_repeat_combos(resume=True)
# run_repeat_combos_mt(4)
# testing()
# process_batch(read_time=True)
# d = {'station': 'Grand_Central'}
# Modeller.run_repeat_combos(resume=False)
# Modeller.run_for_endtime()
# Modeller.run_experiment_1()
# Modeller.run_all(ensemble_size=10)
# Modeller.run_enkf_benchmark(ensemble_size=50, pop_size=50)
# Visualiser.quick_plot()
# Modeller.run_experiment_1_1()
Modeller.run_model_collisions()
|
[
"experiment_utils.Modeller.run_model_collisions",
"numpy.random.seed"
] |
[((171, 189), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (185, 189), True, 'import numpy as np\n'), ((1036, 1067), 'experiment_utils.Modeller.run_model_collisions', 'Modeller.run_model_collisions', ([], {}), '()\n', (1065, 1067), False, 'from experiment_utils import Modeller, Visualiser\n')]
|
from base.base_train import BaseTrain
from tqdm import tqdm
import numpy as np
class ExampleTrainer(BaseTrain):
def __init__(self, sess, model, data, config,logger):
super(ExampleTrainer, self).__init__(sess, model, data, config,logger)
def train_epoch(self):
loop = tqdm(range(self.config.num_iter_per_epoch))
losses = []
accs = []
for _ in loop:
loss, acc = self.train_step()
losses.append(loss)
accs.append(acc)
loss = np.mean(losses)
print(loss)
acc = np.mean(accs)
cur_it = self.model.global_step_tensor.eval(self.sess)
summaries_dict = {
'loss': loss,
'acc': acc,
}
self.logger.summarize(cur_it, summaries_dict=summaries_dict)
def train_step(self):
batch_x, batch_y = next(self.data.next_batch(self.config.batch_size))
feed_dict = {self.model.x: batch_x, self.model.y: batch_y, self.model.is_training: True}
_, loss, acc = self.sess.run([self.model.train_step, self.model.sqm, self.model.accuracy],
feed_dict=feed_dict)
return loss, acc
|
[
"numpy.mean"
] |
[((518, 533), 'numpy.mean', 'np.mean', (['losses'], {}), '(losses)\n', (525, 533), True, 'import numpy as np\n'), ((568, 581), 'numpy.mean', 'np.mean', (['accs'], {}), '(accs)\n', (575, 581), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
import requests
from requests.compat import urljoin
from ngta import TestContextManager, parametrize, skip
from .base import RestBaseTestCase, RestNotFoundTestCase, randstr
import logging
logger = logging.getLogger(__name__)
testsuite_json_schema = {
"type": "object",
"properties": {
"id": {
"type": "integer"
},
"parent_id": {
"type": "integer"
},
"name": {
"type": "string"
},
"description": {
"type": ["string", "null"]
}
},
"required": ["id", "parent_id", "name", "description"]
}
class BaseTestCase(RestBaseTestCase):
def _post_and_validate(self, url, data, schema=None):
resp = self.session.post(url, json=self.parameters)
self.assertEqual(resp.status_code, requests.codes.created, msg="post response status code should be 201")
json = resp.json()
if schema:
self._validate_json(json, schema, "json should be validated successfully by schema.")
self.assertEqual(json["name"], data["name"], "name should be same.")
if "description" not in data:
self.assertIsNone(json["description"], "description should be None.")
else:
self.assertEqual(json["description"], data["description"], "description should be same.")
return json
def _get_and_validate_200(self, url, schema, expected_data):
resp = self.session.get(url)
self.assertEqual(resp.status_code, requests.codes.ok, msg="get response status code should be 200")
json = resp.json()
self._validate_json(json, schema, "json should be validated successfully by schema.")
json.pop("id", None)
self.assertEqual(json, expected_data, "get response and expected data should be same.")
def _get_and_validate_404(self, url, ):
resp = self.session.get(url)
self.assertEqual(resp.status_code, requests.codes.not_found, msg="get response status code should be 404")
def _delete_and_validate(self, url):
resp = self.session.delete(url)
self.assertEqual(resp.status_code, requests.codes.no_content, msg="delete response status code should be 204")
self._get_and_validate_404(url)
class TestSuitesTestCase(BaseTestCase):
def setUp(self):
self.fixture = TestContextManager.current_context().fixture
self.session = self.fixture.session
self.url = urljoin(self.session.resturl, "testsuites")
def test__get(self):
resp = self.session.get(self.url)
self.assertEqual(resp.status_code, requests.codes.method_not_allowed,
msg="get response status code should be 405")
@parametrize("name", default=randstr())
def test__post_without_description(self):
self._post_and_validate(self.url, self.parameters)
@parametrize("name", default=randstr())
@parametrize("description", default=randstr())
def test__post_with_description(self):
self._post_and_validate(self.url, self.parameters)
@skip("Not Implemented")
def test__post_with_name_too_long(self):
raise NotImplementedError
class TestSuiteNotFoundTestCase(RestNotFoundTestCase):
PATH = "testsuites"
|
[
"requests.compat.urljoin",
"ngta.TestContextManager.current_context",
"logging.getLogger",
"ngta.skip"
] |
[((223, 250), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (240, 250), False, 'import logging\n'), ((3098, 3121), 'ngta.skip', 'skip', (['"""Not Implemented"""'], {}), "('Not Implemented')\n", (3102, 3121), False, 'from ngta import TestContextManager, parametrize, skip\n'), ((2483, 2526), 'requests.compat.urljoin', 'urljoin', (['self.session.resturl', '"""testsuites"""'], {}), "(self.session.resturl, 'testsuites')\n", (2490, 2526), False, 'from requests.compat import urljoin\n'), ((2375, 2411), 'ngta.TestContextManager.current_context', 'TestContextManager.current_context', ([], {}), '()\n', (2409, 2411), False, 'from ngta import TestContextManager, parametrize, skip\n')]
|
#! /usr/bin/env python
"""Author: <NAME>
Helper functions to prepare and process data
Email: <EMAIL>
"""
from __future__ import division
import glob
import math
import errno
import os
import shutil
import numpy as np
import multiprocessing as mp
import insar.sario
from insar.log import get_log, log_runtime
logger = get_log()
def mkdir_p(path):
"""Emulates bash `mkdir -p`, in python style
Used for igrams directory creation
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def which(program):
"""Mimics UNIX which
Used from https://stackoverflow.com/a/377028"""
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def downsample_im(image, rate=10):
"""Takes a numpy matrix of an image and returns a smaller version
Args:
image (ndarray) 2D array of an image
rate (int) the reduction rate to downsample
"""
return image[::rate, ::rate]
def floor_float(num, ndigits):
"""Like rounding to ndigits, but flooring
Used for .dem.rsc creation, because rounding to 12 sigfigs
causes the fortran routines to overstep the matrix and fail,
since 0.000277777778*3600 = 1.00000000079.. , but
0.000277777777*3600 = 0.99999999719
Example:
>>> floor_float(1/3600, 12)
0.000277777777
"""
return math.floor((10**ndigits) * num) / (10**ndigits)
def clip(image):
"""Convert float image to only range 0 to 1 (clips)"""
return np.clip(np.abs(image), 0, 1)
def log(image):
"""Converts magnitude amplitude image to log scale"""
if np.iscomplexobj(image):
image = np.abs(image)
return 20 * np.log10(image)
# Alias: convert
db = log
def percent_zero(filepath=None, arr=None):
"""Function to give the percentage of a file that is exactly zero
Used as a quality assessment check
Args:
filepath (str): path to file to check
arr (ndarray): pre-loaded array to check
Returns:
float: decimal from 0 to 1, ratio of zeros to total entries
Example:
>>> a = np.array([[1 + 1j, 0.0], [1, 0.0001]])
>>> print(percent_zero(arr=a))
0.25
"""
if filepath:
arr = insar.sario.load(filepath)
return (np.sum(arr == 0) / arr.size)
def _check_and_move(fp, zero_threshold, test, mv_dir):
"""Wrapper func for clean_files multiprocessing"""
logger.debug("Checking {}".format(fp))
pct = percent_zero(filepath=fp)
if pct > zero_threshold:
logger.info("Moving {} for having {:.2f}% zeros to {}".format(fp, 100 * pct, mv_dir))
if not test:
shutil.move(fp, mv_dir)
@log_runtime
def clean_files(ext, path=".", zero_threshold=0.50, test=True):
"""Move files of type ext from path with a high pct of zeros
Args:
ext (str): file extension to open. Must be loadable by sario.load
path (str): path of directory to search
zero_threshold (float): between 0 and 1, threshold to delete files
if they contain greater ratio of zeros
test (bool): If true, doesn't delete files, just lists
"""
file_glob = os.path.join(path, "*{}".format(ext))
logger.info("Searching {} for files with zero threshold {}".format(file_glob, zero_threshold))
# Make a folder to store the bad geos
mv_dir = os.path.join(path, 'bad_{}'.format(ext.replace('.', '')))
mkdir_p(mv_dir) if not test else logger.info("Test mode: not moving files.")
max_procs = mp.cpu_count() // 2
pool = mp.Pool(processes=max_procs)
results = [
pool.apply_async(_check_and_move, (fp, zero_threshold, test, mv_dir))
for fp in glob.glob(file_glob)
]
# Now ask for results so processes launch
[res.get() for res in results]
def split_array_into_blocks(data):
"""Takes a long rectangular array (like UAVSAR) and creates blocks
Useful to look at small data pieces at a time in dismph
Returns:
blocks (list[np.ndarray])
"""
rows, cols = data.shape
blocks = np.array_split(data, np.ceil(rows / cols))
return blocks
def split_and_save(filename):
"""Creates several files from one long data file
Saves them with same filename with .1,.2,.3... at end before ext
e.g. brazos_14937_17087-002_17088-003_0001d_s01_L090HH_01.int produces
brazos_14937_17087-002_17088-003_0001d_s01_L090HH_01.1.int
brazos_14937_17087-002_17088-003_0001d_s01_L090HH_01.2.int...
Output:
newpaths (list[str]): full paths to new files created
"""
data = insar.sario.load_file(filename)
blocks = split_array_into_blocks(data)
ext = insar.sario.get_file_ext(filename)
newpaths = []
for idx, block in enumerate(blocks, start=1):
fname = filename.replace(ext, ".{}{}".format(str(idx), ext))
print("Saving {}".format(fname))
insar.sario.save(fname, block)
newpaths.append(fname)
return newpaths
def combine_cor_amp(corfilename, save=True):
"""Takes a .cor file from UAVSAR (which doesn't contain amplitude),
and creates a new file with amplitude data interleaved for dishgt
dishgt brazos_14937_17087-002_17088-003_0001d_s01_L090HH_01_withamp.cor 3300 1 5000 1
where 3300 is number of columns/samples, and we want the first 5000 rows. the final
1 is needed for the contour interval to set a max of 1 for .cor data
Inputs:
corfilename (str): string filename of the .cor from UAVSAR
save (bool): True if you want to save the combined array
Returns:
cor_with_amp (np.ndarray) combined correlation + amplitude (as complex64)
outfilename (str): same name as corfilename, but _withamp.cor
Saves a new file under outfilename
Note: .ann and .int files must be in same directory as .cor
"""
ext = insar.sario.get_file_ext(corfilename)
assert ext == '.cor', 'corfilename must be a .cor file'
intfilename = corfilename.replace('.cor', '.int')
intdata = insar.sario.load_file(intfilename)
amp = np.abs(intdata)
cordata = insar.sario.load_file(corfilename)
# For dishgt, it expects the two matrices stacked [[amp]; [cor]]
cor_with_amp = np.vstack((amp, cordata))
outfilename = corfilename.replace('.cor', '_withamp.cor')
insar.sario.save(outfilename, cor_with_amp)
return cor_with_amp, outfilename
|
[
"numpy.abs",
"numpy.iscomplexobj",
"os.makedirs",
"numpy.sum",
"numpy.ceil",
"os.path.isdir",
"math.floor",
"os.path.isfile",
"insar.log.get_log",
"shutil.move",
"multiprocessing.Pool",
"glob.glob",
"numpy.log10",
"os.path.split",
"os.path.join",
"os.access",
"numpy.vstack",
"multiprocessing.cpu_count"
] |
[((319, 328), 'insar.log.get_log', 'get_log', ([], {}), '()\n', (326, 328), False, 'from insar.log import get_log, log_runtime\n'), ((830, 852), 'os.path.split', 'os.path.split', (['program'], {}), '(program)\n', (843, 852), False, 'import os\n'), ((2022, 2044), 'numpy.iscomplexobj', 'np.iscomplexobj', (['image'], {}), '(image)\n', (2037, 2044), True, 'import numpy as np\n'), ((3949, 3977), 'multiprocessing.Pool', 'mp.Pool', ([], {'processes': 'max_procs'}), '(processes=max_procs)\n', (3956, 3977), True, 'import multiprocessing as mp\n'), ((6476, 6491), 'numpy.abs', 'np.abs', (['intdata'], {}), '(intdata)\n', (6482, 6491), True, 'import numpy as np\n'), ((6630, 6655), 'numpy.vstack', 'np.vstack', (['(amp, cordata)'], {}), '((amp, cordata))\n', (6639, 6655), True, 'import numpy as np\n'), ((463, 480), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (474, 480), False, 'import os\n'), ((1773, 1804), 'math.floor', 'math.floor', (['(10 ** ndigits * num)'], {}), '(10 ** ndigits * num)\n', (1783, 1804), False, 'import math\n'), ((1918, 1931), 'numpy.abs', 'np.abs', (['image'], {}), '(image)\n', (1924, 1931), True, 'import numpy as np\n'), ((2062, 2075), 'numpy.abs', 'np.abs', (['image'], {}), '(image)\n', (2068, 2075), True, 'import numpy as np\n'), ((2092, 2107), 'numpy.log10', 'np.log10', (['image'], {}), '(image)\n', (2100, 2107), True, 'import numpy as np\n'), ((2678, 2694), 'numpy.sum', 'np.sum', (['(arr == 0)'], {}), '(arr == 0)\n', (2684, 2694), True, 'import numpy as np\n'), ((3918, 3932), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (3930, 3932), True, 'import multiprocessing as mp\n'), ((4485, 4505), 'numpy.ceil', 'np.ceil', (['(rows / cols)'], {}), '(rows / cols)\n', (4492, 4505), True, 'import numpy as np\n'), ((758, 779), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (772, 779), False, 'import os\n'), ((784, 809), 'os.access', 'os.access', (['fpath', 'os.X_OK'], {}), '(fpath, os.X_OK)\n', (793, 809), False, 'import os\n'), ((1013, 1040), 'os.path.join', 'os.path.join', (['path', 'program'], {}), '(path, program)\n', (1025, 1040), False, 'import os\n'), ((3054, 3077), 'shutil.move', 'shutil.move', (['fp', 'mv_dir'], {}), '(fp, mv_dir)\n', (3065, 3077), False, 'import shutil\n'), ((4090, 4110), 'glob.glob', 'glob.glob', (['file_glob'], {}), '(file_glob)\n', (4099, 4110), False, 'import glob\n'), ((549, 568), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (562, 568), False, 'import os\n')]
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the cluster command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestCluster(TestBrokerCommand):
def test_100_bind_utecl1(self):
for i in range(1, 5):
self.successtest(["cluster",
"--hostname", "evh%s.aqd-unittest.ms.com" % i,
"--personality=vulcan-10g-server-prod",
"--cluster", "utecl1"])
def test_101_verify_utecl1(self):
for i in range(1, 5):
command = "show host --hostname evh%s.aqd-unittest.ms.com" % i
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Primary Name: evh%s.aqd-unittest.ms.com" % i,
command)
self.matchoutput(out, "Member of ESX Cluster: utecl1", command)
self.matchclean(out, "Build Status: ready", command)
command = "show esx cluster --cluster utecl1"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ESX Cluster: utecl1", command)
for i in range(1, 5):
self.matchoutput(out, "Member: evh%d.aqd-unittest.ms.com "
"[node_index: %d]" % (i, i - 1), command)
def test_102_utecl1_manage_setup(self):
command = ["change_status", "--cluster", "utecl1", "--buildstatus", "ready"]
self.successtest(command)
command = "show esx cluster --cluster utecl1"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ESX Cluster: utecl1", command)
self.matchoutput(out, "Metacluster: utmc1", command)
self.matchoutput(out, "Environment: prod", command)
self.matchoutput(out, "Build Status: ready", command)
def test_103_utecl1_manage_no_justification(self):
# Manage command only checks hosts prod impact
command = ["manage", "--metacluster", "utmc1",
"--sandbox", "%s/utsandbox" % self.user, "--force"]
self.successtest(command)
def test_104_utecl1_manage_no_justification_back(self):
# Manage command only checks hosts prod impact
command = ["manage", "--metacluster", "utmc1",
"--domain", "unittest", "--skip_auto_compile", "--force"]
self.successtest(command)
command = ["change_status", "--cluster", "utecl1", "--buildstatus", "rebuild"] + self.valid_just_tcm
self.successtest(command)
def test_105_show_evh1_proto(self):
command = ["show_host", "--hostname", "evh1.aqd-unittest.ms.com",
"--format", "proto"]
host = self.protobuftest(command, expect=1)[0]
self.assertEqual(host.cluster, "utecl1")
def test_105_verify_cat(self):
cat_cluster_command = "cat --cluster utecl1"
cat_cluster_out = self.commandtest(cat_cluster_command.split())
m = self.searchoutput(cat_cluster_out,
r'include "(service/esx_management_server/ut.[ab]/client/config)";',
cat_cluster_command)
template = m.group(1)
cat_cluster_command = "cat --cluster utecl1 --data"
cat_cluster_out = self.commandtest(cat_cluster_command.split())
for i in range(1, 5):
host = "evh%s.aqd-unittest.ms.com" % i
self.searchoutput(cat_cluster_out,
r'"system/cluster/members" = list\([^\)]*'
r'"%s"[^\)]*\);' % host,
cat_cluster_command)
# Also verify that the host plenary was written correctly.
cat_host_command = ["cat", "--hostname", host]
cat_host_out = self.commandtest(cat_host_command)
self.matchoutput(cat_host_out,
'include "%s";' % template,
cat_host_command)
for i in range(1, 5):
command = "cat --hostname evh%s.aqd-unittest.ms.com" % i
out = self.commandtest(command.split())
self.searchoutput(out,
'include "cluster/utecl1/client";',
command)
command = "cat --hostname evh%s.aqd-unittest.ms.com --data" % i
out = self.commandtest(command.split())
self.matchoutput(out,
'"system/cluster/name" = "utecl1";',
command)
self.matchoutput(out,
'"system/cluster/metacluster/name" = "utmc1";',
command)
def test_110_bind_utecl2(self):
# test_rebind_esx_cluster will also bind evh1 to utecl2.
for i in [5]:
self.successtest(["cluster",
"--hostname", "evh%s.aqd-unittest.ms.com" % i,
"--personality=vulcan-10g-server-prod",
"--cluster", "utecl2"])
def test_120_duplicate_bind_utecl1(self):
self.successtest(["cluster",
"--hostname", "evh1.aqd-unittest.ms.com",
"--personality=vulcan-10g-server-prod",
"--cluster", "utecl1"])
def test_130_switching_archetype(self):
command = ["cluster", "--cluster=utecl1",
"--hostname=aquilon61.aqd-unittest.ms.com",
"--personality=vulcan-10g-server-prod"]
# Currently aquilon61 will be an "aquilon" archetype. Which is
# incompatible with vulcan-10g-server-prod...
out = self.notfoundtest(command)
# So, make it a compatible archetype and try again
command = ["reconfigure", "--hostname=aquilon61.aqd-unittest.ms.com",
"--personality=esx_server", "--archetype=vmhost",
"--osname", "esxi", "--osversion", "5.0.0",
"--buildstatus=build"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Personality vmhost/esx_server requires "
"cluster membership, please run 'aq cluster'.",
command)
# Now we have a problem - vmhosts do not compile without a cluster,
# which is a bug, but it means we cannot just switch the archetype. We
# need to delete & re-add the host instead. Sigh...
self.dsdb_expect_delete(self.net["hp_eth0"].usable[11])
self.statustest(["del_host", "--hostname", "aquilon61.aqd-unittest.ms.com"])
self.dsdb_expect_add("aquilon61.aqd-unittest.ms.com",
self.net["hp_eth0"].usable[11], "eth0",
self.net["hp_eth0"].usable[11].mac)
self.noouttest(["add_host", "--hostname", "aquilon61.aqd-unittest.ms.com",
"--archetype", "vmhost", "--personality", "esx_server",
"--osname", "esxi", "--osversion", "5.0.0",
"--machine", "ut9s03p11",
"--sandbox", "%s/utsandbox" % self.user,
"--ip", self.net["hp_eth0"].usable[11]])
self.dsdb_verify()
command = ["cluster", "--cluster=utecl1",
"--personality=vulcan-10g-server-prod",
"--hostname=aquilon61.aqd-unittest.ms.com"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Host aquilon61.aqd-unittest.ms.com sandbox "
"%s/utsandbox does not match ESX cluster utecl1 "
"domain unittest" % self.user,
command)
# Ah yes, we need it to be in the same sandbox
# using --force to bypass normal checks due to git status
# containing uncommitted files
command = ["manage", "--domain=unittest",
"--hostname=aquilon61.aqd-unittest.ms.com", "--force"]
self.successtest(command)
command = ["cluster", "--cluster=utecl1",
"--hostname=aquilon61.aqd-unittest.ms.com",
"--personality=vulcan-10g-server-prod"]
self.successtest(command)
def test_131_ipfromtype_setup(self):
# Setup resource group in a cluster, which has host in a bucket
# This is for --ipfromtype testing
ip = self.net["np_bucket2_vip"].network_address
command = ["show", "cluster", "--cluster", "utecl1"]
out = self.commandtest(command)
self.matchoutput(out,
"Member: aquilon61.aqd-unittest.ms.com",
command)
command = ["show", "host", "--host", "aquilon61.aqd-unittest.ms.com"]
out = self.commandtest(command)
self.matchoutput(out, "Bunker: bucket2.ut", command)
command = ["add", "resourcegroup", "--cluster", "utecl1", "--resourcegroup", "testnextip"]
self.successtest(command)
command = ["search", "network", "--type", "vip", "--exact_location", "--bunker", "bucket2.ut", "--fullinfo"]
self.noouttest(command)
command = ["search", "network", "--type", "vip", "--exact_location", "--bunker", "bucket2.np", "--fullinfo"]
out = self.commandtest(command)
self.matchoutput(out, "Bunker: bucket2.np", command)
self.matchoutput(out, "IP: {}".format(ip), command)
self.matchoutput(out, "Network: np_bucket2_vip", command)
self.matchoutput(out, "Network Type: vip", command)
def test_132_ipfromtype_resourcegroup_test(self):
ip = self.net["np_bucket2_vip"].usable[0]
service_addr = "testresgr.ms.com"
command = ["add", "service", "address", "--resourcegroup", "testnextip", "--service_address",
service_addr, "--name", "test", "--ipfromtype", "vip"]
self.dsdb_expect_add(service_addr, ip)
self.successtest(command)
command = ["show", "service", "address", "--name", "test",
"--resourcegroup", "testnextip"]
out = self.commandtest(command)
self.matchoutput(out, "Service Address: test", command)
self.matchoutput(out, "Resource Group testnextip",
command)
self.matchoutput(out, "Address: {} [{}]".format(service_addr, ip),
command)
self.dsdb_verify()
def test_133_ipfromtype_resourcegroup_restore(self):
ip1 = self.net["np_bucket2_vip"].usable[0]
self.dsdb_expect_delete(ip1)
command = ["del", "service", "address", "--name", "test",
"--resourcegroup", "testnextip"]
self.successtest(command)
self.dsdb_verify()
command = ["del", "resourcegroup", "--resourcegroup", "testnextip", "--cluster", "utecl1"]
self.successtest(command)
def test_135_switching_archetype_restore(self):
# Restore the host. Need to move to a more permissive cluster first.
command = ["cluster", "--cluster=utecl2",
"--hostname=aquilon61.aqd-unittest.ms.com"]
self.successtest(command)
# Check that both cluster plenaries were updated
command = ["cat", "--cluster", "utecl1"]
out = self.commandtest(command)
self.matchclean(out, "aquilon61.aqd-unittest.ms.com", command)
command = ["cat", "--cluster", "utecl2", "--data"]
out = self.commandtest(command)
self.searchoutput(out,
r'"system/cluster/members" = list\('
r'[^)]*"aquilon61.aqd-unittest.ms.com"[^)]*\);',
command)
# Now try to uncluster it...
command = ["uncluster", "--cluster=utecl2",
"--hostname=aquilon61.aqd-unittest.ms.com"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Host personality vulcan-10g-server-prod requires a cluster, "
"use --personality to change personality when "
"leaving the cluster.",
command)
# Oops.
command = ["uncluster", "--hostname=aquilon61.aqd-unittest.ms.com",
"--cluster=utecl2", "--personality=generic"]
out = self.successtest(command)
# using --force to bypass normal checks due to git status
# containing uncommitted files
command = ["manage", "--sandbox=%s/utsandbox" % self.user,
"--hostname=aquilon61.aqd-unittest.ms.com", "--force", "--skip_auto_compile"]
self.successtest(command)
osver = self.config.get("unittest", "linux_version_prev")
command = ["reconfigure", "--hostname=aquilon61.aqd-unittest.ms.com",
"--personality=inventory", "--archetype=aquilon",
"--osname=linux", "--osversion=%s" % osver,
"--buildstatus=build"]
self.successtest(command)
def test_140_bind_utmc4(self):
for i in range(1, 25):
host = "evh%s.aqd-unittest.ms.com" % (i + 50)
cluster = "utecl%d" % (5 + ((i - 1) // 4))
self.successtest(["cluster",
"--hostname", host, "--cluster", cluster])
def test_150_bind_storagecluster(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster=utstorage1"]
out = self.badrequesttest(command)
self.matchoutput(out, "Only hosts with archetype 'filer' can be added",
command)
command = ["cluster", "--hostname=filer1.ms.com",
"--cluster=utstorage1"]
self.successtest(command)
def test_160_bind_utmc7(self):
host = "evh10.aqd-unittest.ms.com"
cluster = "utecl11"
self.successtest(["cluster", "--hostname", host, "--cluster", cluster])
def test_170_bind_utmc8(self):
self.statustest(["cluster", "--hostname", "evh80.aqd-unittest.ms.com",
"--cluster", "utecl12"])
self.statustest(["cluster", "--hostname", "evh81.aqd-unittest.ms.com",
"--cluster", "utecl13"])
def test_175_bind_utmc9(self):
self.statustest(["cluster", "--hostname", "evh82.aqd-unittest.ms.com",
"--cluster", "utecl14"])
self.statustest(["cluster", "--hostname", "evh83.aqd-unittest.ms.com",
"--cluster", "utecl15"])
def test_200_missing_cluster(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster", "cluster-does-not-exist"]
out = self.notfoundtest(command)
self.matchoutput(out,
"Cluster cluster-does-not-exist not found.",
command)
def test_200_bad_location(self):
command = ["cluster", "--hostname=%s.ms.com" % self.aurora_with_node,
"--cluster", "utecl1"] + self.valid_just_tcm
out = self.badrequesttest(command)
self.matchoutput(out, "is not within cluster location", command)
def test_200_cluster_capacity(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster", "utecl3"]
out = self.badrequesttest(command)
self.matchoutput(out,
"ESX Cluster utecl3 has 1 hosts bound, which exceeds "
"the requested limit of 0.",
command)
def test_200_unmade_cluster(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster", "utecl4"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Please run `make cluster --cluster utecl4`",
command)
def test_200_missing_personality(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster", "utecl1",
"--personality", "personality-does-not-exist"]
out = self.notfoundtest(command)
self.matchoutput(out,
"Personality personality-does-not-exist, "
"archetype vmhost not found.",
command)
def test_200_missing_personality_stage(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster", "utecl1",
"--personality", "nostage"]
out = self.notfoundtest(command)
self.matchoutput(out,
"Personality vmhost/nostage does not have "
"stage current.",
command)
def test_200_bad_personality_stage(self):
command = ["cluster", "--hostname=evh9.aqd-unittest.ms.com",
"--cluster", "utecl1",
"--personality", "nostage",
"--personality_stage", "no-such-stage"]
out = self.badrequesttest(command)
self.matchoutput(out,
"'no-such-stage' is not a valid personality stage.",
command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestCluster)
unittest.TextTestRunner(verbosity=2).run(suite)
|
[
"unittest.TextTestRunner",
"unittest.TestLoader",
"utils.import_depends"
] |
[((857, 879), 'utils.import_depends', 'utils.import_depends', ([], {}), '()\n', (877, 879), False, 'import utils\n'), ((17988, 18009), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (18007, 18009), False, 'import unittest\n'), ((18049, 18085), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (18072, 18085), False, 'import unittest\n')]
|
import pytest
from django.urls import reverse
from rest_framework import status
from api.applications.enums import ApplicationExportType, GoodsTypeCategory
from api.applications.models import OpenApplication, CountryOnApplication
from api.cases.enums import CaseTypeReferenceEnum
from api.goodstype.models import GoodsType
from api.goodstype.tests.factories import GoodsTypeFactory
from api.staticdata.countries.helpers import get_country
from api.staticdata.countries.models import Country
from test_helpers.clients import DataTestClient
class OpenDealerTests(DataTestClient):
url = reverse("applications:applications")
def test_create_draft_open_dealer_application_generates_goods(self):
data = {
"name": "Test",
"application_type": CaseTypeReferenceEnum.OIEL,
"export_type": ApplicationExportType.PERMANENT,
"goodstype_category": GoodsTypeCategory.DEALER,
}
response = self.client.post(self.url, data, **self.exporter_headers)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(OpenApplication.objects.count(), 1)
self.assertEqual(GoodsType.objects.filter(application=OpenApplication.objects.first()).count(), 14)
def test_all_eu_countries_added_dealer(self):
data = {
"name": "Test",
"export_type": ApplicationExportType.PERMANENT,
"application_type": CaseTypeReferenceEnum.OIEL,
"goodstype_category": GoodsTypeCategory.DEALER,
}
response = self.client.post(self.url, data, **self.exporter_headers)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
CountryOnApplication.objects.filter(application=OpenApplication.objects.first()).count(),
Country.exclude_special_countries.filter(is_eu=1).exclude(id="GB").count(),
)
def test_cannot_add_goodstypes_on_dealer_application(self):
application = self.create_draft_open_application(organisation=self.organisation)
application.goodstype_category = GoodsTypeCategory.DEALER
application.save()
initial_goods_count = GoodsType.objects.all().count()
url = reverse("applications:application_goodstypes", kwargs={"pk": application.id})
response = self.client.post(url, "", **self.exporter_headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(GoodsType.objects.all().count(), initial_goods_count)
def test_cannot_remove_goodstype_from_open_dealer_application(self):
self.create_draft_open_application(self.organisation)
application = self.create_draft_open_application(organisation=self.organisation)
application.goodstype_category = GoodsTypeCategory.DEALER
application.save()
goodstype = GoodsTypeFactory(application=application)
initial_goods_count = GoodsType.objects.all().count()
url = reverse(
"applications:application_goodstype", kwargs={"pk": application.id, "goodstype_pk": goodstype.id},
)
response = self.client.delete(url, **self.exporter_headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(GoodsType.objects.all().count(), initial_goods_count)
def test_cannot_change_countries_on_dealer_application(self):
application = self.create_draft_open_application(organisation=self.organisation)
application.goodstype_category = GoodsTypeCategory.DEALER
application.save()
initial_countries_count = CountryOnApplication.objects.filter(application=application).count()
data = {"countries": Country.objects.all()[:10].values_list("id", flat=True)}
url = reverse("applications:countries", kwargs={"pk": application.id})
response = self.client.post(url, data, **self.exporter_headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(CountryOnApplication.objects.filter(application=application).count(), initial_countries_count)
def test_cannot_change_countries_on_goodstype_on_dealer_application(self):
country_1 = get_country("ES")
country_2 = get_country("US")
country_3 = get_country("FR")
application = self.create_draft_open_application(organisation=self.organisation)
application.goodstype_category = GoodsTypeCategory.DEALER
application.save()
goodstype = GoodsType.objects.filter(application=application).first()
initial_countries_count = goodstype.countries.count()
data = {str(goodstype.id): [country_1.id, country_2.id, country_3.id]}
url = reverse("applications:application_goodstype_assign_countries", kwargs={"pk": application.id})
response = self.client.put(url, data, **self.exporter_headers)
goodstype.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(goodstype.countries.count(), initial_countries_count)
|
[
"api.goodstype.tests.factories.GoodsTypeFactory",
"api.goodstype.models.GoodsType.objects.filter",
"api.staticdata.countries.helpers.get_country",
"api.applications.models.CountryOnApplication.objects.filter",
"api.staticdata.countries.models.Country.exclude_special_countries.filter",
"django.urls.reverse",
"api.staticdata.countries.models.Country.objects.all",
"api.applications.models.OpenApplication.objects.first",
"api.goodstype.models.GoodsType.objects.all",
"api.applications.models.OpenApplication.objects.count"
] |
[((591, 627), 'django.urls.reverse', 'reverse', (['"""applications:applications"""'], {}), "('applications:applications')\n", (598, 627), False, 'from django.urls import reverse\n'), ((2243, 2320), 'django.urls.reverse', 'reverse', (['"""applications:application_goodstypes"""'], {'kwargs': "{'pk': application.id}"}), "('applications:application_goodstypes', kwargs={'pk': application.id})\n", (2250, 2320), False, 'from django.urls import reverse\n'), ((2886, 2927), 'api.goodstype.tests.factories.GoodsTypeFactory', 'GoodsTypeFactory', ([], {'application': 'application'}), '(application=application)\n', (2902, 2927), False, 'from api.goodstype.tests.factories import GoodsTypeFactory\n'), ((3004, 3114), 'django.urls.reverse', 'reverse', (['"""applications:application_goodstype"""'], {'kwargs': "{'pk': application.id, 'goodstype_pk': goodstype.id}"}), "('applications:application_goodstype', kwargs={'pk': application.id,\n 'goodstype_pk': goodstype.id})\n", (3011, 3114), False, 'from django.urls import reverse\n'), ((3811, 3875), 'django.urls.reverse', 'reverse', (['"""applications:countries"""'], {'kwargs': "{'pk': application.id}"}), "('applications:countries', kwargs={'pk': application.id})\n", (3818, 3875), False, 'from django.urls import reverse\n'), ((4245, 4262), 'api.staticdata.countries.helpers.get_country', 'get_country', (['"""ES"""'], {}), "('ES')\n", (4256, 4262), False, 'from api.staticdata.countries.helpers import get_country\n'), ((4283, 4300), 'api.staticdata.countries.helpers.get_country', 'get_country', (['"""US"""'], {}), "('US')\n", (4294, 4300), False, 'from api.staticdata.countries.helpers import get_country\n'), ((4321, 4338), 'api.staticdata.countries.helpers.get_country', 'get_country', (['"""FR"""'], {}), "('FR')\n", (4332, 4338), False, 'from api.staticdata.countries.helpers import get_country\n'), ((4755, 4852), 'django.urls.reverse', 'reverse', (['"""applications:application_goodstype_assign_countries"""'], {'kwargs': "{'pk': application.id}"}), "('applications:application_goodstype_assign_countries', kwargs={'pk':\n application.id})\n", (4762, 4852), False, 'from django.urls import reverse\n'), ((1113, 1144), 'api.applications.models.OpenApplication.objects.count', 'OpenApplication.objects.count', ([], {}), '()\n', (1142, 1144), False, 'from api.applications.models import OpenApplication, CountryOnApplication\n'), ((2197, 2220), 'api.goodstype.models.GoodsType.objects.all', 'GoodsType.objects.all', ([], {}), '()\n', (2218, 2220), False, 'from api.goodstype.models import GoodsType\n'), ((2958, 2981), 'api.goodstype.models.GoodsType.objects.all', 'GoodsType.objects.all', ([], {}), '()\n', (2979, 2981), False, 'from api.goodstype.models import GoodsType\n'), ((3642, 3702), 'api.applications.models.CountryOnApplication.objects.filter', 'CountryOnApplication.objects.filter', ([], {'application': 'application'}), '(application=application)\n', (3677, 3702), False, 'from api.applications.models import OpenApplication, CountryOnApplication\n'), ((4542, 4591), 'api.goodstype.models.GoodsType.objects.filter', 'GoodsType.objects.filter', ([], {'application': 'application'}), '(application=application)\n', (4566, 4591), False, 'from api.goodstype.models import GoodsType\n'), ((2494, 2517), 'api.goodstype.models.GoodsType.objects.all', 'GoodsType.objects.all', ([], {}), '()\n', (2515, 2517), False, 'from api.goodstype.models import GoodsType\n'), ((3305, 3328), 'api.goodstype.models.GoodsType.objects.all', 'GoodsType.objects.all', ([], {}), '()\n', (3326, 3328), False, 'from api.goodstype.models import GoodsType\n'), ((4050, 4110), 'api.applications.models.CountryOnApplication.objects.filter', 'CountryOnApplication.objects.filter', ([], {'application': 'application'}), '(application=application)\n', (4085, 4110), False, 'from api.applications.models import OpenApplication, CountryOnApplication\n'), ((3740, 3761), 'api.staticdata.countries.models.Country.objects.all', 'Country.objects.all', ([], {}), '()\n', (3759, 3761), False, 'from api.staticdata.countries.models import Country\n'), ((1211, 1242), 'api.applications.models.OpenApplication.objects.first', 'OpenApplication.objects.first', ([], {}), '()\n', (1240, 1242), False, 'from api.applications.models import OpenApplication, CountryOnApplication\n'), ((1780, 1811), 'api.applications.models.OpenApplication.objects.first', 'OpenApplication.objects.first', ([], {}), '()\n', (1809, 1811), False, 'from api.applications.models import OpenApplication, CountryOnApplication\n'), ((1834, 1883), 'api.staticdata.countries.models.Country.exclude_special_countries.filter', 'Country.exclude_special_countries.filter', ([], {'is_eu': '(1)'}), '(is_eu=1)\n', (1874, 1883), False, 'from api.staticdata.countries.models import Country\n')]
|
#!/usr/bin/env python
from constants import *
import numpy as np
def regrid(self):
'''
Called in both firn_density_spin and firn_density_nospin
There are 3 subgrids in the regrid module. Grid 1 is the high resolution grid near the surface. Grid 2 is the lower resolution grid at greater depths; a user-defined number of nodes (self.c['nodestocombine']; refer as NTC here) are combined occasionally (every NTC time steps) to make one new node within grid 2. Grid 3 is at the bottom and has split up one grid 2 node back into a high-resolution grid (1 node into NTC nodes), which can be removed at each time step to keep the model Lagrangian.
the variable gridtrack keeps track of which subgrid each node is in.
'''
ind10 = np.where(self.gridtrack==1)[0] # all of the nodes in subgrid 1.
ind1 = np.where(self.gridtrack==1)[0][-1*self.c['nodestocombine']:] # the last NTC nodes of subgrid 1; will be combined into 1 node within subgrid 2.
ind1a = ind1[0]
ind1b = ind1[-1]
ind0 = ind1[0] - 1 # new last node of grid 1
### create the properties of the new subgrid 2 node
g2dz = np.array([np.sum(self.dz[ind1])])
g2mass = np.sum(self.mass[ind1])
g2rho = g2mass/g2dz
g2Tz0 = np.sum(self.Tz[ind1]*self.mass[ind1])
g2Tz = np.array([g2Tz0 / g2mass]) # Use a weighted average for temperature (effectively the enthalpy)
g2gt = 2 #gridtrack
g2age = np.mean(self.age[ind1])
# g2bm = np.mean(self.bdot_mean[ind1])
g2bm0 = np.sum(self.bdot_mean[ind1]*self.mass[ind1])
g2bm = np.array([g2bm0 / g2mass])
g2lwc = np.sum(self.LWC[ind1])
### split up the last node in grid 2 into NTC nodes. Each node retains the density, age, etc of the old subgrid 2 node.
g3dz = self.dz[-1]/self.nodestocombine * np.ones(self.nodestocombine)
g3rho = self.rho[-1] * np.ones(self.nodestocombine)
g3mass = g3rho * g3dz
g3gt = 3 * np.ones(self.nodestocombine)
g3Tz = self.Tz[-1]* np.ones(self.nodestocombine)
g3age = self.age[-1]*np.ones(self.nodestocombine)
g3bm = self.bdot_mean[-1]*np.ones(self.nodestocombine)
g3lwc = self.LWC[-1]/self.nodestocombine * np.ones(self.nodestocombine)
### combine the new and old nodes into the full grid.
self.dz = np.concatenate((self.dz[0:ind1a],g2dz,self.dz[ind1b+1:-1],g3dz))
self.z = self.dz.cumsum(axis=0)
self.z = np.concatenate(([0], self.z[:-1]))
self.rho = np.concatenate((self.rho[0:ind1a],g2rho,self.rho[ind1b+1:-1],g3rho))
self.Tz = np.concatenate((self.Tz[0:ind1a],g2Tz,self.Tz[ind1b+1:-1],g3Tz))
self.mass = np.concatenate((self.mass[0:ind1a],[g2mass],self.mass[ind1b+1:-1],g3mass))
self.sigma = self.mass * self.dx * GRAVITY
self.sigma = self.sigma.cumsum(axis = 0)
self.mass_sum = self.mass.cumsum(axis = 0)
self.age = np.concatenate((self.age[0:ind1a],[g2age],self.age[ind1b+1:-1],g3age))
self.bdot_mean = np.concatenate((self.bdot_mean[0:ind1a],g2bm,self.bdot_mean[ind1b+1:-1],g3bm))
self.LWC = np.concatenate((self.LWC[0:ind1a],[g2lwc],self.LWC[ind1b+1:-1],g3lwc))
self.gridtrack = np.concatenate((self.gridtrack[0:ind1a],[g2gt],self.gridtrack[ind1b+1:-1],g3gt))
if self.c['physGrain']:
#g2r2 = np.array([np.mean(self.r2)])
g2r2 = np.mean(self.r2[ind1]) # VV added
g3r2 = self.r2[-1]* np.ones(self.nodestocombine)
self.r2 = np.concatenate((self.r2[0:ind1a],[g2r2],self.r2[ind1b+1:-1],g3r2))
return self.dz, self.z, self.rho, self.Tz, self.mass, self.sigma, self.mass_sum, self.age, self.bdot_mean, self.LWC, self.gridtrack, self.r2
def init_regrid(self):
'''
Used in firn_density_spin for the initial regridding.
'''
grid1b = self.c['grid1bottom']
self.nodestocombine = self.c['nodestocombine']
ind1 = np.where(self.z<grid1b)[0]
ind2 = np.where(self.z>=grid1b)[0]
grid1z = self.z[ind1]
grid2z = self.z[ind2[0]::self.nodestocombine]
self.z = np.concatenate((grid1z,grid2z))
grid3z = self.z[-1] + np.cumsum(self.dz[-1*self.nodestocombine:])
self.z = np.concatenate((self.z,grid3z))
self.dz = np.diff(self.z)
self.dz = np.append(self.dz, self.dz[-1])
self.gridLen = len(self.z)
self.dx = np.ones(self.gridLen)
self.gridtrack = 2 * np.ones(self.gridLen)
self.gridtrack[ind1] = 1
self.gridtrack[-1*self.nodestocombine:] = 3
print('After regrid, grid length is', self.gridLen)
return self.nodestocombine, self.z, self.dz, self.gridLen, self.dx, self.gridtrack
|
[
"numpy.sum",
"numpy.ones",
"numpy.append",
"numpy.cumsum",
"numpy.mean",
"numpy.array",
"numpy.diff",
"numpy.where",
"numpy.concatenate"
] |
[((1191, 1214), 'numpy.sum', 'np.sum', (['self.mass[ind1]'], {}), '(self.mass[ind1])\n', (1197, 1214), True, 'import numpy as np\n'), ((1255, 1294), 'numpy.sum', 'np.sum', (['(self.Tz[ind1] * self.mass[ind1])'], {}), '(self.Tz[ind1] * self.mass[ind1])\n', (1261, 1294), True, 'import numpy as np\n'), ((1307, 1333), 'numpy.array', 'np.array', (['[g2Tz0 / g2mass]'], {}), '([g2Tz0 / g2mass])\n', (1315, 1333), True, 'import numpy as np\n'), ((1443, 1466), 'numpy.mean', 'np.mean', (['self.age[ind1]'], {}), '(self.age[ind1])\n', (1450, 1466), True, 'import numpy as np\n'), ((1525, 1571), 'numpy.sum', 'np.sum', (['(self.bdot_mean[ind1] * self.mass[ind1])'], {}), '(self.bdot_mean[ind1] * self.mass[ind1])\n', (1531, 1571), True, 'import numpy as np\n'), ((1584, 1610), 'numpy.array', 'np.array', (['[g2bm0 / g2mass]'], {}), '([g2bm0 / g2mass])\n', (1592, 1610), True, 'import numpy as np\n'), ((1625, 1647), 'numpy.sum', 'np.sum', (['self.LWC[ind1]'], {}), '(self.LWC[ind1])\n', (1631, 1647), True, 'import numpy as np\n'), ((2317, 2386), 'numpy.concatenate', 'np.concatenate', (['(self.dz[0:ind1a], g2dz, self.dz[ind1b + 1:-1], g3dz)'], {}), '((self.dz[0:ind1a], g2dz, self.dz[ind1b + 1:-1], g3dz))\n', (2331, 2386), True, 'import numpy as np\n'), ((2449, 2483), 'numpy.concatenate', 'np.concatenate', (['([0], self.z[:-1])'], {}), '(([0], self.z[:-1]))\n', (2463, 2483), True, 'import numpy as np\n'), ((2506, 2579), 'numpy.concatenate', 'np.concatenate', (['(self.rho[0:ind1a], g2rho, self.rho[ind1b + 1:-1], g3rho)'], {}), '((self.rho[0:ind1a], g2rho, self.rho[ind1b + 1:-1], g3rho))\n', (2520, 2579), True, 'import numpy as np\n'), ((2597, 2666), 'numpy.concatenate', 'np.concatenate', (['(self.Tz[0:ind1a], g2Tz, self.Tz[ind1b + 1:-1], g3Tz)'], {}), '((self.Tz[0:ind1a], g2Tz, self.Tz[ind1b + 1:-1], g3Tz))\n', (2611, 2666), True, 'import numpy as np\n'), ((2684, 2763), 'numpy.concatenate', 'np.concatenate', (['(self.mass[0:ind1a], [g2mass], self.mass[ind1b + 1:-1], g3mass)'], {}), '((self.mass[0:ind1a], [g2mass], self.mass[ind1b + 1:-1], g3mass))\n', (2698, 2763), True, 'import numpy as np\n'), ((2932, 3007), 'numpy.concatenate', 'np.concatenate', (['(self.age[0:ind1a], [g2age], self.age[ind1b + 1:-1], g3age)'], {}), '((self.age[0:ind1a], [g2age], self.age[ind1b + 1:-1], g3age))\n', (2946, 3007), True, 'import numpy as np\n'), ((3025, 3112), 'numpy.concatenate', 'np.concatenate', (['(self.bdot_mean[0:ind1a], g2bm, self.bdot_mean[ind1b + 1:-1], g3bm)'], {}), '((self.bdot_mean[0:ind1a], g2bm, self.bdot_mean[ind1b + 1:-1],\n g3bm))\n', (3039, 3112), True, 'import numpy as np\n'), ((3126, 3201), 'numpy.concatenate', 'np.concatenate', (['(self.LWC[0:ind1a], [g2lwc], self.LWC[ind1b + 1:-1], g3lwc)'], {}), '((self.LWC[0:ind1a], [g2lwc], self.LWC[ind1b + 1:-1], g3lwc))\n', (3140, 3201), True, 'import numpy as np\n'), ((3219, 3309), 'numpy.concatenate', 'np.concatenate', (['(self.gridtrack[0:ind1a], [g2gt], self.gridtrack[ind1b + 1:-1], g3gt)'], {}), '((self.gridtrack[0:ind1a], [g2gt], self.gridtrack[ind1b + 1:-\n 1], g3gt))\n', (3233, 3309), True, 'import numpy as np\n'), ((4135, 4167), 'numpy.concatenate', 'np.concatenate', (['(grid1z, grid2z)'], {}), '((grid1z, grid2z))\n', (4149, 4167), True, 'import numpy as np\n'), ((4268, 4300), 'numpy.concatenate', 'np.concatenate', (['(self.z, grid3z)'], {}), '((self.z, grid3z))\n', (4282, 4300), True, 'import numpy as np\n'), ((4322, 4337), 'numpy.diff', 'np.diff', (['self.z'], {}), '(self.z)\n', (4329, 4337), True, 'import numpy as np\n'), ((4360, 4391), 'numpy.append', 'np.append', (['self.dz', 'self.dz[-1]'], {}), '(self.dz, self.dz[-1])\n', (4369, 4391), True, 'import numpy as np\n'), ((4448, 4469), 'numpy.ones', 'np.ones', (['self.gridLen'], {}), '(self.gridLen)\n', (4455, 4469), True, 'import numpy as np\n'), ((753, 782), 'numpy.where', 'np.where', (['(self.gridtrack == 1)'], {}), '(self.gridtrack == 1)\n', (761, 782), True, 'import numpy as np\n'), ((1822, 1850), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (1829, 1850), True, 'import numpy as np\n'), ((1880, 1908), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (1887, 1908), True, 'import numpy as np\n'), ((1954, 1982), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (1961, 1982), True, 'import numpy as np\n'), ((2010, 2038), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (2017, 2038), True, 'import numpy as np\n'), ((2066, 2094), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (2073, 2094), True, 'import numpy as np\n'), ((2128, 2156), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (2135, 2156), True, 'import numpy as np\n'), ((2206, 2234), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (2213, 2234), True, 'import numpy as np\n'), ((3398, 3420), 'numpy.mean', 'np.mean', (['self.r2[ind1]'], {}), '(self.r2[ind1])\n', (3405, 3420), True, 'import numpy as np\n'), ((3511, 3582), 'numpy.concatenate', 'np.concatenate', (['(self.r2[0:ind1a], [g2r2], self.r2[ind1b + 1:-1], g3r2)'], {}), '((self.r2[0:ind1a], [g2r2], self.r2[ind1b + 1:-1], g3r2))\n', (3525, 3582), True, 'import numpy as np\n'), ((3942, 3967), 'numpy.where', 'np.where', (['(self.z < grid1b)'], {}), '(self.z < grid1b)\n', (3950, 3967), True, 'import numpy as np\n'), ((3991, 4017), 'numpy.where', 'np.where', (['(self.z >= grid1b)'], {}), '(self.z >= grid1b)\n', (3999, 4017), True, 'import numpy as np\n'), ((4202, 4247), 'numpy.cumsum', 'np.cumsum', (['self.dz[-1 * self.nodestocombine:]'], {}), '(self.dz[-1 * self.nodestocombine:])\n', (4211, 4247), True, 'import numpy as np\n'), ((4496, 4517), 'numpy.ones', 'np.ones', (['self.gridLen'], {}), '(self.gridLen)\n', (4503, 4517), True, 'import numpy as np\n'), ((832, 861), 'numpy.where', 'np.where', (['(self.gridtrack == 1)'], {}), '(self.gridtrack == 1)\n', (840, 861), True, 'import numpy as np\n'), ((1153, 1174), 'numpy.sum', 'np.sum', (['self.dz[ind1]'], {}), '(self.dz[ind1])\n', (1159, 1174), True, 'import numpy as np\n'), ((3464, 3492), 'numpy.ones', 'np.ones', (['self.nodestocombine'], {}), '(self.nodestocombine)\n', (3471, 3492), True, 'import numpy as np\n')]
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convenience functions for dealing with instances and instance templates."""
import collections
import re
from googlecloudsdk.api_lib.compute import alias_ip_range_utils
from googlecloudsdk.api_lib.compute import constants
from googlecloudsdk.api_lib.compute import csek_utils
from googlecloudsdk.api_lib.compute import image_utils
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.compute import scope as compute_scopes
from googlecloudsdk.command_lib.compute.instances import flags
from googlecloudsdk.core import log
def GetCpuRamFromCustomName(name):
"""Gets the CPU and memory specs from the custom machine type name.
Args:
name: the custom machine type name for the 'instance create' call
Returns:
A two-tuple with the number of cpu and amount of memory for the custom
machine type
custom_cpu, the number of cpu desired for the custom machine type instance
custom_memory_mib, the amount of ram desired in MiB for the custom machine
type instance
None for both variables otherwise
"""
check_custom = re.search('custom-([0-9]+)-([0-9]+)', name)
if check_custom:
custom_cpu = check_custom.group(1)
custom_memory_mib = check_custom.group(2)
return custom_cpu, custom_memory_mib
return None, None
def GetNameForCustom(custom_cpu, custom_memory_mib, ext=False):
"""Creates a custom machine type name from the desired CPU and memory specs.
Args:
custom_cpu: the number of cpu desired for the custom machine type
custom_memory_mib: the amount of ram desired in MiB for the custom machine
type instance
ext: extended custom machine type should be used if true
Returns:
The custom machine type name for the 'instance create' call
"""
machine_type = 'custom-{0}-{1}'.format(custom_cpu, custom_memory_mib)
if ext:
machine_type += '-ext'
return machine_type
def InterpretMachineType(machine_type, custom_cpu, custom_memory, ext=True):
"""Interprets the machine type for the instance.
Args:
machine_type: name of existing machine type, eg. n1-standard
custom_cpu: number of CPU cores for custom machine type,
custom_memory: amount of RAM memory in bytes for custom machine type,
ext: extended custom machine type should be used if true,
Returns:
A string representing the URL naming a machine-type.
Raises:
exceptions.RequiredArgumentException when only one of the two custom
machine type flags are used.
exceptions.InvalidArgumentException when both the machine type and
custom machine type flags are used to generate a new instance.
"""
# Setting the machine type
machine_type_name = constants.DEFAULT_MACHINE_TYPE
if machine_type:
machine_type_name = machine_type
# Setting the specs for the custom machine.
if custom_cpu or custom_memory or ext:
if not custom_cpu:
raise exceptions.RequiredArgumentException(
'--custom-cpu', 'Both [--custom-cpu] and [--custom-memory] must be '
'set to create a custom machine type instance.')
if not custom_memory:
raise exceptions.RequiredArgumentException(
'--custom-memory', 'Both [--custom-cpu] and [--custom-memory] must '
'be set to create a custom machine type instance.')
if machine_type:
raise exceptions.InvalidArgumentException(
'--machine-type', 'Cannot set both [--machine-type] and '
'[--custom-cpu]/[--custom-memory] for the same instance.')
custom_type_string = GetNameForCustom(
custom_cpu,
# converting from B to MiB.
int(custom_memory / (2 ** 20)),
ext)
# Updating the machine type that is set for the URIs
machine_type_name = custom_type_string
return machine_type_name
def CheckCustomCpuRamRatio(compute_client, project, zone, machine_type_name):
"""Checks that the CPU and memory ratio is a supported custom instance type.
Args:
compute_client: GCE API client,
project: a project,
zone: the zone of the instance(s) being created,
machine_type_name: The machine type of the instance being created.
Returns:
Nothing. Function acts as a bound checker, and will raise an exception from
within the function if needed.
Raises:
utils.RaiseToolException if a custom machine type ratio is out of bounds.
"""
messages = compute_client.messages
compute = compute_client.apitools_client
if 'custom' in machine_type_name:
mt_get_pb = messages.ComputeMachineTypesGetRequest(
machineType=machine_type_name,
project=project,
zone=zone)
mt_get_reqs = [(compute.machineTypes, 'Get', mt_get_pb)]
errors = []
# Makes a 'machine-types describe' request to check the bounds
_ = list(compute_client.MakeRequests(
requests=mt_get_reqs,
errors_to_collect=errors))
if errors:
utils.RaiseToolException(
errors,
error_message='Could not fetch machine type:')
def CreateServiceAccountMessages(messages, scopes, service_account):
"""Returns a list of ServiceAccount messages corresponding to scopes."""
silence_deprecation_warning = False
if scopes is None:
scopes = constants.DEFAULT_SCOPES
# if user provided --no-service-account, it is already verified that
# scopes == [] and thus service_account value will not be used
service_account_specified = service_account is not None
if service_account is None:
service_account = 'default'
accounts_to_scopes = collections.defaultdict(list)
for scope in scopes:
parts = scope.split('=')
if len(parts) == 1:
account = service_account
scope_uri = scope
elif len(parts) == 2:
account, scope_uri = parts
if service_account_specified:
raise exceptions.InvalidArgumentException(
'--scopes',
'It is illegal to mix old --scopes flag format '
'[--scopes {0}={1}] with [--service-account ACCOUNT] flag. Use '
'[--scopes {1} --service-account {2}] instead.'
.format(account, scope_uri, service_account))
# TODO(b/33688878) Remove support for this deprecated format
if not silence_deprecation_warning:
log.warning(
'Flag format --scopes [ACCOUNT=]SCOPE, [[ACCOUNT=]SCOPE, ...] is '
'deprecated and will be removed 24th Jan 2018. Use --scopes SCOPE'
'[, SCOPE...] --service-account ACCOUNT instead.')
silence_deprecation_warning = True # Do not warn again for each scope
else:
raise exceptions.ToolException(
'[{0}] is an illegal value for [--scopes]. Values must be of the '
'form [SCOPE] or [ACCOUNT=SCOPE].'.format(scope))
# Expands the scope if the user provided an alias like
# "compute-rw".
scope_uri = constants.SCOPES.get(scope_uri, [scope_uri])
accounts_to_scopes[account].extend(scope_uri)
res = []
for account, scopes in sorted(accounts_to_scopes.iteritems()):
res.append(messages.ServiceAccount(email=account,
scopes=sorted(scopes)))
return res
def CreateOnHostMaintenanceMessage(messages, maintenance_policy):
"""Create on-host-maintenance message for VM."""
if maintenance_policy:
on_host_maintenance = messages.Scheduling.OnHostMaintenanceValueValuesEnum(
maintenance_policy)
else:
on_host_maintenance = None
return on_host_maintenance
def CreateSchedulingMessage(
messages, maintenance_policy, preemptible, restart_on_failure):
"""Create scheduling message for VM."""
# Note: We always specify automaticRestart=False for preemptible VMs. This
# makes sense, since no-restart-on-failure is defined as "store-true", and
# thus can't be given an explicit value. Hence it either has its default
# value (in which case we override it for convenience's sake to the only
# setting that makes sense for preemptible VMs), or the user actually
# specified no-restart-on-failure, the only usable setting.
on_host_maintenance = CreateOnHostMaintenanceMessage(messages,
maintenance_policy)
if preemptible:
scheduling = messages.Scheduling(automaticRestart=False,
onHostMaintenance=on_host_maintenance,
preemptible=True)
else:
scheduling = messages.Scheduling(automaticRestart=restart_on_failure,
onHostMaintenance=on_host_maintenance)
return scheduling
def CreateMachineTypeUris(
resources, compute_client, project,
machine_type, custom_cpu, custom_memory, ext, instance_refs):
"""Create machine type URIs for given args and instance references."""
# The element at index i is the machine type URI for instance
# i. We build this list here because we want to delay work that
# requires API calls as much as possible. This leads to a better
# user experience because the tool can fail fast upon a spelling
# mistake instead of delaying the user by making API calls whose
# purpose has already been rendered moot by the spelling mistake.
machine_type_uris = []
# Setting the machine type
machine_type_name = InterpretMachineType(
machine_type, custom_cpu, custom_memory, ext)
for instance_ref in instance_refs:
# Check to see if the custom machine type ratio is supported
CheckCustomCpuRamRatio(compute_client,
project,
instance_ref.zone,
machine_type_name)
machine_type_uris.append(
resources.Parse(
machine_type_name,
collection='compute.machineTypes',
params={'zone': instance_ref.zone}).SelfLink())
return machine_type_uris
def CreateNetworkInterfaceMessage(resources,
compute_client,
network,
subnet,
private_network_ip,
no_address,
address,
instance_refs,
alias_ip_ranges_string=None,
no_public_dns=None,
public_dns=None,
no_public_ptr=None,
public_ptr=None,
no_public_ptr_domain=None,
public_ptr_domain=None):
"""Returns a new NetworkInterface message."""
# TODO(b/30460572): instance reference should have zone name, not zone URI.
region = utils.ZoneNameToRegionName(instance_refs[0].zone.split('/')[-1])
messages = compute_client.messages
network_interface = messages.NetworkInterface()
# By default interface is attached to default network. If network or subnet
# are specified they're used instead.
if subnet is not None:
subnet_ref = resources.Parse(
subnet,
collection='compute.subnetworks',
params={'region': region})
network_interface.subnetwork = subnet_ref.SelfLink()
if network is not None:
network_ref = resources.Parse(network, collection='compute.networks')
network_interface.network = network_ref.SelfLink()
elif subnet is None:
network_ref = resources.Parse(
constants.DEFAULT_NETWORK, collection='compute.networks')
network_interface.network = network_ref.SelfLink()
if private_network_ip is not None:
network_interface.networkIP = private_network_ip
if alias_ip_ranges_string:
network_interface.aliasIpRanges = (
alias_ip_range_utils.CreateAliasIpRangeMessagesFromString(
messages, True, alias_ip_ranges_string))
if not no_address:
access_config = messages.AccessConfig(
name=constants.DEFAULT_ACCESS_CONFIG_NAME,
type=messages.AccessConfig.TypeValueValuesEnum.ONE_TO_ONE_NAT)
# If the user provided an external IP, populate the access
# config with it.
# TODO(b/25278937): plays poorly when creating multiple instances
if len(instance_refs) == 1:
address_resource = flags.ExpandAddressFlag(
resources, compute_client, address, region)
if address_resource:
access_config.natIP = address_resource
if no_public_dns is True:
access_config.setPublicDns = False
elif public_dns is True:
access_config.setPublicDns = True
if no_public_ptr is True:
access_config.setPublicPtr = False
elif public_ptr is True:
access_config.setPublicPtr = True
if no_public_ptr_domain is not True and public_ptr_domain is not None:
access_config.publicPtrDomainName = public_ptr_domain
network_interface.accessConfigs = [access_config]
return network_interface
def CreateNetworkInterfaceMessages(
resources, compute_client, network_interface_arg, instance_refs):
"""Create network interface messages.
Args:
resources: generates resource references.
compute_client: creates resources.
network_interface_arg: CLI argument specyfying network interfaces.
instance_refs: reference to instances that will own the generated
interfaces.
Returns:
list, items are NetworkInterfaceMessages.
"""
result = []
if network_interface_arg:
for interface in network_interface_arg:
address = interface.get('address', None)
no_address = 'no-address' in interface
result.append(CreateNetworkInterfaceMessage(
resources, compute_client, interface.get('network', None),
interface.get('subnet', None),
interface.get('private-network-ip', None), no_address,
address, instance_refs, interface.get('aliases', None)))
return result
def ParseDiskResource(resources, name, zone, type_):
if type_ == compute_scopes.ScopeEnum.REGION:
return resources.Parse(
name,
collection='compute.regionDisks',
params={'region': utils.ZoneNameToRegionName(zone)})
else:
return resources.Parse(
name,
collection='compute.disks',
params={'zone': zone})
def CreatePersistentAttachedDiskMessages(
resources, compute_client, csek_keys, disks, instance_ref):
"""Returns a list of AttachedDisk messages and the boot disk's reference."""
disks_messages = []
boot_disk_ref = None
messages = compute_client.messages
compute = compute_client.apitools_client
for disk in disks:
name = disk['name']
# Resolves the mode.
mode_value = disk.get('mode', 'rw')
if mode_value == 'rw':
mode = messages.AttachedDisk.ModeValueValuesEnum.READ_WRITE
else:
mode = messages.AttachedDisk.ModeValueValuesEnum.READ_ONLY
boot = disk.get('boot') == 'yes'
auto_delete = disk.get('auto-delete') == 'yes'
if 'scope' in disk and disk['scope'] == 'regional':
scope = compute_scopes.ScopeEnum.REGION
else:
scope = compute_scopes.ScopeEnum.ZONE
disk_ref = ParseDiskResource(resources, name, instance_ref.zone, scope)
if boot:
boot_disk_ref = disk_ref
# TODO(user) drop test after CSEK goes GA
if csek_keys:
disk_key_or_none = csek_utils.MaybeLookupKeyMessage(
csek_keys, disk_ref, compute)
kwargs = {'diskEncryptionKey': disk_key_or_none}
else:
kwargs = {}
attached_disk = messages.AttachedDisk(
autoDelete=auto_delete,
boot=boot,
deviceName=disk.get('device-name'),
mode=mode,
source=disk_ref.SelfLink(),
type=messages.AttachedDisk.TypeValueValuesEnum.PERSISTENT,
**kwargs)
# The boot disk must end up at index 0.
if boot:
disks_messages = [attached_disk] + disks_messages
else:
disks_messages.append(attached_disk)
return disks_messages, boot_disk_ref
def CreatePersistentCreateDiskMessages(scope_prompter, compute_client,
resources, csek_keys, create_disks,
instance_ref):
"""Returns a list of AttachedDisk messages for newly creating disks.
Args:
scope_prompter: Scope prompter object,
compute_client: creates resources,
resources: parser of resources,
csek_keys: customer suplied encryption keys,
create_disks: disk objects - contains following properties
* name - the name of disk,
* mode - 'rw' (R/W), 'ro' (R/O) access mode,
* disk-size - the size of the disk,
* disk-type - the type of the disk (HDD or SSD),
* image - the name of the image to initialize from,
* image-family - the image family name,
* image-project - the project name that has the image,
* auto-delete - whether disks is deleted when VM is deleted,
* device-name - device name on VM.
instance_ref: reference to the instance that will own the new disks.
Returns:
list of API messages for attached disks
"""
disks_messages = []
messages = compute_client.messages
compute = compute_client.apitools_client
for disk in create_disks or []:
name = disk.get('name')
# Resolves the mode.
mode_value = disk.get('mode', 'rw')
if mode_value == 'rw':
mode = messages.AttachedDisk.ModeValueValuesEnum.READ_WRITE
else:
mode = messages.AttachedDisk.ModeValueValuesEnum.READ_ONLY
auto_delete_value = disk.get('auto-delete', 'yes')
auto_delete = auto_delete_value == 'yes'
disk_size_gb = utils.BytesToGb(disk.get('size'))
disk_type = disk.get('type')
if disk_type:
disk_type_ref = resources.Parse(disk_type,
collection='compute.diskTypes',
params={'zone': instance_ref.zone})
disk_type_uri = disk_type_ref.SelfLink()
else:
disk_type_ref = None
disk_type_uri = None
image_expander = image_utils.ImageExpander(scope_prompter.compute_client,
scope_prompter.resources)
image_uri, _ = image_expander.ExpandImageFlag(
user_project=scope_prompter.project,
image=disk.get('image'),
image_family=disk.get('image-family'),
image_project=disk.get('image-project'),
return_image_resource=False)
image_key = None
disk_key = None
if csek_keys:
image_key = csek_utils.MaybeLookupKeyMessagesByUri(csek_keys,
resources,
[image_uri],
compute)
if name:
disk_ref = resources.Parse(name,
collection='compute.disks',
params={'zone': instance_ref.zone})
disk_key = csek_utils.MaybeLookupKeyMessage(csek_keys, disk_ref,
compute)
create_disk = messages.AttachedDisk(
autoDelete=auto_delete,
boot=False,
deviceName=disk.get('device-name'),
initializeParams=messages.AttachedDiskInitializeParams(
diskName=name,
sourceImage=image_uri,
diskSizeGb=disk_size_gb,
diskType=disk_type_uri,
sourceImageEncryptionKey=image_key),
mode=mode,
type=messages.AttachedDisk.TypeValueValuesEnum.PERSISTENT,
diskEncryptionKey=disk_key)
disks_messages.append(create_disk)
return disks_messages
def CreateAcceleratorConfigMessages(msgs, accelerator_type_ref,
accelerator_count):
"""Returns a list of accelerator config messages.
Args:
msgs: tracked GCE API messages.
accelerator_type_ref: reference to the accelerator type.
accelerator_count: number of accelerators to attach to the VM.
Returns:
a list of accelerator config message that specifies the type and number of
accelerators to attach to an instance.
"""
accelerator_config = msgs.AcceleratorConfig(
acceleratorType=accelerator_type_ref.SelfLink(),
acceleratorCount=accelerator_count)
return [accelerator_config]
def CreateDefaultBootAttachedDiskMessage(
compute_client, resources, disk_type, disk_device_name, disk_auto_delete,
disk_size_gb, require_csek_key_create, image_uri, instance_ref,
csek_keys=None):
"""Returns an AttachedDisk message for creating a new boot disk."""
messages = compute_client.messages
compute = compute_client.apitools_client
if disk_type:
disk_type_ref = resources.Parse(disk_type,
collection='compute.diskTypes',
params={'zone': instance_ref.zone})
disk_type_uri = disk_type_ref.SelfLink()
else:
disk_type_ref = None
disk_type_uri = None
if csek_keys:
# If we're going to encrypt the boot disk make sure that we select
# a name predictably, instead of letting the API deal with name
# conflicts automatically.
#
# Note that when csek keys are being used we *always* want force this
# even if we don't have any encryption key for default disk name.
#
# Consider the case where the user's key file has a key for disk `foo-1`
# and no other disk. Assume she runs
# gcloud compute instances create foo --csek-key-file f \
# --no-require-csek-key-create
# and gcloud doesn't force the disk name to be `foo`. The API might
# select name `foo-1` for the new disk, but has no way of knowing
# that the user has a key file mapping for that disk name. That
# behavior violates the principle of least surprise.
#
# Instead it's better for gcloud to force a specific disk name in the
# instance create, and fail if that name isn't available.
effective_boot_disk_name = (
disk_device_name or instance_ref.Name())
disk_ref = resources.Parse(effective_boot_disk_name,
collection='compute.disks',
params={'zone': instance_ref.zone})
disk_key_or_none = csek_utils.MaybeToMessage(
csek_keys.LookupKey(disk_ref, require_csek_key_create),
compute)
[image_key_or_none] = csek_utils.MaybeLookupKeyMessagesByUri(
csek_keys, resources, [image_uri], compute)
kwargs_init_parms = {'sourceImageEncryptionKey': image_key_or_none}
kwargs_disk = {'diskEncryptionKey': disk_key_or_none}
else:
kwargs_disk = {}
kwargs_init_parms = {}
effective_boot_disk_name = disk_device_name
return messages.AttachedDisk(
autoDelete=disk_auto_delete,
boot=True,
deviceName=effective_boot_disk_name,
initializeParams=messages.AttachedDiskInitializeParams(
sourceImage=image_uri,
diskSizeGb=disk_size_gb,
diskType=disk_type_uri,
**kwargs_init_parms),
mode=messages.AttachedDisk.ModeValueValuesEnum.READ_WRITE,
type=messages.AttachedDisk.TypeValueValuesEnum.PERSISTENT,
**kwargs_disk)
def UseExistingBootDisk(disks):
"""Returns True if the user has specified an existing boot disk."""
return any(disk.get('boot') == 'yes' for disk in disks)
def CreateLocalSsdMessage(resources, messages, device_name, interface,
zone=None):
"""Create a message representing a local ssd."""
if zone:
disk_type_ref = resources.Parse('local-ssd',
collection='compute.diskTypes',
params={'zone': zone})
disk_type = disk_type_ref.SelfLink()
else:
disk_type = 'local-ssd'
maybe_interface_enum = (
messages.AttachedDisk.InterfaceValueValuesEnum(interface)
if interface else None)
return messages.AttachedDisk(
type=messages.AttachedDisk.TypeValueValuesEnum.SCRATCH,
autoDelete=True,
deviceName=device_name,
interface=maybe_interface_enum,
mode=messages.AttachedDisk.ModeValueValuesEnum.READ_WRITE,
initializeParams=messages.AttachedDiskInitializeParams(
diskType=disk_type),
)
|
[
"googlecloudsdk.calliope.exceptions.RequiredArgumentException",
"googlecloudsdk.core.log.warning",
"googlecloudsdk.api_lib.compute.csek_utils.MaybeLookupKeyMessagesByUri",
"googlecloudsdk.api_lib.compute.utils.RaiseToolException",
"googlecloudsdk.api_lib.compute.image_utils.ImageExpander",
"googlecloudsdk.api_lib.compute.constants.SCOPES.get",
"collections.defaultdict",
"googlecloudsdk.api_lib.compute.alias_ip_range_utils.CreateAliasIpRangeMessagesFromString",
"googlecloudsdk.api_lib.compute.utils.ZoneNameToRegionName",
"googlecloudsdk.calliope.exceptions.InvalidArgumentException",
"googlecloudsdk.api_lib.compute.csek_utils.MaybeLookupKeyMessage",
"googlecloudsdk.command_lib.compute.instances.flags.ExpandAddressFlag",
"re.search"
] |
[((1726, 1769), 're.search', 're.search', (['"""custom-([0-9]+)-([0-9]+)"""', 'name'], {}), "('custom-([0-9]+)-([0-9]+)', name)\n", (1735, 1769), False, 'import re\n'), ((6137, 6166), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (6160, 6166), False, 'import collections\n'), ((7436, 7480), 'googlecloudsdk.api_lib.compute.constants.SCOPES.get', 'constants.SCOPES.get', (['scope_uri', '[scope_uri]'], {}), '(scope_uri, [scope_uri])\n', (7456, 7480), False, 'from googlecloudsdk.api_lib.compute import constants\n'), ((12304, 12405), 'googlecloudsdk.api_lib.compute.alias_ip_range_utils.CreateAliasIpRangeMessagesFromString', 'alias_ip_range_utils.CreateAliasIpRangeMessagesFromString', (['messages', '(True)', 'alias_ip_ranges_string'], {}), '(messages, True,\n alias_ip_ranges_string)\n', (12361, 12405), False, 'from googlecloudsdk.api_lib.compute import alias_ip_range_utils\n'), ((18586, 18673), 'googlecloudsdk.api_lib.compute.image_utils.ImageExpander', 'image_utils.ImageExpander', (['scope_prompter.compute_client', 'scope_prompter.resources'], {}), '(scope_prompter.compute_client, scope_prompter.\n resources)\n', (18611, 18673), False, 'from googlecloudsdk.api_lib.compute import image_utils\n'), ((22939, 23025), 'googlecloudsdk.api_lib.compute.csek_utils.MaybeLookupKeyMessagesByUri', 'csek_utils.MaybeLookupKeyMessagesByUri', (['csek_keys', 'resources', '[image_uri]', 'compute'], {}), '(csek_keys, resources, [image_uri],\n compute)\n', (22977, 23025), False, 'from googlecloudsdk.api_lib.compute import csek_utils\n'), ((3528, 3688), 'googlecloudsdk.calliope.exceptions.RequiredArgumentException', 'exceptions.RequiredArgumentException', (['"""--custom-cpu"""', '"""Both [--custom-cpu] and [--custom-memory] must be set to create a custom machine type instance."""'], {}), "('--custom-cpu',\n 'Both [--custom-cpu] and [--custom-memory] must be set to create a custom machine type instance.'\n )\n", (3564, 3688), False, 'from googlecloudsdk.calliope import exceptions\n'), ((3742, 3905), 'googlecloudsdk.calliope.exceptions.RequiredArgumentException', 'exceptions.RequiredArgumentException', (['"""--custom-memory"""', '"""Both [--custom-cpu] and [--custom-memory] must be set to create a custom machine type instance."""'], {}), "('--custom-memory',\n 'Both [--custom-cpu] and [--custom-memory] must be set to create a custom machine type instance.'\n )\n", (3778, 3905), False, 'from googlecloudsdk.calliope import exceptions\n'), ((3954, 4112), 'googlecloudsdk.calliope.exceptions.InvalidArgumentException', 'exceptions.InvalidArgumentException', (['"""--machine-type"""', '"""Cannot set both [--machine-type] and [--custom-cpu]/[--custom-memory] for the same instance."""'], {}), "('--machine-type',\n 'Cannot set both [--machine-type] and [--custom-cpu]/[--custom-memory] for the same instance.'\n )\n", (3989, 4112), False, 'from googlecloudsdk.calliope import exceptions\n'), ((5513, 5592), 'googlecloudsdk.api_lib.compute.utils.RaiseToolException', 'utils.RaiseToolException', (['errors'], {'error_message': '"""Could not fetch machine type:"""'}), "(errors, error_message='Could not fetch machine type:')\n", (5537, 5592), False, 'from googlecloudsdk.api_lib.compute import utils\n'), ((12816, 12883), 'googlecloudsdk.command_lib.compute.instances.flags.ExpandAddressFlag', 'flags.ExpandAddressFlag', (['resources', 'compute_client', 'address', 'region'], {}), '(resources, compute_client, address, region)\n', (12839, 12883), False, 'from googlecloudsdk.command_lib.compute.instances import flags\n'), ((15848, 15910), 'googlecloudsdk.api_lib.compute.csek_utils.MaybeLookupKeyMessage', 'csek_utils.MaybeLookupKeyMessage', (['csek_keys', 'disk_ref', 'compute'], {}), '(csek_keys, disk_ref, compute)\n', (15880, 15910), False, 'from googlecloudsdk.api_lib.compute import csek_utils\n'), ((19056, 19142), 'googlecloudsdk.api_lib.compute.csek_utils.MaybeLookupKeyMessagesByUri', 'csek_utils.MaybeLookupKeyMessagesByUri', (['csek_keys', 'resources', '[image_uri]', 'compute'], {}), '(csek_keys, resources, [image_uri],\n compute)\n', (19094, 19142), False, 'from googlecloudsdk.api_lib.compute import csek_utils\n'), ((19519, 19581), 'googlecloudsdk.api_lib.compute.csek_utils.MaybeLookupKeyMessage', 'csek_utils.MaybeLookupKeyMessage', (['csek_keys', 'disk_ref', 'compute'], {}), '(csek_keys, disk_ref, compute)\n', (19551, 19581), False, 'from googlecloudsdk.api_lib.compute import csek_utils\n'), ((6842, 7042), 'googlecloudsdk.core.log.warning', 'log.warning', (['"""Flag format --scopes [ACCOUNT=]SCOPE, [[ACCOUNT=]SCOPE, ...] is deprecated and will be removed 24th Jan 2018. Use --scopes SCOPE[, SCOPE...] --service-account ACCOUNT instead."""'], {}), "(\n 'Flag format --scopes [ACCOUNT=]SCOPE, [[ACCOUNT=]SCOPE, ...] is deprecated and will be removed 24th Jan 2018. Use --scopes SCOPE[, SCOPE...] --service-account ACCOUNT instead.'\n )\n", (6853, 7042), False, 'from googlecloudsdk.core import log\n'), ((14647, 14679), 'googlecloudsdk.api_lib.compute.utils.ZoneNameToRegionName', 'utils.ZoneNameToRegionName', (['zone'], {}), '(zone)\n', (14673, 14679), False, 'from googlecloudsdk.api_lib.compute import utils\n')]
|
import pandas as pd
from src.mapping.columns.column_label_catalog import ColumnLabelCatalog
from src.mapping.columns.column_name_classifier import ColumnNameClassifier
def test_full_name():
data = {
"full_name": ["<NAME>", "<NAME>", "<NAME>"],
"ssn": ["444-44-4444", "555-55-5555", "777-77-7777"],
"other": ["a", "b", "c"],
}
df = pd.DataFrame(data)
print(df)
id_info = ColumnNameClassifier.get_id_info_from_df(df)
print(id_info)
assert len(id_info) == 2
assert ColumnLabelCatalog.FULL_NAME in id_info
assert ColumnLabelCatalog.SSN in id_info
def test_name_parts():
data = {
"first_name": ["Bob", "Sally", "Aniket"],
"middle_name": ["aaa", "bbb", "ccc"],
"last_name": ["Johnson", "Sallyson", "Prada"],
"ssn": ["444-44-4444", "555-55-5555", "777-77-7777"],
"other": ["a", "b", "c"],
}
df = pd.DataFrame(data)
print("Separate columns for names")
id_info = ColumnNameClassifier.get_id_info_from_df(df)
print(id_info)
assert len(id_info) == 4
assert id_info[ColumnLabelCatalog.FIRST_NAME].table_column_name == "first_name"
|
[
"pandas.DataFrame",
"src.mapping.columns.column_name_classifier.ColumnNameClassifier.get_id_info_from_df"
] |
[((370, 388), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (382, 388), True, 'import pandas as pd\n'), ((417, 461), 'src.mapping.columns.column_name_classifier.ColumnNameClassifier.get_id_info_from_df', 'ColumnNameClassifier.get_id_info_from_df', (['df'], {}), '(df)\n', (457, 461), False, 'from src.mapping.columns.column_name_classifier import ColumnNameClassifier\n'), ((906, 924), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (918, 924), True, 'import pandas as pd\n'), ((979, 1023), 'src.mapping.columns.column_name_classifier.ColumnNameClassifier.get_id_info_from_df', 'ColumnNameClassifier.get_id_info_from_df', (['df'], {}), '(df)\n', (1019, 1023), False, 'from src.mapping.columns.column_name_classifier import ColumnNameClassifier\n')]
|
# (C) Copyright 2007-2021 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
""" The default resource manager. """
# Enthought library imports.
from traits.api import Dict, HasTraits, Str, provides
# Local imports.
from .i_resource_manager import IResourceManager
from .i_resource_protocol import IResourceProtocol
@provides(IResourceManager)
class ResourceManager(HasTraits):
""" The default resource manager. """
#### 'IResourceManager' interface #########################################
# The protocols used by the manager to resolve resource URLs.
resource_protocols = Dict(Str, IResourceProtocol)
###########################################################################
# 'IResourceManager' interface.
###########################################################################
#### Trait initializers ###################################################
def _resource_protocols_default(self):
""" Trait initializer. """
# We do the import(s) here in case somebody wants a resource manager
# that doesn't use the default protocol(s).
from .file_resource_protocol import FileResourceProtocol
from .http_resource_protocol import HTTPResourceProtocol
from .package_resource_protocol import PackageResourceProtocol
resource_protocols = {
"file": FileResourceProtocol(),
"http": HTTPResourceProtocol(),
"pkgfile": PackageResourceProtocol(),
}
return resource_protocols
#### Methods ##############################################################
def file(self, url):
""" Return a readable file-like object for the specified url. """
protocol_name, address = url.split("://")
protocol = self.resource_protocols.get(protocol_name)
if protocol is None:
raise ValueError("unknown protocol in URL %s" % url)
return protocol.file(address)
|
[
"traits.api.provides",
"traits.api.Dict"
] |
[((651, 677), 'traits.api.provides', 'provides', (['IResourceManager'], {}), '(IResourceManager)\n', (659, 677), False, 'from traits.api import Dict, HasTraits, Str, provides\n'), ((927, 955), 'traits.api.Dict', 'Dict', (['Str', 'IResourceProtocol'], {}), '(Str, IResourceProtocol)\n', (931, 955), False, 'from traits.api import Dict, HasTraits, Str, provides\n')]
|
import command_module
command_manager = command_module.CommandManager("192.168.0.1", 3000)
command_manager.start()
command_manager.stop()
|
[
"command_module.CommandManager"
] |
[((41, 91), 'command_module.CommandManager', 'command_module.CommandManager', (['"""192.168.0.1"""', '(3000)'], {}), "('192.168.0.1', 3000)\n", (70, 91), False, 'import command_module\n')]
|
# Based on work by <NAME> (https://bitbucket.org/reuteras/kippo/)
from __future__ import division, absolute_import
import socket
from cowrie.shell.command import HoneyPotCommand
commands = {}
class command_netstat(HoneyPotCommand):
def show_version(self):
self.write('net-tools 1.60\n')
self.write('netstat 1.42 (2001-04-15)\n')
self.write('<NAME>, <NAME>, <NAME>, <NAME>, <NAME> and others\n')
self.write('+NEW_ADDRT +RTF_IRTT +RTF_REJECT +FW_MASQUERADE +I18N\n')
self.write('AF: (inet) +UNIX +INET +INET6 +IPX +AX25 +NETROM +X25 +ATALK +ECONET +ROSE\n')
self.write('HW: +ETHER +ARC +SLIP +PPP +TUNNEL +TR +AX25 +NETROM +X25 +FR +ROSE +ASH +SIT +FDDI +HIPPI +HDLC/LAPB +EUI64\n')
def show_help(self):
self.write("""
usage: netstat [-vWeenNcCF] [<Af>] -r netstat {-V|--version|-h|--help}
netstat [-vWnNcaeol] [<Socket> ...]
netstat { [-vWeenNac] -i | [-cWnNe] -M | -s }
-r, --route display routing table
-i, --interfaces display interface table
-g, --groups display multicast group memberships
-s, --statistics display networking statistics (like SNMP)
-M, --masquerade display masqueraded connections
-v, --verbose be verbose
-W, --wide don\'t truncate IP addresses
-n, --numeric don\'t resolve names
--numeric-hosts don\'t resolve host names
--numeric-ports don\'t resolve port names
--numeric-users don\'t resolve user names
-N, --symbolic resolve hardware names
-e, --extend display other/more information
-p, --programs display PID/Program name for sockets
-c, --continuous continuous listing
-l, --listening display listening server sockets
-o, --timers display timers
-F, --fib display Forwarding Information Base (default)
-C, --cache display routing cache instead of FIB
<Socket>={-t|--tcp} {-u|--udp} {-w|--raw} {-x|--unix} --ax25 --ipx --netrom
<AF>=Use \'-6|-4\' or \'-A <af>\' or \'--<af>\'; default: inet
List of possible address families (which support routing):
inet (DARPA Internet) inet6 (IPv6) ax25 (AMPR AX.25)
netrom (AMPR NET/ROM) ipx (Novell IPX) ddp (Appletalk DDP)
x25 (CCITT X.25)
""")
def do_netstat_route(self):
self.write("""Kernel IP routing table
Destination Gateway Genmask Flags MSS Window irtt Iface\n""")
if self.show_numeric:
default = "default"
lgateway = "0.0.0.0"
else:
default = "0.0.0.0"
lgateway = "*"
destination = self.protocol.kippoIP.rsplit('.', 1)[0] + ".0"
gateway = self.protocol.kippoIP.rsplit('.', 1)[0] + ".1"
l1 = "%s%s0.0.0.0 UG 0 0 0 eth0" % \
('{:<16}'.format(default),
'{:<16}'.format(gateway))
l2 = "%s%s2192.168.127.12 U 0 0 0 eth0" % \
('{:<16}'.format(destination),
'{:<16}'.format(lgateway))
self.write('{0}\n'.format(l1))
self.write('{0}\n'.format(l2))
def do_netstat_normal(self):
self.write("""Active Internet connections (w/o servers)
Proto Recv-Q Send-Q Local Address Foreign Address State\n""")
s_name = self.protocol.hostname
c_port = str(self.protocol.realClientPort)
if self.show_numeric:
s_port = "22"
c_name = str(self.protocol.clientIP)
s_name = str(self.protocol.kippoIP)
else:
s_port = "ssh"
try:
c_name = socket.gethostbyaddr(self.protocol.clientIP)[0][:17]
except:
c_name = self.protocol.clientIP
if self.show_listen or self.show_all:
self.write("tcp 0 0 *:ssh *:* LISTEN\n")
if not self.show_listen or self.show_all:
l = 'tcp 0 308 %s:%s%s%s:%s%s%s' % \
(s_name, s_port, " " * (24 - len(s_name + s_port) - 1),
c_name, c_port, " " * (24 - len(c_name + c_port) - 1),
"ESTABLISHED")
self.write('{0}\n'.format(l))
if self.show_listen or self.show_all:
self.write("tcp6 0 0 [::]:ssh [::]:* LISTEN\n")
self.write("""Active UNIX domain sockets (only servers)
Proto RefCnt Flags Type State I-Node Path\n""")
if self.show_listen:
self.write("""unix 2 [ ACC ] STREAM LISTENING 8969 /var/run/acpid.socket
unix 2 [ ACC ] STREAM LISTENING 6807 @/com/ubuntu/upstart
unix 2 [ ACC ] STREAM LISTENING 7299 /var/run/dbus/system_bus_socket
unix 2 [ ACC ] SEQPACKET LISTENING 7159 /run/udev/control\n""")
elif self.show_all:
self.write("""unix 2 [ ACC ] STREAM LISTENING 8969 /var/run/acpid.socket
unix 4 [ ] DGRAM 7445 /dev/log
unix 2 [ ACC ] STREAM LISTENING 6807 @/com/ubuntu/upstart
unix 2 [ ACC ] STREAM LISTENING 7299 /var/run/dbus/system_bus_socket
unix 2 [ ACC ] SEQPACKET LISTENING 7159 /run/udev/control
unix 3 [ ] STREAM CONNECTED 7323
unix 3 [ ] STREAM CONNECTED 7348 /var/run/dbus/system_bus_socket
unix 3 [ ] STREAM CONNECTED 7330
unix 2 [ ] DGRAM 8966
unix 3 [ ] STREAM CONNECTED 7424 /var/run/dbus/system_bus_socket
unix 3 [ ] STREAM CONNECTED 7140
unix 3 [ ] STREAM CONNECTED 7145 @/com/ubuntu/upstart
unix 3 [ ] DGRAM 7199
unix 3 [ ] STREAM CONNECTED 7347
unix 3 [ ] STREAM CONNECTED 8594
unix 3 [ ] STREAM CONNECTED 7331
unix 3 [ ] STREAM CONNECTED 7364 @/com/ubuntu/upstart
unix 3 [ ] STREAM CONNECTED 7423
unix 3 [ ] DGRAM 7198
unix 2 [ ] DGRAM 9570
unix 3 [ ] STREAM CONNECTED 8619 @/com/ubuntu/upstart\n""")
else:
self.write("""unix 4 [ ] DGRAM 7445 /dev/log
unix 3 [ ] STREAM CONNECTED 7323
unix 3 [ ] STREAM CONNECTED 7348 /var/run/dbus/system_bus_socket
unix 3 [ ] STREAM CONNECTED 7330
unix 2 [ ] DGRAM 8966
unix 3 [ ] STREAM CONNECTED 7424 /var/run/dbus/system_bus_socket
unix 3 [ ] STREAM CONNECTED 7140
unix 3 [ ] STREAM CONNECTED 7145 @/com/ubuntu/upstart
unix 3 [ ] DGRAM 7199
unix 3 [ ] STREAM CONNECTED 7347
unix 3 [ ] STREAM CONNECTED 8594
unix 3 [ ] STREAM CONNECTED 7331
unix 3 [ ] STREAM CONNECTED 7364 @/com/ubuntu/upstart
unix 3 [ ] STREAM CONNECTED 7423
unix 3 [ ] DGRAM 7198
unix 2 [ ] DGRAM 9570
unix 3 [ ] STREAM CONNECTED 8619 @/com/ubuntu/upstart\n""")
def call(self):
self.show_all = False
self.show_numeric = False
self.show_listen = False
func = self.do_netstat_normal
for x in self.args:
if x.startswith('-') and x.count('a'):
self.show_all = True
if x.startswith('-') and x.count('n'):
self.show_numeric = True
if x.startswith('-') and x.count('l'):
self.show_listen = True
if x.startswith('-') and x.count('r'):
func = self.do_netstat_route
if x.startswith('-') and x.count('h'):
func = self.show_help
if x.startswith('-') and x.count('V'):
func = self.show_version
func()
# Definitions
commands['/bin/netstat'] = command_netstat
|
[
"socket.gethostbyaddr"
] |
[((3832, 3876), 'socket.gethostbyaddr', 'socket.gethostbyaddr', (['self.protocol.clientIP'], {}), '(self.protocol.clientIP)\n', (3852, 3876), False, 'import socket\n')]
|
from dataclasses import dataclass
from functools import partial
from logging import Logger
from flo.lamb import FunctionOrLambda, as_fcn, Function, R, UNASSIGNED
from typing import *
R1 = TypeVar('R1')
@dataclass(init=False)
class Attempt(Generic[R]):
_result: R
_exception: Exception
def __init__(self, *, result: R = UNASSIGNED, exception: Exception = UNASSIGNED):
self._result = result
self._exception = exception
def result(self) -> R:
if self._result is UNASSIGNED:
raise self._exception
return self._result
def exception(self) -> Optional[Exception]:
if self._exception is UNASSIGNED:
return None
return self._exception
def succeeded(self) -> bool:
return self._result is not UNASSIGNED
def failed(self) -> bool:
return self._exception is not UNASSIGNED
def and_then(self, fcn: FunctionOrLambda, **kwargs) -> 'Attempt[R1]':
if self.succeeded():
return try_(fcn, self.result(), **kwargs)
else:
return self
def or_else(self, default: R = None, log: Logger = None) -> R:
if self.succeeded():
return self.result()
if log is not None:
log.exception(str(self._exception), exc_info=self._exception)
return default
def try_(fcn: FunctionOrLambda, *args, **kwargs) -> Attempt[R]:
f = as_fcn(fcn)
try:
return Attempt(result=f(*args, **kwargs))
except Exception as e:
return Attempt(exception=e)
def as_try(fcn: FunctionOrLambda, *args, **kwargs) -> Function:
return partial(try_, fcn, *args, **kwargs)
|
[
"flo.lamb.as_fcn",
"functools.partial",
"dataclasses.dataclass"
] |
[((207, 228), 'dataclasses.dataclass', 'dataclass', ([], {'init': '(False)'}), '(init=False)\n', (216, 228), False, 'from dataclasses import dataclass\n'), ((1410, 1421), 'flo.lamb.as_fcn', 'as_fcn', (['fcn'], {}), '(fcn)\n', (1416, 1421), False, 'from flo.lamb import FunctionOrLambda, as_fcn, Function, R, UNASSIGNED\n'), ((1621, 1656), 'functools.partial', 'partial', (['try_', 'fcn', '*args'], {}), '(try_, fcn, *args, **kwargs)\n', (1628, 1656), False, 'from functools import partial\n')]
|
# Copyright 2019, OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from celery import signals
from oteltrace import Pin, config
from oteltrace.pin import _OTEL_PIN_NAME
from oteltrace.ext import AppTypes
from .constants import APP
from .signals import (
trace_prerun,
trace_postrun,
trace_before_publish,
trace_after_publish,
trace_failure,
trace_retry,
)
def patch_app(app, pin=None):
"""Attach the Pin class to the application and connect
our handlers to Celery signals.
"""
if getattr(app, '__opentelemetry_patch', False):
return
setattr(app, '__opentelemetry_patch', True)
# attach the PIN object
pin = pin or Pin(
service=config.celery['worker_service_name'],
app=APP,
app_type=AppTypes.worker,
_config=config.celery,
)
pin.onto(app)
# connect to the Signal framework
signals.task_prerun.connect(trace_prerun)
signals.task_postrun.connect(trace_postrun)
signals.before_task_publish.connect(trace_before_publish)
signals.after_task_publish.connect(trace_after_publish)
signals.task_failure.connect(trace_failure)
signals.task_retry.connect(trace_retry)
return app
def unpatch_app(app):
"""Remove the Pin instance from the application and disconnect
our handlers from Celery signal framework.
"""
if not getattr(app, '__opentelemetry_patch', False):
return
setattr(app, '__opentelemetry_patch', False)
pin = Pin.get_from(app)
if pin is not None:
delattr(app, _OTEL_PIN_NAME)
signals.task_prerun.disconnect(trace_prerun)
signals.task_postrun.disconnect(trace_postrun)
signals.before_task_publish.disconnect(trace_before_publish)
signals.after_task_publish.disconnect(trace_after_publish)
signals.task_failure.disconnect(trace_failure)
signals.task_retry.disconnect(trace_retry)
|
[
"celery.signals.after_task_publish.connect",
"celery.signals.task_retry.connect",
"oteltrace.Pin.get_from",
"celery.signals.task_prerun.disconnect",
"celery.signals.task_failure.connect",
"celery.signals.before_task_publish.connect",
"celery.signals.task_prerun.connect",
"celery.signals.task_postrun.disconnect",
"celery.signals.after_task_publish.disconnect",
"celery.signals.before_task_publish.disconnect",
"celery.signals.task_failure.disconnect",
"oteltrace.Pin",
"celery.signals.task_postrun.connect",
"celery.signals.task_retry.disconnect"
] |
[((1406, 1447), 'celery.signals.task_prerun.connect', 'signals.task_prerun.connect', (['trace_prerun'], {}), '(trace_prerun)\n', (1433, 1447), False, 'from celery import signals\n'), ((1452, 1495), 'celery.signals.task_postrun.connect', 'signals.task_postrun.connect', (['trace_postrun'], {}), '(trace_postrun)\n', (1480, 1495), False, 'from celery import signals\n'), ((1500, 1557), 'celery.signals.before_task_publish.connect', 'signals.before_task_publish.connect', (['trace_before_publish'], {}), '(trace_before_publish)\n', (1535, 1557), False, 'from celery import signals\n'), ((1562, 1617), 'celery.signals.after_task_publish.connect', 'signals.after_task_publish.connect', (['trace_after_publish'], {}), '(trace_after_publish)\n', (1596, 1617), False, 'from celery import signals\n'), ((1622, 1665), 'celery.signals.task_failure.connect', 'signals.task_failure.connect', (['trace_failure'], {}), '(trace_failure)\n', (1650, 1665), False, 'from celery import signals\n'), ((1670, 1709), 'celery.signals.task_retry.connect', 'signals.task_retry.connect', (['trace_retry'], {}), '(trace_retry)\n', (1696, 1709), False, 'from celery import signals\n'), ((2003, 2020), 'oteltrace.Pin.get_from', 'Pin.get_from', (['app'], {}), '(app)\n', (2015, 2020), False, 'from oteltrace import Pin, config\n'), ((2087, 2131), 'celery.signals.task_prerun.disconnect', 'signals.task_prerun.disconnect', (['trace_prerun'], {}), '(trace_prerun)\n', (2117, 2131), False, 'from celery import signals\n'), ((2136, 2182), 'celery.signals.task_postrun.disconnect', 'signals.task_postrun.disconnect', (['trace_postrun'], {}), '(trace_postrun)\n', (2167, 2182), False, 'from celery import signals\n'), ((2187, 2247), 'celery.signals.before_task_publish.disconnect', 'signals.before_task_publish.disconnect', (['trace_before_publish'], {}), '(trace_before_publish)\n', (2225, 2247), False, 'from celery import signals\n'), ((2252, 2310), 'celery.signals.after_task_publish.disconnect', 'signals.after_task_publish.disconnect', (['trace_after_publish'], {}), '(trace_after_publish)\n', (2289, 2310), False, 'from celery import signals\n'), ((2315, 2361), 'celery.signals.task_failure.disconnect', 'signals.task_failure.disconnect', (['trace_failure'], {}), '(trace_failure)\n', (2346, 2361), False, 'from celery import signals\n'), ((2366, 2408), 'celery.signals.task_retry.disconnect', 'signals.task_retry.disconnect', (['trace_retry'], {}), '(trace_retry)\n', (2395, 2408), False, 'from celery import signals\n'), ((1199, 1311), 'oteltrace.Pin', 'Pin', ([], {'service': "config.celery['worker_service_name']", 'app': 'APP', 'app_type': 'AppTypes.worker', '_config': 'config.celery'}), "(service=config.celery['worker_service_name'], app=APP, app_type=\n AppTypes.worker, _config=config.celery)\n", (1202, 1311), False, 'from oteltrace import Pin, config\n')]
|
# test_joiner.py
"""Unit tests for lta/joiner.py."""
from lta.joiner import join_smart, join_smart_url
def test_join_smart_empty():
"""Test join_smart functionality."""
assert join_smart([]) == "."
def test_join_smart_many_relative():
"""Test join_smart functionality."""
assert join_smart(["data", "exp", "IceCube"]) == "data/exp/IceCube"
def test_join_smart_many_absolute():
"""Test join_smart functionality."""
assert join_smart(["/data", "/exp", "/IceCube"]) == "/data/exp/IceCube"
def test_join_smart_many_absolute_trailing_slashes():
"""Test join_smart functionality."""
assert join_smart(["/data/", "/exp/", "/IceCube/"]) == "/data/exp/IceCube/"
def test_join_smart_url():
"""Test join_smart functionality."""
CORRECT = "gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/mnt/lfss/jade-lta/bundler_out/fdd3c3865d1011eb97bb6224ddddaab7.zip"
assert join_smart_url(["gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/",
"/mnt/lfss/jade-lta/bundler_out/fdd3c3865d1011eb97bb6224ddddaab7.zip"]) == CORRECT
def test_join_smart_url_with_path_and_basename():
"""Test join_smart functionality."""
CORRECT = "gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/data/exp/IceCube/2015/filtered/level2/0320/fdd3c3865d1011eb97bb6224ddddaab7.zip"
assert join_smart_url(["gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/",
"/data/exp/IceCube/2015/filtered/level2/0320",
"fdd3c3865d1011eb97bb6224ddddaab7.zip"]) == CORRECT
def test_join_smart_desy_bundle_path():
"""Test join_smart functionality."""
CORRECT = "/pnfs/ifh.de/acs/icecube/archive/data/exp/IceCube/2015/filtered/level2/0320/604b6c80659c11eb8ad66224ddddaab7.zip"
assert join_smart(["/pnfs/ifh.de/acs/icecube/archive",
"/data/exp/IceCube/2015/filtered/level2/0320",
"604b6c80659c11eb8ad66224ddddaab7.zip"]) == CORRECT
assert join_smart(["/pnfs/ifh.de/acs/icecube/archive/",
"/data/exp/IceCube/2015/filtered/level2/0320",
"604b6c80659c11eb8ad66224ddddaab7.zip"]) == CORRECT
assert join_smart(["/pnfs/ifh.de/acs/icecube/archive",
"/data/exp/IceCube/2015/filtered/level2/0320/",
"604b6c80659c11eb8ad66224ddddaab7.zip"]) == CORRECT
assert join_smart(["/pnfs/ifh.de/acs/icecube/archive/",
"/data/exp/IceCube/2015/filtered/level2/0320/",
"604b6c80659c11eb8ad66224ddddaab7.zip"]) == CORRECT
assert join_smart(["/pnfs/ifh.de/acs/icecube/archive",
"/data/exp/IceCube/2015/filtered/level2/0320",
"/604b6c80659c11eb8ad66224ddddaab7.zip"]) == CORRECT
|
[
"lta.joiner.join_smart_url",
"lta.joiner.join_smart"
] |
[((186, 200), 'lta.joiner.join_smart', 'join_smart', (['[]'], {}), '([])\n', (196, 200), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((298, 336), 'lta.joiner.join_smart', 'join_smart', (["['data', 'exp', 'IceCube']"], {}), "(['data', 'exp', 'IceCube'])\n", (308, 336), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((449, 490), 'lta.joiner.join_smart', 'join_smart', (["['/data', '/exp', '/IceCube']"], {}), "(['/data', '/exp', '/IceCube'])\n", (459, 490), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((621, 665), 'lta.joiner.join_smart', 'join_smart', (["['/data/', '/exp/', '/IceCube/']"], {}), "(['/data/', '/exp/', '/IceCube/'])\n", (631, 665), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((923, 1093), 'lta.joiner.join_smart_url', 'join_smart_url', (["['gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/',\n '/mnt/lfss/jade-lta/bundler_out/fdd3c3865d1011eb97bb6224ddddaab7.zip']"], {}), "([\n 'gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/',\n '/mnt/lfss/jade-lta/bundler_out/fdd3c3865d1011eb97bb6224ddddaab7.zip'])\n", (937, 1093), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((1392, 1582), 'lta.joiner.join_smart_url', 'join_smart_url', (["['gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n 'fdd3c3865d1011eb97bb6224ddddaab7.zip']"], {}), "([\n 'gsiftp://gridftp.zeuthen.desy.de:2811/pnfs/ifh.de/acs/icecube/archive/',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n 'fdd3c3865d1011eb97bb6224ddddaab7.zip'])\n", (1406, 1582), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((1857, 2000), 'lta.joiner.join_smart', 'join_smart', (["['/pnfs/ifh.de/acs/icecube/archive',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n '604b6c80659c11eb8ad66224ddddaab7.zip']"], {}), "(['/pnfs/ifh.de/acs/icecube/archive',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n '604b6c80659c11eb8ad66224ddddaab7.zip'])\n", (1867, 2000), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((2061, 2205), 'lta.joiner.join_smart', 'join_smart', (["['/pnfs/ifh.de/acs/icecube/archive/',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n '604b6c80659c11eb8ad66224ddddaab7.zip']"], {}), "(['/pnfs/ifh.de/acs/icecube/archive/',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n '604b6c80659c11eb8ad66224ddddaab7.zip'])\n", (2071, 2205), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((2266, 2410), 'lta.joiner.join_smart', 'join_smart', (["['/pnfs/ifh.de/acs/icecube/archive',\n '/data/exp/IceCube/2015/filtered/level2/0320/',\n '604b6c80659c11eb8ad66224ddddaab7.zip']"], {}), "(['/pnfs/ifh.de/acs/icecube/archive',\n '/data/exp/IceCube/2015/filtered/level2/0320/',\n '604b6c80659c11eb8ad66224ddddaab7.zip'])\n", (2276, 2410), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((2471, 2616), 'lta.joiner.join_smart', 'join_smart', (["['/pnfs/ifh.de/acs/icecube/archive/',\n '/data/exp/IceCube/2015/filtered/level2/0320/',\n '604b6c80659c11eb8ad66224ddddaab7.zip']"], {}), "(['/pnfs/ifh.de/acs/icecube/archive/',\n '/data/exp/IceCube/2015/filtered/level2/0320/',\n '604b6c80659c11eb8ad66224ddddaab7.zip'])\n", (2481, 2616), False, 'from lta.joiner import join_smart, join_smart_url\n'), ((2677, 2821), 'lta.joiner.join_smart', 'join_smart', (["['/pnfs/ifh.de/acs/icecube/archive',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n '/604b6c80659c11eb8ad66224ddddaab7.zip']"], {}), "(['/pnfs/ifh.de/acs/icecube/archive',\n '/data/exp/IceCube/2015/filtered/level2/0320',\n '/604b6c80659c11eb8ad66224ddddaab7.zip'])\n", (2687, 2821), False, 'from lta.joiner import join_smart, join_smart_url\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import itertools
import json
import logging
import os
import subprocess
import sys
import tempfile
import zipfile
CACHE_DIR = "buck-cache"
class CacheEntry(object):
pass
def get_cache_entry(path):
with zipfile.ZipFile(path) as f:
entry_map = {os.path.basename(n): n for n in f.namelist()}
entry = CacheEntry()
entry.target = f.read(entry_map["TARGET"]).strip()
entry.rule_key = f.read(entry_map["RULE_KEY"]).strip()
entry.deps = json.loads(f.read(entry_map["DEPS"]))
entry.path = path
return entry
def get_cache_inventory():
inventory = {}
for item in os.listdir(CACHE_DIR):
entry = get_cache_entry(os.path.join(CACHE_DIR, item))
inventory[entry.target] = entry
return inventory
def get_missing_cache_entries(inventory):
"""
Find and return all entries missing in the cache.
"""
missing_entries = {}
for entry in inventory.itervalues():
if not os.path.exists(entry.path):
missing_entries[entry.target] = entry
return missing_entries
def clear_cache():
subprocess.check_call(["rm", "-rf", CACHE_DIR])
def clear_output():
subprocess.check_call(["rm", "-rf", "buck-out"])
def run_buck(buck, *args):
logging.info("Running {} {}".format(buck, " ".join(args)))
# Always create a temp file, in case we need to serialize the
# arguments to it.
with tempfile.NamedTemporaryFile() as f:
# Point cache to a known location.
args.append("--config")
args.append("cache.dir=" + CACHE_DIR)
# If the command would be too long, put the args into a file and
# execute that.
if len(args) > 30:
for arg in args:
f.write(arg)
f.write(os.linesep)
f.flush()
args = ["@" + f.name]
return subprocess.check_output([buck] + list(args))
def preorder_traversal(roots, deps, callback):
"""
Execute the given callback during a preorder traversal of the graph.
"""
# Keep track of all the nodes processed.
seen = set()
def traverse(node, callback, chain):
# Make sure we only visit nodes once.
if node in seen:
return
seen.add(node)
# Run the callback with the current node and the chain of parent nodes we
# traversed to find it.
callback(node, chain)
# Recurse on depednencies, making sure to update the visiter chain.
for dep in deps[node]:
traverse(dep, callback, chain=chain + [node])
# Traverse starting from all the roots.
for root in roots:
traverse(root, callback, [])
def build(buck, targets):
"""
Verify that each of the actions the run when building the given targets
run correctly using a top-down build.
"""
# Now run a build to populate the cache.
logging.info("Running a build to populate the cache")
run_buck(buck, "build", *targets)
# Find all targets reachable via the UI.
out = run_buck(buck, "audit", "dependencies", "--transitive", *targets)
ui_targets = set(out.splitlines())
ui_targets.update(targets)
# Grab an inventory of the cache and use it to form a dependency map.
cache_inventory = get_cache_inventory()
dependencies = {n.target: n.deps for n in cache_inventory.itervalues()}
# Keep track of all the processed nodes so we can print progress info.
processed = set()
# The callback to run for each build rule.
def handle(current, chain):
logging.info(
"Processing {} ({}/{})".format(
current, len(processed), len(dependencies.keys())
)
)
processed.add(current)
# Empty the previous builds output.
logging.info("Removing output from previous build")
clear_output()
# Remove the cache entry for this target.
entry = cache_inventory[current]
os.remove(entry.path)
logging.info(" removed {} => {}".format(current, entry.path))
# Now run the build using the closest UI visible ancestor target.
logging.info("Running the build to check " + current)
for node in itertools.chain([current], reversed(chain)):
if node in ui_targets:
run_buck(buck, "build", "--just-build", current, node)
break
else:
assert False, "couldn't find target in UI: " + node
# We should *always* end with a full cache.
logging.info("Verifying cache...")
missing = get_missing_cache_entries(cache_inventory)
assert len(missing) == 0, "\n".join(sorted(missing.keys()))
preorder_traversal(targets, dependencies, handle)
def test(buck, targets):
"""
Test that we can run tests when pulling from the cache.
"""
# Find all test targets.
test_targets = set()
out = run_buck(buck, "targets", "--json", *targets)
for info in json.loads(out):
if info["buck.type"].endswith("_test"):
test_targets.add("//" + info["buck.base_path"] + ":" + info["name"])
if not test_targets:
raise Exception("no test targets")
# Now run a build to populate the cache.
logging.info("Running a build to populate the cache")
run_buck(buck, "build", *test_targets)
# Empty the build output.
logging.info("Removing output from build")
clear_output()
# Now run the test
run_buck(buck, "test", *test_targets)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("--buck", default="buck")
parser.add_argument("command", choices=("build", "test"))
parser.add_argument("targets", metavar="target", nargs="+")
args = parser.parse_args(argv[1:])
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p",
)
# Resolve any aliases in the top-level targets.
out = run_buck(args.buck, "targets", *args.targets)
targets = set(out.splitlines())
# Clear the cache and output directories to start with a clean slate.
logging.info("Clearing output and cache")
run_buck(args.buck, "clean")
clear_output()
clear_cache()
# Run the subcommand
if args.command == "build":
build(args.buck, targets)
elif args.command == "test":
test(args.buck, targets)
else:
raise Exception("unknown command: " + args.command)
sys.exit(main(sys.argv))
|
[
"tempfile.NamedTemporaryFile",
"os.remove",
"zipfile.ZipFile",
"argparse.ArgumentParser",
"json.loads",
"logging.basicConfig",
"os.path.basename",
"os.path.exists",
"logging.info",
"os.path.join",
"os.listdir",
"subprocess.check_call"
] |
[((1245, 1266), 'os.listdir', 'os.listdir', (['CACHE_DIR'], {}), '(CACHE_DIR)\n', (1255, 1266), False, 'import os\n'), ((1720, 1767), 'subprocess.check_call', 'subprocess.check_call', (["['rm', '-rf', CACHE_DIR]"], {}), "(['rm', '-rf', CACHE_DIR])\n", (1741, 1767), False, 'import subprocess\n'), ((1794, 1842), 'subprocess.check_call', 'subprocess.check_call', (["['rm', '-rf', 'buck-out']"], {}), "(['rm', '-rf', 'buck-out'])\n", (1815, 1842), False, 'import subprocess\n'), ((3513, 3566), 'logging.info', 'logging.info', (['"""Running a build to populate the cache"""'], {}), "('Running a build to populate the cache')\n", (3525, 3566), False, 'import logging\n'), ((5596, 5611), 'json.loads', 'json.loads', (['out'], {}), '(out)\n', (5606, 5611), False, 'import json\n'), ((5860, 5913), 'logging.info', 'logging.info', (['"""Running a build to populate the cache"""'], {}), "('Running a build to populate the cache')\n", (5872, 5913), False, 'import logging\n'), ((5992, 6034), 'logging.info', 'logging.info', (['"""Removing output from build"""'], {}), "('Removing output from build')\n", (6004, 6034), False, 'import logging\n'), ((6151, 6176), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6174, 6176), False, 'import argparse\n'), ((6397, 6506), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(message)s"""', 'datefmt': '"""%m/%d/%Y %I:%M:%S %p"""'}), "(level=logging.INFO, format='%(asctime)s %(message)s',\n datefmt='%m/%d/%Y %I:%M:%S %p')\n", (6416, 6506), False, 'import logging\n'), ((6758, 6799), 'logging.info', 'logging.info', (['"""Clearing output and cache"""'], {}), "('Clearing output and cache')\n", (6770, 6799), False, 'import logging\n'), ((829, 850), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (844, 850), False, 'import zipfile\n'), ((2034, 2063), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2061, 2063), False, 'import tempfile\n'), ((4410, 4461), 'logging.info', 'logging.info', (['"""Removing output from previous build"""'], {}), "('Removing output from previous build')\n", (4422, 4461), False, 'import logging\n'), ((4585, 4606), 'os.remove', 'os.remove', (['entry.path'], {}), '(entry.path)\n', (4594, 4606), False, 'import os\n'), ((4761, 4814), 'logging.info', 'logging.info', (["('Running the build to check ' + current)"], {}), "('Running the build to check ' + current)\n", (4773, 4814), False, 'import logging\n'), ((5147, 5181), 'logging.info', 'logging.info', (['"""Verifying cache..."""'], {}), "('Verifying cache...')\n", (5159, 5181), False, 'import logging\n'), ((878, 897), 'os.path.basename', 'os.path.basename', (['n'], {}), '(n)\n', (894, 897), False, 'import os\n'), ((1300, 1329), 'os.path.join', 'os.path.join', (['CACHE_DIR', 'item'], {}), '(CACHE_DIR, item)\n', (1312, 1329), False, 'import os\n'), ((1589, 1615), 'os.path.exists', 'os.path.exists', (['entry.path'], {}), '(entry.path)\n', (1603, 1615), False, 'import os\n')]
|
"""
Misc Utility functions
"""
import os
import logging
import datetime
import numpy as np
from collections import OrderedDict
def recursive_glob(rootdir=".", suffix=""):
"""Performs recursive glob with given suffix and rootdir
:param rootdir is the root directory
:param suffix is the suffix to be searched
"""
return [
os.path.join(looproot, filename)
for looproot, _, filenames in os.walk(rootdir)
for filename in filenames
if filename.endswith(suffix)
]
def alpha_blend(input_image, segmentation_mask, alpha=0.5):
"""Alpha Blending utility to overlay RGB masks on RBG images
:param input_image is a np.ndarray with 3 channels
:param segmentation_mask is a np.ndarray with 3 channels
:param alpha is a float value
"""
blended = np.zeros(input_image.size, dtype=np.float32)
blended = input_image * alpha + segmentation_mask * (1 - alpha)
return blended
def convert_state_dict(state_dict):
"""Converts a state dict saved from a dataParallel module to normal
module state_dict inplace
:param state_dict is the loaded DataParallel model_state
"""
if not next(iter(state_dict)).startswith("module."):
return state_dict
new_state_dict = OrderedDict()
for k, v in state_dict.items():
name = k[7:] # remove `module.`
new_state_dict[name] = v
return new_state_dict
def get_logger(logdir):
logger = logging.getLogger('ptsemseg')
ts = str(datetime.datetime.now()).split('.')[0].replace(" ", "_")
ts = ts.replace(":", "_").replace("-","_")
file_path = os.path.join(logdir, 'run_{}.log'.format(ts))
hdlr = logging.FileHandler(file_path)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
return logger
|
[
"logging.FileHandler",
"os.walk",
"numpy.zeros",
"datetime.datetime.now",
"logging.Formatter",
"collections.OrderedDict",
"os.path.join",
"logging.getLogger"
] |
[((838, 882), 'numpy.zeros', 'np.zeros', (['input_image.size'], {'dtype': 'np.float32'}), '(input_image.size, dtype=np.float32)\n', (846, 882), True, 'import numpy as np\n'), ((1295, 1308), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1306, 1308), False, 'from collections import OrderedDict\n'), ((1484, 1513), 'logging.getLogger', 'logging.getLogger', (['"""ptsemseg"""'], {}), "('ptsemseg')\n", (1501, 1513), False, 'import logging\n'), ((1704, 1734), 'logging.FileHandler', 'logging.FileHandler', (['file_path'], {}), '(file_path)\n', (1723, 1734), False, 'import logging\n'), ((1751, 1809), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s %(message)s"""'], {}), "('%(asctime)s %(levelname)s %(message)s')\n", (1768, 1809), False, 'import logging\n'), ((360, 392), 'os.path.join', 'os.path.join', (['looproot', 'filename'], {}), '(looproot, filename)\n', (372, 392), False, 'import os\n'), ((431, 447), 'os.walk', 'os.walk', (['rootdir'], {}), '(rootdir)\n', (438, 447), False, 'import os\n'), ((1527, 1550), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1548, 1550), False, 'import datetime\n')]
|
import datetime
from unittest import mock
import boto3
import pytest
from freezegun import freeze_time
from moto import mock_s3
import tests.test_utils as test_utils
from great_expectations.core import ExpectationSuiteValidationResult
from great_expectations.core.expectation_validation_result import (
ExpectationSuiteValidationResult,
)
from great_expectations.data_context.store import ValidationsStore
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
ValidationResultIdentifier,
)
from great_expectations.util import gen_directory_tree_str
from tests.core.usage_statistics.util import (
usage_stats_exceptions_exist,
usage_stats_invalid_messages_exist,
)
@freeze_time("09/26/2019 13:42:41")
@mock_s3
@pytest.mark.filterwarnings(
"ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"
)
def test_ValidationsStore_with_TupleS3StoreBackend():
bucket = "test_validation_store_bucket"
prefix = "test/prefix"
# create a bucket in Moto's mock AWS environment
conn = boto3.resource("s3", region_name="us-east-1")
conn.create_bucket(Bucket=bucket)
# First, demonstrate that we pick up default configuration including from an S3TupleS3StoreBackend
my_store = ValidationsStore(
store_backend={
"class_name": "TupleS3StoreBackend",
"bucket": bucket,
"prefix": prefix,
}
)
with pytest.raises(TypeError):
my_store.get("not_a_ValidationResultIdentifier")
ns_1 = ValidationResultIdentifier(
expectation_suite_identifier=ExpectationSuiteIdentifier(
expectation_suite_name="asset.quarantine",
),
run_id="20191007T151224.1234Z_prod_100",
batch_identifier="batch_id",
)
my_store.set(ns_1, ExpectationSuiteValidationResult(success=True))
assert my_store.get(ns_1) == ExpectationSuiteValidationResult(
success=True, statistics={}, results=[]
)
ns_2 = ValidationResultIdentifier(
expectation_suite_identifier=ExpectationSuiteIdentifier(
expectation_suite_name="asset.quarantine",
),
run_id="20191007T151224.1234Z_prod_200",
batch_identifier="batch_id",
)
my_store.set(ns_2, ExpectationSuiteValidationResult(success=False))
assert my_store.get(ns_2) == ExpectationSuiteValidationResult(
success=False, statistics={}, results=[]
)
# Verify that internals are working as expected, including the default filepath
assert {
s3_object_info["Key"]
for s3_object_info in boto3.client("s3").list_objects_v2(
Bucket=bucket, Prefix=prefix
)["Contents"]
} == {
"test/prefix/.ge_store_backend_id",
"test/prefix/asset/quarantine/20191007T151224.1234Z_prod_100/20190926T134241.000000Z/batch_id.json",
"test/prefix/asset/quarantine/20191007T151224.1234Z_prod_200/20190926T134241.000000Z/batch_id.json",
}
print(my_store.list_keys())
assert set(my_store.list_keys()) == {
ns_1,
ns_2,
}
"""
What does this test and why?
A Store should be able to report it's store_backend_id
which is set when the StoreBackend is instantiated.
"""
# Check that store_backend_id exists can be read
assert my_store.store_backend_id is not None
# Check that store_backend_id is a valid UUID
assert test_utils.validate_uuid4(my_store.store_backend_id)
@freeze_time("09/26/2019 13:42:41")
def test_ValidationsStore_with_InMemoryStoreBackend():
my_store = ValidationsStore(
store_backend={
"module_name": "great_expectations.data_context.store",
"class_name": "InMemoryStoreBackend",
}
)
with pytest.raises(TypeError):
my_store.get("not_a_ValidationResultIdentifier")
ns_1 = ValidationResultIdentifier.from_tuple(
(
"a",
"b",
"c",
"quarantine",
datetime.datetime.now(datetime.timezone.utc),
"prod-100",
)
)
my_store.set(ns_1, ExpectationSuiteValidationResult(success=True))
assert my_store.get(ns_1) == ExpectationSuiteValidationResult(
success=True, statistics={}, results=[]
)
ns_2 = ValidationResultIdentifier.from_tuple(
(
"a",
"b",
"c",
"quarantine",
datetime.datetime.now(datetime.timezone.utc),
"prod-200",
)
)
my_store.set(ns_2, ExpectationSuiteValidationResult(success=False))
assert my_store.get(ns_2) == ExpectationSuiteValidationResult(
success=False, statistics={}, results=[]
)
assert set(my_store.list_keys()) == {
ns_1,
ns_2,
}
"""
What does this test and why?
A Store should be able to report it's store_backend_id
which is set when the StoreBackend is instantiated.
"""
# Check that store_backend_id exists can be read
assert my_store.store_backend_id is not None
# Check that store_backend_id is a valid UUID
assert test_utils.validate_uuid4(my_store.store_backend_id)
@freeze_time("09/26/2019 13:42:41")
@pytest.mark.filterwarnings(
"ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"
)
def test_ValidationsStore_with_TupleFileSystemStoreBackend(tmp_path_factory):
path = str(
tmp_path_factory.mktemp(
"test_ValidationResultStore_with_TupleFileSystemStoreBackend__dir"
)
)
project_path = str(tmp_path_factory.mktemp("my_dir"))
my_store = ValidationsStore(
store_backend={
"module_name": "great_expectations.data_context.store",
"class_name": "TupleFilesystemStoreBackend",
"base_directory": "my_store/",
},
runtime_environment={"root_directory": path},
)
with pytest.raises(TypeError):
my_store.get("not_a_ValidationResultIdentifier")
ns_1 = ValidationResultIdentifier(
expectation_suite_identifier=ExpectationSuiteIdentifier("asset.quarantine"),
run_id="prod-100",
batch_identifier="batch_id",
)
my_store.set(ns_1, ExpectationSuiteValidationResult(success=True))
assert my_store.get(ns_1) == ExpectationSuiteValidationResult(
success=True, statistics={}, results=[]
)
ns_2 = ValidationResultIdentifier.from_tuple(
(
"asset",
"quarantine",
"prod-20",
datetime.datetime.now(datetime.timezone.utc),
"batch_id",
)
)
my_store.set(ns_2, ExpectationSuiteValidationResult(success=False))
assert my_store.get(ns_2) == ExpectationSuiteValidationResult(
success=False, statistics={}, results=[]
)
print(my_store.list_keys())
assert set(my_store.list_keys()) == {
ns_1,
ns_2,
}
print(gen_directory_tree_str(path))
assert (
gen_directory_tree_str(path)
== """\
test_ValidationResultStore_with_TupleFileSystemStoreBackend__dir0/
my_store/
.ge_store_backend_id
asset/
quarantine/
prod-100/
20190926T134241.000000Z/
batch_id.json
prod-20/
20190926T134241.000000Z/
batch_id.json
"""
)
"""
What does this test and why?
A Store should be able to report it's store_backend_id
which is set when the StoreBackend is instantiated.
"""
# Check that store_backend_id exists can be read
assert my_store.store_backend_id is not None
# Check that store_backend_id is a valid UUID
assert test_utils.validate_uuid4(my_store.store_backend_id)
# Check that another store with the same configuration shares the same store_backend_id
my_store_duplicate = ValidationsStore(
store_backend={
"module_name": "great_expectations.data_context.store",
"class_name": "TupleFilesystemStoreBackend",
"base_directory": "my_store/",
},
runtime_environment={"root_directory": path},
)
assert my_store.store_backend_id == my_store_duplicate.store_backend_id
@pytest.mark.filterwarnings(
"ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"
)
def test_ValidationsStore_with_DatabaseStoreBackend(sa):
# Use sqlite so we don't require postgres for this test.
connection_kwargs = {"drivername": "sqlite"}
# First, demonstrate that we pick up default configuration
my_store = ValidationsStore(
store_backend={
"class_name": "DatabaseStoreBackend",
"credentials": connection_kwargs,
}
)
with pytest.raises(TypeError):
my_store.get("not_a_ValidationResultIdentifier")
ns_1 = ValidationResultIdentifier(
expectation_suite_identifier=ExpectationSuiteIdentifier(
expectation_suite_name="asset.quarantine",
),
run_id="20191007T151224.1234Z_prod_100",
batch_identifier="batch_id",
)
my_store.set(ns_1, ExpectationSuiteValidationResult(success=True))
assert my_store.get(ns_1) == ExpectationSuiteValidationResult(
success=True, statistics={}, results=[]
)
ns_2 = ValidationResultIdentifier(
expectation_suite_identifier=ExpectationSuiteIdentifier(
expectation_suite_name="asset.quarantine",
),
run_id="20191007T151224.1234Z_prod_200",
batch_identifier="batch_id",
)
my_store.set(ns_2, ExpectationSuiteValidationResult(success=False))
assert my_store.get(ns_2) == ExpectationSuiteValidationResult(
success=False, statistics={}, results=[]
)
assert set(my_store.list_keys()) == {
ns_1,
ns_2,
}
"""
What does this test and why?
A Store should be able to report it's store_backend_id
which is set when the StoreBackend is instantiated.
"""
# Check that store_backend_id exists can be read
assert my_store.store_backend_id is not None
# Check that store_backend_id is a valid UUID
assert test_utils.validate_uuid4(my_store.store_backend_id)
@mock.patch(
"great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"
)
@pytest.mark.filterwarnings(
"ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"
)
def test_instantiation_with_test_yaml_config(
mock_emit, caplog, empty_data_context_stats_enabled
):
empty_data_context_stats_enabled.test_yaml_config(
yaml_config="""
module_name: great_expectations.data_context.store.validations_store
class_name: ValidationsStore
store_backend:
class_name: TupleFilesystemStoreBackend
base_directory: uncommitted/validations/
"""
)
assert mock_emit.call_count == 1
# Substitute current anonymized name since it changes for each run
anonymized_name = mock_emit.call_args_list[0][0][0]["event_payload"][
"anonymized_name"
]
assert mock_emit.call_args_list == [
mock.call(
{
"event": "data_context.test_yaml_config",
"event_payload": {
"anonymized_name": anonymized_name,
"parent_class": "ValidationsStore",
"anonymized_store_backend": {
"parent_class": "TupleFilesystemStoreBackend"
},
},
"success": True,
}
),
]
# Confirm that logs do not contain any exceptions or invalid messages
assert not usage_stats_exceptions_exist(messages=caplog.messages)
assert not usage_stats_invalid_messages_exist(messages=caplog.messages)
|
[
"tests.test_utils.validate_uuid4",
"boto3.client",
"tests.core.usage_statistics.util.usage_stats_exceptions_exist",
"unittest.mock.patch",
"great_expectations.data_context.store.ValidationsStore",
"pytest.raises",
"boto3.resource",
"great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult",
"great_expectations.util.gen_directory_tree_str",
"tests.core.usage_statistics.util.usage_stats_invalid_messages_exist",
"great_expectations.data_context.types.resource_identifiers.ExpectationSuiteIdentifier",
"unittest.mock.call",
"pytest.mark.filterwarnings",
"freezegun.freeze_time",
"datetime.datetime.now"
] |
[((736, 770), 'freezegun.freeze_time', 'freeze_time', (['"""09/26/2019 13:42:41"""'], {}), "('09/26/2019 13:42:41')\n", (747, 770), False, 'from freezegun import freeze_time\n'), ((781, 936), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"""'], {}), "(\n 'ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers'\n )\n", (807, 936), False, 'import pytest\n'), ((3528, 3562), 'freezegun.freeze_time', 'freeze_time', (['"""09/26/2019 13:42:41"""'], {}), "('09/26/2019 13:42:41')\n", (3539, 3562), False, 'from freezegun import freeze_time\n'), ((5219, 5253), 'freezegun.freeze_time', 'freeze_time', (['"""09/26/2019 13:42:41"""'], {}), "('09/26/2019 13:42:41')\n", (5230, 5253), False, 'from freezegun import freeze_time\n'), ((5255, 5410), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"""'], {}), "(\n 'ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers'\n )\n", (5281, 5410), False, 'import pytest\n'), ((8338, 8493), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"""'], {}), "(\n 'ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers'\n )\n", (8364, 8493), False, 'import pytest\n'), ((10357, 10466), 'unittest.mock.patch', 'mock.patch', (['"""great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit"""'], {}), "(\n 'great_expectations.core.usage_statistics.usage_statistics.UsageStatisticsHandler.emit'\n )\n", (10367, 10466), False, 'from unittest import mock\n'), ((10464, 10619), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers"""'], {}), "(\n 'ignore:String run_ids are deprecated*:DeprecationWarning:great_expectations.data_context.types.resource_identifiers'\n )\n", (10490, 10619), False, 'import pytest\n'), ((1123, 1168), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {'region_name': '"""us-east-1"""'}), "('s3', region_name='us-east-1')\n", (1137, 1168), False, 'import boto3\n'), ((1326, 1435), 'great_expectations.data_context.store.ValidationsStore', 'ValidationsStore', ([], {'store_backend': "{'class_name': 'TupleS3StoreBackend', 'bucket': bucket, 'prefix': prefix}"}), "(store_backend={'class_name': 'TupleS3StoreBackend',\n 'bucket': bucket, 'prefix': prefix})\n", (1342, 1435), False, 'from great_expectations.data_context.store import ValidationsStore\n'), ((3472, 3524), 'tests.test_utils.validate_uuid4', 'test_utils.validate_uuid4', (['my_store.store_backend_id'], {}), '(my_store.store_backend_id)\n', (3497, 3524), True, 'import tests.test_utils as test_utils\n'), ((3633, 3767), 'great_expectations.data_context.store.ValidationsStore', 'ValidationsStore', ([], {'store_backend': "{'module_name': 'great_expectations.data_context.store', 'class_name':\n 'InMemoryStoreBackend'}"}), "(store_backend={'module_name':\n 'great_expectations.data_context.store', 'class_name':\n 'InMemoryStoreBackend'})\n", (3649, 3767), False, 'from great_expectations.data_context.store import ValidationsStore\n'), ((5163, 5215), 'tests.test_utils.validate_uuid4', 'test_utils.validate_uuid4', (['my_store.store_backend_id'], {}), '(my_store.store_backend_id)\n', (5188, 5215), True, 'import tests.test_utils as test_utils\n'), ((5703, 5925), 'great_expectations.data_context.store.ValidationsStore', 'ValidationsStore', ([], {'store_backend': "{'module_name': 'great_expectations.data_context.store', 'class_name':\n 'TupleFilesystemStoreBackend', 'base_directory': 'my_store/'}", 'runtime_environment': "{'root_directory': path}"}), "(store_backend={'module_name':\n 'great_expectations.data_context.store', 'class_name':\n 'TupleFilesystemStoreBackend', 'base_directory': 'my_store/'},\n runtime_environment={'root_directory': path})\n", (5719, 5925), False, 'from great_expectations.data_context.store import ValidationsStore\n'), ((7807, 7859), 'tests.test_utils.validate_uuid4', 'test_utils.validate_uuid4', (['my_store.store_backend_id'], {}), '(my_store.store_backend_id)\n', (7832, 7859), True, 'import tests.test_utils as test_utils\n'), ((7978, 8200), 'great_expectations.data_context.store.ValidationsStore', 'ValidationsStore', ([], {'store_backend': "{'module_name': 'great_expectations.data_context.store', 'class_name':\n 'TupleFilesystemStoreBackend', 'base_directory': 'my_store/'}", 'runtime_environment': "{'root_directory': path}"}), "(store_backend={'module_name':\n 'great_expectations.data_context.store', 'class_name':\n 'TupleFilesystemStoreBackend', 'base_directory': 'my_store/'},\n runtime_environment={'root_directory': path})\n", (7994, 8200), False, 'from great_expectations.data_context.store import ValidationsStore\n'), ((8736, 8844), 'great_expectations.data_context.store.ValidationsStore', 'ValidationsStore', ([], {'store_backend': "{'class_name': 'DatabaseStoreBackend', 'credentials': connection_kwargs}"}), "(store_backend={'class_name': 'DatabaseStoreBackend',\n 'credentials': connection_kwargs})\n", (8752, 8844), False, 'from great_expectations.data_context.store import ValidationsStore\n'), ((10301, 10353), 'tests.test_utils.validate_uuid4', 'test_utils.validate_uuid4', (['my_store.store_backend_id'], {}), '(my_store.store_backend_id)\n', (10326, 10353), True, 'import tests.test_utils as test_utils\n'), ((1503, 1527), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1516, 1527), False, 'import pytest\n'), ((1872, 1918), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)'}), '(success=True)\n', (1904, 1918), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((1953, 2026), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)', 'statistics': '{}', 'results': '[]'}), '(success=True, statistics={}, results=[])\n', (1985, 2026), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((2328, 2375), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)'}), '(success=False)\n', (2360, 2375), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((2410, 2484), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)', 'statistics': '{}', 'results': '[]'}), '(success=False, statistics={}, results=[])\n', (2442, 2484), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((3819, 3843), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (3832, 3843), False, 'import pytest\n'), ((4161, 4207), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)'}), '(success=True)\n', (4193, 4207), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((4242, 4315), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)', 'statistics': '{}', 'results': '[]'}), '(success=True, statistics={}, results=[])\n', (4274, 4315), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((4589, 4636), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)'}), '(success=False)\n', (4621, 4636), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((4671, 4745), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)', 'statistics': '{}', 'results': '[]'}), '(success=False, statistics={}, results=[])\n', (4703, 4745), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((5994, 6018), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6007, 6018), False, 'import pytest\n'), ((6295, 6341), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)'}), '(success=True)\n', (6327, 6341), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((6376, 6449), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)', 'statistics': '{}', 'results': '[]'}), '(success=True, statistics={}, results=[])\n', (6408, 6449), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((6716, 6763), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)'}), '(success=False)\n', (6748, 6763), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((6798, 6872), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)', 'statistics': '{}', 'results': '[]'}), '(success=False, statistics={}, results=[])\n', (6830, 6872), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((7007, 7035), 'great_expectations.util.gen_directory_tree_str', 'gen_directory_tree_str', (['path'], {}), '(path)\n', (7029, 7035), False, 'from great_expectations.util import gen_directory_tree_str\n'), ((7058, 7086), 'great_expectations.util.gen_directory_tree_str', 'gen_directory_tree_str', (['path'], {}), '(path)\n', (7080, 7086), False, 'from great_expectations.util import gen_directory_tree_str\n'), ((8900, 8924), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (8913, 8924), False, 'import pytest\n'), ((9269, 9315), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)'}), '(success=True)\n', (9301, 9315), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((9350, 9423), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(True)', 'statistics': '{}', 'results': '[]'}), '(success=True, statistics={}, results=[])\n', (9382, 9423), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((9725, 9772), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)'}), '(success=False)\n', (9757, 9772), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((9807, 9881), 'great_expectations.core.expectation_validation_result.ExpectationSuiteValidationResult', 'ExpectationSuiteValidationResult', ([], {'success': '(False)', 'statistics': '{}', 'results': '[]'}), '(success=False, statistics={}, results=[])\n', (9839, 9881), False, 'from great_expectations.core.expectation_validation_result import ExpectationSuiteValidationResult\n'), ((11821, 11875), 'tests.core.usage_statistics.util.usage_stats_exceptions_exist', 'usage_stats_exceptions_exist', ([], {'messages': 'caplog.messages'}), '(messages=caplog.messages)\n', (11849, 11875), False, 'from tests.core.usage_statistics.util import usage_stats_exceptions_exist, usage_stats_invalid_messages_exist\n'), ((11891, 11951), 'tests.core.usage_statistics.util.usage_stats_invalid_messages_exist', 'usage_stats_invalid_messages_exist', ([], {'messages': 'caplog.messages'}), '(messages=caplog.messages)\n', (11925, 11951), False, 'from tests.core.usage_statistics.util import usage_stats_exceptions_exist, usage_stats_invalid_messages_exist\n'), ((1663, 1732), 'great_expectations.data_context.types.resource_identifiers.ExpectationSuiteIdentifier', 'ExpectationSuiteIdentifier', ([], {'expectation_suite_name': '"""asset.quarantine"""'}), "(expectation_suite_name='asset.quarantine')\n", (1689, 1732), False, 'from great_expectations.data_context.types.resource_identifiers import ExpectationSuiteIdentifier, ValidationResultIdentifier\n'), ((2118, 2187), 'great_expectations.data_context.types.resource_identifiers.ExpectationSuiteIdentifier', 'ExpectationSuiteIdentifier', ([], {'expectation_suite_name': '"""asset.quarantine"""'}), "(expectation_suite_name='asset.quarantine')\n", (2144, 2187), False, 'from great_expectations.data_context.types.resource_identifiers import ExpectationSuiteIdentifier, ValidationResultIdentifier\n'), ((4052, 4096), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (4073, 4096), False, 'import datetime\n'), ((4480, 4524), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (4501, 4524), False, 'import datetime\n'), ((6154, 6200), 'great_expectations.data_context.types.resource_identifiers.ExpectationSuiteIdentifier', 'ExpectationSuiteIdentifier', (['"""asset.quarantine"""'], {}), "('asset.quarantine')\n", (6180, 6200), False, 'from great_expectations.data_context.types.resource_identifiers import ExpectationSuiteIdentifier, ValidationResultIdentifier\n'), ((6607, 6651), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (6628, 6651), False, 'import datetime\n'), ((9060, 9129), 'great_expectations.data_context.types.resource_identifiers.ExpectationSuiteIdentifier', 'ExpectationSuiteIdentifier', ([], {'expectation_suite_name': '"""asset.quarantine"""'}), "(expectation_suite_name='asset.quarantine')\n", (9086, 9129), False, 'from great_expectations.data_context.types.resource_identifiers import ExpectationSuiteIdentifier, ValidationResultIdentifier\n'), ((9515, 9584), 'great_expectations.data_context.types.resource_identifiers.ExpectationSuiteIdentifier', 'ExpectationSuiteIdentifier', ([], {'expectation_suite_name': '"""asset.quarantine"""'}), "(expectation_suite_name='asset.quarantine')\n", (9541, 9584), False, 'from great_expectations.data_context.types.resource_identifiers import ExpectationSuiteIdentifier, ValidationResultIdentifier\n'), ((11275, 11526), 'unittest.mock.call', 'mock.call', (["{'event': 'data_context.test_yaml_config', 'event_payload': {\n 'anonymized_name': anonymized_name, 'parent_class': 'ValidationsStore',\n 'anonymized_store_backend': {'parent_class':\n 'TupleFilesystemStoreBackend'}}, 'success': True}"], {}), "({'event': 'data_context.test_yaml_config', 'event_payload': {\n 'anonymized_name': anonymized_name, 'parent_class': 'ValidationsStore',\n 'anonymized_store_backend': {'parent_class':\n 'TupleFilesystemStoreBackend'}}, 'success': True})\n", (11284, 11526), False, 'from unittest import mock\n'), ((2657, 2675), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (2669, 2675), False, 'import boto3\n')]
|
# -*- coding: utf-8 -*-
import unittest
from lrucache_interview import DoublyLinkedList, DoublyLinkedListNode
class TestDoublyLinkedList(unittest.TestCase):
def test_remove_item_when_head(self):
llist = DoublyLinkedList()
first_node = llist.new_list_node(key="a", value=1234)
llist.set_head(first_node)
second_node = llist.new_list_node(key="b", value=1234)
llist.set_head(second_node)
# (b -> a -> None)
assert llist.head == second_node
assert llist.tail == first_node
second_node.remove()
# (a -> None)
assert llist.head == first_node
assert llist.tail == first_node
def test_remove_item_when_tail(self):
llist = DoublyLinkedList()
first_node = llist.new_list_node(key="a", value=1234)
llist.set_head(first_node)
second_node = llist.new_list_node(key="b", value=1234)
llist.set_head(second_node)
# (b -> a -> None)
assert llist.head == second_node
assert llist.tail == first_node
first_node.remove()
# (b -> None)
assert llist.head == second_node
assert llist.tail == second_node
def test_remove_item_when_middle(self):
llist = DoublyLinkedList()
tail_node = llist.new_list_node(key="a", value=1234)
llist.set_head(tail_node)
middle_node = llist.new_list_node(key="b", value=1234)
llist.set_head(middle_node)
head_node = llist.new_list_node(key="c", value=1234)
llist.set_head(head_node)
# (c -> b -> a -> None)
assert llist.head == head_node
assert llist.tail == tail_node
assert llist.head.next == middle_node
middle_node.remove()
# (c -> a -> None)
assert llist.head == head_node
assert llist.tail == tail_node
assert llist.head.next == tail_node
|
[
"lrucache_interview.DoublyLinkedList"
] |
[((219, 237), 'lrucache_interview.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (235, 237), False, 'from lrucache_interview import DoublyLinkedList, DoublyLinkedListNode\n'), ((741, 759), 'lrucache_interview.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (757, 759), False, 'from lrucache_interview import DoublyLinkedList, DoublyLinkedListNode\n'), ((1266, 1284), 'lrucache_interview.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (1282, 1284), False, 'from lrucache_interview import DoublyLinkedList, DoublyLinkedListNode\n')]
|
#! /usr/bin/env python3
# Copyright (c) 2020 Snoop Project <<EMAIL>>
"Самотестирование сети"
import speedtest
import sys
from colorama import Fore, Style, init
from rich.panel import Panel
from rich.style import Style as STL
from rich.console import Console
init(autoreset=True)
console2 = Console()
def nettest():
with console2.status("[cyan]Ожидайте, идёт самотестирование сети..."):
servers = []
threads = None
try:
s = speedtest.Speedtest()
s.get_servers(servers)
s.get_best_server()
s.download(threads=threads)
s.upload(threads=threads)
a = s.results.dict()
d = round(a.get("download") / 1_000_000, 2)
u = round(a.get("upload") / 1_000_000, 2)
p = round(a.get("ping"))
v4 = a.get("client")
# Скорость загрузки.
try:
if d < 3: d = f"Download: [bold red]{d}[/bold red] Мбит/с"
elif 3 <= d <= 5.5: d = f"Download: [yellow]{d}[/yellow] Мбит/с"
elif d > 5.5: d = f"Download: [bold green]{d}[/bold green] Мбит/с"
except:
d = f"Download: [bold red]Сбой[/bold red]"
# Скорость выгрузки.
try:
if u < 0.8: u = f"Upload: [bold red]{u}[/bold red] Мбит/с"
elif 0.8 <= u <= 1.5: u = f"Upload: [yellow]{u}[/yellow] Мбит/с"
elif u > 1.5: u = f"Upload: [bold green]{u}[/bold green] Мбит/с"
except:
u = f"Upload: [bold red]Сбой[/bold red]"
# Ping.
try:
if p >= 250: p = f"Ping: [bold red]{p}[/bold red] мс"
elif 60 <= p < 250: p = f"Ping: [yellow]{p}[/yellow] мс"
elif p < 60: p = f"Ping: [bold green]{p}[/bold green] мс"
except:
p = f"Ping: [bold red]Сбой[/bold red]"
# Результат.
console2.print(Panel.fit(f"{d}\n{u}\n{p}\n\nВаш ip: {v4.get('ip')}\nПровайдер: {v4.get('isp')}\nЛокация: {v4.get('country')}",
title="🌐 Тест сети", style=STL(color="cyan")))
console2.log("[cyan]--> завершен")
except Exception:
console2.print(f"[bold red]Нет сети?!\nТест будет пропущен...")
|
[
"colorama.init",
"speedtest.Speedtest",
"rich.style.Style",
"rich.console.Console"
] |
[((261, 281), 'colorama.init', 'init', ([], {'autoreset': '(True)'}), '(autoreset=True)\n', (265, 281), False, 'from colorama import Fore, Style, init\n'), ((293, 302), 'rich.console.Console', 'Console', ([], {}), '()\n', (300, 302), False, 'from rich.console import Console\n'), ((467, 488), 'speedtest.Speedtest', 'speedtest.Speedtest', ([], {}), '()\n', (486, 488), False, 'import speedtest\n'), ((2103, 2120), 'rich.style.Style', 'STL', ([], {'color': '"""cyan"""'}), "(color='cyan')\n", (2106, 2120), True, 'from rich.style import Style as STL\n')]
|
import network_2 as network
import link
import threading
from time import sleep
import sys
##configuration parameters
router_queue_size = 0 #0 means unlimited
simulation_time = 1 #give the network sufficient time to execute transfers
if __name__ == '__main__':
object_L = [] #keeps track of objects, so we can kill their threads at the end
#create network hosts
host_1 = network.Host('H1')
object_L.append(host_1)
host_2 = network.Host('H2')
object_L.append(host_2)
#create routers and cost tables for reaching neighbors
cost_D = {'H1': {0: 1}, 'RB': {1: 1}} # {neighbor: {interface: cost}}
router_a = network.Router(name='RA',
cost_D = cost_D,
max_queue_size=router_queue_size)
object_L.append(router_a)
cost_D = {'H2': {1: 3}, 'RA': {0: 1}} # {neighbor: {interface: cost}}
router_b = network.Router(name='RB',
cost_D = cost_D,
max_queue_size=router_queue_size)
object_L.append(router_b)
#create a Link Layer to keep track of links between network nodes
link_layer = link.LinkLayer()
object_L.append(link_layer)
#add all the links - need to reflect the connectivity in cost_D tables above
link_layer.add_link(link.Link(host_1, 0, router_a, 0))
link_layer.add_link(link.Link(router_a, 1, router_b, 0))
link_layer.add_link(link.Link(router_b, 1, host_2, 0))
#start all the objects
thread_L = []
for obj in object_L:
thread_L.append(threading.Thread(name=obj.__str__(), target=obj.run))
for t in thread_L:
t.start()
## compute routing tables
router_a.send_routes(1) #one update starts the routing process
sleep(simulation_time) #let the tables converge
# router_a.rt_tbl_D = router_b.rt_tbl_D
print("Converged routing tables")
print("Router A table: " + str(router_a.rt_tbl_D))
print("Router B table: " + str(router_b.rt_tbl_D))
for obj in object_L:
if str(type(obj)) == "<class 'network.Router'>":
obj.print_routes()
#send packet from host 1 to host 2
host_1.udt_send('H2', '0', 'MESSAGE_FROM_H1')
sleep(simulation_time)
host_2.udt_send('H1', '0', 'RETURN_FROM_H2')
sleep(simulation_time)
#join all threads
for o in object_L:
o.stop = True
for t in thread_L:
t.join()
print("All simulation threads joined")
|
[
"network_2.Host",
"time.sleep",
"link.Link",
"link.LinkLayer",
"network_2.Router"
] |
[((402, 420), 'network_2.Host', 'network.Host', (['"""H1"""'], {}), "('H1')\n", (414, 420), True, 'import network_2 as network\n'), ((464, 482), 'network_2.Host', 'network.Host', (['"""H2"""'], {}), "('H2')\n", (476, 482), True, 'import network_2 as network\n'), ((665, 739), 'network_2.Router', 'network.Router', ([], {'name': '"""RA"""', 'cost_D': 'cost_D', 'max_queue_size': 'router_queue_size'}), "(name='RA', cost_D=cost_D, max_queue_size=router_queue_size)\n", (679, 739), True, 'import network_2 as network\n'), ((928, 1002), 'network_2.Router', 'network.Router', ([], {'name': '"""RB"""', 'cost_D': 'cost_D', 'max_queue_size': 'router_queue_size'}), "(name='RB', cost_D=cost_D, max_queue_size=router_queue_size)\n", (942, 1002), True, 'import network_2 as network\n'), ((1189, 1205), 'link.LinkLayer', 'link.LinkLayer', ([], {}), '()\n', (1203, 1205), False, 'import link\n'), ((1812, 1834), 'time.sleep', 'sleep', (['simulation_time'], {}), '(simulation_time)\n', (1817, 1834), False, 'from time import sleep\n'), ((2271, 2293), 'time.sleep', 'sleep', (['simulation_time'], {}), '(simulation_time)\n', (2276, 2293), False, 'from time import sleep\n'), ((2349, 2371), 'time.sleep', 'sleep', (['simulation_time'], {}), '(simulation_time)\n', (2354, 2371), False, 'from time import sleep\n'), ((1348, 1381), 'link.Link', 'link.Link', (['host_1', '(0)', 'router_a', '(0)'], {}), '(host_1, 0, router_a, 0)\n', (1357, 1381), False, 'import link\n'), ((1408, 1443), 'link.Link', 'link.Link', (['router_a', '(1)', 'router_b', '(0)'], {}), '(router_a, 1, router_b, 0)\n', (1417, 1443), False, 'import link\n'), ((1470, 1503), 'link.Link', 'link.Link', (['router_b', '(1)', 'host_2', '(0)'], {}), '(router_b, 1, host_2, 0)\n', (1479, 1503), False, 'import link\n')]
|
# Owner(s): ["oncall: distributed"]
import sys
import torch
from torch import distributed as dist
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
from torch.nn import Linear, Module
from torch.optim import SGD
from torch.testing._internal.common_distributed import skip_if_lt_x_gpu
from torch.testing._internal.common_fsdp import (
FSDPTest,
)
from torch.testing._internal.common_utils import (
TEST_WITH_DEV_DBG_ASAN,
instantiate_parametrized_tests,
parametrize,
run_tests,
subtest,
)
if not dist.is_available():
print("Distributed not available, skipping tests", file=sys.stderr)
sys.exit(0)
if TEST_WITH_DEV_DBG_ASAN:
print(
"Skip dev-asan as torch + multiprocessing spawn have known issues",
file=sys.stderr,
)
sys.exit(0)
class TestInput(FSDPTest):
@property
def world_size(self):
return 1
@skip_if_lt_x_gpu(1)
@parametrize("input_cls", [subtest(dict, name="dict"), subtest(list, name="list")])
def test_input_type(self, input_cls):
"""Test FSDP with input being a list or a dict, only single GPU."""
class Model(Module):
def __init__(self):
super().__init__()
self.layer = Linear(4, 4)
def forward(self, input):
if isinstance(input, list):
input = input[0]
else:
assert isinstance(input, dict), input
input = input["in"]
return self.layer(input)
model = FSDP(Model()).cuda()
optim = SGD(model.parameters(), lr=0.1)
for _ in range(5):
in_data = torch.rand(64, 4).cuda()
in_data.requires_grad = True
if input_cls is list:
in_data = [in_data]
else:
self.assertTrue(input_cls is dict)
in_data = {"in": in_data}
out = model(in_data)
out.sum().backward()
optim.step()
optim.zero_grad()
instantiate_parametrized_tests(TestInput)
if __name__ == "__main__":
run_tests()
|
[
"torch.testing._internal.common_utils.instantiate_parametrized_tests",
"torch.testing._internal.common_utils.subtest",
"torch.rand",
"torch.testing._internal.common_utils.run_tests",
"sys.exit",
"torch.nn.Linear",
"torch.distributed.is_available",
"torch.testing._internal.common_distributed.skip_if_lt_x_gpu"
] |
[((2057, 2098), 'torch.testing._internal.common_utils.instantiate_parametrized_tests', 'instantiate_parametrized_tests', (['TestInput'], {}), '(TestInput)\n', (2087, 2098), False, 'from torch.testing._internal.common_utils import TEST_WITH_DEV_DBG_ASAN, instantiate_parametrized_tests, parametrize, run_tests, subtest\n'), ((541, 560), 'torch.distributed.is_available', 'dist.is_available', ([], {}), '()\n', (558, 560), True, 'from torch import distributed as dist\n'), ((638, 649), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (646, 649), False, 'import sys\n'), ((800, 811), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (808, 811), False, 'import sys\n'), ((904, 923), 'torch.testing._internal.common_distributed.skip_if_lt_x_gpu', 'skip_if_lt_x_gpu', (['(1)'], {}), '(1)\n', (920, 923), False, 'from torch.testing._internal.common_distributed import skip_if_lt_x_gpu\n'), ((2131, 2142), 'torch.testing._internal.common_utils.run_tests', 'run_tests', ([], {}), '()\n', (2140, 2142), False, 'from torch.testing._internal.common_utils import TEST_WITH_DEV_DBG_ASAN, instantiate_parametrized_tests, parametrize, run_tests, subtest\n'), ((955, 981), 'torch.testing._internal.common_utils.subtest', 'subtest', (['dict'], {'name': '"""dict"""'}), "(dict, name='dict')\n", (962, 981), False, 'from torch.testing._internal.common_utils import TEST_WITH_DEV_DBG_ASAN, instantiate_parametrized_tests, parametrize, run_tests, subtest\n'), ((983, 1009), 'torch.testing._internal.common_utils.subtest', 'subtest', (['list'], {'name': '"""list"""'}), "(list, name='list')\n", (990, 1009), False, 'from torch.testing._internal.common_utils import TEST_WITH_DEV_DBG_ASAN, instantiate_parametrized_tests, parametrize, run_tests, subtest\n'), ((1256, 1268), 'torch.nn.Linear', 'Linear', (['(4)', '(4)'], {}), '(4, 4)\n', (1262, 1268), False, 'from torch.nn import Linear, Module\n'), ((1686, 1703), 'torch.rand', 'torch.rand', (['(64)', '(4)'], {}), '(64, 4)\n', (1696, 1703), False, 'import torch\n')]
|
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag
from recipe.serializers import TagSerializer
# We're using a viewset for the tag api endpoint, which means
# that we can specify which viewset we want with the "-"
TAGS_URL = reverse('recipe:tag-list')
class PrivateTagsApiTest(TestCase):
"""Test the authorized user tags API"""
def setUp(self):
"""Setup the api client and authenticated user to list tags"""
self.user = get_user_model().objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_create_tag_successful(self):
"""Test creating a new tag"""
payload = {'name': 'Test tag'}
self.client.post(TAGS_URL, payload)
exists = Tag.objects.filter(
user=self.user,
name=payload['name']
)
self.assertTrue(exists)
def test_create_tag_invalid(self):
"""Test creating a new tag with invalid payload"""
payload = {'name': ''}
response = self.client.post(TAGS_URL, payload)
# Make sure this isn't actually added
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_tags(self):
"""Test retrieving tags"""
# Create the tags to list for current user
Tag.objects.create(user=self.user, name='Vegan')
Tag.objects.create(user=self.user, name='Dessert')
response = self.client.get(TAGS_URL)
# Order reversed alphabetically
tags = Tag.objects.all().order_by('-name')
# Serialzes the tags and thereby also manages the ordering
# many=true indicates that we're dealing with many objects
serializer = TagSerializer(tags, many=True)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Make sure the lists from the serializer and we added are the same,
# so same items and same reversed ordering
self.assertEqual(response.data, serializer.data)
def test_tags_limited_to_user(self):
"""Test that tags returned are for current authenticated user"""
# Create a new user so we can assign a tag it, which should
# not be part of the returned tags from this class's User
other_user = get_user_model().objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
# Add tag to other user
Tag.objects.create(user=other_user, name="Fruity")
# Add a tag to the authenticated user to make sure only the new one is listed
new_tag = Tag.objects.create(user=self.user, name="Comfort Food")
response = self.client.get(TAGS_URL)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], new_tag.name)
class PublicTagsApiTest(TestCase):
"""Test the publicly available tags API"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that the login is required for retrieving tags"""
response = self.client.get(TAGS_URL)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
[
"core.models.Tag.objects.create",
"core.models.Tag.objects.filter",
"django.contrib.auth.get_user_model",
"django.urls.reverse",
"recipe.serializers.TagSerializer",
"core.models.Tag.objects.all",
"rest_framework.test.APIClient"
] |
[((392, 418), 'django.urls.reverse', 'reverse', (['"""recipe:tag-list"""'], {}), "('recipe:tag-list')\n", (399, 418), False, 'from django.urls import reverse\n'), ((748, 759), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (757, 759), False, 'from rest_framework.test import APIClient\n'), ((992, 1048), 'core.models.Tag.objects.filter', 'Tag.objects.filter', ([], {'user': 'self.user', 'name': "payload['name']"}), "(user=self.user, name=payload['name'])\n", (1010, 1048), False, 'from core.models import Tag\n'), ((1552, 1600), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""Vegan"""'}), "(user=self.user, name='Vegan')\n", (1570, 1600), False, 'from core.models import Tag\n'), ((1609, 1659), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""Dessert"""'}), "(user=self.user, name='Dessert')\n", (1627, 1659), False, 'from core.models import Tag\n'), ((1954, 1984), 'recipe.serializers.TagSerializer', 'TagSerializer', (['tags'], {'many': '(True)'}), '(tags, many=True)\n', (1967, 1984), False, 'from recipe.serializers import TagSerializer\n'), ((2659, 2709), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'other_user', 'name': '"""Fruity"""'}), "(user=other_user, name='Fruity')\n", (2677, 2709), False, 'from core.models import Tag\n'), ((2815, 2870), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""Comfort Food"""'}), "(user=self.user, name='Comfort Food')\n", (2833, 2870), False, 'from core.models import Tag\n'), ((3225, 3236), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (3234, 3236), False, 'from rest_framework.test import APIClient\n'), ((1762, 1779), 'core.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (1777, 1779), False, 'from core.models import Tag\n'), ((614, 630), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (628, 630), False, 'from django.contrib.auth import get_user_model\n'), ((2508, 2524), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2522, 2524), False, 'from django.contrib.auth import get_user_model\n')]
|
import logging
import os
import subprocess
from cekit.builder import Builder
from cekit.errors import CekitError
LOGGER = logging.getLogger('cekit')
class PodmanBuilder(Builder):
"""This class represents podman builder in build mode."""
def __init__(self, params):
super(PodmanBuilder, self).__init__('podman', params)
@staticmethod
def dependencies(params=None):
deps = {}
deps['podman'] = {
'package': 'podman',
'executable': 'podman'
}
return deps
def run(self):
"""Build container image using podman."""
tags = self.params.tags
cmd = ["/usr/bin/podman", "build"]
if not tags:
tags = self.generator.get_tags()
if self.params.pull:
cmd.append('--pull-always')
if not self.params.no_squash:
cmd.append('--squash')
# Custom tags for the container image
LOGGER.debug("Building image with tags: '{}'".format("', '".join(tags)))
for tag in tags:
cmd.extend(["-t", tag])
LOGGER.info("Building container image...")
cmd.append(os.path.join(self.target, 'image'))
LOGGER.debug("Running Podman build: '{}'".format(" ".join(cmd)))
try:
subprocess.check_call(cmd)
LOGGER.info("Image built and available under following tags: {}".format(", ".join(tags)))
except:
raise CekitError("Image build failed, see logs above.")
|
[
"cekit.errors.CekitError",
"subprocess.check_call",
"os.path.join",
"logging.getLogger"
] |
[((124, 150), 'logging.getLogger', 'logging.getLogger', (['"""cekit"""'], {}), "('cekit')\n", (141, 150), False, 'import logging\n'), ((1157, 1191), 'os.path.join', 'os.path.join', (['self.target', '"""image"""'], {}), "(self.target, 'image')\n", (1169, 1191), False, 'import os\n'), ((1293, 1319), 'subprocess.check_call', 'subprocess.check_call', (['cmd'], {}), '(cmd)\n', (1314, 1319), False, 'import subprocess\n'), ((1457, 1506), 'cekit.errors.CekitError', 'CekitError', (['"""Image build failed, see logs above."""'], {}), "('Image build failed, see logs above.')\n", (1467, 1506), False, 'from cekit.errors import CekitError\n')]
|
import pytest
from pyhamtools.frequency import freq_to_band
from pyhamtools.consts import LookupConventions as const
class Test_utils_freq_to_band():
def test_hf_frequencies(self):
assert freq_to_band(137) == {"band" : 2190, "mode":const.CW}
assert freq_to_band(1805) == {"band" : 160, "mode":const.CW}
assert freq_to_band(1838) == {"band" : 160, "mode":const.DIGITAL}
assert freq_to_band(1870) == {"band" : 160, "mode":const.LSB}
assert freq_to_band(3500) == {"band" : 80, "mode":const.CW}
assert freq_to_band(3580) == {"band" : 80, "mode":const.DIGITAL}
assert freq_to_band(3799) == {"band" : 80, "mode":const.LSB}
assert freq_to_band(5200) == {"band" : 60, "mode":None}
assert freq_to_band(7000) == {"band" : 40, "mode":const.CW}
assert freq_to_band(7044) == {"band" : 40, "mode":const.DIGITAL}
assert freq_to_band(7139) == {"band" : 40, "mode":const.LSB}
assert freq_to_band(10100) == {"band" : 30, "mode":const.CW}
assert freq_to_band(10141) == {"band" : 30, "mode":const.DIGITAL}
assert freq_to_band(14000) == {"band" : 20, "mode":const.CW}
assert freq_to_band(14070) == {"band" : 20, "mode":const.DIGITAL}
assert freq_to_band(14349) == {"band" : 20, "mode":const.USB}
assert freq_to_band(18068) == {"band" : 17, "mode":const.CW}
assert freq_to_band(18096) == {"band" : 17, "mode":const.DIGITAL}
assert freq_to_band(18250) == {"band" : 17, "mode":const.USB}
assert freq_to_band(21000) == {"band" : 15, "mode":const.CW}
assert freq_to_band(21070) == {"band" : 15, "mode":const.DIGITAL}
assert freq_to_band(21449) == {"band" : 15, "mode":const.USB}
assert freq_to_band(24890) == {"band" : 12, "mode":const.CW}
assert freq_to_band(24916) == {"band" : 12, "mode":const.DIGITAL}
assert freq_to_band(24965) == {"band" : 12, "mode":const.USB}
assert freq_to_band(28000) == {"band" : 10, "mode":const.CW}
assert freq_to_band(28070) == {"band" : 10, "mode":const.DIGITAL}
assert freq_to_band(28500) == {"band" : 10, "mode":const.USB}
assert freq_to_band(50000) == {"band" : 6, "mode":const.CW}
assert freq_to_band(50100) == {"band" : 6, "mode":const.USB}
assert freq_to_band(50500) == {"band" : 6, "mode":const.DIGITAL}
def test_vhf_frequencies(self):
assert freq_to_band(70001) == {"band" : 4, "mode":None}
assert freq_to_band(144000) == {"band" : 2, "mode":const.CW}
assert freq_to_band(144150) == {"band" : 2, "mode":const.USB}
assert freq_to_band(144400) == {"band" : 2, "mode":None}
assert freq_to_band(220000) == {"band" : 1.25, "mode":None}
def test_uhf_frequencies(self):
assert freq_to_band(420000) == {"band" : 0.7, "mode":None}
assert freq_to_band(902000) == {"band" : 0.33, "mode":None}
assert freq_to_band(1200000) == {"band" : 0.23, "mode":None}
def test_shf_frequencies(self):
assert freq_to_band(2390000) == {"band" : 0.13, "mode":None}
assert freq_to_band(3300000) == {"band" : 0.09, "mode":None}
assert freq_to_band(5650000) == {"band" : 0.053, "mode":None}
assert freq_to_band(10000000) == {"band" : 0.03, "mode":None}
assert freq_to_band(24000000) == {"band" : 0.0125, "mode":None}
assert freq_to_band(47000000) == {"band" : 0.0063, "mode":None}
with pytest.raises(KeyError):
freq_to_band(16304)
def test_ft_frequencies(self):
assert freq_to_band(1840) == {"band": 160, "mode": const.DIGITAL} #FT8
assert freq_to_band(3573) == {"band": 80, "mode": const.DIGITAL} #FT8
assert freq_to_band(7074) == {"band": 40, "mode": const.DIGITAL} #FT8
assert freq_to_band(10136) == {"band": 30, "mode": const.DIGITAL} #FT8
assert freq_to_band(14074) == {"band": 20, "mode": const.DIGITAL} #FT8
assert freq_to_band(18100) == {"band": 17, "mode": const.DIGITAL} #FT8
assert freq_to_band(21074) == {"band": 15, "mode": const.DIGITAL} #FT8
assert freq_to_band(24915) == {"band": 12, "mode": const.DIGITAL} #FT8
assert freq_to_band(28074) == {"band": 10, "mode": const.DIGITAL} #FT8
assert freq_to_band(50313) == {"band": 6, "mode": const.DIGITAL} #FT8
assert freq_to_band(144174.5) == {"band": 2, "mode": const.DIGITAL} #FT8
|
[
"pytest.raises",
"pyhamtools.frequency.freq_to_band"
] |
[((202, 219), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(137)'], {}), '(137)\n', (214, 219), False, 'from pyhamtools.frequency import freq_to_band\n'), ((272, 290), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(1805)'], {}), '(1805)\n', (284, 290), False, 'from pyhamtools.frequency import freq_to_band\n'), ((341, 359), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(1838)'], {}), '(1838)\n', (353, 359), False, 'from pyhamtools.frequency import freq_to_band\n'), ((415, 433), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(1870)'], {}), '(1870)\n', (427, 433), False, 'from pyhamtools.frequency import freq_to_band\n'), ((486, 504), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(3500)'], {}), '(3500)\n', (498, 504), False, 'from pyhamtools.frequency import freq_to_band\n'), ((554, 572), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(3580)'], {}), '(3580)\n', (566, 572), False, 'from pyhamtools.frequency import freq_to_band\n'), ((627, 645), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(3799)'], {}), '(3799)\n', (639, 645), False, 'from pyhamtools.frequency import freq_to_band\n'), ((697, 715), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(5200)'], {}), '(5200)\n', (709, 715), False, 'from pyhamtools.frequency import freq_to_band\n'), ((762, 780), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(7000)'], {}), '(7000)\n', (774, 780), False, 'from pyhamtools.frequency import freq_to_band\n'), ((830, 848), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(7044)'], {}), '(7044)\n', (842, 848), False, 'from pyhamtools.frequency import freq_to_band\n'), ((903, 921), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(7139)'], {}), '(7139)\n', (915, 921), False, 'from pyhamtools.frequency import freq_to_band\n'), ((973, 992), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(10100)'], {}), '(10100)\n', (985, 992), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1042, 1061), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(10141)'], {}), '(10141)\n', (1054, 1061), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1117, 1136), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(14000)'], {}), '(14000)\n', (1129, 1136), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1186, 1205), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(14070)'], {}), '(14070)\n', (1198, 1205), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1260, 1279), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(14349)'], {}), '(14349)\n', (1272, 1279), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1331, 1350), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(18068)'], {}), '(18068)\n', (1343, 1350), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1400, 1419), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(18096)'], {}), '(18096)\n', (1412, 1419), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1474, 1493), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(18250)'], {}), '(18250)\n', (1486, 1493), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1545, 1564), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(21000)'], {}), '(21000)\n', (1557, 1564), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1614, 1633), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(21070)'], {}), '(21070)\n', (1626, 1633), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1688, 1707), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(21449)'], {}), '(21449)\n', (1700, 1707), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1759, 1778), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(24890)'], {}), '(24890)\n', (1771, 1778), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1828, 1847), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(24916)'], {}), '(24916)\n', (1840, 1847), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1902, 1921), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(24965)'], {}), '(24965)\n', (1914, 1921), False, 'from pyhamtools.frequency import freq_to_band\n'), ((1973, 1992), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(28000)'], {}), '(28000)\n', (1985, 1992), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2042, 2061), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(28070)'], {}), '(28070)\n', (2054, 2061), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2116, 2135), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(28500)'], {}), '(28500)\n', (2128, 2135), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2187, 2206), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(50000)'], {}), '(50000)\n', (2199, 2206), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2255, 2274), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(50100)'], {}), '(50100)\n', (2267, 2274), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2324, 2343), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(50500)'], {}), '(50500)\n', (2336, 2343), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2434, 2453), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(70001)'], {}), '(70001)\n', (2446, 2453), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2499, 2519), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(144000)'], {}), '(144000)\n', (2511, 2519), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2568, 2588), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(144150)'], {}), '(144150)\n', (2580, 2588), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2638, 2658), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(144400)'], {}), '(144400)\n', (2650, 2658), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2704, 2724), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(220000)'], {}), '(220000)\n', (2716, 2724), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2809, 2829), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(420000)'], {}), '(420000)\n', (2821, 2829), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2877, 2897), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(902000)'], {}), '(902000)\n', (2889, 2897), False, 'from pyhamtools.frequency import freq_to_band\n'), ((2946, 2967), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(1200000)'], {}), '(1200000)\n', (2958, 2967), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3052, 3073), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(2390000)'], {}), '(2390000)\n', (3064, 3073), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3122, 3143), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(3300000)'], {}), '(3300000)\n', (3134, 3143), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3192, 3213), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(5650000)'], {}), '(5650000)\n', (3204, 3213), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3263, 3285), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(10000000)'], {}), '(10000000)\n', (3275, 3285), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3334, 3356), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(24000000)'], {}), '(24000000)\n', (3346, 3356), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3407, 3429), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(47000000)'], {}), '(47000000)\n', (3419, 3429), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3478, 3501), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (3491, 3501), False, 'import pytest\n'), ((3515, 3534), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(16304)'], {}), '(16304)\n', (3527, 3534), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3586, 3604), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(1840)'], {}), '(1840)\n', (3598, 3604), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3665, 3683), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(3573)'], {}), '(3573)\n', (3677, 3683), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3743, 3761), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(7074)'], {}), '(7074)\n', (3755, 3761), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3821, 3840), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(10136)'], {}), '(10136)\n', (3833, 3840), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3900, 3919), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(14074)'], {}), '(14074)\n', (3912, 3919), False, 'from pyhamtools.frequency import freq_to_band\n'), ((3979, 3998), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(18100)'], {}), '(18100)\n', (3991, 3998), False, 'from pyhamtools.frequency import freq_to_band\n'), ((4058, 4077), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(21074)'], {}), '(21074)\n', (4070, 4077), False, 'from pyhamtools.frequency import freq_to_band\n'), ((4137, 4156), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(24915)'], {}), '(24915)\n', (4149, 4156), False, 'from pyhamtools.frequency import freq_to_band\n'), ((4216, 4235), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(28074)'], {}), '(28074)\n', (4228, 4235), False, 'from pyhamtools.frequency import freq_to_band\n'), ((4295, 4314), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(50313)'], {}), '(50313)\n', (4307, 4314), False, 'from pyhamtools.frequency import freq_to_band\n'), ((4373, 4395), 'pyhamtools.frequency.freq_to_band', 'freq_to_band', (['(144174.5)'], {}), '(144174.5)\n', (4385, 4395), False, 'from pyhamtools.frequency import freq_to_band\n')]
|
from firmware_variables import privileges, get_boot_order, get_boot_entry, LoadOption
def doit():
with privileges():
order = get_boot_order()
for entry_id in order:
raw = get_boot_entry(entry_id)
load_option = LoadOption.from_bytes(raw)
print("0x{:04X} {}".format(entry_id, load_option))
assert load_option.to_bytes() == raw
if __name__ == "__main__":
doit()
|
[
"firmware_variables.privileges",
"firmware_variables.get_boot_entry",
"firmware_variables.get_boot_order",
"firmware_variables.LoadOption.from_bytes"
] |
[((109, 121), 'firmware_variables.privileges', 'privileges', ([], {}), '()\n', (119, 121), False, 'from firmware_variables import privileges, get_boot_order, get_boot_entry, LoadOption\n'), ((139, 155), 'firmware_variables.get_boot_order', 'get_boot_order', ([], {}), '()\n', (153, 155), False, 'from firmware_variables import privileges, get_boot_order, get_boot_entry, LoadOption\n'), ((205, 229), 'firmware_variables.get_boot_entry', 'get_boot_entry', (['entry_id'], {}), '(entry_id)\n', (219, 229), False, 'from firmware_variables import privileges, get_boot_order, get_boot_entry, LoadOption\n'), ((256, 282), 'firmware_variables.LoadOption.from_bytes', 'LoadOption.from_bytes', (['raw'], {}), '(raw)\n', (277, 282), False, 'from firmware_variables import privileges, get_boot_order, get_boot_entry, LoadOption\n')]
|
import numpy as np
import os
import torch
import subprocess
import matplotlib.pyplot as plt
import time
import pickle
import re
def save_pickle(features, labels, path, name):
features, labels = np.array(features).astype(np.float32), np.array(labels).astype(np.float32)
x = time.asctime()
filename = name + '_' + str(re.sub('[ ]', '_', x) + '.pkl')
dir = os.path.join(path, filename)
with open(dir, "wb") as f:
pickle.dump([features, labels], f)
def heatmap2D(features):
x1, x2 = features[:, 0], features[:, 1]
heatmap, xedges, yedges = np.histogram2d(x1, x2, bins=50)
extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]]
plt.clf()
plt.imshow(heatmap.T, extent=extent, origin='lower')
plt.show()
def int_tuple(s):
return tuple(int(i) for i in s.split(','))
def find_nan(variable, var_name):
variable_n = variable.data.cpu().numpy()
if np.isnan(variable_n).any():
exit('%s has nan' % var_name)
def get_gpu_memory():
torch.cuda.synchronize()
opts = [
'nvidia-smi', '-q', '--gpu=' + str(1), '|', 'grep', '"Used GPU Memory"'
]
cmd = str.join(' ', opts)
ps = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = ps.communicate()[0].decode('utf-8')
output = output.split("\n")[0].split(":")
consumed_mem = int(output[1].strip().split(" ")[0])
return consumed_mem
def get_total_norm(parameters, norm_type=2):
if norm_type == float('inf'):
total_norm = max(p.grad.data.abs().max() for p in parameters)
else:
total_norm = 0
for p in parameters:
try:
param_norm = p.grad.data.norm(norm_type)
total_norm += param_norm**norm_type
total_norm = total_norm**(1. / norm_type)
except:
continue
return total_norm
def get_dset_path(dset_name, dset_type):
_dir = os.path.dirname(__file__)
_dir = _dir.split("/")[:-1]
_dir = "/".join(_dir)
return os.path.join(_dir, 'datasets', dset_name, dset_type)
def bool_flag(s):
if s == '1':
return True
elif s == '0':
return False
msg = 'Invalid value "%s" for bool flag (should be 0 or 1)'
raise ValueError(msg % s)
def Save_Network(PATH, epoch, net, optimizer):
state = {
'epoch': epoch,
'state_dict': net.state_dict(),
'optimizer': optimizer.state_dict(),
}
torch.save(state, PATH)
def Load_Network(PATH, net, optimizer):
state = torch.load(PATH)
net.load_state_dict(state['state_dict'])
optimizer.load_state_dict(state['optimizer'])
def plot_all(prediction, actual, title, model_name, idx):
pred1, pred2, pred3 = prediction[:, 0], prediction[:, 1], prediction[:, 2]
actual1, actual2, actual3 = actual[:, 0], actual[:, 1], actual[:, 2]
x = np.arange(1, len(pred1)+1)
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex=True, figsize=(10,10))
fig.suptitle(title)
ax1.plot(x, pred1 , label='predicted')
ax1.plot(x, actual1, label='actual' )
ax1.set_ylabel('dx [mm]')
ax2.plot(x, pred2, label='predicted')
ax2.plot(x, actual2, label='actual' )
ax2.set_ylabel('dy [mm]')
ax3.plot(x, pred3, label='predicted')
ax3.plot(x, actual3, label='actual' )
ax3.set_ylabel('d_theta [radians]')
ax3.set_xlabel('time step')
ax1.legend()
# plt.show()
dir = r'E:\CarProject\NewCode_Project\plot'
checkpoint_path = os.path.join(dir, model_name + '_' + str(idx) + '.png')
plt.savefig(checkpoint_path, bbox_inches='tight')
plt.close()
def plot_live(prediction, actual, title): # input: array[[dx,dy.d_theta],...] shape:(num_of_samples,3)
plt.gcf().clear()
pred1, pred2, pred3 = prediction[:, 0], prediction[:, 1], prediction[:, 2]
actual1, actual2, actual3 = actual[:, 0], actual[:, 1], actual[:, 2]
x = np.arange(1, len(pred1)+1)
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex=True, figsize=(10,10))
fig.suptitle(title)
ax1.plot(x, pred1, label='predicted')
ax1.plot(x, actual1, label='actual')
ax1.set_ylabel('dx [mm]')
ax2.plot(x, pred2)
ax2.plot(x, actual2)
ax2.set_ylabel('dy [mm]')
ax3.plot(x, pred3)
ax3.plot(x, actual3)
ax3.set_ylabel('d_theta [radians]')
ax3.set_xlabel('time step')
ax1.legend()
plt.draw()
plt.pause(0.00000000001)
def save_states(checkpoint_state, path):
s = checkpoint_state['state']
sp = checkpoint_state['state_predict']
with open(path, "wb") as f:
pickle.dump([s, sp], f)
def loss_graph(train_loss, val_loss):
plt.plot(range(len(train_loss)), train_loss, label="train_loss")
plt.plot(range(len(val_loss)), val_loss, label="train_loss")
plt.legend()
plt.savefig('lossVSvalidation.png')
plt.show()
def predict_batch(x, model):
x_min = -120
x_max = 120
dx_min = -25
dx_max = 25
dy_min = -50
dy_max = 50
dtheta_min = -1.4
dtheta_max = 1.4
action_nor = (x - x_min) / (x_max - x_min)
action_nor = torch.tensor(action_nor, dtype=torch.float)
with torch.no_grad():
action_nor = action_nor.cuda().unsqueeze(1)
prediction = model(action_nor)
prediction = prediction.detach().cpu().numpy()
prediction[:,0] = prediction[:,0] * ((dx_max) - (dx_min)) + dx_min
prediction[:,1] = prediction[:,1] * ((dy_max) - (dy_min)) + dy_min
prediction[:,2] = prediction[:,2] * ((dtheta_max) - (dtheta_min)) + dtheta_min
return prediction
def plot_loss(train_loss, val_loss, loss_plot_name):
plt.figure(figsize=(10, 7))
plt.plot(train_loss, color='orange', label='train loss')
plt.plot(val_loss, color='red', label='validataion loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.savefig(loss_plot_name, bbox_inches='tight')
plt.close()
|
[
"torch.cuda.synchronize",
"matplotlib.pyplot.savefig",
"pickle.dump",
"matplotlib.pyplot.clf",
"numpy.isnan",
"matplotlib.pyplot.figure",
"torch.no_grad",
"os.path.join",
"time.asctime",
"numpy.histogram2d",
"matplotlib.pyplot.imshow",
"os.path.dirname",
"torch.load",
"matplotlib.pyplot.close",
"matplotlib.pyplot.draw",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.pause",
"re.sub",
"subprocess.Popen",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.plot",
"torch.save",
"numpy.array",
"matplotlib.pyplot.xlabel",
"torch.tensor"
] |
[((293, 307), 'time.asctime', 'time.asctime', ([], {}), '()\n', (305, 307), False, 'import time\n'), ((384, 412), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (396, 412), False, 'import os\n'), ((593, 624), 'numpy.histogram2d', 'np.histogram2d', (['x1', 'x2'], {'bins': '(50)'}), '(x1, x2, bins=50)\n', (607, 624), True, 'import numpy as np\n'), ((691, 700), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (698, 700), True, 'import matplotlib.pyplot as plt\n'), ((706, 758), 'matplotlib.pyplot.imshow', 'plt.imshow', (['heatmap.T'], {'extent': 'extent', 'origin': '"""lower"""'}), "(heatmap.T, extent=extent, origin='lower')\n", (716, 758), True, 'import matplotlib.pyplot as plt\n'), ((764, 774), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (772, 774), True, 'import matplotlib.pyplot as plt\n'), ((1034, 1058), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (1056, 1058), False, 'import torch\n'), ((1202, 1290), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), '(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess\n .STDOUT)\n', (1218, 1290), False, 'import subprocess\n'), ((2008, 2033), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2023, 2033), False, 'import os\n'), ((2106, 2158), 'os.path.join', 'os.path.join', (['_dir', '"""datasets"""', 'dset_name', 'dset_type'], {}), "(_dir, 'datasets', dset_name, dset_type)\n", (2118, 2158), False, 'import os\n'), ((2552, 2575), 'torch.save', 'torch.save', (['state', 'PATH'], {}), '(state, PATH)\n', (2562, 2575), False, 'import torch\n'), ((2632, 2648), 'torch.load', 'torch.load', (['PATH'], {}), '(PATH)\n', (2642, 2648), False, 'import torch\n'), ((3027, 3076), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {'sharex': '(True)', 'figsize': '(10, 10)'}), '(3, 1, sharex=True, figsize=(10, 10))\n', (3039, 3076), True, 'import matplotlib.pyplot as plt\n'), ((3666, 3715), 'matplotlib.pyplot.savefig', 'plt.savefig', (['checkpoint_path'], {'bbox_inches': '"""tight"""'}), "(checkpoint_path, bbox_inches='tight')\n", (3677, 3715), True, 'import matplotlib.pyplot as plt\n'), ((3721, 3732), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3730, 3732), True, 'import matplotlib.pyplot as plt\n'), ((4080, 4129), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {'sharex': '(True)', 'figsize': '(10, 10)'}), '(3, 1, sharex=True, figsize=(10, 10))\n', (4092, 4129), True, 'import matplotlib.pyplot as plt\n'), ((4512, 4522), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (4520, 4522), True, 'import matplotlib.pyplot as plt\n'), ((4528, 4544), 'matplotlib.pyplot.pause', 'plt.pause', (['(1e-11)'], {}), '(1e-11)\n', (4537, 4544), True, 'import matplotlib.pyplot as plt\n'), ((4924, 4936), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4934, 4936), True, 'import matplotlib.pyplot as plt\n'), ((4942, 4977), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""lossVSvalidation.png"""'], {}), "('lossVSvalidation.png')\n", (4953, 4977), True, 'import matplotlib.pyplot as plt\n'), ((4983, 4993), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4991, 4993), True, 'import matplotlib.pyplot as plt\n'), ((5242, 5285), 'torch.tensor', 'torch.tensor', (['action_nor'], {'dtype': 'torch.float'}), '(action_nor, dtype=torch.float)\n', (5254, 5285), False, 'import torch\n'), ((5770, 5797), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (5780, 5797), True, 'import matplotlib.pyplot as plt\n'), ((5803, 5859), 'matplotlib.pyplot.plot', 'plt.plot', (['train_loss'], {'color': '"""orange"""', 'label': '"""train loss"""'}), "(train_loss, color='orange', label='train loss')\n", (5811, 5859), True, 'import matplotlib.pyplot as plt\n'), ((5865, 5922), 'matplotlib.pyplot.plot', 'plt.plot', (['val_loss'], {'color': '"""red"""', 'label': '"""validataion loss"""'}), "(val_loss, color='red', label='validataion loss')\n", (5873, 5922), True, 'import matplotlib.pyplot as plt\n'), ((5928, 5948), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (5938, 5948), True, 'import matplotlib.pyplot as plt\n'), ((5954, 5972), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (5964, 5972), True, 'import matplotlib.pyplot as plt\n'), ((5978, 5990), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5988, 5990), True, 'import matplotlib.pyplot as plt\n'), ((5996, 6044), 'matplotlib.pyplot.savefig', 'plt.savefig', (['loss_plot_name'], {'bbox_inches': '"""tight"""'}), "(loss_plot_name, bbox_inches='tight')\n", (6007, 6044), True, 'import matplotlib.pyplot as plt\n'), ((6050, 6061), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6059, 6061), True, 'import matplotlib.pyplot as plt\n'), ((454, 488), 'pickle.dump', 'pickle.dump', (['[features, labels]', 'f'], {}), '([features, labels], f)\n', (465, 488), False, 'import pickle\n'), ((4718, 4741), 'pickle.dump', 'pickle.dump', (['[s, sp]', 'f'], {}), '([s, sp], f)\n', (4729, 4741), False, 'import pickle\n'), ((5296, 5311), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5309, 5311), False, 'import torch\n'), ((937, 957), 'numpy.isnan', 'np.isnan', (['variable_n'], {}), '(variable_n)\n', (945, 957), True, 'import numpy as np\n'), ((3844, 3853), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3851, 3853), True, 'import matplotlib.pyplot as plt\n'), ((209, 227), 'numpy.array', 'np.array', (['features'], {}), '(features)\n', (217, 227), True, 'import numpy as np\n'), ((248, 264), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (256, 264), True, 'import numpy as np\n'), ((341, 362), 're.sub', 're.sub', (['"""[ ]"""', '"""_"""', 'x'], {}), "('[ ]', '_', x)\n", (347, 362), False, 'import re\n')]
|
import logging
logger = logging.getLogger('base')
def create_model(opt):
model = opt['model']
if model == 'sr':
from .SR_model import SRModel as M
elif model == 'srgan':
from .SRGAN_model import SRGANModel as M
elif model == 'srragan':
from .SRRaGAN_model import SRRaGANModel as M
elif model == 'sftgan':
from .SFTGAN_ACD_model import SFTGAN_ACD_Model as M
else:
raise NotImplementedError('Model [{:s}] not recognized.'.format(model))
m = M(opt)
logger.info('Model [{:s}] is created.'.format(m.__class__.__name__))
return m
|
[
"logging.getLogger"
] |
[((24, 49), 'logging.getLogger', 'logging.getLogger', (['"""base"""'], {}), "('base')\n", (41, 49), False, 'import logging\n')]
|
from libsaas import http, parsers
from libsaas.services import base
class FilesResource(base.RESTResource):
path = 'files'
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
class Files(FilesResource):
@base.apimethod
def get(self, start=None, limit=None):
"""
Returns data about all files.
Upstream documentation:
https://developers.pipedrive.com/v1#methods-Files
"""
params = base.get_params(None, locals())
return http.Request('GET', self.get_url(), params), parsers.parse_json
class File(FilesResource):
pass
|
[
"libsaas.services.base.MethodNotSupported"
] |
[((184, 209), 'libsaas.services.base.MethodNotSupported', 'base.MethodNotSupported', ([], {}), '()\n', (207, 209), False, 'from libsaas.services import base\n')]
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from ctypes.util import find_library
from ctypes import c_void_p, c_long, c_uint32, c_char_p, c_byte, c_ulong, c_bool
from ctypes import CDLL, string_at, cast, POINTER, byref
import ctypes
from .._ffi import FFIEngineError, buffer_from_bytes, byte_string_from_buffer
from ..errors import LibraryNotFoundError
__all__ = [
'CFHelpers',
'CoreFoundation',
]
core_foundation_path = find_library('CoreFoundation')
if not core_foundation_path:
raise LibraryNotFoundError('The library CoreFoundation could not be found')
CoreFoundation = CDLL(core_foundation_path, use_errno=True)
CFIndex = c_long
CFStringEncoding = c_uint32
CFArray = c_void_p
CFData = c_void_p
CFString = c_void_p
CFNumber = c_void_p
CFDictionary = c_void_p
CFError = c_void_p
CFType = c_void_p
CFTypeID = c_ulong
CFBoolean = c_void_p
CFNumberType = c_uint32
CFTypeRef = POINTER(CFType)
CFArrayRef = POINTER(CFArray)
CFDataRef = POINTER(CFData)
CFStringRef = POINTER(CFString)
CFNumberRef = POINTER(CFNumber)
CFBooleanRef = POINTER(CFBoolean)
CFDictionaryRef = POINTER(CFDictionary)
CFErrorRef = POINTER(CFError)
CFAllocatorRef = c_void_p
CFDictionaryKeyCallBacks = c_void_p
CFDictionaryValueCallBacks = c_void_p
CFArrayCallBacks = c_void_p
pointer_p = POINTER(c_void_p)
try:
CoreFoundation.CFDataGetLength.argtypes = [
CFDataRef
]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [
CFDataRef
]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDataCreate.argtypes = [
CFAllocatorRef,
c_char_p,
CFIndex
]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
CFStringRef,
CFTypeRef,
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetCount.argtypes = [
CFDictionaryRef
]
CoreFoundation.CFDictionaryGetCount.restype = CFIndex
CoreFoundation.CFStringGetCStringPtr.argtypes = [
CFStringRef,
CFStringEncoding
]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFNumberCreate.argtypes = [
CFAllocatorRef,
CFNumberType,
c_void_p
]
CoreFoundation.CFNumberCreate.restype = CFNumberRef
CoreFoundation.CFCopyTypeIDDescription.argtypes = [
CFTypeID
]
CoreFoundation.CFCopyTypeIDDescription.restype = CFStringRef
CoreFoundation.CFRelease.argtypes = [
CFTypeRef
]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFErrorCopyDescription.argtypes = [
CFErrorRef
]
CoreFoundation.CFErrorCopyDescription.restype = CFStringRef
CoreFoundation.CFErrorGetDomain.argtypes = [
CFErrorRef
]
CoreFoundation.CFErrorGetDomain.restype = CFStringRef
CoreFoundation.CFErrorGetCode.argtypes = [
CFErrorRef
]
CoreFoundation.CFErrorGetCode.restype = CFIndex
CoreFoundation.CFBooleanGetValue.argtypes = [
CFBooleanRef
]
CoreFoundation.CFBooleanGetValue.restype = c_byte
CoreFoundation.CFDictionaryGetTypeID.argtypes = []
CoreFoundation.CFDictionaryGetTypeID.restype = CFTypeID
CoreFoundation.CFNumberGetTypeID.argtypes = []
CoreFoundation.CFNumberGetTypeID.restype = CFTypeID
CoreFoundation.CFStringGetTypeID.argtypes = []
CoreFoundation.CFStringGetTypeID.restype = CFTypeID
CoreFoundation.CFDataGetTypeID.argtypes = []
CoreFoundation.CFDataGetTypeID.restype = CFTypeID
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(c_void_p),
CFIndex,
CFArrayCallBacks
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayGetCount.argtypes = [
CFArrayRef
]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [
CFArrayRef,
CFIndex
]
CoreFoundation.CFArrayGetValueAtIndex.restype = CFTypeRef
CoreFoundation.CFNumberGetType.argtypes = [
CFNumberRef
]
CoreFoundation.CFNumberGetType.restype = CFNumberType
CoreFoundation.CFNumberGetValue.argtypes = [
CFNumberRef,
CFNumberType,
c_void_p
]
CoreFoundation.CFNumberGetValue.restype = c_bool
CoreFoundation.CFDictionaryGetKeysAndValues.argtypes = [
CFDictionaryRef,
pointer_p,
pointer_p
]
CoreFoundation.CFDictionaryGetKeysAndValues.restype = CFIndex
CoreFoundation.CFGetTypeID.argtypes = [
CFTypeRef
]
CoreFoundation.CFGetTypeID.restype = CFTypeID
setattr(CoreFoundation, 'kCFAllocatorDefault', CFAllocatorRef.in_dll(CoreFoundation, 'kCFAllocatorDefault'))
setattr(CoreFoundation, 'kCFBooleanTrue', CFTypeRef.in_dll(CoreFoundation, 'kCFBooleanTrue'))
kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeDictionaryKeyCallBacks')
kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeDictionaryValueCallBacks')
kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks')
except (AttributeError):
raise FFIEngineError('Error initializing ctypes')
setattr(CoreFoundation, 'CFDataRef', CFDataRef)
setattr(CoreFoundation, 'CFErrorRef', CFErrorRef)
setattr(CoreFoundation, 'CFArrayRef', CFArrayRef)
kCFNumberCFIndexType = CFNumberType(14)
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
def _cast_pointer_p(value):
"""
Casts a value to a pointer of a pointer
:param value:
A ctypes object
:return:
A POINTER(c_void_p) object
"""
return cast(value, pointer_p)
class CFHelpers():
"""
Namespace for core foundation helpers
"""
_native_map = {}
@classmethod
def register_native_mapping(cls, type_id, callback):
"""
Register a function to convert a core foundation data type into its
equivalent in python
:param type_id:
The CFTypeId for the type
:param callback:
A callback to pass the CFType object to
"""
cls._native_map[int(type_id)] = callback
@staticmethod
def cf_number_to_number(value):
"""
Converts a CFNumber object to a python float or integer
:param value:
The CFNumber object
:return:
A python number (float or integer)
"""
type_ = CoreFoundation.CFNumberGetType(_cast_pointer_p(value))
c_type = {
1: c_byte, # kCFNumberSInt8Type
2: ctypes.c_short, # kCFNumberSInt16Type
3: ctypes.c_int32, # kCFNumberSInt32Type
4: ctypes.c_int64, # kCFNumberSInt64Type
5: ctypes.c_float, # kCFNumberFloat32Type
6: ctypes.c_double, # kCFNumberFloat64Type
7: c_byte, # kCFNumberCharType
8: ctypes.c_short, # kCFNumberShortType
9: ctypes.c_int, # kCFNumberIntType
10: c_long, # kCFNumberLongType
11: ctypes.c_longlong, # kCFNumberLongLongType
12: ctypes.c_float, # kCFNumberFloatType
13: ctypes.c_double, # kCFNumberDoubleType
14: c_long, # kCFNumberCFIndexType
15: ctypes.c_int, # kCFNumberNSIntegerType
16: ctypes.c_double, # kCFNumberCGFloatType
}[type_]
output = c_type(0)
CoreFoundation.CFNumberGetValue(_cast_pointer_p(value), type_, byref(output))
return output.value
@staticmethod
def cf_dictionary_to_dict(dictionary):
"""
Converts a CFDictionary object into a python dictionary
:param dictionary:
The CFDictionary to convert
:return:
A python dict
"""
dict_length = CoreFoundation.CFDictionaryGetCount(dictionary)
keys = (CFTypeRef * dict_length)()
values = (CFTypeRef * dict_length)()
CoreFoundation.CFDictionaryGetKeysAndValues(
dictionary,
_cast_pointer_p(keys),
_cast_pointer_p(values)
)
output = {}
for index in range(0, dict_length):
output[CFHelpers.native(keys[index])] = CFHelpers.native(values[index])
return output
@classmethod
def native(cls, value):
"""
Converts a CF* object into its python equivalent
:param value:
The CF* object to convert
:return:
The native python object
"""
type_id = CoreFoundation.CFGetTypeID(value)
if type_id in cls._native_map:
return cls._native_map[type_id](value)
else:
return value
@staticmethod
def cf_string_to_unicode(value):
"""
Creates a python unicode string from a CFString object
:param value:
The CFString to convert
:return:
A python unicode string
"""
string = CoreFoundation.CFStringGetCStringPtr(
_cast_pointer_p(value),
kCFStringEncodingUTF8
)
if string is None:
buffer = buffer_from_bytes(1024)
result = CoreFoundation.CFStringGetCString(
_cast_pointer_p(value),
buffer,
1024,
kCFStringEncodingUTF8
)
if not result:
raise OSError('Error copying C string from CFStringRef')
string = byte_string_from_buffer(buffer)
if string is not None:
string = string.decode('utf-8')
return string
@staticmethod
def cf_string_from_unicode(string):
"""
Creates a CFStringRef object from a unicode string
:param string:
The unicode string to create the CFString object from
:return:
A CFStringRef
"""
return CoreFoundation.CFStringCreateWithCString(
CoreFoundation.kCFAllocatorDefault,
string.encode('utf-8'),
kCFStringEncodingUTF8
)
@staticmethod
def cf_data_to_bytes(value):
"""
Extracts a bytestring from a CFData object
:param value:
A CFData object
:return:
A byte string
"""
start = CoreFoundation.CFDataGetBytePtr(value)
num_bytes = CoreFoundation.CFDataGetLength(value)
return string_at(start, num_bytes)
@staticmethod
def cf_data_from_bytes(bytes_):
"""
Creates a CFDataRef object from a byte string
:param bytes_:
The data to create the CFData object from
:return:
A CFDataRef
"""
return CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault,
bytes_,
len(bytes_)
)
@staticmethod
def cf_dictionary_from_pairs(pairs):
"""
Creates a CFDictionaryRef object from a list of 2-element tuples
representing the key and value. Each key should be a CFStringRef and each
value some sort of CF* type.
:param pairs:
A list of 2-element tuples
:return:
A CFDictionaryRef
"""
length = len(pairs)
keys = []
values = []
for pair in pairs:
key, value = pair
keys.append(key)
values.append(value)
keys = (CFStringRef * length)(*keys)
values = (CFTypeRef * length)(*values)
return CoreFoundation.CFDictionaryCreate(
CoreFoundation.kCFAllocatorDefault,
_cast_pointer_p(byref(keys)),
_cast_pointer_p(byref(values)),
length,
kCFTypeDictionaryKeyCallBacks,
kCFTypeDictionaryValueCallBacks
)
@staticmethod
def cf_array_from_list(values):
"""
Creates a CFArrayRef object from a list of CF* type objects.
:param values:
A list of CF* type object
:return:
A CFArrayRef
"""
length = len(values)
values = (CFTypeRef * length)(*values)
return CoreFoundation.CFArrayCreate(
CoreFoundation.kCFAllocatorDefault,
_cast_pointer_p(byref(values)),
length,
kCFTypeArrayCallBacks
)
@staticmethod
def cf_number_from_integer(integer):
"""
Creates a CFNumber object from an integer
:param integer:
The integer to create the CFNumber for
:return:
A CFNumber
"""
integer_as_long = c_long(integer)
return CoreFoundation.CFNumberCreate(
CoreFoundation.kCFAllocatorDefault,
kCFNumberCFIndexType,
byref(integer_as_long)
)
|
[
"ctypes.util.find_library",
"ctypes.string_at",
"ctypes.byref",
"ctypes.cast",
"ctypes.c_long",
"ctypes.c_void_p.in_dll",
"ctypes.CDLL",
"ctypes.POINTER"
] |
[((490, 520), 'ctypes.util.find_library', 'find_library', (['"""CoreFoundation"""'], {}), "('CoreFoundation')\n", (502, 520), False, 'from ctypes.util import find_library\n'), ((648, 690), 'ctypes.CDLL', 'CDLL', (['core_foundation_path'], {'use_errno': '(True)'}), '(core_foundation_path, use_errno=True)\n', (652, 690), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((952, 967), 'ctypes.POINTER', 'POINTER', (['CFType'], {}), '(CFType)\n', (959, 967), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((981, 997), 'ctypes.POINTER', 'POINTER', (['CFArray'], {}), '(CFArray)\n', (988, 997), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1010, 1025), 'ctypes.POINTER', 'POINTER', (['CFData'], {}), '(CFData)\n', (1017, 1025), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1040, 1057), 'ctypes.POINTER', 'POINTER', (['CFString'], {}), '(CFString)\n', (1047, 1057), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1072, 1089), 'ctypes.POINTER', 'POINTER', (['CFNumber'], {}), '(CFNumber)\n', (1079, 1089), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1105, 1123), 'ctypes.POINTER', 'POINTER', (['CFBoolean'], {}), '(CFBoolean)\n', (1112, 1123), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1142, 1163), 'ctypes.POINTER', 'POINTER', (['CFDictionary'], {}), '(CFDictionary)\n', (1149, 1163), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1177, 1193), 'ctypes.POINTER', 'POINTER', (['CFError'], {}), '(CFError)\n', (1184, 1193), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((1335, 1352), 'ctypes.POINTER', 'POINTER', (['c_void_p'], {}), '(c_void_p)\n', (1342, 1352), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((5502, 5566), 'ctypes.c_void_p.in_dll', 'c_void_p.in_dll', (['CoreFoundation', '"""kCFTypeDictionaryKeyCallBacks"""'], {}), "(CoreFoundation, 'kCFTypeDictionaryKeyCallBacks')\n", (5517, 5566), False, 'from ctypes import c_void_p, c_long, c_uint32, c_char_p, c_byte, c_ulong, c_bool\n'), ((5605, 5671), 'ctypes.c_void_p.in_dll', 'c_void_p.in_dll', (['CoreFoundation', '"""kCFTypeDictionaryValueCallBacks"""'], {}), "(CoreFoundation, 'kCFTypeDictionaryValueCallBacks')\n", (5620, 5671), False, 'from ctypes import c_void_p, c_long, c_uint32, c_char_p, c_byte, c_ulong, c_bool\n'), ((5700, 5756), 'ctypes.c_void_p.in_dll', 'c_void_p.in_dll', (['CoreFoundation', '"""kCFTypeArrayCallBacks"""'], {}), "(CoreFoundation, 'kCFTypeArrayCallBacks')\n", (5715, 5756), False, 'from ctypes import c_void_p, c_long, c_uint32, c_char_p, c_byte, c_ulong, c_bool\n'), ((6273, 6295), 'ctypes.cast', 'cast', (['value', 'pointer_p'], {}), '(value, pointer_p)\n', (6277, 6295), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((4228, 4245), 'ctypes.POINTER', 'POINTER', (['c_void_p'], {}), '(c_void_p)\n', (4235, 4245), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((11129, 11156), 'ctypes.string_at', 'string_at', (['start', 'num_bytes'], {}), '(start, num_bytes)\n', (11138, 11156), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((13331, 13346), 'ctypes.c_long', 'c_long', (['integer'], {}), '(integer)\n', (13337, 13346), False, 'from ctypes import c_void_p, c_long, c_uint32, c_char_p, c_byte, c_ulong, c_bool\n'), ((8188, 8201), 'ctypes.byref', 'byref', (['output'], {}), '(output)\n', (8193, 8201), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((13487, 13509), 'ctypes.byref', 'byref', (['integer_as_long'], {}), '(integer_as_long)\n', (13492, 13509), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((12347, 12358), 'ctypes.byref', 'byref', (['keys'], {}), '(keys)\n', (12352, 12358), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((12389, 12402), 'ctypes.byref', 'byref', (['values'], {}), '(values)\n', (12394, 12402), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n'), ((12973, 12986), 'ctypes.byref', 'byref', (['values'], {}), '(values)\n', (12978, 12986), False, 'from ctypes import CDLL, string_at, cast, POINTER, byref\n')]
|
# -*-coding:utf-8 -*
import json
from lib_Partage_BSS import utils
from lib_Partage_BSS.exceptions.NameException import NameException
from lib_Partage_BSS.models.GlobalModel import GlobalModel
class COS(GlobalModel):
"""
Classe représentant une classe de service dans Partage
:ivar _zimbraDumpsterEnabled: ...
:ivar _zimbraExternalSharingEnabled: ...
:ivar _zimbraFeatureBriefcasesEnabled: ...
:ivar _zimbraFeatureCalendarEnabled: ...
:ivar _zimbraFeatureChangePasswordEnabled: ...
:ivar _zimbraFeatureContactsEnabled: ...
:ivar _zimbraFeatureConversationsEnabled: ...
:ivar _zimbraFeatureDistributionListFolderEnabled: ...
:ivar _zimbraFeatureExportFolderEnabled: ...
:ivar _zimbraFeatureFiltersEnabled: ...
:ivar _zimbraFeatureFlaggingEnabled: ...
:ivar _zimbraFeatureGalAutoCompleteEnabled: ...
:ivar _zimbraFeatureGalEnabled: ...
:ivar _zimbraFeatureGroupCalendarEnabled: ...
:ivar _zimbraFeatureHtmlComposeEnabled: ...
:ivar _zimbraFeatureIdentitiesEnabled: ...
:ivar _zimbraFeatureImapDataSourceEnabled: ...
:ivar _zimbraFeatureImportFolderEnabled: ...
:ivar _zimbraFeatureMailEnabled: ...
:ivar _zimbraFeatureMailForwardingEnabled: ...
:ivar _zimbraFeatureMailPriorityEnabled: ...
:ivar _zimbraFeatureMailSendLaterEnabled: ...
:ivar _zimbraFeatureManageZimlets: ...
:ivar _zimbraFeatureMAPIConnectorEnabled: ...
:ivar _zimbraFeatureMobileSyncEnabled: ...
:ivar _zimbraFeatureNewMailNotificationEnabled: ...
:ivar _zimbraFeatureOptionsEnabled: ...
:ivar _zimbraFeatureOutOfOfficeReplyEnabled: ...
:ivar _zimbraFeaturePop3DataSourceEnabled: ...
:ivar _zimbraFeatureReadReceiptsEnabled: ...
:ivar _zimbraFeatureSavedSearchesEnabled: ...
:ivar _zimbraFeatureSharingEnabled: ...
:ivar _zimbraFeatureSkinChangeEnabled: ...
:ivar _zimbraFeatureTaggingEnabled: ...
:ivar _zimbraFeatureTasksEnabled: ...
:ivar _zimbraId: identifiant de la classe de service
:ivar _zimbraImapEnabled: ...
:ivar _zimbraMailQuota: quota de la classe de service
:ivar _zimbraNotes: commentaire
:ivar _zimbraPop3Enabled: ...
:ivar _zimbraPublicSharingEnabled: ...
:ivar _zimbraZimletAvailableZimlets: Array ...
"""
def __init__(self, name):
GlobalModel.__init__(self, name)
for attr in ['zimbraDumpsterEnabled','zimbraExternalSharingEnabled','zimbraFeatureCalendarEnabled','zimbraFeatureChangePasswordEnabled',
'zimbraFeatureContactsEnabled','zimbraFeatureConversationsEnabled','zimbraFeatureDistributionListFolderEnabled',
'zimbraFeatureExportFolderEnabled','zimbraFeatureFiltersEnabled','zimbraFeatureFlaggingEnabled','zimbraFeatureGalAutoCompleteEnabled',
'zimbraFeatureGalEnabled','zimbraFeatureGroupCalendarEnabled','zimbraFeatureHtmlComposeEnabled','zimbraFeatureIdentitiesEnabled',
'zimbraFeatureImapDataSourceEnabled','zimbraFeatureImportFolderEnabled','zimbraFeatureMailEnabled','zimbraFeatureMailForwardingEnabled',
'zimbraFeatureMailPriorityEnabled','zimbraFeatureMailSendLaterEnabled','zimbraFeatureManageZimlets','zimbraFeatureMAPIConnectorEnabled',
'zimbraFeatureMobileSyncEnabled','zimbraFeatureNewMailNotificationEnabled','zimbraFeatureOptionsEnabled','zimbraFeatureOutOfOfficeReplyEnabled',
'zimbraFeaturePop3DataSourceEnabled','zimbraFeatureReadReceiptsEnabled','zimbraFeatureSavedSearchesEnabled','zimbraFeatureSharingEnabled',
'zimbraFeatureSkinChangeEnabled','zimbraFeatureTaggingEnabled','zimbraFeatureTasksEnabled','zimbraId','zimbraImapEnabled','_zimbraMailQuota',
'zimbraNotes','zimbraPop3Enabled','zimbraPublicSharingEnabled','zimbraZimletAvailableZimlets']:
setattr(self, attr, None)
def fillCOS(self, listOfAttr):
if not isinstance(listOfAttr, dict):
raise TypeError
for key, value in listOfAttr.items():
setattr(self, key, value)
|
[
"lib_Partage_BSS.models.GlobalModel.GlobalModel.__init__"
] |
[((2313, 2345), 'lib_Partage_BSS.models.GlobalModel.GlobalModel.__init__', 'GlobalModel.__init__', (['self', 'name'], {}), '(self, name)\n', (2333, 2345), False, 'from lib_Partage_BSS.models.GlobalModel import GlobalModel\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import hilbert
from src import XMitt
plt.ion()
exper_file = 'src/experiments/canope_setup.toml'
xmitt = XMitt(exper_file, 1.)
p_sca = []
#for i in range(3):
xmitt.generate_realization()
xmitt.surface_realization()
p_sca.append(xmitt.ping_surface())
p_sca = np.array(p_sca)
fig, ax = plt.subplots()
ax.plot(xmitt.t_a, 20 * np.log10(np.abs(hilbert(p_sca))).T)
ax.grid()
|
[
"src.XMitt",
"matplotlib.pyplot.ion",
"numpy.array",
"scipy.signal.hilbert",
"matplotlib.pyplot.subplots"
] |
[((108, 117), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (115, 117), True, 'import matplotlib.pyplot as plt\n'), ((176, 198), 'src.XMitt', 'XMitt', (['exper_file', '(1.0)'], {}), '(exper_file, 1.0)\n', (181, 198), False, 'from src import XMitt\n'), ((331, 346), 'numpy.array', 'np.array', (['p_sca'], {}), '(p_sca)\n', (339, 346), True, 'import numpy as np\n'), ((358, 372), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (370, 372), True, 'import matplotlib.pyplot as plt\n'), ((413, 427), 'scipy.signal.hilbert', 'hilbert', (['p_sca'], {}), '(p_sca)\n', (420, 427), False, 'from scipy.signal import hilbert\n')]
|
# Copyright 2016 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""JSON-related utilities."""
# TODO(kitching): Consider moving this to the cros.factory.utils directory.
import datetime
import inspect
import json
import logging
import traceback
# This is ISO 8601 format of date/time/datetime. If you want to change this,
# you have to also change the FastStringParseDate/Time/Datetime function
# and isoformat() below.
FORMAT_DATETIME = '%Y-%m-%dT%H:%M:%S.%fZ'
FORMAT_DATE = '%Y-%m-%d'
FORMAT_TIME = '%H:%M:%S.%f'
def FastStringParseDate(date_string):
"""Parses the date_string with FORMAT_DATE to datetime.date"""
if len(date_string) != 10 or date_string[4] != '-' or date_string[7] != '-':
raise ValueError('Wrong format string: %s' % date_string)
return datetime.date(
int(date_string[0:4]),
int(date_string[5:7]),
int(date_string[8:10]))
def FastStringParseTime(date_string):
"""Parses the date_string with FORMAT_TIME to datetime.time"""
if (len(date_string) != 15 or date_string[2] != ':' or
date_string[5] != ':' or date_string[8] != '.'):
raise ValueError('Wrong format string: %s' % date_string)
return datetime.time(
int(date_string[0:2]),
int(date_string[3:5]),
int(date_string[6:8]),
int(date_string[9:15]))
def FastStringParseDatetime(date_string):
"""Parses the date_string with FORMAT_DATETIME to datetime.datetime"""
if len(date_string) != 27 or date_string[10] != 'T' or date_string[26] != 'Z':
raise ValueError('Wrong format string: %s' % date_string)
return datetime.datetime.combine(
FastStringParseDate(date_string[0:10]),
FastStringParseTime(date_string[11:26]))
class JSONEncoder(json.JSONEncoder):
def default(self, obj): # pylint: disable=method-hidden, arguments-differ
"""Handler for serializing objects during conversion to JSON.
Outputs datetime, date, and time objects with enough metadata to restore
as their former objects when deserialized.
"""
if isinstance(obj, Serializable):
dct = obj.ToDict()
dct['__type__'] = obj.__class__.__name__
return dct
if isinstance(obj, datetime.datetime):
assert obj.tzinfo is None
# obj.isoformat() will ignore microsecond if obj.microsecond is 0.
return {
'__type__': 'datetime',
'value': obj.isoformat() + (
'.000000Z' if obj.microsecond == 0 else 'Z')}
if isinstance(obj, datetime.date):
return {
'__type__': 'date',
'value': obj.isoformat()}
if isinstance(obj, datetime.time):
assert obj.tzinfo is None
# obj.isoformat() will ignore microsecond if obj.microsecond is 0.
return {
'__type__': 'time',
'value': obj.isoformat() + (
'.000000' if obj.microsecond == 0 else '')}
if inspect.istraceback(obj):
tb = ''.join(traceback.format_tb(obj))
return tb.strip()
if isinstance(obj, Exception):
return 'Exception: %s' % str(obj)
# Base class default method may raise TypeError.
try:
return json.JSONEncoder.default(self, obj)
except TypeError:
return str(obj)
class JSONDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
self._class_registry = kwargs.pop('class_registry', {})
json.JSONDecoder.__init__(
self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, dct): # pylint: disable=method-hidden
"""Handler for deserializing objects after conversion to JSON.
Restores datetime, date, and time objects using the metadata output from
matching JSONDecoder class.
"""
if dct.get('__type__') in self._class_registry:
return self._class_registry[dct['__type__']].FromDict(dct)
# TODO(kitching): Remove legacy __datetime__, __date__, and __time__ checks.
if dct.get('__type__') == 'datetime' or '__datetime__' in dct:
try:
return FastStringParseDatetime(dct['value'])
except ValueError:
logging.warning('Fast strptime failed: %s', dct['value'])
return datetime.datetime.strptime(dct['value'], FORMAT_DATETIME)
if dct.get('__type__') == 'date' or '__date__' in dct:
try:
return FastStringParseDate(dct['value'])
except ValueError:
logging.warning('Fast strptime failed: %s', dct['value'])
return datetime.datetime.strptime(dct['value'], FORMAT_DATE).date()
if dct.get('__type__') == 'time' or '__time__' in dct:
try:
return FastStringParseTime(dct['value'])
except ValueError:
logging.warning('Fast strptime failed: %s', dct['value'])
return datetime.datetime.strptime(dct['value'], FORMAT_TIME).time()
return dct
# Class registry maps class name => class reference for Serializable subclasses.
_class_registry = {}
encoder = JSONEncoder()
decoder = JSONDecoder(class_registry=_class_registry)
class SerializableMeta(type):
"""Metaclass to collect Serializable classes into a class registry."""
def __new__(cls, name, bases, class_dict):
mcs = type.__new__(cls, name, bases, class_dict)
if mcs.__name__ in _class_registry:
raise RuntimeError('Multiple serializable classes with name "%s"'
% mcs.__name__)
_class_registry[mcs.__name__] = mcs
return mcs
class Serializable(metaclass=SerializableMeta):
"""Superclass to allow object serialization and deserialization.
Usage (note order of the classes in the inheritance list):
class MyClass(json_utils.Serializable, MyBaseClass):
def __init__(self, my_data):
self.my_data = my_data
def ToDict(self):
return {'my_data': self.my_data}
@classmethod
def FromDict(self, dct):
return MyClass(dct['my_data'])
"""
def Serialize(self):
"""Serializes this object to a JSON string."""
return encoder.encode(self)
@classmethod
def Deserialize(cls, json_string):
"""Deserializes the JSON string into its corresponding Python object."""
ret = decoder.decode(json_string)
if not isinstance(ret, cls):
raise ValueError('Given JSON string does not contain "%s" instance'
% cls.__name__)
return ret
def ToDict(self):
"""Returns the dictionary equivalent of the object."""
raise NotImplementedError
@classmethod
def FromDict(cls, dct):
"""Returns the object from its dictionary equivalent."""
raise NotImplementedError
def Deserialize(json_string):
"""Deserializes any JSON string using json_utils's class registry."""
return decoder.decode(json_string)
def WalkJSONPath(json_path, data):
"""Retrieves part of a Python dictionary by walking a JSONPath-like pattern.
Uses a simplified version of jq's JSON querying language to select
to select information out of the dictionary. The supported operators
are "." and "[]".
Example:
{'hello': {'world': [100, 200]}}
".hello.world[0]" ==> 100
".hello.world[-1]" ==> 200
".hello.world" ==> [100, 200]
"." ==> {'hello': {'world': [100, 200]}}
"""
def ChompNextPart(json_path):
"""Splits the JSON path into the next operator, and everything else."""
dict_operator_pos = json_path.find('.', 1)
list_operator_pos = json_path.find('[', 1)
if dict_operator_pos == -1:
dict_operator_pos = len(json_path)
if list_operator_pos == -1:
list_operator_pos = len(json_path)
cut = min(dict_operator_pos, list_operator_pos)
return json_path[:cut], json_path[cut:]
if not json_path:
return data
current, left = ChompNextPart(json_path)
try:
if current == '.':
return WalkJSONPath(left, data)
if current.startswith('.'):
return WalkJSONPath(left, data[current[1:]])
if current.startswith('['):
return WalkJSONPath(left, data[int(current[1:-1])])
except (KeyError, TypeError):
raise ValueError('Could not access %s' % json_path)
else:
raise ValueError('Invalid syntax found at %s' % json_path)
|
[
"json.JSONDecoder.__init__",
"logging.warning",
"traceback.format_tb",
"datetime.datetime.strptime",
"inspect.istraceback",
"json.JSONEncoder.default"
] |
[((2940, 2964), 'inspect.istraceback', 'inspect.istraceback', (['obj'], {}), '(obj)\n', (2959, 2964), False, 'import inspect\n'), ((3409, 3487), 'json.JSONDecoder.__init__', 'json.JSONDecoder.__init__', (['self', '*args'], {'object_hook': 'self.object_hook'}), '(self, *args, object_hook=self.object_hook, **kwargs)\n', (3434, 3487), False, 'import json\n'), ((3186, 3221), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'obj'], {}), '(self, obj)\n', (3210, 3221), False, 'import json\n'), ((2985, 3009), 'traceback.format_tb', 'traceback.format_tb', (['obj'], {}), '(obj)\n', (3004, 3009), False, 'import traceback\n'), ((4108, 4165), 'logging.warning', 'logging.warning', (['"""Fast strptime failed: %s"""', "dct['value']"], {}), "('Fast strptime failed: %s', dct['value'])\n", (4123, 4165), False, 'import logging\n'), ((4181, 4238), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["dct['value']", 'FORMAT_DATETIME'], {}), "(dct['value'], FORMAT_DATETIME)\n", (4207, 4238), False, 'import datetime\n'), ((4391, 4448), 'logging.warning', 'logging.warning', (['"""Fast strptime failed: %s"""', "dct['value']"], {}), "('Fast strptime failed: %s', dct['value'])\n", (4406, 4448), False, 'import logging\n'), ((4677, 4734), 'logging.warning', 'logging.warning', (['"""Fast strptime failed: %s"""', "dct['value']"], {}), "('Fast strptime failed: %s', dct['value'])\n", (4692, 4734), False, 'import logging\n'), ((4464, 4517), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["dct['value']", 'FORMAT_DATE'], {}), "(dct['value'], FORMAT_DATE)\n", (4490, 4517), False, 'import datetime\n'), ((4750, 4803), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["dct['value']", 'FORMAT_TIME'], {}), "(dct['value'], FORMAT_TIME)\n", (4776, 4803), False, 'import datetime\n')]
|
# -*- codinwig: utf-8 -*-
"""
wikipedia text fetcher
<NAME>
<EMAIL>
(c) PageKicker 2014
"""
import argparse
import codecs
import sys
import time
import wikipedia
parser = argparse.ArgumentParser()
parser.add_argument("--infile", help = "seed file", default = 'test')
parser.add_argument("--lang", help="wiki language bigram", default = 'en')
parser.add_argument("--request_type", help="request type", default = 'sum')
parser.add_argument("--outfile", help = "path to outfile", default = 'outfile')
parser.add_argument("--summary", help = "true or false", action = "store_true")
parser.add_argument("--pagehits", help = "path to list of page hits", default = 'pagehits')
parser.add_argument("--mediawiki_api_url", help = "true or false", default = 'http://en.wikipedia.org/w/api.php')
args = parser.parse_args()
input_file = args.infile
output_file = args.outfile
pagehits = args.pagehits
lang = args.lang
summary = args.summary
request_type = args.request_type
mediawiki_api_url = args.mediawiki_api_url
wikipedia.set_lang(lang)
test = 'mw url is ' + mediawiki_api_url
print(test)
file1 = open(input_file, 'r')
file3 = codecs.open(pagehits,'w','utf-8')
for line in file1:
print(line)
try:
seedhits = wikipedia.search(line)
except:
wikipedia.exceptions.DisambiguationError
wikipedia.exceptions.WikipediaException
continue
for i in seedhits:
file3.write(i+'\n')
file3.close
|
[
"wikipedia.search",
"codecs.open",
"wikipedia.set_lang",
"argparse.ArgumentParser"
] |
[((174, 199), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (197, 199), False, 'import argparse\n'), ((1008, 1032), 'wikipedia.set_lang', 'wikipedia.set_lang', (['lang'], {}), '(lang)\n', (1026, 1032), False, 'import wikipedia\n'), ((1125, 1160), 'codecs.open', 'codecs.open', (['pagehits', '"""w"""', '"""utf-8"""'], {}), "(pagehits, 'w', 'utf-8')\n", (1136, 1160), False, 'import codecs\n'), ((1222, 1244), 'wikipedia.search', 'wikipedia.search', (['line'], {}), '(line)\n', (1238, 1244), False, 'import wikipedia\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# File: w2v_to_numpy.py
# Convert binary W2V file to
import sys, struct
import cPickle, gzip
import numpy as np
def save_numpy_w2v(M, vocab, outfname):
assert M.shape[0] == len(vocab)
f = open(outfname, 'wb')
vocSize, embSize = (M.shape[0], M.shape[1])
print >> f, '{:d} {:d}'.format(M.shape[0], M.shape[1])
for i in xrange(len(vocab)):
f.write('{} '.format(vocab[i].encode('utf-8')))
f.write(struct.pack('{:d}f'.format(embSize), *tuple(M[i])))
f.write('\n')
f.close()
def save_numpy_hidden(H, b, outfname):
assert H.shape[1] == b.shape[0]
f = open(outfname, 'wb')
print >> f, '{:d} {:d}'.format(H.shape[0], H.shape[1])
for i in xrange(H.shape[0]):
f.write(struct.pack('{:d}f'.format(H.shape[1]), *tuple(H[i])))
f.write(struct.pack('{:d}f'.format(b.shape[0]), *tuple(b)))
f.close()
def save_numpy_output(H, b, outfname):
f = open(outfname, 'wb')
print >> f, '{:d}'.format(H.shape[0])
f.write(struct.pack('{:d}f'.format(H.shape[0]), *tuple(H)))
f.write(struct.pack('1f', b))
f.close()
|
[
"struct.pack"
] |
[((1093, 1113), 'struct.pack', 'struct.pack', (['"""1f"""', 'b'], {}), "('1f', b)\n", (1104, 1113), False, 'import sys, struct\n')]
|
import enum
import os
# Used to import null _pylumi name on ReadTheDocs
try:
import _pylumi
from _pylumi import (
UNKNOWN_KEY,
UNKNOWN_BOOL_VALUE,
UNKNOWN_NUMBER_VALUE,
UNKNOWN_STRING_VALUE,
UNKNOWN_ARRAY_VALUE,
UNKNOWN_ASSET_VALUE,
UNKNOWN_ARCHIVE_VALUE,
UNKNOWN_OBJECT_VALUE,
UNKNOWN_NULL_VALUE,
DIFF_ADD,
DIFF_ADD_REPLACE,
DIFF_DELETE,
DIFF_DELETE_REPLACE,
DIFF_UPDATE,
DIFF_UPDATE_REPLACE,
DiffKind,
UnknownValue,
PylumiError,
PylumiGoError,
ContextError,
ProviderError,
)
except ImportError:
if not os.getenv("READTHEDOCS"):
raise
_pylumi = None
UNKNOWN_KEY = None
UNKNOWN_BOOL_VALUE = None
UNKNOWN_NUMBER_VALUE = None
UNKNOWN_STRING_VALUE = None
UNKNOWN_ARRAY_VALUE = None
UNKNOWN_ASSET_VALUE = None
UNKNOWN_ARCHIVE_VALUE = None
UNKNOWN_OBJECT_VALUE = None
UNKNOWN_NULL_VALUE = None
DIFF_ADD = None
DIFF_ADD_REPLACE = None
DIFF_DELETE = None
DIFF_DELETE_REPLACE = None
DIFF_UPDATE = None
DIFF_UPDATE_REPLACE = None
class UnknownValue(enum.Enum):
"""
Enum for the UNKNOWN_*_VALUE values
"""
BOOL = UNKNOWN_BOOL_VALUE
NUMBER = UNKNOWN_NUMBER_VALUE
STRING = UNKNOWN_STRING_VALUE
ARRAY = UNKNOWN_ARRAY_VALUE
ASSET = UNKNOWN_ASSET_VALUE
ARCHIVE = UNKNOWN_ARCHIVE_VALUE
OBJECT = UNKNOWN_OBJECT_VALUE
NULL_ = UNKNOWN_NULL_VALUE
class DiffKind(enum.Enum):
"""
Enum for diff types
"""
ADD = DIFF_ADD
ADD_REPLACE = DIFF_ADD_REPLACE
DELETE = DIFF_DELETE
DELETE_REPLACE = DIFF_DELETE_REPLACE
UPDATE = DIFF_UPDATE
UPDATE_REPLACE = DIFF_UPDATE_REPLACE
class PylumiError(Exception):
"""
Base class for pylumi errors
"""
class PylumiGoError(PylumiError):
"""
Pylumi error originating from go
"""
class ContextError(PylumiGoError):
"""
Error relating to a pylumi context
"""
class ProviderError(PylumiGoError):
"""
Error relating to a pylumi provider
"""
|
[
"os.getenv"
] |
[((690, 714), 'os.getenv', 'os.getenv', (['"""READTHEDOCS"""'], {}), "('READTHEDOCS')\n", (699, 714), False, 'import os\n')]
|
from __future__ import absolute_import
import time
from logging import getLogger
from typing import Union, List, Dict, Any
from unittest import TestCase
from upath import getp
logger = getLogger(__name__)
def getfp(nested_input: Union[List, Dict], path: Union[str, list], separator: str = '.', default_value=None) -> Any:
"""
get value from dict path
:param nested_input
:param path:
:param separator:
:param default_value:
:return:
"""
path_list = path if isinstance(path, list) else path.split(separator)
current_val = nested_input
for key in path_list:
try:
if key.isdecimal() and isinstance(current_val, list):
if int(key) < len(current_val):
key = int(key)
else:
logger.info(
f"Can't retrieve value from list in path {path}, key {key}, list index out of range")
return default_value
current_val = current_val[key]
except KeyError:
logger.info(f"Can't retrieve value from dict in path {path}, key {key}, key not found")
return default_value
except TypeError:
logger.info(f"Path {path} can't be resolved, stuck at key {key}")
return default_value
return current_val
class GetFromPathTests(TestCase):
"""
basic tests
"""
def nest_dict(self, dict_to_nest: dict, level: int, key_prefix: str, path: str):
"""
generate a nested structure
:param dict_to_nest:
:param level:
:param key_prefix:
:param path:
:return:
"""
if level == 0:
return path
dict_to_nest[key_prefix + str(level)] = {}
path += "." + key_prefix + str(level) if path else key_prefix + str(level)
return self.nest_dict(dict_to_nest[key_prefix + str(level)], level - 1, key_prefix, path)
def test_both_versions_give_the_same_results_and_display_speed_comparison(self):
nested_dict = {}
path = self.nest_dict(nested_dict, 100, "path", "")
now = time.time()
result_c = getp(nested_dict, path)
c_ext_time = time.time() - now
now = time.time()
result_python = getfp(nested_dict, path)
python_time = time.time() - now
logger.error(f"C extension is {python_time / c_ext_time} times faster")
self.assertEqual(result_python, result_c)
def test_get_integer_from_path(self):
result_c = getp({"test": 1}, "test")
self.assertEqual(result_c, 1)
def test_get_from_path_with_list(self):
result_c = getp({"test": {"test2": [{"test3": 1}]}}, "test.test2.0.test3")
self.assertEqual(result_c, 1)
def test_get_from_path_with_different_separator(self):
result_c = getp({"test": {"test.2": [{"test3": 1}]}}, "test/test.2/0/test3", "/")
self.assertEqual(result_c, 1)
def test_path_not_found_set_default(self):
result_c = getp({"test": {"test2": [{"test3": 7}]}}, "test/test.2/0/test3", "/", 1)
self.assertEqual(result_c, 1)
def test_no_arguments_raises_exception(self):
with self.assertRaises(Exception):
getp()
def test_one_argument_raises_exception(self):
with self.assertRaises(Exception):
getp({"test": 2})
|
[
"upath.getp",
"logging.getLogger",
"time.time"
] |
[((188, 207), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (197, 207), False, 'from logging import getLogger\n'), ((2135, 2146), 'time.time', 'time.time', ([], {}), '()\n', (2144, 2146), False, 'import time\n'), ((2166, 2189), 'upath.getp', 'getp', (['nested_dict', 'path'], {}), '(nested_dict, path)\n', (2170, 2189), False, 'from upath import getp\n'), ((2244, 2255), 'time.time', 'time.time', ([], {}), '()\n', (2253, 2255), False, 'import time\n'), ((2538, 2563), 'upath.getp', 'getp', (["{'test': 1}", '"""test"""'], {}), "({'test': 1}, 'test')\n", (2542, 2563), False, 'from upath import getp\n'), ((2666, 2729), 'upath.getp', 'getp', (["{'test': {'test2': [{'test3': 1}]}}", '"""test.test2.0.test3"""'], {}), "({'test': {'test2': [{'test3': 1}]}}, 'test.test2.0.test3')\n", (2670, 2729), False, 'from upath import getp\n'), ((2847, 2917), 'upath.getp', 'getp', (["{'test': {'test.2': [{'test3': 1}]}}", '"""test/test.2/0/test3"""', '"""/"""'], {}), "({'test': {'test.2': [{'test3': 1}]}}, 'test/test.2/0/test3', '/')\n", (2851, 2917), False, 'from upath import getp\n'), ((3023, 3095), 'upath.getp', 'getp', (["{'test': {'test2': [{'test3': 7}]}}", '"""test/test.2/0/test3"""', '"""/"""', '(1)'], {}), "({'test': {'test2': [{'test3': 7}]}}, 'test/test.2/0/test3', '/', 1)\n", (3027, 3095), False, 'from upath import getp\n'), ((2211, 2222), 'time.time', 'time.time', ([], {}), '()\n', (2220, 2222), False, 'import time\n'), ((2327, 2338), 'time.time', 'time.time', ([], {}), '()\n', (2336, 2338), False, 'import time\n'), ((3240, 3246), 'upath.getp', 'getp', ([], {}), '()\n', (3244, 3246), False, 'from upath import getp\n'), ((3353, 3370), 'upath.getp', 'getp', (["{'test': 2}"], {}), "({'test': 2})\n", (3357, 3370), False, 'from upath import getp\n')]
|
from django.db import models
from django.utils.crypto import get_random_string
import datetime
# Create your models here.
def user_profile_pic_location(instance, filename):
return "users/%s/%s" % (instance.usr_name, filename)
class AppUsers(models.Model):
usr_id = models.AutoField(primary_key=True)
usr_name = models.CharField(max_length=200, unique=True)
usr_password = models.CharField(max_length=200)
usr_email = models.CharField(max_length=200)
phone_number = models.CharField(max_length=20)
birth_date = models.DateField()
profile_picture = models.ImageField(null=True, upload_to=user_profile_pic_location)
def __str__(self):
return str(self.usr_name)
@staticmethod
def get_user(user_name):
users = AppUsers.objects.filter(usr_name=user_name)
if users.count() == 0:
return None
return users[0]
@staticmethod
def get_picture(picture):
try:
url = picture.url
return url
except Exception:
return ""
class FriendList(models.Model):
friendship_owner = models.ForeignKey(
AppUsers,
related_name='current_friendships',
on_delete=models.CASCADE
)
friend_with = models.ForeignKey(
AppUsers,
related_name='involved_in_friendships',
on_delete=models.CASCADE
)
def __str__(self):
return str(self.friendship_owner)+" is friend with "+str(self.friend_with)
'''
In the moment a user performs a successful log in
a session will be created for that user, being
granted a unique session token
'''
class UserSession(models.Model):
session_id = models.AutoField(primary_key=True)
user = models.ForeignKey(
AppUsers,
related_name='current_sessions',
on_delete=models.CASCADE
)
started_at = models.DateTimeField()
token = models.CharField(max_length=400, unique=True)
def __str__(self):
return "Started at: "+str(self.started_at)+" with token: "+str(self.token)
@staticmethod
def generate_session_token():
while True:
generated_token = get_random_string(15)
if UserSession.objects.filter(token=generated_token):
continue
return generated_token
@staticmethod
def get_user_session(user):
sessions = UserSession.objects.filter(user=user)
if sessions.count() == 0:
return None
return sessions[0]
@staticmethod
def create_session(user):
user_session = UserSession()
user_session.user = user
user_session.started_at = datetime.datetime.now()
user_session.token = user_session.generate_session_token()
session_token = user_session.token
user_session.save()
return session_token
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.ImageField",
"django.db.models.DateField",
"django.db.models.DateTimeField",
"django.utils.crypto.get_random_string",
"datetime.datetime.now"
] |
[((277, 311), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (293, 311), False, 'from django.db import models\n'), ((327, 372), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'unique': '(True)'}), '(max_length=200, unique=True)\n', (343, 372), False, 'from django.db import models\n'), ((392, 424), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (408, 424), False, 'from django.db import models\n'), ((441, 473), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (457, 473), False, 'from django.db import models\n'), ((493, 524), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (509, 524), False, 'from django.db import models\n'), ((542, 560), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (558, 560), False, 'from django.db import models\n'), ((583, 648), 'django.db.models.ImageField', 'models.ImageField', ([], {'null': '(True)', 'upload_to': 'user_profile_pic_location'}), '(null=True, upload_to=user_profile_pic_location)\n', (600, 648), False, 'from django.db import models\n'), ((1114, 1208), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AppUsers'], {'related_name': '"""current_friendships"""', 'on_delete': 'models.CASCADE'}), "(AppUsers, related_name='current_friendships', on_delete=\n models.CASCADE)\n", (1131, 1208), False, 'from django.db import models\n'), ((1252, 1349), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AppUsers'], {'related_name': '"""involved_in_friendships"""', 'on_delete': 'models.CASCADE'}), "(AppUsers, related_name='involved_in_friendships',\n on_delete=models.CASCADE)\n", (1269, 1349), False, 'from django.db import models\n'), ((1687, 1721), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1703, 1721), False, 'from django.db import models\n'), ((1733, 1824), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AppUsers'], {'related_name': '"""current_sessions"""', 'on_delete': 'models.CASCADE'}), "(AppUsers, related_name='current_sessions', on_delete=\n models.CASCADE)\n", (1750, 1824), False, 'from django.db import models\n'), ((1867, 1889), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (1887, 1889), False, 'from django.db import models\n'), ((1902, 1947), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(400)', 'unique': '(True)'}), '(max_length=400, unique=True)\n', (1918, 1947), False, 'from django.db import models\n'), ((2652, 2675), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2673, 2675), False, 'import datetime\n'), ((2158, 2179), 'django.utils.crypto.get_random_string', 'get_random_string', (['(15)'], {}), '(15)\n', (2175, 2179), False, 'from django.utils.crypto import get_random_string\n')]
|
import sys
import mpi
from math import *
from io import BytesIO as StringIO
from SpheralCompiledPackages import *
from MaterialPropertiesLib import SpheralMaterialPropertiesLib
from spheralDimensions import spheralDimensions
dims = spheralDimensions()
#-------------------------------------------------------------------------------
# Define help strings for our constructors.
#-------------------------------------------------------------------------------
expectedUsageString = """
IvanoviSALEDamageModel is constructed with the following arguments (any
default values listed in parens):
materialName : (optional) label for the material in data base to
lookup the Weibull parameters
units : (optional) scale kWeibull/mWeibull lookup from
material name to given units
nodeList : (required) the FluidNodeList this model should be
applied to
kernel : (required) the interpolation kernel to use
minPlasticFailure : min plastic strain for shearing plastic failure to
start
plasticFailurePressureSlope : slope for plastic strain shearing failure law
plasticFailurePressureOffset : intercept for plastic strain shearing failure
law
tensileFailureStress : threshold stress for tensile failure to start
crackGrowthMultiplier : (0.4) crack growth rate in units of longitudinal
sound speed
damageCouplingAlgorithm : (DirectDamage) how should damaged points couple
criticalDamageThreshold : (4.0) prevent any nodes where Trace(D_i) exceeds
criticalDamageThreshold from setting timestep
mask : (1 on all points) a field of flags: a node with
zero implies no flaws (and therefore no damage)
on that point
"""
#-------------------------------------------------------------------------------
# IvanoviSALEDamageModel
#-------------------------------------------------------------------------------
IvanoviSALEDamageModelGenString = """
class IvanoviSALEDamageModel%(dim)s(CXXIvanoviSALEDamageModel%(dim)s):
'''%(help)s'''
def __init__(self, *args_in, **kwargs):
args = list(args_in)
# The material library values are in CGS, so build a units object for
# conversions.
cgs = PhysicalConstants(0.01, # unit length in m
0.001, # unit mass in kg
1.0) # unit time in sec
# Arguments needed to build the damage model.
damage_kwargs = {"nodeList" : None,
"kernel" : None,
"minPlasticFailure" : None,
"plasticFailurePressureSlope" : None,
"plasticFailurePressureOffset" : None,
"tensileFailureStress" : None,
"crackGrowthMultiplier" : 0.4,
"damageCouplingAlgorithm" : DirectDamage,
"criticalDamageThreshold" : 4.0,
"mask" : None}
# Extra arguments for our convenient constructor.
convenient_kwargs = {"materialName" : None,
"units" : None}
# Check the input arguments.
validKeys = damage_kwargs.keys() + convenient_kwargs.keys()
for argname in kwargs:
if not argname in validKeys:
raise ValueError, ("ERROR: argument %%s not a valid option.\\n" %% argname +
expectedUsageString)
# Did the user try any convenient constructor operations?
if ((len(args) > 0 and type(args[0]) == str) or
"materialName" in kwargs):
if len(args) > 0 and type(args[0]) == str:
materialName = args[0]
del args[0]
else:
materialName = kwargs["materialName"]
del kwargs["materialName"]
if not materialName in SpheralMaterialPropertiesLib:
raise ValueError, (("ERROR: material %%s is not in the library of material values.\\n" %% materialName) +
expectedUsageString)
matprops = SpheralMaterialPropertiesLib[materialName]
if not ("IvanovDamageModel" in matprops):
raise ValueError, (("ERROR : material %%s does not provide the required values for the Ivanov damage model.\\n" %% materialName) +
expectedUsageString)
damage_kwargs["minPlasticFailure"] = matprops["IvanovDamageModel"]["epsfb"]
damage_kwargs["plasticFailurePressureSlope"] = matprops["IvanovDamageModel"]["B"]
damage_kwargs["plasticFailurePressureOffset"] = matprops["IvanovDamageModel"]["Pc"]
damage_kwargs["tensileFailureStress"] = matprops["IvanovDamageModel"]["Yt"]
# Any attempt to specify units?
units = None
if "units" in kwargs:
units = kwargs["units"]
del kwargs["units"]
elif len(args) > 1 and isinstance(args[1], PhysicalConstants):
units = args[1]
del args[1]
if units:
lconv = cgs.unitLengthMeters / units.unitLengthMeters
mconv = cgs.unitMassKg / units.unitMassKg
tconv = cgs.unitTimeSec / units.unitTimeSec
Pconv = mconv/(lconv*tconv*tconv)
damage_kwargs["plasticFailurePressureSlope"] /= Pconv
damage_kwargs["plasticFailurePressureOffset"] *= Pconv
damage_kwargs["tensileFailureStress"] *= Pconv
# Process remaining user arguments.
kwarg_order = ["nodeList",
"kernel",
"minPlasticFailure",
"plasticFailurePressureSlope",
"plasticFailurePressureOffset",
"tensileFailureStress",
"crackGrowthMultiplier",
"damageCouplingAlgorithm",
"damageInCompression",
"criticalDamageThreshold",
"mask"]
for iarg, argval in enumerate(args):
damage_kwargs[kward_order[iarg]] = argval
# Process any keyword arguments. Note we already removed any deprecated keywords.
for argname in kwargs:
if argname in damage_kwargs:
damage_kwargs[argname] = kwargs[argname]
else:
raise ValueError, (("ERROR : unknown kwarg %%s.\\n" %% argname) + expectedUsageString)
# If no mask was provided, deafult to all points active
if damage_kwargs["mask"] is None:
damage_kwargs["mask"] = IntField%(dim)s("damage mask", damage_kwargs["nodeList"], 1)
# Build the damage model.
CXXIvanoviSALEDamageModel%(dim)s.__init__(self, **damage_kwargs)
return
"""
#-------------------------------------------------------------------------------
# Make 'em
#-------------------------------------------------------------------------------
for dim in dims:
exec("from SpheralCompiledPackages import IvanoviSALEDamageModel%id as CXXIvanoviSALEDamageModel%id" % (dim, dim))
# Capture the full class help string
save_stdout = sys.stdout
ss = StringIO()
sys.stdout = ss
eval("help(CXXIvanoviSALEDamageModel%id)" % dim)
sys.stdout = save_stdout
ss.seek(0)
class_help = ss.read()
exec(IvanoviSALEDamageModelGenString % {"dim": "%id" % dim,
"help": (expectedUsageString + "\n\n Class help:\n\n" + class_help)})
|
[
"spheralDimensions.spheralDimensions",
"io.BytesIO"
] |
[((233, 252), 'spheralDimensions.spheralDimensions', 'spheralDimensions', ([], {}), '()\n', (250, 252), False, 'from spheralDimensions import spheralDimensions\n'), ((7817, 7827), 'io.BytesIO', 'StringIO', ([], {}), '()\n', (7825, 7827), True, 'from io import BytesIO as StringIO\n')]
|
# -*- coding: utf-8 -*-
import os
from simmate.workflow_engine import ErrorHandler
from simmate.calculators.vasp.inputs import Incar
class LongVector(ErrorHandler):
"""
This a simple error handler that is active when VASP finds an issue with the
rotation matrix.
"""
is_monitor = True
filename_to_check = "vasp.out"
possible_error_messages = [
"One of the lattice vectors is very long (>50 A), but AMIN"
]
def correct(self, directory: str) -> str:
# load the INCAR file to view the current settings
incar_filename = os.path.join(directory, "INCAR")
incar = Incar.from_file(incar_filename)
# make the fix
incar["AMIN"] = 0.01
correction = "switched AMIN to 0.01"
# rewrite the INCAR with new settings
incar.to_file(incar_filename)
return correction
|
[
"simmate.calculators.vasp.inputs.Incar.from_file",
"os.path.join"
] |
[((583, 615), 'os.path.join', 'os.path.join', (['directory', '"""INCAR"""'], {}), "(directory, 'INCAR')\n", (595, 615), False, 'import os\n'), ((632, 663), 'simmate.calculators.vasp.inputs.Incar.from_file', 'Incar.from_file', (['incar_filename'], {}), '(incar_filename)\n', (647, 663), False, 'from simmate.calculators.vasp.inputs import Incar\n')]
|
import copy
import json
import os
from flask import Blueprint
from flask import redirect
from flask import render_template
from flask import url_for
from shopyo.api.assets import get_static
class ModuleHelp:
def __init__(self, dunderfile, dundername):
self.dirpath = os.path.dirname(os.path.abspath(dunderfile))
self.info = {}
self._context = {}
with open(self.dirpath + "/info.json") as f:
self.info = json.load(f)
self.blueprint_str = "{}_blueprint".format(self.info["module_name"])
self.blueprint = Blueprint(
"{}".format(self.info["module_name"]),
dundername,
template_folder="templates",
url_prefix=self.info["url_prefix"],
)
self._context.update({"info": self.info})
def render(self, filename, **kwargs):
"""
renders file.html found in module/templates/module/file.html
"""
return render_template(
"{}/{}".format(self.info["module_name"], filename), **kwargs
)
def redirect_url(self, url, **kwargs):
return redirect(url_for(url, **kwargs))
def context(self):
return copy.deepcopy(self._context)
def method(self, methodname):
return "{}.{}".format(self.info["module_name"], methodname)
def get_self_static(self, filename):
module_parent = os.path.dirname(self.dirpath)
module_folder = self.dirpath
module_parent = os.path.normpath(module_parent)
module_parent = os.path.basename(module_parent)
module_folder = os.path.normpath(module_folder)
module_folder = os.path.basename(module_folder)
print(module_parent, module_parent)
if module_parent.startswith("box__"):
boxormodule = f"{module_parent}/{module_folder}"
else:
boxormodule = module_folder
return get_static(boxormodule=boxormodule, filename=filename)
|
[
"copy.deepcopy",
"os.path.abspath",
"json.load",
"os.path.basename",
"shopyo.api.assets.get_static",
"os.path.dirname",
"flask.url_for",
"os.path.normpath"
] |
[((1190, 1218), 'copy.deepcopy', 'copy.deepcopy', (['self._context'], {}), '(self._context)\n', (1203, 1218), False, 'import copy\n'), ((1388, 1417), 'os.path.dirname', 'os.path.dirname', (['self.dirpath'], {}), '(self.dirpath)\n', (1403, 1417), False, 'import os\n'), ((1480, 1511), 'os.path.normpath', 'os.path.normpath', (['module_parent'], {}), '(module_parent)\n', (1496, 1511), False, 'import os\n'), ((1536, 1567), 'os.path.basename', 'os.path.basename', (['module_parent'], {}), '(module_parent)\n', (1552, 1567), False, 'import os\n'), ((1593, 1624), 'os.path.normpath', 'os.path.normpath', (['module_folder'], {}), '(module_folder)\n', (1609, 1624), False, 'import os\n'), ((1649, 1680), 'os.path.basename', 'os.path.basename', (['module_folder'], {}), '(module_folder)\n', (1665, 1680), False, 'import os\n'), ((1902, 1956), 'shopyo.api.assets.get_static', 'get_static', ([], {'boxormodule': 'boxormodule', 'filename': 'filename'}), '(boxormodule=boxormodule, filename=filename)\n', (1912, 1956), False, 'from shopyo.api.assets import get_static\n'), ((299, 326), 'os.path.abspath', 'os.path.abspath', (['dunderfile'], {}), '(dunderfile)\n', (314, 326), False, 'import os\n'), ((456, 468), 'json.load', 'json.load', (['f'], {}), '(f)\n', (465, 468), False, 'import json\n'), ((1127, 1149), 'flask.url_for', 'url_for', (['url'], {}), '(url, **kwargs)\n', (1134, 1149), False, 'from flask import url_for\n')]
|
"""This module provides admin list and access level information."""
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division
)
from sopel.module import commands
@commands('botadmins', 'admins')
def admin_list(bot, trigger):
"""Provide the list of bot admins."""
admins = bot.config.core.admin_accounts
if len(admins) == 0:
bot.reply('There are no bot admins')
return
admins = ['You' if admin == trigger.account else admin for admin in admins]
admins_str = ', '.join(admins[:-1]) + ' and ' + admins[-1]
bot.reply('The bot\'s admins are: ' + admins_str)
@commands('accesslevel', 'access')
def access_level(bot, trigger):
"""Tell user what access level they have for the bot."""
if trigger.account == bot.config.core.owner_account:
level = 'Owner'
elif trigger.account in bot.config.core.admin_accounts:
level = 'Admin'
else:
level = 'User'
bot.say('The access level for {} is {}.'.format(trigger.nick, level))
|
[
"sopel.module.commands"
] |
[((210, 241), 'sopel.module.commands', 'commands', (['"""botadmins"""', '"""admins"""'], {}), "('botadmins', 'admins')\n", (218, 241), False, 'from sopel.module import commands\n'), ((644, 677), 'sopel.module.commands', 'commands', (['"""accesslevel"""', '"""access"""'], {}), "('accesslevel', 'access')\n", (652, 677), False, 'from sopel.module import commands\n')]
|
import re
import json
import os
from unidecode import unidecode
from malaya.text.tatabahasa import permulaan, hujung
from malaya.text.rules import rules_normalizer
from malaya.function import (
check_file,
load_graph,
generate_session,
nodes_session,
)
from malaya.text.function import pad_sentence_batch, case_of
from malaya.text.regex import _expressions, _money, _date
from malaya.model.abstract import Abstract
from malaya.preprocessing import Tokenizer
from malaya.text.bpe import YTTMEncoder
from malaya.path import STEMMER_VOCAB
from herpetologist import check_type
def _classification_textcleaning_stemmer(string, stemmer):
string = re.sub(
'http\\S+|www.\\S+',
'',
' '.join(
[i for i in string.split() if i.find('#') < 0 and i.find('@') < 0]
),
)
string = unidecode(string).replace('.', ' . ').replace(',', ' , ')
string = re.sub('[^A-Za-z ]+', ' ', string)
string = re.sub(r'[ ]+', ' ', string.lower()).strip()
string = [rules_normalizer.get(w, w) for w in string.split()]
string = [(stemmer.stem(word), word) for word in string]
return ' '.join([word[0] for word in string if len(word[0]) > 1])
class Sastrawi:
def __init__(self, factory):
self.sastrawi_stemmer = factory.create_stemmer()
@check_type
def stem(self, string: str):
return self.sastrawi_stemmer.stem(string)
class Naive:
def __init__(self, tokenizer):
self._tokenizer = tokenizer
def stem_word(self, word):
hujung_result = [v for k, v in hujung.items() if word.endswith(k)]
if len(hujung_result):
hujung_result = max(hujung_result, key=len)
if len(hujung_result):
word = word[: -len(hujung_result)]
permulaan_result = [
v for k, v in permulaan.items() if word.startswith(k)
]
if len(permulaan_result):
permulaan_result = max(permulaan_result, key=len)
if len(permulaan_result):
word = word[len(permulaan_result):]
return word
@check_type
def stem(self, string: str):
result = []
tokenized = self._tokenizer(string)
for no, word in enumerate(tokenized):
if word in '~@#$%^&*()_+{}|[:"\'];<>,.?/-':
result.append(word)
elif (
re.findall(_money, word.lower())
or re.findall(_date, word.lower())
or re.findall(_expressions['time'], word.lower())
or re.findall(_expressions['hashtag'], word.lower())
or re.findall(_expressions['url'], word.lower())
or re.findall(_expressions['user'], word.lower())
):
result.append(word)
else:
result.append(self.stem_word(word))
return ' '.join(result)
class DeepStemmer(Abstract):
def __init__(
self, input_nodes, output_nodes, sess, bpe, tokenizer
):
self._input_nodes = input_nodes
self._output_nodes = output_nodes
self._sess = sess
self._bpe = bpe
self._tokenizer = tokenizer
@check_type
def stem(self, string: str, beam_search: bool = False):
"""
Stem a string, this also include lemmatization.
Parameters
----------
string : str
beam_search : bool, (optional=False)
If True, use beam search decoder, else use greedy decoder.
Returns
-------
result: str
"""
tokenized = self._tokenizer(string)
result, batch, actual, mapping = [], [], [], {}
for no, word in enumerate(tokenized):
if word in '~@#$%^&*()_+{}|[:"\'];<>,.?/-':
result.append(word)
elif (
re.findall(_money, word.lower())
or re.findall(_date, word.lower())
or re.findall(_expressions['time'], word.lower())
or re.findall(_expressions['hashtag'], word.lower())
or re.findall(_expressions['url'], word.lower())
or re.findall(_expressions['user'], word.lower())
):
result.append(word)
else:
mapping[len(batch)] = no
result.append('REPLACE-ME')
actual.append(word)
batch.append(word.lower())
if len(batch):
batch = self._bpe.bpe.encode(batch, output_type=self._bpe.mode)
batch = [i + [1] for i in batch]
batch = pad_sentence_batch(batch, 0)[0]
if beam_search:
output = 'beam'
else:
output = 'greedy'
r = self._execute(
inputs=[batch],
input_labels=['Placeholder'],
output_labels=[output],
)
output = r[output].tolist()
for no, o in enumerate(output):
predicted = list(dict.fromkeys(o))
predicted = (
self._bpe.bpe.decode(predicted)[0]
.replace('<EOS>', '')
.replace('<PAD>', '')
)
predicted = case_of(actual[no])(predicted)
result[mapping[no]] = predicted
return ' '.join(result)
@check_type
def naive():
"""
Load stemming model using startswith and endswith naively using regex patterns.
Returns
-------
result : malaya.stem.Naive class
"""
tokenizer = Tokenizer().tokenize
return Naive(tokenizer=tokenizer)
@check_type
def sastrawi():
"""
Load stemming model using Sastrawi, this also include lemmatization.
Returns
-------
result: malaya.stem.Sastrawi class
"""
try:
from Sastrawi.Stemmer.StemmerFactory import StemmerFactory
except BaseException:
raise ModuleNotFoundError(
'PySastrawi not installed. Please install it by `pip install PySastrawi` and try again.'
)
return Sastrawi(StemmerFactory())
@check_type
def deep_model(quantized: bool = False, **kwargs):
"""
Load LSTM + Bahdanau Attention stemming model, this also include lemmatization.
Original size 41.6MB, quantized size 10.6MB .
Parameters
----------
quantized : bool, optional (default=False)
if True, will load 8-bit quantized model.
Quantized model not necessary faster, totally depends on the machine.
Returns
-------
result: malaya.stem.DeepStemmer class
"""
path = check_file(
file='lstm-bahdanau',
module='stem',
keys={'model': 'model.pb', 'vocab': STEMMER_VOCAB},
quantized=quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = []
bpe = YTTMEncoder(vocab_file=path['vocab'], id_mode=True)
input_nodes, output_nodes = nodes_session(
g,
inputs,
outputs,
extra={
'greedy': 'import/decode_1/greedy:0',
'beam': 'import/decode_2/beam:0',
},
)
tokenizer = Tokenizer().tokenize
return DeepStemmer(
input_nodes=input_nodes,
output_nodes=output_nodes,
sess=generate_session(graph=g, **kwargs),
bpe=bpe,
tokenizer=tokenizer,
)
|
[
"malaya.text.function.case_of",
"malaya.text.function.pad_sentence_batch",
"unidecode.unidecode",
"malaya.preprocessing.Tokenizer",
"malaya.function.nodes_session",
"malaya.text.tatabahasa.hujung.items",
"malaya.function.check_file",
"malaya.text.tatabahasa.permulaan.items",
"malaya.text.rules.rules_normalizer.get",
"malaya.text.bpe.YTTMEncoder",
"malaya.function.generate_session",
"Sastrawi.Stemmer.StemmerFactory.StemmerFactory",
"re.sub",
"malaya.function.load_graph"
] |
[((910, 944), 're.sub', 're.sub', (['"""[^A-Za-z ]+"""', '""" """', 'string'], {}), "('[^A-Za-z ]+', ' ', string)\n", (916, 944), False, 'import re\n'), ((6577, 6711), 'malaya.function.check_file', 'check_file', ([], {'file': '"""lstm-bahdanau"""', 'module': '"""stem"""', 'keys': "{'model': 'model.pb', 'vocab': STEMMER_VOCAB}", 'quantized': 'quantized'}), "(file='lstm-bahdanau', module='stem', keys={'model': 'model.pb',\n 'vocab': STEMMER_VOCAB}, quantized=quantized, **kwargs)\n", (6587, 6711), False, 'from malaya.function import check_file, load_graph, generate_session, nodes_session\n'), ((6763, 6798), 'malaya.function.load_graph', 'load_graph', (["path['model']"], {}), "(path['model'], **kwargs)\n", (6773, 6798), False, 'from malaya.function import check_file, load_graph, generate_session, nodes_session\n'), ((6855, 6906), 'malaya.text.bpe.YTTMEncoder', 'YTTMEncoder', ([], {'vocab_file': "path['vocab']", 'id_mode': '(True)'}), "(vocab_file=path['vocab'], id_mode=True)\n", (6866, 6906), False, 'from malaya.text.bpe import YTTMEncoder\n'), ((6939, 7056), 'malaya.function.nodes_session', 'nodes_session', (['g', 'inputs', 'outputs'], {'extra': "{'greedy': 'import/decode_1/greedy:0', 'beam': 'import/decode_2/beam:0'}"}), "(g, inputs, outputs, extra={'greedy':\n 'import/decode_1/greedy:0', 'beam': 'import/decode_2/beam:0'})\n", (6952, 7056), False, 'from malaya.function import check_file, load_graph, generate_session, nodes_session\n'), ((1017, 1043), 'malaya.text.rules.rules_normalizer.get', 'rules_normalizer.get', (['w', 'w'], {}), '(w, w)\n', (1037, 1043), False, 'from malaya.text.rules import rules_normalizer\n'), ((5548, 5559), 'malaya.preprocessing.Tokenizer', 'Tokenizer', ([], {}), '()\n', (5557, 5559), False, 'from malaya.preprocessing import Tokenizer\n'), ((6059, 6075), 'Sastrawi.Stemmer.StemmerFactory.StemmerFactory', 'StemmerFactory', ([], {}), '()\n', (6073, 6075), False, 'from Sastrawi.Stemmer.StemmerFactory import StemmerFactory\n'), ((7144, 7155), 'malaya.preprocessing.Tokenizer', 'Tokenizer', ([], {}), '()\n', (7153, 7155), False, 'from malaya.preprocessing import Tokenizer\n'), ((7271, 7306), 'malaya.function.generate_session', 'generate_session', ([], {'graph': 'g'}), '(graph=g, **kwargs)\n', (7287, 7306), False, 'from malaya.function import check_file, load_graph, generate_session, nodes_session\n'), ((1565, 1579), 'malaya.text.tatabahasa.hujung.items', 'hujung.items', ([], {}), '()\n', (1577, 1579), False, 'from malaya.text.tatabahasa import permulaan, hujung\n'), ((1829, 1846), 'malaya.text.tatabahasa.permulaan.items', 'permulaan.items', ([], {}), '()\n', (1844, 1846), False, 'from malaya.text.tatabahasa import permulaan, hujung\n'), ((4571, 4599), 'malaya.text.function.pad_sentence_batch', 'pad_sentence_batch', (['batch', '(0)'], {}), '(batch, 0)\n', (4589, 4599), False, 'from malaya.text.function import pad_sentence_batch, case_of\n'), ((839, 856), 'unidecode.unidecode', 'unidecode', (['string'], {}), '(string)\n', (848, 856), False, 'from unidecode import unidecode\n'), ((5231, 5250), 'malaya.text.function.case_of', 'case_of', (['actual[no]'], {}), '(actual[no])\n', (5238, 5250), False, 'from malaya.text.function import pad_sentence_batch, case_of\n')]
|
import json
import gramex.cache
import pandas as pd
from . import TestGramex
from gramex.http import FOUND
from pandas.util.testing import assert_frame_equal as afe
class TestFunctionHandler(TestGramex):
def test_args(self):
etag = {'headers': {'Etag': True}}
text = '{"args": [0, 1], "kwargs": {"a": "a", "b": "b"}}'
self.check('/func/args', text=text, **etag)
self.check('/func/args-split', text=text, **etag)
text = '{"args": ["abc", 1], "kwargs": {"a": "abc", "b": 1}}'
self.check('/func/args-variable', text=text, **etag)
self.check('/func/handler', text='{"args": ["Handler"], "kwargs": {}', **etag)
self.check('/func/handler-null', text='{"args": [], "kwargs": {}', **etag)
self.check('/func/composite',
text='{"args": [0, "Handler"], "kwargs": {"a": "a", "handler": "Handler"}}',
**etag)
text = '{"args": [0, "Handler"], "kwargs": {"a": {"b": 1}, "handler": "Handler"}}'
self.check('/func/compositenested', text=text, **etag)
self.check('/func/compositenested-split', text=text, **etag)
self.check('/func/compositenested-variable', text=text, **etag)
self.check('/func/dumpx?x=1&x=2', text='{"args": [["1", "2"]], "kwargs": {}}', **etag)
def test_async(self):
etag = {'headers': {'Etag': True}}
text = '{"args": [0, 1], "kwargs": {"a": "a", "b": "b"}}'
self.check('/func/async/args', text=text, **etag)
self.check('/func/async/args-split', text=text, **etag)
self.check('/func/async/http', text='{"args": [["1", "2"]], "kwargs": {}}', **etag)
self.check('/func/async/http2',
text='{"args": [["1"]], "kwargs": {}}{"args": [["2"]], "kwargs": {}}', **etag)
self.check('/func/async/calc',
text='[[250,250,250],[250,250,250],[250,250,250],[250,250,250]]', **etag)
def test_json(self):
self.check('/func/numpytypes')
def test_iterator(self):
no_etag = {'headers': {'Etag': False}}
self.check('/func/iterator?x=1&x=2&x=3', text='123', **no_etag)
self.check('/func/iterator/async?x=1&x=2&x=3', text='123', **no_etag)
def test_redirect(self):
r = self.get('/func/redirect', allow_redirects=False)
self.assertEqual(r.headers.get('Location'), '/dir/index/')
self.assertEqual(r.headers.get('Increment'), '1')
r = self.get('/func/redirect?next=/abc', allow_redirects=False)
self.assertEqual(r.headers.get('Location'), '/abc')
self.assertEqual(r.headers.get('Increment'), '2')
r = self.get('/func/redirect', headers={'NEXT': '/abc'}, allow_redirects=False)
self.assertEqual(r.headers.get('Location'), '/abc')
self.assertEqual(r.headers.get('Increment'), '3')
r = self.get('/func/redirect?next=/def', headers={'NEXT': '/abc'}, allow_redirects=False)
self.assertEqual(r.headers.get('Location'), '/def')
self.assertEqual(r.headers.get('Increment'), '4')
def test_path_args(self):
self.check('/func/path_args/高/兴', text='["\\u9ad8", "\\u5174"]')
def test_methods(self):
self.check('/func/methods', method='get', code=405)
self.check('/func/methods', method='delete', code=405)
for method in ['post', 'put']:
r = self.get('/func/methods', method=method,
headers={'NEXT': '/abc'}, allow_redirects=False)
self.assertEqual(r.status_code, FOUND)
self.assertEqual(r.headers.get('Location'), '/abc')
class TestWrapper(TestGramex):
def test_config_kwargs(self):
self.check('/func/power?y=3', text='9.0')
self.check('/func/power?y=3&x=3', text='27.0')
def test_yielder(self):
self.check('/func/yielder?i=a&i=b&i=c', text='abc')
def test_add_handler_get(self):
self.check('/func/total/40/2', text='42.0')
self.check('/func/total/40/2?items=10', text='52.0')
self.check('/func/total/40/2?items=10&items=10', text='62.0')
self.check('/func/name_age/johndoe/age/42', text='johndoe is 42 years old.')
self.check('/func/name_age', text='alpha is 10 years old.')
self.check('/func/name_age?name=johndoe&age=42', text='johndoe is 42 years old.')
# In case of multiple kwargs, the last parameter is picked
self.check('/func/name_age?name=x&name=y&age=1&age=2', text='y is 2 years old.')
# When type hints are violated:
self.check('/func/hints?name=johndoe&age=42.3', code=500)
# When multiple arguments are passed:
self.check('/func/total?items=1&items=2&items=3', text='6.0')
self.check('/func/multilist?items=1&items=2&items=3&start=1', text='7.0')
# Positional args with types
self.check('/func/strtotal?items=a&items=b&items=c', text='abc')
# Test native types. Note: "i=false" won't work -- use "i=" since it's a np.bool8
# Note: datetimes must be quoted, since they'll be read as JSON usually.
self.check(
'/func/nativetypes?a=3&b=1.5&c=false&d=d&e=null&f=3&g=1.5&h=h&i=',
text=''.join(['3', '1.5', 'false', 'd', '', '3', '1.5', 'h', 'false',
'"2020-01-01T00:00:00+00:00"', '{"a":3,"b":1.5}', '[3,1.5]']))
self.check('/func/greet', text='Hello, Stranger!')
self.check('/func/greet?name=gramex', text='Hello, gramex!')
self.check('/func/multilist?items=1&items=2&items=3&start=1', text='7.0')
sales = self.check('/func/sales').json()
afe(pd.DataFrame(sales), gramex.cache.open('sales.xlsx', rel=True))
self.check('/func/content/003.json',
text='{"x":3}',
headers={'Content-Type': 'application/json'})
self.check('/func/content/003.txt',
text='x=3',
headers={'Content-Type': 'text/plain'})
def test_add_handler_post(self):
self.check(
'/func/name_age', method='post', data={'name': 'johndoe', 'age': '42'},
text='johndoe is 42 years old.')
self.check(
'/func/name_age', method='post', data=json.dumps({'name': 'johndoe', 'age': '42'}),
request_headers={'Content-Type': 'application/json'},
text='johndoe is 42 years old.')
# When type hints are violated:
self.check('/func/hints', method='post', data={'name': 'johndoe', 'age': '42.3'},
code=500)
# Check typecasting
self.check(
'/func/nativetypes', method='post',
data=json.dumps({'a': 3, 'b': 1.5, 'c': False, 'd': 'd', 'e': None, 'f': 3,
'g': 1.5, 'h': 'h', 'i': False}),
request_headers={'Content-Type': 'application/json'},
text=''.join(['3', '1.5', 'false', 'd', '', '3', '1.5', 'h', 'false',
'"2020-01-01T00:00:00+00:00"', '{"a":3,"b":1.5}', '[3,1.5]']))
self.check('/func/greet', text='Hello, Stranger!')
# Check if POSTing url params and path args works
self.check('/func/name_age?name=johndoe&age=42', method='post',
text='johndoe is 42 years old.')
self.check('/func/name_age/johndoe/age/42', text='johndoe is 42 years old.')
def test_add_handler_delete(self):
self.check('/func/total/40/2?items=10&items=20', text='72.0', method='delete')
|
[
"pandas.DataFrame",
"json.dumps"
] |
[((5597, 5616), 'pandas.DataFrame', 'pd.DataFrame', (['sales'], {}), '(sales)\n', (5609, 5616), True, 'import pandas as pd\n'), ((6197, 6241), 'json.dumps', 'json.dumps', (["{'name': 'johndoe', 'age': '42'}"], {}), "({'name': 'johndoe', 'age': '42'})\n", (6207, 6241), False, 'import json\n'), ((6626, 6733), 'json.dumps', 'json.dumps', (["{'a': 3, 'b': 1.5, 'c': False, 'd': 'd', 'e': None, 'f': 3, 'g': 1.5, 'h':\n 'h', 'i': False}"], {}), "({'a': 3, 'b': 1.5, 'c': False, 'd': 'd', 'e': None, 'f': 3, 'g':\n 1.5, 'h': 'h', 'i': False})\n", (6636, 6733), False, 'import json\n')]
|
import cv2
import matplotlib.pyplot as plt
from QuadTreeCodec import QuadTreeCodec
from TiledCodec import TiledCodec, BitStream
import numpy as np
# TODO Comment
class CodecFactory:
def __init__(self):
self.quad_size = 128
def create_codec(self):
return QuadTreeCodec(quad_tree_size=self.quad_size, min_cell_snr=80, debug=False)
def get_codec_shape(self):
return [self.quad_size, self.quad_size]
# TODO Comment
class DepthImageCodec(TiledCodec):
def __init__(self):
TiledCodec.__init__(self, CodecFactory())
def compress(self, image: np.ndarray):
super().compress(image)
def uncompress(self) -> np.ndarray:
return super().uncompress()
def encode(self, stream: BitStream):
return super().encode(stream)
def decode(self, stream: BitStream):
super().decode(stream)
# TODO Comment
def main():
im = cv2.imread('bgExampleDepth.tif')[:, :, 1]
tc = DepthImageCodec()
tc.compress(im)
uncompressed = tc.uncompress()
plt.figure()
plt.imshow(im)
plt.figure()
plt.imshow(uncompressed)
plt.pause(0.001)
plt.waitforbuttonpress()
|
[
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.waitforbuttonpress",
"cv2.imread",
"matplotlib.pyplot.figure",
"QuadTreeCodec.QuadTreeCodec",
"matplotlib.pyplot.pause"
] |
[((1034, 1046), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1044, 1046), True, 'import matplotlib.pyplot as plt\n'), ((1051, 1065), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (1061, 1065), True, 'import matplotlib.pyplot as plt\n'), ((1070, 1082), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1080, 1082), True, 'import matplotlib.pyplot as plt\n'), ((1087, 1111), 'matplotlib.pyplot.imshow', 'plt.imshow', (['uncompressed'], {}), '(uncompressed)\n', (1097, 1111), True, 'import matplotlib.pyplot as plt\n'), ((1116, 1132), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.001)'], {}), '(0.001)\n', (1125, 1132), True, 'import matplotlib.pyplot as plt\n'), ((1137, 1161), 'matplotlib.pyplot.waitforbuttonpress', 'plt.waitforbuttonpress', ([], {}), '()\n', (1159, 1161), True, 'import matplotlib.pyplot as plt\n'), ((281, 355), 'QuadTreeCodec.QuadTreeCodec', 'QuadTreeCodec', ([], {'quad_tree_size': 'self.quad_size', 'min_cell_snr': '(80)', 'debug': '(False)'}), '(quad_tree_size=self.quad_size, min_cell_snr=80, debug=False)\n', (294, 355), False, 'from QuadTreeCodec import QuadTreeCodec\n'), ((906, 938), 'cv2.imread', 'cv2.imread', (['"""bgExampleDepth.tif"""'], {}), "('bgExampleDepth.tif')\n", (916, 938), False, 'import cv2\n')]
|
"""
Copyright Astronomer, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Unittest module to test Operators.
Requires the unittest, pytest, and requests-mock Python libraries.
"""
import logging
import math
import pathlib
import unittest.mock
from airflow.models import DAG, DagRun
from airflow.models import TaskInstance as TI
from airflow.providers.postgres.hooks.postgres import PostgresHook
from airflow.utils import timezone
from airflow.utils.session import create_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType
# Import Operator
import astro.sql as aql
from astro.sql.table import Table
from tests.operators import utils as test_utils
log = logging.getLogger(__name__)
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
def drop_table(table_name, postgres_conn):
cursor = postgres_conn.cursor()
cursor.execute(f"DROP TABLE IF EXISTS {table_name} CASCADE;")
postgres_conn.commit()
cursor.close()
postgres_conn.close()
class TestPostgresMergeOperator(unittest.TestCase):
"""
Test Postgres Merge Operator.
"""
cwd = pathlib.Path(__file__).parent
@classmethod
def setUpClass(cls):
super().setUpClass()
def setUp(self):
super().setUp()
self.clear_run()
self.addCleanup(self.clear_run)
self.dag = DAG(
"test_dag",
default_args={
"owner": "airflow",
"start_date": DEFAULT_DATE,
},
)
self.main_table = Table(
table_name="merge_test_1", conn_id="postgres_conn", database="pagila"
)
self.merge_table = Table(
table_name="merge_test_2", conn_id="postgres_conn", database="pagila"
)
aql.load_file(
path=str(self.cwd) + "/../data/homes_merge_1.csv",
output_table=self.main_table,
).operator.execute({"run_id": "foo"})
aql.load_file(
path=str(self.cwd) + "/../data/homes_merge_2.csv",
output_table=self.merge_table,
).operator.execute({"run_id": "foo"})
def clear_run(self):
self.run = False
def tearDown(self):
super().tearDown()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def create_and_run_task(self, decorator_func, op_args, op_kwargs):
with self.dag:
f = decorator_func(*op_args, **op_kwargs)
test_utils.run_dag(self.dag)
return f
def test_merge_basic_single_key(self):
hook = PostgresHook(schema="pagila", postgres_conn_id="postgres_conn")
hook.run(
sql="ALTER TABLE tmp_astro.merge_test_1 ADD CONSTRAINT airflow UNIQUE (list)"
)
a = aql.merge(
target_table=self.main_table,
merge_table=self.merge_table,
merge_keys=["list"],
target_columns=["list"],
merge_columns=["list"],
conflict_strategy="ignore",
)
a.execute({"run_id": "foo"})
df = hook.get_pandas_df(sql="SELECT * FROM tmp_astro.merge_test_1")
assert df.age.to_list()[:-1] == [60.0, 12.0, 41.0, 22.0]
assert math.isnan(df.age.to_list()[-1])
assert df.taxes.to_list()[:-1] == [3167.0, 4033.0, 1471.0, 3204.0]
assert math.isnan(df.taxes.to_list()[-1])
assert df.taxes.to_list()[:-1] == [3167.0, 4033.0, 1471.0, 3204.0]
assert df.list.to_list() == [160, 180, 132, 140, 240]
assert df.sell.to_list()[:-1] == [142, 175, 129, 138]
assert math.isnan(df.taxes.to_list()[-1])
def test_merge_basic_ignore(self):
hook = PostgresHook(schema="pagila", postgres_conn_id="postgres_conn")
hook.run(
sql="ALTER TABLE tmp_astro.merge_test_1 ADD CONSTRAINT airflow UNIQUE (list,sell)"
)
a = aql.merge(
target_table=self.main_table,
merge_table=self.merge_table,
merge_keys=["list", "sell"],
target_columns=["list", "sell"],
merge_columns=["list", "sell"],
conflict_strategy="ignore",
)
a.execute({"run_id": "foo"})
df = hook.get_pandas_df(sql="SELECT * FROM tmp_astro.merge_test_1")
assert df.age.to_list()[:-1] == [60.0, 12.0, 41.0, 22.0]
assert math.isnan(df.age.to_list()[-1])
assert df.taxes.to_list()[:-1] == [3167.0, 4033.0, 1471.0, 3204.0]
assert math.isnan(df.taxes.to_list()[-1])
assert df.taxes.to_list()[:-1] == [3167.0, 4033.0, 1471.0, 3204.0]
assert df.list.to_list() == [160, 180, 132, 140, 240]
assert df.sell.to_list() == [142, 175, 129, 138, 232]
def test_merge_basic_update(self):
hook = PostgresHook(schema="pagila", postgres_conn_id="postgres_conn")
hook.run(
sql="ALTER TABLE tmp_astro.merge_test_1 ADD CONSTRAINT airflow UNIQUE (list,sell)"
)
a = aql.merge(
target_table=self.main_table,
merge_table=self.merge_table,
merge_keys=["list", "sell"],
target_columns=["list", "sell", "taxes"],
merge_columns=["list", "sell", "age"],
conflict_strategy="update",
)
a.execute({"run_id": "foo"})
df = hook.get_pandas_df(sql="SELECT * FROM tmp_astro.merge_test_1")
assert df.taxes.to_list() == [1, 1, 1, 1, 1]
assert df.age.to_list()[:-1] == [60.0, 12.0, 41.0, 22.0]
assert math.isnan(df.age.to_list()[-1])
|
[
"astro.sql.table.Table",
"airflow.utils.timezone.datetime",
"airflow.utils.session.create_session",
"pathlib.Path",
"airflow.models.DAG",
"tests.operators.utils.run_dag",
"airflow.providers.postgres.hooks.postgres.PostgresHook",
"astro.sql.merge",
"logging.getLogger"
] |
[((1186, 1213), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1203, 1213), False, 'import logging\n'), ((1229, 1258), 'airflow.utils.timezone.datetime', 'timezone.datetime', (['(2016)', '(1)', '(1)'], {}), '(2016, 1, 1)\n', (1246, 1258), False, 'from airflow.utils import timezone\n'), ((1593, 1615), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1605, 1615), False, 'import pathlib\n'), ((1825, 1903), 'airflow.models.DAG', 'DAG', (['"""test_dag"""'], {'default_args': "{'owner': 'airflow', 'start_date': DEFAULT_DATE}"}), "('test_dag', default_args={'owner': 'airflow', 'start_date': DEFAULT_DATE})\n", (1828, 1903), False, 'from airflow.models import DAG, DagRun\n'), ((2012, 2088), 'astro.sql.table.Table', 'Table', ([], {'table_name': '"""merge_test_1"""', 'conn_id': '"""postgres_conn"""', 'database': '"""pagila"""'}), "(table_name='merge_test_1', conn_id='postgres_conn', database='pagila')\n", (2017, 2088), False, 'from astro.sql.table import Table\n'), ((2139, 2215), 'astro.sql.table.Table', 'Table', ([], {'table_name': '"""merge_test_2"""', 'conn_id': '"""postgres_conn"""', 'database': '"""pagila"""'}), "(table_name='merge_test_2', conn_id='postgres_conn', database='pagila')\n", (2144, 2215), False, 'from astro.sql.table import Table\n'), ((2971, 2999), 'tests.operators.utils.run_dag', 'test_utils.run_dag', (['self.dag'], {}), '(self.dag)\n', (2989, 2999), True, 'from tests.operators import utils as test_utils\n'), ((3076, 3139), 'airflow.providers.postgres.hooks.postgres.PostgresHook', 'PostgresHook', ([], {'schema': '"""pagila"""', 'postgres_conn_id': '"""postgres_conn"""'}), "(schema='pagila', postgres_conn_id='postgres_conn')\n", (3088, 3139), False, 'from airflow.providers.postgres.hooks.postgres import PostgresHook\n'), ((3270, 3445), 'astro.sql.merge', 'aql.merge', ([], {'target_table': 'self.main_table', 'merge_table': 'self.merge_table', 'merge_keys': "['list']", 'target_columns': "['list']", 'merge_columns': "['list']", 'conflict_strategy': '"""ignore"""'}), "(target_table=self.main_table, merge_table=self.merge_table,\n merge_keys=['list'], target_columns=['list'], merge_columns=['list'],\n conflict_strategy='ignore')\n", (3279, 3445), True, 'import astro.sql as aql\n'), ((4177, 4240), 'airflow.providers.postgres.hooks.postgres.PostgresHook', 'PostgresHook', ([], {'schema': '"""pagila"""', 'postgres_conn_id': '"""postgres_conn"""'}), "(schema='pagila', postgres_conn_id='postgres_conn')\n", (4189, 4240), False, 'from airflow.providers.postgres.hooks.postgres import PostgresHook\n'), ((4377, 4576), 'astro.sql.merge', 'aql.merge', ([], {'target_table': 'self.main_table', 'merge_table': 'self.merge_table', 'merge_keys': "['list', 'sell']", 'target_columns': "['list', 'sell']", 'merge_columns': "['list', 'sell']", 'conflict_strategy': '"""ignore"""'}), "(target_table=self.main_table, merge_table=self.merge_table,\n merge_keys=['list', 'sell'], target_columns=['list', 'sell'],\n merge_columns=['list', 'sell'], conflict_strategy='ignore')\n", (4386, 4576), True, 'import astro.sql as aql\n'), ((5258, 5321), 'airflow.providers.postgres.hooks.postgres.PostgresHook', 'PostgresHook', ([], {'schema': '"""pagila"""', 'postgres_conn_id': '"""postgres_conn"""'}), "(schema='pagila', postgres_conn_id='postgres_conn')\n", (5270, 5321), False, 'from airflow.providers.postgres.hooks.postgres import PostgresHook\n'), ((5457, 5672), 'astro.sql.merge', 'aql.merge', ([], {'target_table': 'self.main_table', 'merge_table': 'self.merge_table', 'merge_keys': "['list', 'sell']", 'target_columns': "['list', 'sell', 'taxes']", 'merge_columns': "['list', 'sell', 'age']", 'conflict_strategy': '"""update"""'}), "(target_table=self.main_table, merge_table=self.merge_table,\n merge_keys=['list', 'sell'], target_columns=['list', 'sell', 'taxes'],\n merge_columns=['list', 'sell', 'age'], conflict_strategy='update')\n", (5466, 5672), True, 'import astro.sql as aql\n'), ((2703, 2719), 'airflow.utils.session.create_session', 'create_session', ([], {}), '()\n', (2717, 2719), False, 'from airflow.utils.session import create_session\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2020 CERN.
# Copyright (C) 2018-2020 RERO.
#
# Invenio-Circulation is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Invenio Circulation custom transitions."""
from flask import current_app
from invenio_db import db
from invenio_circulation.proxies import current_circulation
from ..api import can_be_requested, get_available_item_by_doc_pid, \
get_document_pid_by_item_pid, get_pending_loans_by_doc_pid, \
is_item_at_desk_available_for_checkout
from ..errors import ItemDoNotMatchError, ItemNotAvailableError, \
LoanMaxExtensionError, RecordCannotBeRequestedError, \
TransitionConditionsFailedError, TransitionConstraintsViolationError
from ..transitions.base import Transition
from ..transitions.conditions import is_same_location
def _ensure_valid_loan_duration(loan, initial_loan):
"""Validate start and end dates for a loan."""
loan.setdefault("start_date", loan["transaction_date"])
if not loan.get("end_date"):
get_loan_duration = current_app.config["CIRCULATION_POLICIES"][
"checkout"
]["duration_default"]
duration = get_loan_duration(loan, initial_loan)
loan["end_date"] = loan["start_date"] + duration
is_duration_valid = current_app.config["CIRCULATION_POLICIES"]["checkout"][
"duration_validate"
]
if not is_duration_valid(loan):
msg = "The loan duration from '{0}' to '{1}' is not valid.".format(
loan["start_date"].isoformat(), loan["end_date"].isoformat()
)
raise TransitionConstraintsViolationError(description=msg)
def _ensure_item_attached_to_loan(loan):
"""Validate that an item is attached to a loan."""
if not loan.get("item_pid"):
msg = "No item assigned to loan '{0}'.".format(loan.id)
raise TransitionConditionsFailedError(description=msg)
def ensure_same_item(f):
"""Validate that the item PID exists and cannot be changed."""
def inner(self, loan, initial_loan, **kwargs):
item_pid = kwargs.get("item_pid")
if item_pid:
if not current_app.config["CIRCULATION_ITEM_EXISTS"](item_pid):
msg = "Item '{0}:{1}' not found in the system".format(
item_pid["type"], item_pid["value"]
)
raise ItemNotAvailableError(description=msg)
wrong_pid_value = loan.get("item_pid") and \
item_pid["value"] != loan["item_pid"]["value"]
wrong_pid_type = loan.get("item_pid") and \
item_pid["type"] != loan["item_pid"]["type"]
if wrong_pid_value or wrong_pid_type:
msg = (
"Cannot change item '{0}:{1}' while performing an "
"action on this loan".format(
item_pid["type"], item_pid["value"]
)
)
raise ItemDoNotMatchError(description=msg)
return f(self, loan, initial_loan, **kwargs)
return inner
def _update_document_pending_request_for_item(item_pid, **kwargs):
"""Update pending loans on a Document with no Item attached yet.
:param item_pid: a dict containing `value` and `type` fields to
uniquely identify the item.
"""
document_pid = get_document_pid_by_item_pid(item_pid)
for pending_loan in get_pending_loans_by_doc_pid(document_pid):
pending_loan["item_pid"] = item_pid
pending_loan.commit()
db.session.commit()
current_circulation.loan_indexer().index(pending_loan)
def _ensure_same_location(item_pid, location_pid, destination, error_msg):
"""Validate that item location is same as given location."""
if not is_same_location(item_pid, location_pid):
error_msg += " Transition to '{}' has failed.".format(destination)
raise TransitionConditionsFailedError(description=error_msg)
def _ensure_not_same_location(item_pid, location_pid, destination, error_msg):
"""Validate that item location is not the same as given location."""
if is_same_location(item_pid, location_pid):
error_msg += " Transition to '{}' has failed.".format(destination)
raise TransitionConditionsFailedError(description=error_msg)
def _validate_item_pickup_transaction_locations(loan, destination, **kwargs):
"""Validate the loan item, pickup and transaction locations."""
item_location_pid = \
current_app.config["CIRCULATION_ITEM_LOCATION_RETRIEVER"](
loan["item_pid"])
kwargs["item_location_pid"] = item_location_pid
validate_item_pickup_transaction_locations = current_app.config[
"CIRCULATION_LOAN_LOCATIONS_VALIDATION"]
if not validate_item_pickup_transaction_locations(
loan, destination, **kwargs):
raise TransitionConditionsFailedError()
def _get_item_location(item_pid):
"""Retrieve Item location based on PID."""
return current_app.config["CIRCULATION_ITEM_LOCATION_RETRIEVER"](item_pid)
def _ensure_default_pickup_location(loan, context):
"""Set default pickup location if no one."""
if not context.get("pickup_location_pid") \
or "pickup_location_pid" not in loan:
loan['pickup_location_pid'] = _get_item_location(loan['item_pid'])
class ToItemOnLoan(Transition):
"""Action to checkout."""
def before(self, loan, initial_loan, **kwargs):
"""Validate checkout action."""
super().before(loan, initial_loan, **kwargs)
self.ensure_item_is_available_for_checkout(loan)
_ensure_default_pickup_location(loan, kwargs)
_ensure_valid_loan_duration(loan, initial_loan)
class ItemAtDeskToItemOnLoan(Transition):
"""Check-out action to perform a loan when item ready at desk."""
def before(self, loan, initial_loan, **kwargs):
"""Validate checkout action."""
super().before(loan, initial_loan, **kwargs)
self.ensure_at_desk_item_is_available_for_checkout(loan)
_ensure_default_pickup_location(loan, kwargs)
_ensure_valid_loan_duration(loan, initial_loan)
def ensure_at_desk_item_is_available_for_checkout(self, loan):
"""Validate that an item at desk is available for checkout."""
self._check_item_before_availability(loan)
# patron_pid is mandatory for next steps
if 'patron_pid' not in loan:
msg = "Patron not set for loan with pid '{}'".format(loan['pid'])
raise TransitionConstraintsViolationError(description=msg)
is_available = is_item_at_desk_available_for_checkout(
loan['item_pid'],
loan['patron_pid']
)
if not is_available:
raise ItemNotAvailableError(
item_pid=loan['item_pid'], transition=self.dest)
def check_request_on_document(f):
"""Decorator to check if the request is on document."""
def inner(self, loan, initial_loan, **kwargs):
document_pid = kwargs.get("document_pid")
if document_pid and not kwargs.get("item_pid"):
if not can_be_requested(loan):
msg = "Cannot create a request for the document '{}'".format(
loan.get("document_pid")
)
raise RecordCannotBeRequestedError(description=msg)
if self.assign_item:
available_item_pid = get_available_item_by_doc_pid(
document_pid
)
if available_item_pid:
kwargs["item_pid"] = available_item_pid
if kwargs.get("item_pid") and not kwargs.get("pickup_location_pid"):
# if no pickup location was specified in the request,
# assign a default one
kwargs["pickup_location_pid"] = _get_item_location(
kwargs["item_pid"]
)
return f(self, loan, initial_loan, **kwargs)
return inner
class CreatedToPending(Transition):
"""Action to request to loan an item."""
def __init__(
self, src, dest, trigger="next", permission_factory=None, **kwargs
):
"""Constructor."""
super().__init__(
src,
dest,
trigger=trigger,
permission_factory=permission_factory,
**kwargs
)
self.assign_item = kwargs.get("assign_item", True)
@check_request_on_document
def before(self, loan, initial_loan, **kwargs):
"""Check if the loan request can be created."""
super().before(loan, initial_loan, **kwargs)
if not can_be_requested(loan):
msg = "Cannot create a request for the loan '{}'".format(loan)
raise RecordCannotBeRequestedError(description=msg)
class PendingToItemAtDesk(Transition):
"""Validate pending request to prepare the item at desk of its location."""
def before(self, loan, initial_loan, **kwargs):
"""Validate if the item is for this location or should transit."""
super().before(loan, initial_loan, **kwargs)
# check if a request on document has no item attached
_ensure_item_attached_to_loan(loan)
# validate the item, pickup and transaction locations of the loan
_validate_item_pickup_transaction_locations(loan, self.dest, **kwargs)
class PendingToItemInTransitPickup(Transition):
"""Validate pending request to send the item to the pickup location."""
def before(self, loan, initial_loan, **kwargs):
"""Validate if the item is for this location or should transit."""
super().before(loan, initial_loan, **kwargs)
# check if a request on document has no item attached
_ensure_item_attached_to_loan(loan)
# validate the item, pickup and transaction locations of the loan
_validate_item_pickup_transaction_locations(loan, self.dest, **kwargs)
class ItemOnLoanToItemOnLoan(Transition):
"""Extend action to perform a item loan extension."""
def update_extension_count(self, loan):
"""Check number of extensions and update it."""
extension_count = loan.get("extension_count", 0)
extension_count += 1
get_extension_max_count_func = current_app.config[
"CIRCULATION_POLICIES"
]["extension"]["max_count"]
extension_max_count = get_extension_max_count_func(loan)
if extension_count > extension_max_count:
raise LoanMaxExtensionError(
loan_pid=loan["pid"], extension_count=extension_max_count
)
loan["extension_count"] = extension_count
def update_loan_end_date(self, loan, initial_loan):
"""Update the end date of the extended loan."""
get_extension_duration_func = current_app.config[
"CIRCULATION_POLICIES"
]["extension"]["duration_default"]
duration = get_extension_duration_func(loan, initial_loan)
should_extend_from_end_date = current_app.config[
"CIRCULATION_POLICIES"
]["extension"]["from_end_date"]
if not should_extend_from_end_date:
# extend from the transaction_date instead
loan["end_date"] = loan["transaction_date"]
loan["end_date"] += duration
@ensure_same_item
def before(self, loan, initial_loan, **kwargs):
"""Validate extension action."""
super().before(loan, initial_loan, **kwargs)
self.update_extension_count(loan)
self.update_loan_end_date(loan, initial_loan)
class ItemOnLoanToItemInTransitHouse(Transition):
"""Check-in action when returning an item not to its belonging location."""
@ensure_same_item
def before(self, loan, initial_loan, **kwargs):
"""Validate check-in action."""
super().before(loan, initial_loan, **kwargs)
_ensure_not_same_location(
loan["item_pid"],
loan["transaction_location_pid"],
self.dest,
error_msg="Item should be returned (already in house).",
)
class ItemOnLoanToItemReturned(Transition):
"""Check-in action when returning an item to its belonging location."""
def __init__(
self, src, dest, trigger="next", permission_factory=None, **kwargs
):
"""Constructor."""
super().__init__(
src,
dest,
trigger=trigger,
permission_factory=permission_factory,
**kwargs
)
self.assign_item = kwargs.get("assign_item", True)
@ensure_same_item
def before(self, loan, initial_loan, **kwargs):
"""Validate check-in action."""
super().before(loan, initial_loan, **kwargs)
_ensure_same_location(
loan["item_pid"],
loan["transaction_location_pid"],
self.dest,
error_msg="Item should be in transit to house.",
)
# set end loan date as transaction date when completing loan
loan["end_date"] = loan["transaction_date"]
def after(self, loan, initial_loan):
"""Check for pending requests on this item after check-in."""
super().after(loan, initial_loan)
if self.assign_item:
_update_document_pending_request_for_item(loan["item_pid"])
class ItemInTransitHouseToItemReturned(Transition):
"""Check-in action when returning an item to its belonging location."""
def __init__(
self, src, dest, trigger="next", permission_factory=None, **kwargs
):
"""Constructor."""
super().__init__(
src,
dest,
trigger=trigger,
permission_factory=permission_factory,
**kwargs
)
self.assign_item = kwargs.get("assign_item", True)
@ensure_same_item
def before(self, loan, initial_loan, **kwargs):
"""Validate check-in action."""
super().before(loan, initial_loan, **kwargs)
_ensure_same_location(
loan["item_pid"],
loan["transaction_location_pid"],
self.dest,
error_msg="Item should be in transit to house.",
)
def after(self, loan, initial_loan):
"""Check for pending requests on this item after check-in."""
super().after(loan, initial_loan)
if self.assign_item:
_update_document_pending_request_for_item(loan["item_pid"])
class ToCancelled(Transition):
"""When cancelling a loan, ensure that the item is not changed."""
@ensure_same_item
def before(self, loan, initial_loan, **kwargs):
"""Validate cancel action."""
super().before(loan, initial_loan, **kwargs)
|
[
"invenio_db.db.session.commit",
"invenio_circulation.proxies.current_circulation.loan_indexer"
] |
[((3574, 3593), 'invenio_db.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3591, 3593), False, 'from invenio_db import db\n'), ((3602, 3636), 'invenio_circulation.proxies.current_circulation.loan_indexer', 'current_circulation.loan_indexer', ([], {}), '()\n', (3634, 3636), False, 'from invenio_circulation.proxies import current_circulation\n')]
|
import asana
# credentials
API_KEY = 'YOUR_API_KEY HERE'
OLD_PROJECT_ID = 123
NEW_PROJECT_ID = 1234
# connecting to API
client = asana.Client.access_token(API_KEY)
# check if user is connected
me = client.users.me()
print("Hello " + me['name'])
# get new workspace id
new_workspace_id = client.projects.find_by_id(NEW_PROJECT_ID)['workspace']['id']
# get all tasks from old project
tasks = client.tasks.find_all({'project': OLD_PROJECT_ID})
# Method to update old task data for new task
def update_old_task(task_data):
# necessary deletions
del task_data['workspace']
del task_data['hearted']
del task_data['resource_type']
del task_data['created_at']
del task_data['modified_at']
del task_data['completed_at']
del task_data['memberships']
del task_data['hearts']
del task_data['num_hearts']
del task_data['likes']
del task_data['num_likes']
del task_data['assignee_status']
# update task to new project
task_data['projects'] = [NEW_PROJECT_ID]
return task_data
# loop through task
for task in tasks:
# get detailed task information
task_obj = client.tasks.find_by_id(task['id'])
# remove unwanted items
task_obj = update_old_task(task_obj)
print(task_obj)
# create new task
client.tasks.create_in_workspace(new_workspace_id, task_obj)
|
[
"asana.Client.access_token"
] |
[((131, 165), 'asana.Client.access_token', 'asana.Client.access_token', (['API_KEY'], {}), '(API_KEY)\n', (156, 165), False, 'import asana\n')]
|
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from PIL import ImageTk, Image
import PIL
import pyaudio
import pandas as pd
import numpy
from tkinter import *
import random
import speech_recognition as sr
from nltk import *
import pyttsx3
import math
def speak(command):
engine = pyttsx3.init() # engine initialized
engine.setProperty("rate", 200)
engine.say(command)
engine.runAndWait()
engine.stop
def recognize():
r = sr.Recognizer()
while True:
try:
with sr.Microphone() as source2: # here i have set the microphone as source2
print("Speak something : ")
# activated microphone
audio = r.listen(source2, timeout=1, phrase_time_limit=5)
MyText = r.recognize_google(audio)
r.adjust_for_ambient_noise(source2, duration=0.2)
print("Thinking....wait ")
MyText = MyText.lower() # convert the text in lower case
inp_val = MyText
print("user says " + MyText) # input by the user
if "credit required" in MyText:
speak("You must have 80 credits")
elif "submit" in MyText:
speak("Okay sir")
submit()
elif "what you can do":
info()
elif "stop" in MyText:
print("Bye Bye")
break
except sr.RequestError as e:
print("Not possile now")
speak("Welcome to Course Recommender!")
def submit():
sentiment_Result = [""]
msg = inp_val.get()
sentiment = SentimentIntensityAnalyzer()
print(msg)
print()
ss = sentiment.polarity_scores(msg)
temp = ss.items()
print(temp)
temp1 = list(ss.values())
print(temp1[0])
print('{0}: {1}, '.format(msg, ss), end='')
print(ss)
sentiment_Result.append(ss)
print()
credit = inp_Entry_Credit.get()
if temp1[0] < 0.4:
speak("You can apply courses of your interest!")
if inp_credit_vals.get() < min_credit:
required_Credit = min_credit - inp_credit_vals.get()
speak("You are not Eligible for MIT Courses!")
print("You have ", end=" ")
print(required_Credit, end=" ")
print("more to")
speak("You have to gain")
speak(required_Credit)
speak("more credits")
# speak(csvFile['MITCourse'])
speak("you can go with")
if inp_val.get() in machineLearning:
print("You can go with -->", end="")
print(random.choices(csvFile['MachineLearning']))
speak(random.choices(csvFile['MachineLearning']))
elif msg in DataAnalysis:
print(random.choices(csvFile['DataAnalysis']))
speak(random.choices(csvFile['DataAnalysis']))
elif msg in DataStructure:
print(random.choices(csvFile['Datastrucure']))
speak(random.choices(csvFile['Datastrucure']))
elif msg in DataVisualization:
print(random.choices(csvFile['DataVisualization']))
speak(random.choices(csvFile['DataVisualization']))
elif msg in Marketing:
print(random.choices(csvFile['Marketing']))
speak(random.choices(csvFile['Marketing']))
elif msg in ContentMarketing:
print(random.choices(csvFile['ContentMarketing']))
speak(random.choices(csvFile['ContentMarketing']))
elif msg in WebDeveloment:
print(random.choices(csvFile['WebDeveloment']))
speak(random.choices(csvFile['WebDeveloment']))
elif msg in Architecture:
print(random.choices(csvFile['Architecture']))
speak(random.choices(csvFile['Architecture']))
elif msg in GraphicDesign:
print(random.choices(csvFile['Graphicdesign']))
speak(random.choices(csvFile['Graphicdesign']))
elif msg in CloudComputing:
print(random.choices(csvFile['Cloudcomputing']))
speak(random.choices(csvFile['Cloudcomputing']))
else:
print("Not understood")
speak("I'm not able to understand")
else:
if inp_credit_vals.get() < min_credit:
required_Credit = min_credit - inp_credit_vals.get()
speak("You are not Eligible for MIT Courses!")
print("You have ", end=" ")
print(required_Credit, end=" ")
print("more to")
speak("You have to gain")
speak(required_Credit)
speak("more credits")
speak("You can enter other course as per your interest ")
else:
speak(csvFile['MITCourse'])
print(csvFile['MITCourse'])
def info():
speak("I can recommend you a good course as per your inerest, i can also check that if student is eligible for MIT courses or not")
print("RUNNNNNNNNN")
csvFile = pd.read_csv("Book411.csv")
course = csvFile['MachineLearning']
print("********")
print(csvFile.head())
machineLearning = ["ML", "Machine Learning", "AI", "ml", "machine learning"]
DataAnalysis = ["data analysys", "DataAnalysis", "Data Analysis"]
DataVisualization = ["Data Visualization",
"data visualization", "datavisualization"]
GraphicDesign = ["Graphic Design", "graphicdesign", "GraphicDesign"]
DataStructure = ["datastructure", "DataStructure",
"data structure", "Data Structure"]
CloudComputing = ["Cloud Computing", "Cloudcomputing"]
Marketing = ["Marketing", "marketting"]
ContentMarketing = ["ContentMarketing", "Content Marketting"]
SocialMediaMarketting = ["Social Media Marketing", "social media marketting"]
WebDeveloment = ["Web", "Web development", "Web Development"]
Architecture = ["Architecture", "architecture"]
machineLearningCourse = ["ML for everybody",
"ML and Datastrucure", "Ml with Piyush"]
course_credits = ['7', '8', '9', '10']
root = Tk()
load = PIL.Image.open("G:\\VSCODE\\NLP_LAB\\1.jpg")
render = ImageTk.PhotoImage(load)
img = Label(root, image=render)
img.place(x=0, y=0)
root.geometry("800x450")
lab = Label(text="Course Recommender",
font="timesnewroman 22", padx=12, pady=44)
lab.grid(row=1, column=10)
root.title("Course Recommender")
inp_val = StringVar()
inp_credit_vals = IntVar()
# x = inp_credit_vals.get()
Course_Credit = 10
min_credit = 80
# y = int(inp_credit_vals.get())
# required_Credit = min_credit - y
button1 = Button(text="MIT Courses", activebackground="red", command="")
button1.grid(row=20, column=10)
photo = PhotoImage(file=r"G:\\VSCODE\\NLP_LAB\\mic.png")
photo = photo.subsample(4)
button2 = Button(root, text="Click To Speak", image=photo,
activebackground="red", command=recognize)
button2.grid(row=3, column=19)
msg = inp_val.get()
print(msg)
frame = Frame(root, width=5, height=5)
frame.grid(row=12, column=10)
lab = Label(frame, text="Course Eligibility", wraplength=200, font="20")
lab.grid(row=12, column=10)
inp_entry = Entry(root, textvariable=inp_val,
insertbackground="skyblue", width=30, bg='red')
inp_entry.grid(row=3, column=10)
inp_Entry_Credit = Entry(root, textvariable=inp_credit_vals,
width=10, insertbackground="skyblue", bg="red")
inp_Entry_Credit.grid(row=16, column=10, pady=12)
label1 = Label(text="Your Interest")
label2 = Label(text="Enter Credits")
label1.grid(row=3)
label2.grid(row=16)
button = Button(text="Submit", activebackground="red", command=submit)
button.grid(row=5, column=10, pady=12)
root.mainloop()
|
[
"PIL.ImageTk.PhotoImage",
"pyttsx3.init",
"nltk.sentiment.vader.SentimentIntensityAnalyzer",
"pandas.read_csv",
"random.choices",
"PIL.Image.open",
"speech_recognition.Microphone",
"speech_recognition.Recognizer"
] |
[((5327, 5353), 'pandas.read_csv', 'pd.read_csv', (['"""Book411.csv"""'], {}), "('Book411.csv')\n", (5338, 5353), True, 'import pandas as pd\n'), ((6420, 6464), 'PIL.Image.open', 'PIL.Image.open', (['"""G:\\\\VSCODE\\\\NLP_LAB\\\\1.jpg"""'], {}), "('G:\\\\VSCODE\\\\NLP_LAB\\\\1.jpg')\n", (6434, 6464), False, 'import PIL\n'), ((6475, 6499), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['load'], {}), '(load)\n', (6493, 6499), False, 'from PIL import ImageTk, Image\n'), ((316, 330), 'pyttsx3.init', 'pyttsx3.init', ([], {}), '()\n', (328, 330), False, 'import pyttsx3\n'), ((488, 503), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (501, 503), True, 'import speech_recognition as sr\n'), ((1737, 1765), 'nltk.sentiment.vader.SentimentIntensityAnalyzer', 'SentimentIntensityAnalyzer', ([], {}), '()\n', (1763, 1765), False, 'from nltk.sentiment.vader import SentimentIntensityAnalyzer\n'), ((553, 568), 'speech_recognition.Microphone', 'sr.Microphone', ([], {}), '()\n', (566, 568), True, 'import speech_recognition as sr\n'), ((2777, 2819), 'random.choices', 'random.choices', (["csvFile['MachineLearning']"], {}), "(csvFile['MachineLearning'])\n", (2791, 2819), False, 'import random\n'), ((2844, 2886), 'random.choices', 'random.choices', (["csvFile['MachineLearning']"], {}), "(csvFile['MachineLearning'])\n", (2858, 2886), False, 'import random\n'), ((2952, 2991), 'random.choices', 'random.choices', (["csvFile['DataAnalysis']"], {}), "(csvFile['DataAnalysis'])\n", (2966, 2991), False, 'import random\n'), ((3016, 3055), 'random.choices', 'random.choices', (["csvFile['DataAnalysis']"], {}), "(csvFile['DataAnalysis'])\n", (3030, 3055), False, 'import random\n'), ((3122, 3161), 'random.choices', 'random.choices', (["csvFile['Datastrucure']"], {}), "(csvFile['Datastrucure'])\n", (3136, 3161), False, 'import random\n'), ((3186, 3225), 'random.choices', 'random.choices', (["csvFile['Datastrucure']"], {}), "(csvFile['Datastrucure'])\n", (3200, 3225), False, 'import random\n'), ((3296, 3340), 'random.choices', 'random.choices', (["csvFile['DataVisualization']"], {}), "(csvFile['DataVisualization'])\n", (3310, 3340), False, 'import random\n'), ((3365, 3409), 'random.choices', 'random.choices', (["csvFile['DataVisualization']"], {}), "(csvFile['DataVisualization'])\n", (3379, 3409), False, 'import random\n'), ((3472, 3508), 'random.choices', 'random.choices', (["csvFile['Marketing']"], {}), "(csvFile['Marketing'])\n", (3486, 3508), False, 'import random\n'), ((3533, 3569), 'random.choices', 'random.choices', (["csvFile['Marketing']"], {}), "(csvFile['Marketing'])\n", (3547, 3569), False, 'import random\n'), ((3639, 3682), 'random.choices', 'random.choices', (["csvFile['ContentMarketing']"], {}), "(csvFile['ContentMarketing'])\n", (3653, 3682), False, 'import random\n'), ((3707, 3750), 'random.choices', 'random.choices', (["csvFile['ContentMarketing']"], {}), "(csvFile['ContentMarketing'])\n", (3721, 3750), False, 'import random\n'), ((3817, 3857), 'random.choices', 'random.choices', (["csvFile['WebDeveloment']"], {}), "(csvFile['WebDeveloment'])\n", (3831, 3857), False, 'import random\n'), ((3882, 3922), 'random.choices', 'random.choices', (["csvFile['WebDeveloment']"], {}), "(csvFile['WebDeveloment'])\n", (3896, 3922), False, 'import random\n'), ((3988, 4027), 'random.choices', 'random.choices', (["csvFile['Architecture']"], {}), "(csvFile['Architecture'])\n", (4002, 4027), False, 'import random\n'), ((4052, 4091), 'random.choices', 'random.choices', (["csvFile['Architecture']"], {}), "(csvFile['Architecture'])\n", (4066, 4091), False, 'import random\n'), ((4158, 4198), 'random.choices', 'random.choices', (["csvFile['Graphicdesign']"], {}), "(csvFile['Graphicdesign'])\n", (4172, 4198), False, 'import random\n'), ((4223, 4263), 'random.choices', 'random.choices', (["csvFile['Graphicdesign']"], {}), "(csvFile['Graphicdesign'])\n", (4237, 4263), False, 'import random\n'), ((4331, 4372), 'random.choices', 'random.choices', (["csvFile['Cloudcomputing']"], {}), "(csvFile['Cloudcomputing'])\n", (4345, 4372), False, 'import random\n'), ((4397, 4438), 'random.choices', 'random.choices', (["csvFile['Cloudcomputing']"], {}), "(csvFile['Cloudcomputing'])\n", (4411, 4438), False, 'import random\n')]
|
from xml.etree import ElementTree
from os import path
from io import open
import os
from utils.ospathex import list_files
def remap_output(csproj_path):
# ElementTree.register_namespace('xmlns', 'http://schemas.microsoft.com/developer/msbuild/2003')
tree = ElementTree.parse(csproj_path)
root = tree.getroot()
namespace = {'ns':'{http://schemas.microsoft.com/developer/msbuild/2003}'}
property_groups_els = [elements for elements in root.findall('%(ns)sPropertyGroup'%namespace) if not elements.find('%(ns)sOutputPath' % namespace) == None]
for property_group_els in property_groups_els:
isdebug = 'Debug' in property_group_els.get('Condition')
output_path_els = property_group_els.find('%(ns)sOutputPath' % namespace)
source_path = output_path_els.text
target_path = find_relpath(csproj_path, 'ThinkGeo', isdebug);
output_path_els.text = target_path
projectrefs_els = [elements for elements in root.findall('%(ns)sItemGroup/%(ns)sProjectReference'%namespace)]
for projectref_els in projectrefs_els:
private_element = projectref_els.find('%(ns)sPrivate'%namespace)
if private_element == None:
private_element = ElementTree.Element('%(ns)sPrivate'%namespace)
projectref_els.append(private_element)
private_element.text = str(False)
ElementTree.register_namespace('', 'http://schemas.microsoft.com/developer/msbuild/2003')
tree.write(csproj_path, xml_declaration=True, encoding='utf-8')
print('Remap', path.basename(csproj_path), 'completed.')
def find_relpath(csproj_path, target_basename, debug = True):
rel_path = ''
source_path = path.dirname(csproj_path)
while not path.basename(source_path) == target_basename:
source_path = path.dirname(source_path)
rel_path = rel_path + '..\\'
target_path = path.join(rel_path, r'MapSuiteGisEditor\MapSuiteGisEditor\MapSuiteGisEditor\bin', debug and 'Debug' or 'Release')
return target_path
def need_remap(csproj_path):
return csproj_path.endswith('ForGisEditor-Windows.csproj')
# dirname = r'E:\ThinkGeo\MapSuite\MapSuite'
dirname = r'E:\ThinkGeo\MapSuite\MapSuite'
for csproj_path in list_files(dirname, '.csproj'):
need_remap = csproj_path.endswith('ForGisEditor-Windows.csproj')
if need_remap:
remap_output(csproj_path)
print('Task complete.')
|
[
"xml.etree.ElementTree.parse",
"xml.etree.ElementTree.register_namespace",
"os.path.basename",
"os.path.dirname",
"xml.etree.ElementTree.Element",
"os.path.join",
"utils.ospathex.list_files"
] |
[((2212, 2242), 'utils.ospathex.list_files', 'list_files', (['dirname', '""".csproj"""'], {}), "(dirname, '.csproj')\n", (2222, 2242), False, 'from utils.ospathex import list_files\n'), ((266, 296), 'xml.etree.ElementTree.parse', 'ElementTree.parse', (['csproj_path'], {}), '(csproj_path)\n', (283, 296), False, 'from xml.etree import ElementTree\n'), ((1360, 1453), 'xml.etree.ElementTree.register_namespace', 'ElementTree.register_namespace', (['""""""', '"""http://schemas.microsoft.com/developer/msbuild/2003"""'], {}), "('',\n 'http://schemas.microsoft.com/developer/msbuild/2003')\n", (1390, 1453), False, 'from xml.etree import ElementTree\n'), ((1683, 1708), 'os.path.dirname', 'path.dirname', (['csproj_path'], {}), '(csproj_path)\n', (1695, 1708), False, 'from os import path\n'), ((1874, 1997), 'os.path.join', 'path.join', (['rel_path', '"""MapSuiteGisEditor\\\\MapSuiteGisEditor\\\\MapSuiteGisEditor\\\\bin"""', "(debug and 'Debug' or 'Release')"], {}), "(rel_path,\n 'MapSuiteGisEditor\\\\MapSuiteGisEditor\\\\MapSuiteGisEditor\\\\bin', debug and\n 'Debug' or 'Release')\n", (1883, 1997), False, 'from os import path\n'), ((1537, 1563), 'os.path.basename', 'path.basename', (['csproj_path'], {}), '(csproj_path)\n', (1550, 1563), False, 'from os import path\n'), ((1792, 1817), 'os.path.dirname', 'path.dirname', (['source_path'], {}), '(source_path)\n', (1804, 1817), False, 'from os import path\n'), ((1215, 1263), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (["('%(ns)sPrivate' % namespace)"], {}), "('%(ns)sPrivate' % namespace)\n", (1234, 1263), False, 'from xml.etree import ElementTree\n'), ((1723, 1749), 'os.path.basename', 'path.basename', (['source_path'], {}), '(source_path)\n', (1736, 1749), False, 'from os import path\n')]
|
import getpass
import os
import time
import colored
import requests
from colored import stylize
from dotenv import load_dotenv
# Helper for API interaction with Silver Peak Edge Connect
from silverpeak_python_sdk import EdgeConnect
# Console text highlight color parameters
red_text = colored.fg("red") + colored.attr("bold")
green_text = colored.fg("green") + colored.attr("bold")
blue_text = colored.fg("steel_blue_1b") + colored.attr("bold")
orange_text = colored.fg("dark_orange") + colored.attr("bold")
def ec_assign_orch(
ec_ip,
orchestrator,
account,
accountKey,
ec_user="admin",
ec_pass="<PASSWORD>",
tag="",
orch_check="n",
):
print(tag)
ec = EdgeConnect(ec_ip)
ec.login(ec_user, ec_pass)
# Retrieve current Orchestrator and Account configured on Edge Connect
print(stylize("########## CHECKING CURRENT STATUS ##########", orange_text))
try:
current_orchestrator = ec.get_orchestrator()
print("Current Orchestrator: " + stylize(current_orchestrator, blue_text))
except:
print(
"Could not retrieve current Orchestrator for {0} due to {1}".format(
ec_ip, Exception
)
)
try:
current_reg_status = ec.register_sp_portal_status()
print("Current Account: " + stylize(current_reg_status["account"], blue_text))
except:
print(
"Could not retrieve current registration information for {0} due to {1}".format(
ec_ip, Exception
)
)
# New Orchestrator and Account to be configured on Edge Connect
print("The new Orch will be: " + stylize(orchestrator, blue_text))
print("The new Account will be: " + stylize(account, blue_text))
# Assign new Orchestrator and Account Details
print(stylize("########## CONFIGURING ORCH AND ACCOUNT ##########", orange_text))
try:
ec.assign_orchestrator(orchestrator)
except:
print(
"Could not assign new Orchestrator {0} to Edge Connect {1}".format(
orchestrator, ec_ip
)
)
try:
if tag == "":
ec.register_sp_portal(accountKey, account)
else:
ec.register_sp_portal(accountKey, account, site=tag)
except:
print(
"Could not assign new account {0} to Edge Connect {1}".format(
account, ec_ip
)
)
time.sleep(7)
print(stylize("########## SAVING CHANGES ##########", orange_text))
ec.save_changes()
time.sleep(7)
if orch_check == "y":
# Wait for up to 40 seconds for appliance reach new Orchestrator, checking every 10 seconds
print(stylize("########## WAITING FOR REGISTRATION ##########", orange_text))
reachable = "unknown"
i = 0
while True:
i = i + 1
if (reachable != "Reachable") and (i < 4):
try:
reachable = ec.get_orchestrator()[orchestrator]["webSocket"]
if reachable != "Reachable":
print(
"ECV to Orchestrator web socket status: {0}, waiting 10s for next attempt".format(
reachable
)
)
time.sleep(10)
except:
print(
"Could not get status from Edge Connect {0} (attempt: {1})".format(
ec_ip, i
)
)
else:
print("Not registered with Orchestrator after 4 attempts, moving on...")
break
else:
pass
try:
# Retrieve new status before logging out
print(stylize("########## CHECKING NEW STATUS ##########", orange_text))
current_orchestrator = ec.get_orchestrator()
print("Current Orchestrator: " + stylize(current_orchestrator, blue_text))
current_reg_status = ec.register_sp_portal_status()
print("Current Account: " + stylize(current_reg_status["account"], blue_text))
except:
print(
"Could not retrieve current status from Edge Connect {0} before logging out".format(
ec_ip
)
)
ec.logout()
if __name__ == "__main__":
# Load environment variables
load_dotenv()
# Set Orchestrator and Account Details from .env
orchestrator = str(os.getenv("ORCH_URL"))
account = os.getenv("ACCOUNT")
accountKey = os.getenv("ACCOUNT_KEY")
# Set custom Edge Connect Credentials, otherwise defaults to admin/admin
ec_default_creds = input(
"Are default credentials in use for the Edge Connect(s)? (y/n): "
)
if ec_default_creds == "n":
print(stylize("Enter Edge Connect Credentials:", blue_text))
ec_user = getpass.getuser()
ec_pass = getpass.getpass()
else:
pass
# Enter IP address of single Edge Connect
ec_ip = input(
"Please enter IP address of Edge Connect to Migrate (e.g. 10.1.30.100): "
)
# Login to Edge Connect
if ec_default_creds == "y":
ec_assign_orch(ec_ip, orchestrator, account, accountKey)
else:
ec_assign_orch(ec_ip, orchestrator, account, accountKey, ec_user, ec_pass)
|
[
"getpass.getuser",
"getpass.getpass",
"colored.fg",
"time.sleep",
"dotenv.load_dotenv",
"silverpeak_python_sdk.EdgeConnect",
"colored.stylize",
"colored.attr",
"os.getenv"
] |
[((288, 305), 'colored.fg', 'colored.fg', (['"""red"""'], {}), "('red')\n", (298, 305), False, 'import colored\n'), ((308, 328), 'colored.attr', 'colored.attr', (['"""bold"""'], {}), "('bold')\n", (320, 328), False, 'import colored\n'), ((342, 361), 'colored.fg', 'colored.fg', (['"""green"""'], {}), "('green')\n", (352, 361), False, 'import colored\n'), ((364, 384), 'colored.attr', 'colored.attr', (['"""bold"""'], {}), "('bold')\n", (376, 384), False, 'import colored\n'), ((397, 424), 'colored.fg', 'colored.fg', (['"""steel_blue_1b"""'], {}), "('steel_blue_1b')\n", (407, 424), False, 'import colored\n'), ((427, 447), 'colored.attr', 'colored.attr', (['"""bold"""'], {}), "('bold')\n", (439, 447), False, 'import colored\n'), ((462, 487), 'colored.fg', 'colored.fg', (['"""dark_orange"""'], {}), "('dark_orange')\n", (472, 487), False, 'import colored\n'), ((490, 510), 'colored.attr', 'colored.attr', (['"""bold"""'], {}), "('bold')\n", (502, 510), False, 'import colored\n'), ((697, 715), 'silverpeak_python_sdk.EdgeConnect', 'EdgeConnect', (['ec_ip'], {}), '(ec_ip)\n', (708, 715), False, 'from silverpeak_python_sdk import EdgeConnect\n'), ((2441, 2454), 'time.sleep', 'time.sleep', (['(7)'], {}), '(7)\n', (2451, 2454), False, 'import time\n'), ((2555, 2568), 'time.sleep', 'time.sleep', (['(7)'], {}), '(7)\n', (2565, 2568), False, 'import time\n'), ((4412, 4425), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (4423, 4425), False, 'from dotenv import load_dotenv\n'), ((4540, 4560), 'os.getenv', 'os.getenv', (['"""ACCOUNT"""'], {}), "('ACCOUNT')\n", (4549, 4560), False, 'import os\n'), ((4578, 4602), 'os.getenv', 'os.getenv', (['"""ACCOUNT_KEY"""'], {}), "('ACCOUNT_KEY')\n", (4587, 4602), False, 'import os\n'), ((833, 902), 'colored.stylize', 'stylize', (['"""########## CHECKING CURRENT STATUS ##########"""', 'orange_text'], {}), "('########## CHECKING CURRENT STATUS ##########', orange_text)\n", (840, 902), False, 'from colored import stylize\n'), ((1817, 1891), 'colored.stylize', 'stylize', (['"""########## CONFIGURING ORCH AND ACCOUNT ##########"""', 'orange_text'], {}), "('########## CONFIGURING ORCH AND ACCOUNT ##########', orange_text)\n", (1824, 1891), False, 'from colored import stylize\n'), ((2466, 2526), 'colored.stylize', 'stylize', (['"""########## SAVING CHANGES ##########"""', 'orange_text'], {}), "('########## SAVING CHANGES ##########', orange_text)\n", (2473, 2526), False, 'from colored import stylize\n'), ((4503, 4524), 'os.getenv', 'os.getenv', (['"""ORCH_URL"""'], {}), "('ORCH_URL')\n", (4512, 4524), False, 'import os\n'), ((4911, 4928), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (4926, 4928), False, 'import getpass\n'), ((4947, 4964), 'getpass.getpass', 'getpass.getpass', ([], {}), '()\n', (4962, 4964), False, 'import getpass\n'), ((1653, 1685), 'colored.stylize', 'stylize', (['orchestrator', 'blue_text'], {}), '(orchestrator, blue_text)\n', (1660, 1685), False, 'from colored import stylize\n'), ((1727, 1754), 'colored.stylize', 'stylize', (['account', 'blue_text'], {}), '(account, blue_text)\n', (1734, 1754), False, 'from colored import stylize\n'), ((2710, 2780), 'colored.stylize', 'stylize', (['"""########## WAITING FOR REGISTRATION ##########"""', 'orange_text'], {}), "('########## WAITING FOR REGISTRATION ##########', orange_text)\n", (2717, 2780), False, 'from colored import stylize\n'), ((3808, 3873), 'colored.stylize', 'stylize', (['"""########## CHECKING NEW STATUS ##########"""', 'orange_text'], {}), "('########## CHECKING NEW STATUS ##########', orange_text)\n", (3815, 3873), False, 'from colored import stylize\n'), ((4838, 4891), 'colored.stylize', 'stylize', (['"""Enter Edge Connect Credentials:"""', 'blue_text'], {}), "('Enter Edge Connect Credentials:', blue_text)\n", (4845, 4891), False, 'from colored import stylize\n'), ((1007, 1047), 'colored.stylize', 'stylize', (['current_orchestrator', 'blue_text'], {}), '(current_orchestrator, blue_text)\n', (1014, 1047), False, 'from colored import stylize\n'), ((1319, 1368), 'colored.stylize', 'stylize', (["current_reg_status['account']", 'blue_text'], {}), "(current_reg_status['account'], blue_text)\n", (1326, 1368), False, 'from colored import stylize\n'), ((3969, 4009), 'colored.stylize', 'stylize', (['current_orchestrator', 'blue_text'], {}), '(current_orchestrator, blue_text)\n', (3976, 4009), False, 'from colored import stylize\n'), ((4107, 4156), 'colored.stylize', 'stylize', (["current_reg_status['account']", 'blue_text'], {}), "(current_reg_status['account'], blue_text)\n", (4114, 4156), False, 'from colored import stylize\n'), ((3340, 3354), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (3350, 3354), False, 'import time\n')]
|
#!/usr/bin/env python3
"""
Author : <NAME> <<EMAIL>>
Date : 2021-09-22
Purpose: concatenate a file
"""
import argparse
import os
import sys
import re
# pylint: disable=W0105,missing-function-docstring,unspecified-encoding,consider-using-with
# flake8: noqa
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='concatenate a file',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('file',
metavar='FILE',
type=argparse.FileType('rt'),
nargs='+',
help='one or more file names')
parser.add_argument('-n',
'--number',
help='print line numbers',
action='store_true')
parser.add_argument('-o',
'--outfile',
help='Output',
metavar='FILE',
type=str,
default=sys.stdout)
return parser.parse_args()
# --------------------------------------------------
def main():
"""A file to concatenate files and print them"""
args = get_args()
filenames = args.file
# numbers = args.number
outfile = args.outfile
cat = []
tac = []
# iterate over imputed files
for filename in filenames:
linenum = 1 # counter
lines = filename.readlines() # read lines of file
# or each file get each line
for line in lines:
cat.append(line)
for o in reversed(cat):
tac.append(o.strip("\n"))
if os.path.isfile(outfile):
#print('is file'):
f = open(outfile,"w")
print("Output is in file:", outfile)
for element in tac:
element += "\n"
f.write(element)
f.close()
elif outfile != sys.stdout:
f = open(outfile,'x')
print("Output is in file:", outfile)
for element in tac:
# print(element)
f.write(element)
f.close()
else:
print(*tac, sep="")
# --------------------------------------------------
if __name__ == '__main__':
main()
|
[
"os.path.isfile",
"argparse.ArgumentParser",
"argparse.FileType"
] |
[((383, 501), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""concatenate a file"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='concatenate a file', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (406, 501), False, 'import argparse\n'), ((1722, 1745), 'os.path.isfile', 'os.path.isfile', (['outfile'], {}), '(outfile)\n', (1736, 1745), False, 'import os\n'), ((616, 639), 'argparse.FileType', 'argparse.FileType', (['"""rt"""'], {}), "('rt')\n", (633, 639), False, 'import argparse\n')]
|
import importlib.util
import os
from elasticdl.python.common.log_utils import default_logger as logger
from elasticdl.python.worker.prediction_outputs_processor import (
BasePredictionOutputsProcessor,
)
def load_module(module_file):
spec = importlib.util.spec_from_file_location(module_file, module_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def load_model_from_module(model_def, model_module, model_params):
model_def_name = model_def.split(".")[-1]
if model_def_name in model_module:
custom_model_name = model_def_name
else:
raise ValueError(
"Cannot find the custom model function/class "
"in model definition files"
)
if model_params:
model_params_dict = get_dict_from_params_str(model_params)
return model_module[custom_model_name](**model_params_dict)
else:
return model_module[custom_model_name]()
def get_dict_from_params_str(params_str):
"""Get the dictionary of kv pairs in a string separated
by semi-colon."""
if params_str:
kvs = params_str.split(";")
params_dict = {}
for kv in kvs:
k, v = kv.strip().split("=")
params_dict[k] = eval(v)
return params_dict
else:
return None
def get_module_file_path(model_zoo, spec_key):
"""Get the path to module file from model zoo and the spec string.
For example, if `model_zoo = "model_zoo"` and
`spec_key = "test_module.custom_model"`, the function returns
"model_zoo/test_module.py".
"""
return os.path.join(model_zoo, "/".join(spec_key.split(".")[:-1]) + ".py")
def _get_spec_value(spec_key, model_zoo, default_module, required=False):
"""Get the value to the given spec key.
Notes:
* If the dot-splitted spec key (e.g. "test_module.custom_model"
is splitted into "test_module" and "custom_model") is of length 1
(e.g. `spec_key` is "custom_model"), return the value in the
specified `default_module`.
* If the spec key does not exist in the module, return `None`.
"""
spec_key_items = spec_key.split(".")
spec_key_base = spec_key_items[-1]
if len(spec_key_items) == 1:
spec_key_module = default_module
else:
spec_key_module = load_module(
get_module_file_path(model_zoo, spec_key)
).__dict__
spec_value = (
spec_key_module[spec_key_base]
if spec_key_base in spec_key_module
else None
)
if required and spec_value is None:
raise Exception(
"Missing required spec key %s in the module: %s"
% (spec_key_base, spec_key)
)
return spec_value
def get_model_spec(
model_zoo,
model_def,
model_params,
dataset_fn,
loss,
optimizer,
eval_metrics_fn,
prediction_outputs_processor,
):
"""Get the model spec items in a tuple.
The model spec tuple contains the following items in order:
* The model object instantiated with parameters specified
in `model_params`,
* The `dataset_fn`,
* The `loss`,
* The `optimizer`,
* The `eval_metrics_fn`,
* The `prediction_outputs_processor`. Note that it will print
warning if it's not inherited from `BasePredictionOutputsProcessor`.
"""
model_def_module_file = get_module_file_path(model_zoo, model_def)
default_module = load_module(model_def_module_file).__dict__
model = load_model_from_module(model_def, default_module, model_params)
prediction_outputs_processor = _get_spec_value(
prediction_outputs_processor, model_zoo, default_module
)
if prediction_outputs_processor and not isinstance(
prediction_outputs_processor, BasePredictionOutputsProcessor
):
logger.warning(
"prediction_outputs_processor is not "
"inherited from BasePredictionOutputsProcessor. "
"Prediction outputs may not be processed correctly."
)
return (
model,
_get_spec_value(dataset_fn, model_zoo, default_module, required=True),
_get_spec_value(loss, model_zoo, default_module, required=True),
_get_spec_value(optimizer, model_zoo, default_module, required=True),
_get_spec_value(
eval_metrics_fn, model_zoo, default_module, required=True
),
prediction_outputs_processor,
)
def save_checkpoint_to_file(pb_model, file_name):
encoded_model = pb_model.SerializeToString()
with open(file_name, "wb") as f:
f.write(encoded_model)
def load_from_checkpoint_file(file_name):
from elasticdl.proto import elasticdl_pb2
pb_model = elasticdl_pb2.Model()
with open(file_name, "rb") as f:
pb_model.ParseFromString(f.read())
return pb_model
def find_layer(model, layer_class):
"""
Find all layers in model that are instances of layer_class
"""
layers = []
for layer in model.layers:
if isinstance(layer, layer_class):
layers.append(layer)
elif hasattr(layer, "layers"):
# search in nested layers
layers += find_layer(layer, layer_class)
return layers
def get_non_embedding_trainable_vars(model, embedding_layers):
"""
Get trainable variables which are not from ElasticDL embedding layers.
"""
embedding_items = []
for layer in embedding_layers:
embedding_items.extend(layer.trainable_variables)
non_embedding_trainable_vars = []
for var in model.trainable_variables:
is_embedding_item = False
for embedding_item in embedding_items:
if var is embedding_item:
is_embedding_item = True
break
if not is_embedding_item:
non_embedding_trainable_vars.append(var)
return non_embedding_trainable_vars
|
[
"elasticdl.proto.elasticdl_pb2.Model",
"elasticdl.python.common.log_utils.default_logger.warning"
] |
[((4718, 4739), 'elasticdl.proto.elasticdl_pb2.Model', 'elasticdl_pb2.Model', ([], {}), '()\n', (4737, 4739), False, 'from elasticdl.proto import elasticdl_pb2\n'), ((3831, 3992), 'elasticdl.python.common.log_utils.default_logger.warning', 'logger.warning', (['"""prediction_outputs_processor is not inherited from BasePredictionOutputsProcessor. Prediction outputs may not be processed correctly."""'], {}), "(\n 'prediction_outputs_processor is not inherited from BasePredictionOutputsProcessor. Prediction outputs may not be processed correctly.'\n )\n", (3845, 3992), True, 'from elasticdl.python.common.log_utils import default_logger as logger\n')]
|
from django.contrib.auth.models import User
from django.test import TestCase
from checklist.models import (
Bookmark,
Category,
Checklist,
Comment,
Follow,
FollowChecklist,
Item,
Notification,
Upvote,
)
from .helper_methods import (
create_bookmark_upvote,
create_category_if_not_exists,
create_checklist,
create_comment,
create_item,
create_notif,
create_user_if_not_exists,
)
# test classes
class TestChecklistModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.category = create_category_if_not_exists("Category_test")
cls.checklist = create_checklist(
title="New Checklist 1",
content="Test content",
user=cls.user,
category=cls.category,
)
def test_object_creation(self):
self.assertTrue(isinstance(TestChecklistModel.checklist, Checklist))
def test_str_method(self):
self.assertEqual(TestChecklistModel.checklist.__str__(), "New Checklist 1")
def test_get_absolute_url(self):
self.assertEqual(
TestChecklistModel.checklist.get_absolute_url(),
"/checklist/" + str(TestChecklistModel.user.id) + "/",
)
class TestItemModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.category = create_category_if_not_exists("Category_test")
cls.checklist = create_checklist(
title="New Checklist 1",
content="Test content",
user=cls.user,
category=cls.category,
)
cls.item = create_item(title="New Item 1", checklist=cls.checklist)
def test_object_creation(self):
self.assertTrue(isinstance(TestItemModel.item, Item))
def test_get_absolute_url(self):
self.assertEqual(
TestItemModel.item.get_absolute_url(),
"/checklist/item/" + str(TestItemModel.item.id) + "/view/",
)
class TestUpvoteModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.category = create_category_if_not_exists("Category_test")
cls.checklist = create_checklist(
title="New Checklist 1",
content="Test content",
user=cls.user,
category=cls.category,
)
cls.upvote = create_bookmark_upvote(
user=cls.user, checklist=cls.checklist, if_bookmark=False
)
def test_object_creation(self):
self.assertTrue(isinstance(TestUpvoteModel.upvote, Upvote))
def test_str_method(self):
self.assertEqual(
TestUpvoteModel.upvote.__str__(),
TestUpvoteModel.user.username + " - " + TestUpvoteModel.checklist.title,
)
class TestBookmarkModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(username="testuser", password="<PASSWORD>")
cls.user.save()
cls.category = Category.objects.create(name="Category_test")
cls.checklist = Checklist.objects.create(
title="New Checklist 1",
content="Test content",
author=cls.user,
category=cls.category,
)
cls.bookmark = create_bookmark_upvote(
user=cls.user, checklist=cls.checklist, if_bookmark=True
)
def test_object_creation(self):
self.assertTrue(isinstance(TestBookmarkModel.bookmark, Bookmark))
def test_str_method(self):
self.assertEqual(
TestBookmarkModel.bookmark.__str__(),
TestBookmarkModel.user.username + " - " + TestBookmarkModel.checklist.title,
)
class TestCategoryModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.category = Category.objects.create(name="Category_test")
def test_object_creation(self):
self.assertTrue(isinstance(TestCategoryModel.category, Category))
def test_str_method(self):
self.assertEqual(
TestCategoryModel.category.__str__(),
TestCategoryModel.category.name,
)
class TestFollowModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.user2 = create_user_if_not_exists(username="testuser2", password="<PASSWORD>")
def test_object_creation(self):
follow_obj = Follow.objects.create(
fromUser=TestFollowModel.user1, toUser=TestFollowModel.user2
)
self.assertTrue(isinstance(follow_obj, Follow))
class TestFollowChecklistModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.category = Category.objects.create(name="Category_test")
cls.checklist = Checklist.objects.create(
title="New Checklist 1",
content="Test content",
author=cls.user,
category=cls.category,
)
def test_object_creation(self):
follow_obj = FollowChecklist.objects.create(
fromUser=TestFollowChecklistModel.user,
toChecklist=TestFollowChecklistModel.checklist,
)
self.assertTrue(isinstance(follow_obj, FollowChecklist))
class TestNotificationModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.user2 = create_user_if_not_exists(username="testuser2", password="<PASSWORD>")
cls.category = Category.objects.create(name="Category_test")
cls.checklist = Checklist.objects.create(
title="New Checklist 1",
content="Test content",
author=cls.user1,
category=cls.category,
)
def test_object_creation(self):
notif_obj = create_notif(
fromUser=TestNotificationModel.user1,
toUser=TestNotificationModel.user2,
notif_type=2,
)
self.assertTrue(isinstance(notif_obj, Notification))
class TestCommentModel(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = create_user_if_not_exists(username="testuser", password="<PASSWORD>")
cls.user2 = create_user_if_not_exists(username="testuser2", password="<PASSWORD>")
cls.category = Category.objects.create(name="Category_test")
cls.checklist = Checklist.objects.create(
title="New Checklist 1",
content="Test content",
author=cls.user1,
category=cls.category,
)
def test_object_creation(self):
comment_obj = create_comment(
checklist=TestCommentModel.checklist,
user=TestCommentModel.user2,
body="Test comment",
)
self.assertTrue(isinstance(comment_obj, Comment))
|
[
"checklist.models.Checklist.objects.create",
"django.contrib.auth.models.User.objects.create_user",
"checklist.models.Follow.objects.create",
"checklist.models.Category.objects.create",
"checklist.models.FollowChecklist.objects.create"
] |
[((3074, 3142), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""testuser"""', 'password': '"""<PASSWORD>"""'}), "(username='testuser', password='<PASSWORD>')\n", (3098, 3142), False, 'from django.contrib.auth.models import User\n'), ((3191, 3236), 'checklist.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""Category_test"""'}), "(name='Category_test')\n", (3214, 3236), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((3261, 3378), 'checklist.models.Checklist.objects.create', 'Checklist.objects.create', ([], {'title': '"""New Checklist 1"""', 'content': '"""Test content"""', 'author': 'cls.user', 'category': 'cls.category'}), "(title='New Checklist 1', content='Test content',\n author=cls.user, category=cls.category)\n", (3285, 3378), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((3984, 4029), 'checklist.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""Category_test"""'}), "(name='Category_test')\n", (4007, 4029), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((4623, 4711), 'checklist.models.Follow.objects.create', 'Follow.objects.create', ([], {'fromUser': 'TestFollowModel.user1', 'toUser': 'TestFollowModel.user2'}), '(fromUser=TestFollowModel.user1, toUser=\n TestFollowModel.user2)\n', (4644, 4711), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((4986, 5031), 'checklist.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""Category_test"""'}), "(name='Category_test')\n", (5009, 5031), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((5057, 5174), 'checklist.models.Checklist.objects.create', 'Checklist.objects.create', ([], {'title': '"""New Checklist 1"""', 'content': '"""Test content"""', 'author': 'cls.user', 'category': 'cls.category'}), "(title='New Checklist 1', content='Test content',\n author=cls.user, category=cls.category)\n", (5081, 5174), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((5288, 5410), 'checklist.models.FollowChecklist.objects.create', 'FollowChecklist.objects.create', ([], {'fromUser': 'TestFollowChecklistModel.user', 'toChecklist': 'TestFollowChecklistModel.checklist'}), '(fromUser=TestFollowChecklistModel.user,\n toChecklist=TestFollowChecklistModel.checklist)\n', (5318, 5410), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((5797, 5842), 'checklist.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""Category_test"""'}), "(name='Category_test')\n", (5820, 5842), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((5868, 5986), 'checklist.models.Checklist.objects.create', 'Checklist.objects.create', ([], {'title': '"""New Checklist 1"""', 'content': '"""Test content"""', 'author': 'cls.user1', 'category': 'cls.category'}), "(title='New Checklist 1', content='Test content',\n author=cls.user1, category=cls.category)\n", (5892, 5986), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((6593, 6638), 'checklist.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""Category_test"""'}), "(name='Category_test')\n", (6616, 6638), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n'), ((6664, 6782), 'checklist.models.Checklist.objects.create', 'Checklist.objects.create', ([], {'title': '"""New Checklist 1"""', 'content': '"""Test content"""', 'author': 'cls.user1', 'category': 'cls.category'}), "(title='New Checklist 1', content='Test content',\n author=cls.user1, category=cls.category)\n", (6688, 6782), False, 'from checklist.models import Bookmark, Category, Checklist, Comment, Follow, FollowChecklist, Item, Notification, Upvote\n')]
|
#!/usr/bin/env python3
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='calculation radius of gyration using MDAnalysis')
## args
parser.add_argument('-i', '--input', default='traj.trr', nargs='?',
help='input trajectory file')
parser.add_argument('-s', '--structure', default='topol.tpr', nargs='?',
help='.tpr structure file')
parser.add_argument('-select', '--select', nargs='?',
help='selection of each molecule')
parser.add_argument('-nmol', '--nmol', nargs='?', type=int,
help='# molecules')
parser.add_argument('-b', '--begin', default=-1, nargs='?', type=int,
help='begining frame (-1: last half trajectory)')
parser.add_argument('-o', '--output', default='pol.rg', nargs='?',
help='output filename for Rg files')
parser.add_argument('args', nargs=argparse.REMAINDER)
parser.add_argument('-v', '--version', action='version', version='%(prog)s 0.1')
## read args
args = parser.parse_args()
## Check arguments for log
print(" input arguments: {0}".format(args))
## import modules
import sys
sys.path.append('/home/htjung/Utility/python/')
import hjung
from hjung import *
import MDAnalysis as mda
import numpy as np
## timer
start_proc, start_prof = hjung.time.init()
# read trajectory
u = mda.Universe(args.structure,args.input)
n_frames = len(u.trajectory)
skip_frames = 0
if args.begin == -1:
skip_frames = int(n_frames/2)
print(" skip {} frames".format(skip_frames))
else:
skip_frames = args.begin
if args.begin >= n_frames:
raise ValueError("wrong args.begin because of > n_frames")
n_frames = n_frames - skip_frames
atomtxt = open(args.select).read()
#hjung.polymer.check_traj_connectivity(u,str(atomtxt),args.nmol,1.8,'random')
select_mol = u.select_atoms(str(atomtxt))
if len(select_mol)%args.nmol != 0:
raise ValueError("wrong # molecules, (args.nmol, select_mol) {} {} ".format(args.nmol, len(select_mol)))
n_deg = int(len(select_mol)/args.nmol)
print("assume {} atoms you select per molecule".format(n_deg))
# calculation of Rg
data_rg = np.zeros((n_frames,args.nmol))
i_frame = 0
imod = hjung.time.process_init()
for ts in u.trajectory[skip_frames:]:
for i_mol in range(args.nmol):
mol = select_mol.atoms[n_deg*i_mol:n_deg*(i_mol+1)]
data_rg[i_frame,i_mol] = mol.radius_of_gyration()
i_frame = i_frame + 1
imod = hjung.time.process_print(i_frame, n_frames, imod)
# save raw rg data file
np.savetxt(args.output, data_rg,
header='Rg for each molecules (mean = {} +- {}) with {} frames'.format(np.mean(data_rg),np.std(data_rg),n_frames), fmt='%f', comments='# ')
np.save(args.output, data_rg)
print("average Rg = {} +- {}".format(np.mean(data_rg),np.std(data_rg)))
# save avg file
data_rg_tavg = np.column_stack((np.mean(data_rg, axis=0),np.std(data_rg, axis=0)))
np.savetxt(args.output+'.avg', data_rg_tavg,
header='averaged Rg for each molecule with {} frames'.format(n_frames), fmt='%f', comments='# ')
## timer
hjung.time.end_print(start_proc, start_prof)
|
[
"sys.path.append",
"hjung.time.init",
"numpy.save",
"argparse.ArgumentParser",
"hjung.time.end_print",
"numpy.std",
"hjung.time.process_init",
"numpy.zeros",
"MDAnalysis.Universe",
"hjung.time.process_print",
"numpy.mean"
] |
[((52, 204), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter', 'description': '"""calculation radius of gyration using MDAnalysis"""'}), "(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, description=\n 'calculation radius of gyration using MDAnalysis')\n", (75, 204), False, 'import argparse\n'), ((1109, 1156), 'sys.path.append', 'sys.path.append', (['"""/home/htjung/Utility/python/"""'], {}), "('/home/htjung/Utility/python/')\n", (1124, 1156), False, 'import sys\n'), ((1276, 1293), 'hjung.time.init', 'hjung.time.init', ([], {}), '()\n', (1291, 1293), False, 'import hjung\n'), ((1320, 1360), 'MDAnalysis.Universe', 'mda.Universe', (['args.structure', 'args.input'], {}), '(args.structure, args.input)\n', (1332, 1360), True, 'import MDAnalysis as mda\n'), ((2105, 2136), 'numpy.zeros', 'np.zeros', (['(n_frames, args.nmol)'], {}), '((n_frames, args.nmol))\n', (2113, 2136), True, 'import numpy as np\n'), ((2157, 2182), 'hjung.time.process_init', 'hjung.time.process_init', ([], {}), '()\n', (2180, 2182), False, 'import hjung\n'), ((2652, 2681), 'numpy.save', 'np.save', (['args.output', 'data_rg'], {}), '(args.output, data_rg)\n', (2659, 2681), True, 'import numpy as np\n'), ((3019, 3063), 'hjung.time.end_print', 'hjung.time.end_print', (['start_proc', 'start_prof'], {}), '(start_proc, start_prof)\n', (3039, 3063), False, 'import hjung\n'), ((2397, 2446), 'hjung.time.process_print', 'hjung.time.process_print', (['i_frame', 'n_frames', 'imod'], {}), '(i_frame, n_frames, imod)\n', (2421, 2446), False, 'import hjung\n'), ((2720, 2736), 'numpy.mean', 'np.mean', (['data_rg'], {}), '(data_rg)\n', (2727, 2736), True, 'import numpy as np\n'), ((2737, 2752), 'numpy.std', 'np.std', (['data_rg'], {}), '(data_rg)\n', (2743, 2752), True, 'import numpy as np\n'), ((2807, 2831), 'numpy.mean', 'np.mean', (['data_rg'], {'axis': '(0)'}), '(data_rg, axis=0)\n', (2814, 2831), True, 'import numpy as np\n'), ((2832, 2855), 'numpy.std', 'np.std', (['data_rg'], {'axis': '(0)'}), '(data_rg, axis=0)\n', (2838, 2855), True, 'import numpy as np\n'), ((2582, 2598), 'numpy.mean', 'np.mean', (['data_rg'], {}), '(data_rg)\n', (2589, 2598), True, 'import numpy as np\n'), ((2599, 2614), 'numpy.std', 'np.std', (['data_rg'], {}), '(data_rg)\n', (2605, 2614), True, 'import numpy as np\n')]
|
"""
Various tensorflow utilities
"""
import numpy as np
import tensorflow as tf
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.python.ops import variables
import functools
def passthrough(obj, value): return value
try:
variables.Variable._build_initializer_expr=passthrough
except: # older versions of TF don't have this
pass
def int_shape(x):
return list(map(int, x.get_shape()))
def concat_elu(x):
""" like concatenated ReLU (http://arxiv.org/abs/1603.05201), but then with ELU """
axis = len(x.get_shape()) - 1
return tf.nn.elu(tf.concat([x, -x], axis))
def log_sum_exp(x):
""" numerically stable log_sum_exp implementation that prevents overflow """
axis = len(x.get_shape()) - 1
m = tf.reduce_max(x, axis)
m2 = tf.reduce_max(x, axis, keep_dims=True)
return m + tf.log(tf.reduce_sum(tf.exp(x - m2), axis))
def log_prob_from_logits(x):
""" numerically stable log_softmax implementation that prevents overflow """
axis = len(x.get_shape()) - 1
m = tf.reduce_max(x, axis, keep_dims=True)
return x - m - tf.log(tf.reduce_sum(tf.exp(x - m), axis, keep_dims=True))
def discretized_mix_logistic_loss(x, l, sum_all=True):
""" log-likelihood for mixture of discretized logistics, assumes the data has been rescaled to [-1,1] interval """
xs = int_shape(
x) # true image (i.e. labels) to regress to, e.g. (B,32,32,3)
ls = int_shape(l) # predicted distribution, e.g. (B,32,32,100)
# here and below: unpacking the params of the mixture of logistics
nr_mix = int(ls[-1] / 10)
logit_probs = l[:, :, :, :nr_mix]
l = tf.reshape(l[:, :, :, nr_mix:], xs + [nr_mix * 3])
means = l[:, :, :, :, :nr_mix]
log_scales = tf.maximum(l[:, :, :, :, nr_mix:2 * nr_mix], -7.)
coeffs = tf.nn.tanh(l[:, :, :, :, 2 * nr_mix:3 * nr_mix])
# here and below: getting the means and adjusting them based on preceding
# sub-pixels
x = tf.reshape(x, xs + [1]) + tf.zeros(xs + [nr_mix])
m2 = tf.reshape(means[:, :, :, 1, :] + coeffs[:, :, :, 0, :]
* x[:, :, :, 0, :], [xs[0], xs[1], xs[2], 1, nr_mix])
m3 = tf.reshape(means[:, :, :, 2, :] + coeffs[:, :, :, 1, :] * x[:, :, :, 0, :] +
coeffs[:, :, :, 2, :] * x[:, :, :, 1, :], [xs[0], xs[1], xs[2], 1, nr_mix])
means = tf.concat([tf.reshape(means[:, :, :, 0, :], [
xs[0], xs[1], xs[2], 1, nr_mix]), m2, m3], 3)
centered_x = x - means
inv_stdv = tf.exp(-log_scales)
plus_in = inv_stdv * (centered_x + 1. / 255.)
cdf_plus = tf.nn.sigmoid(plus_in)
min_in = inv_stdv * (centered_x - 1. / 255.)
cdf_min = tf.nn.sigmoid(min_in)
# log probability for edge case of 0 (before scaling)
log_cdf_plus = plus_in - tf.nn.softplus(plus_in)
# log probability for edge case of 255 (before scaling)
log_one_minus_cdf_min = -tf.nn.softplus(min_in)
cdf_delta = cdf_plus - cdf_min # probability for all other cases
mid_in = inv_stdv * centered_x
# log probability in the center of the bin, to be used in extreme cases
# (not actually used in our code)
log_pdf_mid = mid_in - log_scales - 2. * tf.nn.softplus(mid_in)
# now select the right output: left edge case, right edge case, normal
# case, extremely low prob case (doesn't actually happen for us)
# this is what we are really doing, but using the robust version below for extreme cases in other applications and to avoid NaN issue with tf.select()
# log_probs = tf.select(x < -0.999, log_cdf_plus, tf.select(x > 0.999, log_one_minus_cdf_min, tf.log(cdf_delta)))
# robust version, that still works if probabilities are below 1e-5 (which never happens in our code)
# tensorflow backpropagates through tf.select() by multiplying with zero instead of selecting: this requires use to use some ugly tricks to avoid potential NaNs
# the 1e-12 in tf.maximum(cdf_delta, 1e-12) is never actually used as output, it's purely there to get around the tf.select() gradient issue
# if the probability on a sub-pixel is below 1e-5, we use an approximation
# based on the assumption that the log-density is constant in the bin of
# the observed sub-pixel value
log_probs = tf.where(x < -0.999, log_cdf_plus, tf.where(x > 0.999, log_one_minus_cdf_min,
tf.where(cdf_delta > 1e-5, tf.log(tf.maximum(cdf_delta, 1e-12)), log_pdf_mid - np.log(127.5))))
log_probs = tf.reduce_sum(log_probs, 3) + log_prob_from_logits(logit_probs)
if sum_all:
return -tf.reduce_sum(log_sum_exp(log_probs))
else:
return -tf.reduce_sum(log_sum_exp(log_probs), [1, 2])
def discretized_mix_logistic_loss_per_chn(x, lr, lg, lb, sum_all=True):
""" log-likelihood for mixture of discretized logistics, assumes the data has been rescaled to [-1,1] interval """
xs = int_shape(x) # true image (i.e. labels) to regress to, e.g. (B,32,32,3)
ls = int_shape(lr) # predicted distribution, e.g. (B,32,32,100)
# here and below: unpacking the params of the mixture of logistics
nr_mix = int(ls[-1] / 3)
logit_probs = lr[:, :, :, :nr_mix]
means = tf.concat([lr[:, :, :, None, nr_mix:nr_mix*2], lg[:, :, :, None, nr_mix:nr_mix*2], lb[:, :, :, None, nr_mix:nr_mix*2],], axis=-2)
log_scales = tf.concat([lr[:, :, :, None, nr_mix*2:nr_mix*3], lg[:, :, :, None, nr_mix*2:nr_mix*3], lb[:, :, :, None, nr_mix*2:nr_mix*3],], axis=-2)
log_scales = tf.maximum(log_scales, -7.)
x = tf.reshape(x, xs + [1]) + tf.zeros(xs + [nr_mix])
centered_x = x - means
inv_stdv = tf.exp(-log_scales)
plus_in = inv_stdv * (centered_x + 1. / 255.)
cdf_plus = tf.nn.sigmoid(plus_in)
min_in = inv_stdv * (centered_x - 1. / 255.)
cdf_min = tf.nn.sigmoid(min_in)
# log probability for edge case of 0 (before scaling)
log_cdf_plus = plus_in - tf.nn.softplus(plus_in)
# log probability for edge case of 255 (before scaling)
log_one_minus_cdf_min = -tf.nn.softplus(min_in)
cdf_delta = cdf_plus - cdf_min # probability for all other cases
mid_in = inv_stdv * centered_x
# log probability in the center of the bin, to be used in extreme cases
# (not actually used in our code)
log_pdf_mid = mid_in - log_scales - 2. * tf.nn.softplus(mid_in)
# now select the right output: left edge case, right edge case, normal
# case, extremely low prob case (doesn't actually happen for us)
# this is what we are really doing, but using the robust version below for extreme cases in other applications and to avoid NaN issue with tf.select()
# log_probs = tf.select(x < -0.999, log_cdf_plus, tf.select(x > 0.999, log_one_minus_cdf_min, tf.log(cdf_delta)))
# robust version, that still works if probabilities are below 1e-5 (which never happens in our code)
# tensorflow backpropagates through tf.select() by multiplying with zero instead of selecting: this requires use to use some ugly tricks to avoid potential NaNs
# the 1e-12 in tf.maximum(cdf_delta, 1e-12) is never actually used as output, it's purely there to get around the tf.select() gradient issue
# if the probability on a sub-pixel is below 1e-5, we use an approximation
# based on the assumption that the log-density is constant in the bin of
# the observed sub-pixel value
log_probs = tf.where(x < -0.999, log_cdf_plus, tf.where(x > 0.999, log_one_minus_cdf_min,
tf.where(cdf_delta > 1e-5, tf.log(tf.maximum(cdf_delta, 1e-12)), log_pdf_mid - np.log(127.5))))
log_probs = tf.reduce_sum(log_probs, 3) + log_prob_from_logits(logit_probs)
if sum_all:
return -tf.reduce_sum(log_sum_exp(log_probs))
else:
return -tf.reduce_sum(log_sum_exp(log_probs), [1, 2])
def sample_from_discretized_mix_logistic(l, nr_mix):
ls = int_shape(l)
xs = ls[:-1] + [3]
# unpack parameters
logit_probs = l[:, :, :, :nr_mix]
l = tf.reshape(l[:, :, :, nr_mix:], xs + [nr_mix * 3])
# sample mixture indicator from softmax
sel = tf.one_hot(tf.argmax(logit_probs - tf.log(-tf.log(tf.random_uniform(
logit_probs.get_shape(), minval=1e-5, maxval=1. - 1e-5))), 3), depth=nr_mix, dtype=tf.float32)
sel = tf.reshape(sel, xs[:-1] + [1, nr_mix])
# select logistic parameters
means = tf.reduce_sum(l[:, :, :, :, :nr_mix] * sel, 4)
log_scales = tf.maximum(tf.reduce_sum(
l[:, :, :, :, nr_mix:2 * nr_mix] * sel, 4), -7.)
coeffs = tf.reduce_sum(tf.nn.tanh(
l[:, :, :, :, 2 * nr_mix:3 * nr_mix]) * sel, 4)
# sample from logistic & clip to interval
# we don't actually round to the nearest 8bit value when sampling
u = tf.random_uniform(means.get_shape(), minval=1e-5, maxval=1. - 1e-5)
x = means + tf.exp(log_scales) * (tf.log(u) - tf.log(1. - u))
x0 = tf.minimum(tf.maximum(x[:, :, :, 0], -1.), 1.)
x1 = tf.minimum(tf.maximum(
x[:, :, :, 1] + coeffs[:, :, :, 0] * x0, -1.), 1.)
x2 = tf.minimum(tf.maximum(
x[:, :, :, 2] + coeffs[:, :, :, 1] * x0 + coeffs[:, :, :, 2] * x1, -1.), 1.)
return tf.concat([tf.reshape(x0, xs[:-1] + [1]), tf.reshape(x1, xs[:-1] + [1]), tf.reshape(x2, xs[:-1] + [1])], 3)
def get_var_maybe_avg(var_name, ema, **kwargs):
''' utility for retrieving polyak averaged params '''
v = tf.get_variable(var_name, **kwargs)
if ema is not None:
v = ema.average(v)
return v
def get_vars_maybe_avg(var_names, ema, **kwargs):
''' utility for retrieving polyak averaged params '''
vars = []
for vn in var_names:
vars.append(get_var_maybe_avg(vn, ema, **kwargs))
return vars
def adam_updates(params, cost_or_grads, lr=0.001, mom1=0.9, mom2=0.999, eps=1e-8):
''' Adam optimizer '''
updates = []
if type(cost_or_grads) is not list:
grads = tf.gradients(cost_or_grads, params)
else:
grads = cost_or_grads
t = tf.Variable(1., 'adam_t')
for p, g in zip(params, grads):
mg = tf.Variable(tf.zeros(p.get_shape()), p.name + '_adam_mg')
if mom1 > 0:
v = tf.Variable(tf.zeros(p.get_shape()), p.name + '_adam_v')
v_t = mom1 * v + (1. - mom1) * g
v_hat = v_t / (1. - tf.pow(mom1, t))
updates.append(v.assign(v_t))
else:
v_hat = g
mg_t = mom2 * mg + (1. - mom2) * tf.square(g)
mg_hat = mg_t / (1. - tf.pow(mom2, t))
g_t = v_hat / tf.sqrt(mg_hat + eps)
p_t = p - lr * g_t
updates.append(mg.assign(mg_t))
updates.append(p.assign(p_t))
updates.append(t.assign_add(1))
return tf.group(*updates)
def get_name(layer_name, counters):
''' utlity for keeping track of layer names '''
if not layer_name in counters:
counters[layer_name] = 0
name = layer_name + '_' + str(counters[layer_name])
counters[layer_name] += 1
return name
@add_arg_scope
def dense(x, num_units, nonlinearity=None, init_scale=1., counters={}, init=False, ema=None, **kwargs):
''' fully connected layer '''
name = get_name('dense', counters)
with tf.variable_scope(name):
if init:
# data based initialization of parameters
V = tf.get_variable('V', [int(x.get_shape()[
1]), num_units], tf.float32, tf.random_normal_initializer(0, 0.05), trainable=True)
V_norm = tf.nn.l2_normalize(V.initialized_value(), [0])
x_init = tf.matmul(x, V_norm)
m_init, v_init = tf.nn.moments(x_init, [0])
scale_init = init_scale / tf.sqrt(v_init + 1e-10)
g = tf.get_variable('g', dtype=tf.float32,
initializer=scale_init, trainable=True)
b = tf.get_variable('b', dtype=tf.float32,
initializer=-m_init * scale_init, trainable=True)
x_init = tf.reshape(
scale_init, [1, num_units]) * (x_init - tf.reshape(m_init, [1, num_units]))
if nonlinearity is not None:
x_init = nonlinearity(x_init)
return x_init
else:
V, g, b = get_vars_maybe_avg(['V', 'g', 'b'], ema)
# tf.assert_variables_initialized([V, g, b])
# use weight normalization (Salimans & Kingma, 2016)
x = tf.matmul(x, V)
scaler = g / tf.sqrt(tf.reduce_sum(tf.square(V), [0]))
x = tf.reshape(scaler, [1, num_units]) * \
x + tf.reshape(b, [1, num_units])
# apply nonlinearity
if nonlinearity is not None:
x = nonlinearity(x)
return x
@add_arg_scope
def conv2d(x, num_filters, filter_size=[3, 3], stride=[1, 1], pad='SAME', nonlinearity=None, init_scale=1., counters={}, init=False, ema=None, **kwargs):
''' convolutional layer '''
name = get_name('conv2d', counters)
with tf.variable_scope(name):
if init:
# data based initialization of parameters
V = tf.get_variable('V', filter_size + [int(x.get_shape()[-1]), num_filters],
tf.float32, tf.random_normal_initializer(0, 0.05), trainable=True)
V_norm = tf.nn.l2_normalize(V.initialized_value(), [0, 1, 2])
x_init = tf.nn.conv2d(x, V_norm, [1] + stride + [1], pad)
m_init, v_init = tf.nn.moments(x_init, [0, 1, 2])
scale_init = init_scale / tf.sqrt(v_init + 1e-8)
g = tf.get_variable('g', dtype=tf.float32,
initializer=scale_init, trainable=True)
b = tf.get_variable('b', dtype=tf.float32,
initializer=-m_init * scale_init, trainable=True)
x_init = tf.reshape(scale_init, [
1, 1, 1, num_filters]) * (x_init - tf.reshape(m_init, [1, 1, 1, num_filters]))
if nonlinearity is not None:
x_init = nonlinearity(x_init)
return x_init
else:
V, g, b = get_vars_maybe_avg(['V', 'g', 'b'], ema)
# tf.assert_variables_initialized([V, g, b])
# use weight normalization (Salimans & Kingma, 2016)
W = tf.reshape(g, [1, 1, 1, num_filters]) * \
tf.nn.l2_normalize(V, [0, 1, 2])
# calculate convolutional layer output
x = tf.nn.bias_add(tf.nn.conv2d(x, W, [1] + stride + [1], pad), b)
# apply nonlinearity
if nonlinearity is not None:
x = nonlinearity(x)
return x
@add_arg_scope
def deconv2d(x, num_filters, filter_size=[3, 3], stride=[1, 1], pad='SAME', nonlinearity=None, init_scale=1., counters={}, init=False, ema=None, **kwargs):
''' transposed convolutional layer '''
name = get_name('deconv2d', counters)
xs = int_shape(x)
if pad == 'SAME':
target_shape = [xs[0], xs[1] * stride[0],
xs[2] * stride[1], num_filters]
else:
target_shape = [xs[0], xs[1] * stride[0] + filter_size[0] -
1, xs[2] * stride[1] + filter_size[1] - 1, num_filters]
with tf.variable_scope(name):
if init:
# data based initialization of parameters
V = tf.get_variable('V', filter_size + [num_filters, int(x.get_shape(
)[-1])], tf.float32, tf.random_normal_initializer(0, 0.05), trainable=True)
V_norm = tf.nn.l2_normalize(V.initialized_value(), [0, 1, 3])
x_init = tf.nn.conv2d_transpose(x, V_norm, target_shape, [
1] + stride + [1], padding=pad)
m_init, v_init = tf.nn.moments(x_init, [0, 1, 2])
scale_init = init_scale / tf.sqrt(v_init + 1e-8)
g = tf.get_variable('g', dtype=tf.float32,
initializer=scale_init, trainable=True)
b = tf.get_variable('b', dtype=tf.float32,
initializer=-m_init * scale_init, trainable=True)
x_init = tf.reshape(scale_init, [
1, 1, 1, num_filters]) * (x_init - tf.reshape(m_init, [1, 1, 1, num_filters]))
if nonlinearity is not None:
x_init = nonlinearity(x_init)
return x_init
else:
V, g, b = get_vars_maybe_avg(['V', 'g', 'b'], ema)
# tf.assert_variables_initialized([V, g, b])
# use weight normalization (Salimans & Kingma, 2016)
W = tf.reshape(g, [1, 1, num_filters, 1]) * \
tf.nn.l2_normalize(V, [0, 1, 3])
# calculate convolutional layer output
x = tf.nn.conv2d_transpose(
x, W, target_shape, [1] + stride + [1], padding=pad)
x = tf.nn.bias_add(x, b)
# apply nonlinearity
if nonlinearity is not None:
x = nonlinearity(x)
return x
@add_arg_scope
def nin(x, num_units, **kwargs):
""" a network in network layer (1x1 CONV) """
s = int_shape(x)
x = tf.reshape(x, [np.prod(s[:-1]), s[-1]])
x = dense(x, num_units, **kwargs)
return tf.reshape(x, s[:-1] + [num_units])
''' meta-layer consisting of multiple base layers '''
@add_arg_scope
def gated_resnet(x, a=None, h=None, nonlinearity=concat_elu, conv=conv2d, init=False, counters={}, ema=None, dropout_p=0., **kwargs):
xs = int_shape(x)
num_filters = xs[-1]
c1 = conv(nonlinearity(x), num_filters)
if a is not None: # add short-cut connection if auxiliary input 'a' is given
c1 += nin(nonlinearity(a), num_filters)
c1 = nonlinearity(c1)
if dropout_p > 0:
c1 = tf.nn.dropout(c1, keep_prob=1. - dropout_p)
c2 = conv(c1, num_filters * 2, init_scale=0.1)
# add projection of h vector if included: conditional generation
if h is not None:
with tf.variable_scope(get_name('conditional_weights', counters)):
hw = get_var_maybe_avg('hw', ema, shape=[int_shape(h)[-1], 2 * num_filters], dtype=tf.float32,
initializer=tf.random_normal_initializer(0, 0.05), trainable=True)
if init:
hw = hw.initialized_value()
c2 += tf.reshape(tf.matmul(h, hw), [xs[0], 1, 1, 2 * num_filters])
# Is this 3,2 or 2,3 ?
a, b = tf.split(c2, 2, 3)
c3 = a * tf.nn.sigmoid(b)
return x + c3
''' utilities for shifting the image around, efficient alternative to masking convolutions '''
def down_shift(x, step=1):
xs = int_shape(x)
return tf.concat([tf.zeros([xs[0], step, xs[2], xs[3]]), x[:, :xs[1] - step, :, :]], 1)
def right_shift(x, step=1):
xs = int_shape(x)
return tf.concat([tf.zeros([xs[0], xs[1], step, xs[3]]), x[:, :, :xs[2] - step, :]], 2)
def left_shift(x, step=1):
xs = int_shape(x)
return tf.concat([x[:, :, step:, :], tf.zeros([xs[0], xs[1], step, xs[3]]),], 2)
@add_arg_scope
def down_shifted_conv2d(x, num_filters, filter_size=[2, 3], stride=[1, 1], **kwargs):
x = tf.pad(x, [[0, 0], [filter_size[0] - 1, 0],
[int((filter_size[1] - 1) / 2), int((filter_size[1] - 1) / 2)], [0, 0]])
return conv2d(x, num_filters, filter_size=filter_size, pad='VALID', stride=stride, **kwargs)
@add_arg_scope
def down_shifted_deconv2d(x, num_filters, filter_size=[2, 3], stride=[1, 1], **kwargs):
x = deconv2d(x, num_filters, filter_size=filter_size,
pad='VALID', stride=stride, **kwargs)
xs = int_shape(x)
return x[:, :(xs[1] - filter_size[0] + 1), int((filter_size[1] - 1) / 2):(xs[2] - int((filter_size[1] - 1) / 2)), :]
@add_arg_scope
def down_right_shifted_conv2d(x, num_filters, filter_size=[2, 2], stride=[1, 1], **kwargs):
x = tf.pad(x, [[0, 0], [filter_size[0] - 1, 0],
[filter_size[1] - 1, 0], [0, 0]])
return conv2d(x, num_filters, filter_size=filter_size, pad='VALID', stride=stride, **kwargs)
@add_arg_scope
def down_right_shifted_deconv2d(x, num_filters, filter_size=[2, 2], stride=[1, 1], **kwargs):
x = deconv2d(x, num_filters, filter_size=filter_size,
pad='VALID', stride=stride, **kwargs)
xs = int_shape(x)
return x[:, :(xs[1] - filter_size[0] + 1):, :(xs[2] - filter_size[1] + 1), :]
def causal_shift_nin(x, num_filters, **kwargs):
chns = int_shape(x)[-1]
assert chns % 4 == 0
left, upleft, up, upright = tf.split(x, 4, axis=-1)
return nin(
tf.concat(
[right_shift(left), right_shift(down_shift(upleft)), down_shift(up), down_shift(left_shift(upleft))],
axis=-1
),
num_filters,
**kwargs
)
from tensorflow.python.framework import function
@add_arg_scope
def mem_saving_causal_shift_nin(x, num_filters, init, counters, **kwargs):
if init:
return causal_shift_nin(x, num_filters, init=init, counters=counters, **kwargs)
shps = int_shape(x)
@function.Defun(tf.float32)
def go(ix):
tf.get_variable_scope().reuse_variables()
ix.set_shape(shps)
return causal_shift_nin(ix, num_filters, init=init, counters=counters, **kwargs)
temp = go(x)
temp.set_shape([shps[0], shps[1], shps[2], num_filters])
return temp
import functools
@functools.lru_cache(maxsize=32)
def get_causal_mask(canvas_size, rate=1):
causal_mask = np.zeros([canvas_size, canvas_size], dtype=np.float32)
for i in range(canvas_size):
causal_mask[i, :i] = 1.
causal_mask = tf.constant(causal_mask, dtype=tf.float32)
if rate > 1:
dim = int(np.sqrt(canvas_size))
causal_mask = tf.reshape(causal_mask, [canvas_size, dim, dim, 1])
causal_mask = -tf.nn.max_pool(-causal_mask, [1, rate, rate, 1], [1, rate, rate, 1], 'SAME')
causal_mask = tf.reshape(causal_mask, [1, canvas_size, -1])
return causal_mask
def causal_attention(key, mixin, query, downsample=1, use_pos_enc=False):
bs, nr_chns = int_shape(key)[0], int_shape(key)[-1]
if downsample > 1:
pool_shape = [1, downsample, downsample, 1]
key = tf.nn.max_pool(key, pool_shape, pool_shape, 'SAME')
mixin = tf.nn.max_pool(mixin, pool_shape, pool_shape, 'SAME')
xs = int_shape(mixin)
if use_pos_enc:
pos1 = tf.range(0., xs[1]) / xs[1]
pos2 = tf.range(0., xs[2]) / xs[1]
mixin = tf.concat([
mixin,
tf.tile(pos1[None, :, None, None], [xs[0], 1, xs[2], 1]),
tf.tile(pos2[None, None, :, None], [xs[0], xs[2], 1, 1]),
], axis=3)
mixin_chns = int_shape(mixin)[-1]
canvas_size = int(np.prod(int_shape(key)[1:-1]))
canvas_size_q = int(np.prod(int_shape(query)[1:-1]))
causal_mask = get_causal_mask(canvas_size_q, downsample)
dot = tf.matmul(
tf.reshape(query, [bs, canvas_size_q, nr_chns]),
tf.reshape(key, [bs, canvas_size, nr_chns]),
transpose_b=True
) - (1. - causal_mask) * 1e10
dot = dot - tf.reduce_max(dot, axis=-1, keep_dims=True)
causal_exp_dot = tf.exp(dot / np.sqrt(nr_chns).astype(np.float32)) * causal_mask
causal_probs = causal_exp_dot / (tf.reduce_sum(causal_exp_dot, axis=-1, keep_dims=True) + 1e-6)
mixed = tf.matmul(
causal_probs,
tf.reshape(mixin, [bs, canvas_size, mixin_chns])
)
return tf.reshape(mixed, int_shape(query)[:-1] + [mixin_chns])
def non_cached_get_causal_mask(canvas_size, causal_unit):
assert causal_unit == 1
ones = tf.ones([canvas_size, canvas_size], dtype=tf.float32)
lt = tf.matrix_band_part(ones, -1, 0) - tf.matrix_diag(tf.ones([canvas_size,], dtype=tf.float32))
return lt[None, ...]
def mem_saving_causal_attention(_key, _mixin, _query, causal_unit=1):
# @function.Defun(tf.float32, tf.float32, tf.float32)
def go(key, mixin, query,):
key.set_shape(int_shape(_key))
mixin.set_shape(int_shape(_mixin))
query.set_shape(int_shape(_query))
bs, nr_chns = int_shape(key)[0], int_shape(key)[-1]
mixin_chns = int_shape(mixin)[-1]
canvas_size = int(np.prod(int_shape(key)[1:-1]))
causal_mask = non_cached_get_causal_mask(canvas_size, causal_unit=causal_unit)
dot = tf.matmul(
tf.reshape(query, [bs, canvas_size, nr_chns]),
tf.reshape(key, [bs, canvas_size, nr_chns]),
transpose_b=True
) - (1. - causal_mask) * 1e10
dot = dot - tf.reduce_max(dot, axis=-1, keep_dims=True)
causal_exp_dot = tf.exp(dot / np.sqrt(nr_chns).astype(np.float32)) * causal_mask
causal_probs = causal_exp_dot / (tf.reduce_sum(causal_exp_dot, axis=-1, keep_dims=True) + 1e-6)
mixed = tf.matmul(
causal_probs,
tf.reshape(mixin, [bs, canvas_size, mixin_chns])
)
return tf.reshape(mixed, int_shape(mixin))
temp = go(_key, _mixin, _query)
temp.set_shape(int_shape(_mixin))
return temp
|
[
"tensorflow.reduce_sum",
"tensorflow.matrix_band_part",
"tensorflow.square",
"tensorflow.nn.tanh",
"tensorflow.maximum",
"tensorflow.reshape",
"tensorflow.nn.l2_normalize",
"tensorflow.get_variable_scope",
"tensorflow.matmul",
"tensorflow.Variable",
"tensorflow.nn.conv2d",
"tensorflow.reduce_max",
"tensorflow.split",
"tensorflow.sqrt",
"tensorflow.nn.conv2d_transpose",
"numpy.prod",
"tensorflow.get_variable",
"tensorflow.nn.moments",
"tensorflow.pad",
"tensorflow.concat",
"tensorflow.variable_scope",
"tensorflow.exp",
"tensorflow.gradients",
"tensorflow.nn.bias_add",
"tensorflow.ones",
"tensorflow.range",
"tensorflow.constant",
"tensorflow.nn.max_pool",
"tensorflow.tile",
"tensorflow.group",
"tensorflow.random_normal_initializer",
"tensorflow.log",
"tensorflow.python.framework.function.Defun",
"numpy.log",
"numpy.zeros",
"tensorflow.pow",
"tensorflow.zeros",
"tensorflow.nn.softplus",
"functools.lru_cache",
"tensorflow.nn.sigmoid",
"tensorflow.nn.dropout",
"numpy.sqrt"
] |
[((21746, 21777), 'functools.lru_cache', 'functools.lru_cache', ([], {'maxsize': '(32)'}), '(maxsize=32)\n', (21765, 21777), False, 'import functools\n'), ((797, 819), 'tensorflow.reduce_max', 'tf.reduce_max', (['x', 'axis'], {}), '(x, axis)\n', (810, 819), True, 'import tensorflow as tf\n'), ((830, 868), 'tensorflow.reduce_max', 'tf.reduce_max', (['x', 'axis'], {'keep_dims': '(True)'}), '(x, axis, keep_dims=True)\n', (843, 868), True, 'import tensorflow as tf\n'), ((1089, 1127), 'tensorflow.reduce_max', 'tf.reduce_max', (['x', 'axis'], {'keep_dims': '(True)'}), '(x, axis, keep_dims=True)\n', (1102, 1127), True, 'import tensorflow as tf\n'), ((1700, 1750), 'tensorflow.reshape', 'tf.reshape', (['l[:, :, :, nr_mix:]', '(xs + [nr_mix * 3])'], {}), '(l[:, :, :, nr_mix:], xs + [nr_mix * 3])\n', (1710, 1750), True, 'import tensorflow as tf\n'), ((1805, 1855), 'tensorflow.maximum', 'tf.maximum', (['l[:, :, :, :, nr_mix:2 * nr_mix]', '(-7.0)'], {}), '(l[:, :, :, :, nr_mix:2 * nr_mix], -7.0)\n', (1815, 1855), True, 'import tensorflow as tf\n'), ((1869, 1917), 'tensorflow.nn.tanh', 'tf.nn.tanh', (['l[:, :, :, :, 2 * nr_mix:3 * nr_mix]'], {}), '(l[:, :, :, :, 2 * nr_mix:3 * nr_mix])\n', (1879, 1917), True, 'import tensorflow as tf\n'), ((2084, 2197), 'tensorflow.reshape', 'tf.reshape', (['(means[:, :, :, 1, :] + coeffs[:, :, :, 0, :] * x[:, :, :, 0, :])', '[xs[0], xs[1], xs[2], 1, nr_mix]'], {}), '(means[:, :, :, 1, :] + coeffs[:, :, :, 0, :] * x[:, :, :, 0, :],\n [xs[0], xs[1], xs[2], 1, nr_mix])\n', (2094, 2197), True, 'import tensorflow as tf\n'), ((2225, 2381), 'tensorflow.reshape', 'tf.reshape', (['(means[:, :, :, 2, :] + coeffs[:, :, :, 1, :] * x[:, :, :, 0, :] + coeffs[:,\n :, :, 2, :] * x[:, :, :, 1, :])', '[xs[0], xs[1], xs[2], 1, nr_mix]'], {}), '(means[:, :, :, 2, :] + coeffs[:, :, :, 1, :] * x[:, :, :, 0, :] +\n coeffs[:, :, :, 2, :] * x[:, :, :, 1, :], [xs[0], xs[1], xs[2], 1, nr_mix])\n', (2235, 2381), True, 'import tensorflow as tf\n'), ((2571, 2590), 'tensorflow.exp', 'tf.exp', (['(-log_scales)'], {}), '(-log_scales)\n', (2577, 2590), True, 'import tensorflow as tf\n'), ((2658, 2680), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['plus_in'], {}), '(plus_in)\n', (2671, 2680), True, 'import tensorflow as tf\n'), ((2746, 2767), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['min_in'], {}), '(min_in)\n', (2759, 2767), True, 'import tensorflow as tf\n'), ((5310, 5449), 'tensorflow.concat', 'tf.concat', (['[lr[:, :, :, None, nr_mix:nr_mix * 2], lg[:, :, :, None, nr_mix:nr_mix * 2],\n lb[:, :, :, None, nr_mix:nr_mix * 2]]'], {'axis': '(-2)'}), '([lr[:, :, :, None, nr_mix:nr_mix * 2], lg[:, :, :, None, nr_mix:\n nr_mix * 2], lb[:, :, :, None, nr_mix:nr_mix * 2]], axis=-2)\n', (5319, 5449), True, 'import tensorflow as tf\n'), ((5458, 5609), 'tensorflow.concat', 'tf.concat', (['[lr[:, :, :, None, nr_mix * 2:nr_mix * 3], lg[:, :, :, None, nr_mix * 2:\n nr_mix * 3], lb[:, :, :, None, nr_mix * 2:nr_mix * 3]]'], {'axis': '(-2)'}), '([lr[:, :, :, None, nr_mix * 2:nr_mix * 3], lg[:, :, :, None, \n nr_mix * 2:nr_mix * 3], lb[:, :, :, None, nr_mix * 2:nr_mix * 3]], axis=-2)\n', (5467, 5609), True, 'import tensorflow as tf\n'), ((5612, 5640), 'tensorflow.maximum', 'tf.maximum', (['log_scales', '(-7.0)'], {}), '(log_scales, -7.0)\n', (5622, 5640), True, 'import tensorflow as tf\n'), ((5745, 5764), 'tensorflow.exp', 'tf.exp', (['(-log_scales)'], {}), '(-log_scales)\n', (5751, 5764), True, 'import tensorflow as tf\n'), ((5832, 5854), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['plus_in'], {}), '(plus_in)\n', (5845, 5854), True, 'import tensorflow as tf\n'), ((5920, 5941), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['min_in'], {}), '(min_in)\n', (5933, 5941), True, 'import tensorflow as tf\n'), ((8157, 8207), 'tensorflow.reshape', 'tf.reshape', (['l[:, :, :, nr_mix:]', '(xs + [nr_mix * 3])'], {}), '(l[:, :, :, nr_mix:], xs + [nr_mix * 3])\n', (8167, 8207), True, 'import tensorflow as tf\n'), ((8448, 8486), 'tensorflow.reshape', 'tf.reshape', (['sel', '(xs[:-1] + [1, nr_mix])'], {}), '(sel, xs[:-1] + [1, nr_mix])\n', (8458, 8486), True, 'import tensorflow as tf\n'), ((8534, 8580), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(l[:, :, :, :, :nr_mix] * sel)', '(4)'], {}), '(l[:, :, :, :, :nr_mix] * sel, 4)\n', (8547, 8580), True, 'import tensorflow as tf\n'), ((9552, 9587), 'tensorflow.get_variable', 'tf.get_variable', (['var_name'], {}), '(var_name, **kwargs)\n', (9567, 9587), True, 'import tensorflow as tf\n'), ((10165, 10191), 'tensorflow.Variable', 'tf.Variable', (['(1.0)', '"""adam_t"""'], {}), "(1.0, 'adam_t')\n", (10176, 10191), True, 'import tensorflow as tf\n'), ((10878, 10896), 'tensorflow.group', 'tf.group', (['*updates'], {}), '(*updates)\n', (10886, 10896), True, 'import tensorflow as tf\n'), ((17527, 17562), 'tensorflow.reshape', 'tf.reshape', (['x', '(s[:-1] + [num_units])'], {}), '(x, s[:-1] + [num_units])\n', (17537, 17562), True, 'import tensorflow as tf\n'), ((18722, 18740), 'tensorflow.split', 'tf.split', (['c2', '(2)', '(3)'], {}), '(c2, 2, 3)\n', (18730, 18740), True, 'import tensorflow as tf\n'), ((20167, 20244), 'tensorflow.pad', 'tf.pad', (['x', '[[0, 0], [filter_size[0] - 1, 0], [filter_size[1] - 1, 0], [0, 0]]'], {}), '(x, [[0, 0], [filter_size[0] - 1, 0], [filter_size[1] - 1, 0], [0, 0]])\n', (20173, 20244), True, 'import tensorflow as tf\n'), ((20838, 20861), 'tensorflow.split', 'tf.split', (['x', '(4)'], {'axis': '(-1)'}), '(x, 4, axis=-1)\n', (20846, 20861), True, 'import tensorflow as tf\n'), ((21410, 21436), 'tensorflow.python.framework.function.Defun', 'function.Defun', (['tf.float32'], {}), '(tf.float32)\n', (21424, 21436), False, 'from tensorflow.python.framework import function\n'), ((21840, 21894), 'numpy.zeros', 'np.zeros', (['[canvas_size, canvas_size]'], {'dtype': 'np.float32'}), '([canvas_size, canvas_size], dtype=np.float32)\n', (21848, 21894), True, 'import numpy as np\n'), ((21981, 22023), 'tensorflow.constant', 'tf.constant', (['causal_mask'], {'dtype': 'tf.float32'}), '(causal_mask, dtype=tf.float32)\n', (21992, 22023), True, 'import tensorflow as tf\n'), ((22282, 22327), 'tensorflow.reshape', 'tf.reshape', (['causal_mask', '[1, canvas_size, -1]'], {}), '(causal_mask, [1, canvas_size, -1])\n', (22292, 22327), True, 'import tensorflow as tf\n'), ((24008, 24061), 'tensorflow.ones', 'tf.ones', (['[canvas_size, canvas_size]'], {'dtype': 'tf.float32'}), '([canvas_size, canvas_size], dtype=tf.float32)\n', (24015, 24061), True, 'import tensorflow as tf\n'), ((620, 644), 'tensorflow.concat', 'tf.concat', (['[x, -x]', 'axis'], {}), '([x, -x], axis)\n', (629, 644), True, 'import tensorflow as tf\n'), ((2024, 2047), 'tensorflow.reshape', 'tf.reshape', (['x', '(xs + [1])'], {}), '(x, xs + [1])\n', (2034, 2047), True, 'import tensorflow as tf\n'), ((2050, 2073), 'tensorflow.zeros', 'tf.zeros', (['(xs + [nr_mix])'], {}), '(xs + [nr_mix])\n', (2058, 2073), True, 'import tensorflow as tf\n'), ((2857, 2880), 'tensorflow.nn.softplus', 'tf.nn.softplus', (['plus_in'], {}), '(plus_in)\n', (2871, 2880), True, 'import tensorflow as tf\n'), ((2972, 2994), 'tensorflow.nn.softplus', 'tf.nn.softplus', (['min_in'], {}), '(min_in)\n', (2986, 2994), True, 'import tensorflow as tf\n'), ((4597, 4624), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['log_probs', '(3)'], {}), '(log_probs, 3)\n', (4610, 4624), True, 'import tensorflow as tf\n'), ((5651, 5674), 'tensorflow.reshape', 'tf.reshape', (['x', '(xs + [1])'], {}), '(x, xs + [1])\n', (5661, 5674), True, 'import tensorflow as tf\n'), ((5677, 5700), 'tensorflow.zeros', 'tf.zeros', (['(xs + [nr_mix])'], {}), '(xs + [nr_mix])\n', (5685, 5700), True, 'import tensorflow as tf\n'), ((6031, 6054), 'tensorflow.nn.softplus', 'tf.nn.softplus', (['plus_in'], {}), '(plus_in)\n', (6045, 6054), True, 'import tensorflow as tf\n'), ((6146, 6168), 'tensorflow.nn.softplus', 'tf.nn.softplus', (['min_in'], {}), '(min_in)\n', (6160, 6168), True, 'import tensorflow as tf\n'), ((7771, 7798), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['log_probs', '(3)'], {}), '(log_probs, 3)\n', (7784, 7798), True, 'import tensorflow as tf\n'), ((8610, 8666), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(l[:, :, :, :, nr_mix:2 * nr_mix] * sel)', '(4)'], {}), '(l[:, :, :, :, nr_mix:2 * nr_mix] * sel, 4)\n', (8623, 8666), True, 'import tensorflow as tf\n'), ((9063, 9094), 'tensorflow.maximum', 'tf.maximum', (['x[:, :, :, 0]', '(-1.0)'], {}), '(x[:, :, :, 0], -1.0)\n', (9073, 9094), True, 'import tensorflow as tf\n'), ((9120, 9177), 'tensorflow.maximum', 'tf.maximum', (['(x[:, :, :, 1] + coeffs[:, :, :, 0] * x0)', '(-1.0)'], {}), '(x[:, :, :, 1] + coeffs[:, :, :, 0] * x0, -1.0)\n', (9130, 9177), True, 'import tensorflow as tf\n'), ((9213, 9300), 'tensorflow.maximum', 'tf.maximum', (['(x[:, :, :, 2] + coeffs[:, :, :, 1] * x0 + coeffs[:, :, :, 2] * x1)', '(-1.0)'], {}), '(x[:, :, :, 2] + coeffs[:, :, :, 1] * x0 + coeffs[:, :, :, 2] *\n x1, -1.0)\n', (9223, 9300), True, 'import tensorflow as tf\n'), ((10078, 10113), 'tensorflow.gradients', 'tf.gradients', (['cost_or_grads', 'params'], {}), '(cost_or_grads, params)\n', (10090, 10113), True, 'import tensorflow as tf\n'), ((11376, 11399), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (11393, 11399), True, 'import tensorflow as tf\n'), ((13203, 13226), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (13220, 13226), True, 'import tensorflow as tf\n'), ((15482, 15505), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (15499, 15505), True, 'import tensorflow as tf\n'), ((18067, 18111), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['c1'], {'keep_prob': '(1.0 - dropout_p)'}), '(c1, keep_prob=1.0 - dropout_p)\n', (18080, 18111), True, 'import tensorflow as tf\n'), ((18755, 18771), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['b'], {}), '(b)\n', (18768, 18771), True, 'import tensorflow as tf\n'), ((22108, 22159), 'tensorflow.reshape', 'tf.reshape', (['causal_mask', '[canvas_size, dim, dim, 1]'], {}), '(causal_mask, [canvas_size, dim, dim, 1])\n', (22118, 22159), True, 'import tensorflow as tf\n'), ((22584, 22635), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['key', 'pool_shape', 'pool_shape', '"""SAME"""'], {}), "(key, pool_shape, pool_shape, 'SAME')\n", (22598, 22635), True, 'import tensorflow as tf\n'), ((22653, 22706), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['mixin', 'pool_shape', 'pool_shape', '"""SAME"""'], {}), "(mixin, pool_shape, pool_shape, 'SAME')\n", (22667, 22706), True, 'import tensorflow as tf\n'), ((23487, 23530), 'tensorflow.reduce_max', 'tf.reduce_max', (['dot'], {'axis': '(-1)', 'keep_dims': '(True)'}), '(dot, axis=-1, keep_dims=True)\n', (23500, 23530), True, 'import tensorflow as tf\n'), ((23778, 23826), 'tensorflow.reshape', 'tf.reshape', (['mixin', '[bs, canvas_size, mixin_chns]'], {}), '(mixin, [bs, canvas_size, mixin_chns])\n', (23788, 23826), True, 'import tensorflow as tf\n'), ((24072, 24104), 'tensorflow.matrix_band_part', 'tf.matrix_band_part', (['ones', '(-1)', '(0)'], {}), '(ones, -1, 0)\n', (24091, 24104), True, 'import tensorflow as tf\n'), ((2423, 2489), 'tensorflow.reshape', 'tf.reshape', (['means[:, :, :, 0, :]', '[xs[0], xs[1], xs[2], 1, nr_mix]'], {}), '(means[:, :, :, 0, :], [xs[0], xs[1], xs[2], 1, nr_mix])\n', (2433, 2489), True, 'import tensorflow as tf\n'), ((3264, 3286), 'tensorflow.nn.softplus', 'tf.nn.softplus', (['mid_in'], {}), '(mid_in)\n', (3278, 3286), True, 'import tensorflow as tf\n'), ((6438, 6460), 'tensorflow.nn.softplus', 'tf.nn.softplus', (['mid_in'], {}), '(mid_in)\n', (6452, 6460), True, 'import tensorflow as tf\n'), ((8711, 8759), 'tensorflow.nn.tanh', 'tf.nn.tanh', (['l[:, :, :, :, 2 * nr_mix:3 * nr_mix]'], {}), '(l[:, :, :, :, 2 * nr_mix:3 * nr_mix])\n', (8721, 8759), True, 'import tensorflow as tf\n'), ((8992, 9010), 'tensorflow.exp', 'tf.exp', (['log_scales'], {}), '(log_scales)\n', (8998, 9010), True, 'import tensorflow as tf\n'), ((9334, 9363), 'tensorflow.reshape', 'tf.reshape', (['x0', '(xs[:-1] + [1])'], {}), '(x0, xs[:-1] + [1])\n', (9344, 9363), True, 'import tensorflow as tf\n'), ((9365, 9394), 'tensorflow.reshape', 'tf.reshape', (['x1', '(xs[:-1] + [1])'], {}), '(x1, xs[:-1] + [1])\n', (9375, 9394), True, 'import tensorflow as tf\n'), ((9396, 9425), 'tensorflow.reshape', 'tf.reshape', (['x2', '(xs[:-1] + [1])'], {}), '(x2, xs[:-1] + [1])\n', (9406, 9425), True, 'import tensorflow as tf\n'), ((10699, 10720), 'tensorflow.sqrt', 'tf.sqrt', (['(mg_hat + eps)'], {}), '(mg_hat + eps)\n', (10706, 10720), True, 'import tensorflow as tf\n'), ((11740, 11760), 'tensorflow.matmul', 'tf.matmul', (['x', 'V_norm'], {}), '(x, V_norm)\n', (11749, 11760), True, 'import tensorflow as tf\n'), ((11791, 11817), 'tensorflow.nn.moments', 'tf.nn.moments', (['x_init', '[0]'], {}), '(x_init, [0])\n', (11804, 11817), True, 'import tensorflow as tf\n'), ((11898, 11976), 'tensorflow.get_variable', 'tf.get_variable', (['"""g"""'], {'dtype': 'tf.float32', 'initializer': 'scale_init', 'trainable': '(True)'}), "('g', dtype=tf.float32, initializer=scale_init, trainable=True)\n", (11913, 11976), True, 'import tensorflow as tf\n'), ((12027, 12119), 'tensorflow.get_variable', 'tf.get_variable', (['"""b"""'], {'dtype': 'tf.float32', 'initializer': '(-m_init * scale_init)', 'trainable': '(True)'}), "('b', dtype=tf.float32, initializer=-m_init * scale_init,\n trainable=True)\n", (12042, 12119), True, 'import tensorflow as tf\n'), ((12616, 12631), 'tensorflow.matmul', 'tf.matmul', (['x', 'V'], {}), '(x, V)\n', (12625, 12631), True, 'import tensorflow as tf\n'), ((13589, 13637), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x', 'V_norm', '([1] + stride + [1])', 'pad'], {}), '(x, V_norm, [1] + stride + [1], pad)\n', (13601, 13637), True, 'import tensorflow as tf\n'), ((13668, 13700), 'tensorflow.nn.moments', 'tf.nn.moments', (['x_init', '[0, 1, 2]'], {}), '(x_init, [0, 1, 2])\n', (13681, 13700), True, 'import tensorflow as tf\n'), ((13780, 13858), 'tensorflow.get_variable', 'tf.get_variable', (['"""g"""'], {'dtype': 'tf.float32', 'initializer': 'scale_init', 'trainable': '(True)'}), "('g', dtype=tf.float32, initializer=scale_init, trainable=True)\n", (13795, 13858), True, 'import tensorflow as tf\n'), ((13909, 14001), 'tensorflow.get_variable', 'tf.get_variable', (['"""b"""'], {'dtype': 'tf.float32', 'initializer': '(-m_init * scale_init)', 'trainable': '(True)'}), "('b', dtype=tf.float32, initializer=-m_init * scale_init,\n trainable=True)\n", (13924, 14001), True, 'import tensorflow as tf\n'), ((15849, 15934), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (['x', 'V_norm', 'target_shape', '([1] + stride + [1])'], {'padding': 'pad'}), '(x, V_norm, target_shape, [1] + stride + [1], padding=pad\n )\n', (15871, 15934), True, 'import tensorflow as tf\n'), ((16006, 16038), 'tensorflow.nn.moments', 'tf.nn.moments', (['x_init', '[0, 1, 2]'], {}), '(x_init, [0, 1, 2])\n', (16019, 16038), True, 'import tensorflow as tf\n'), ((16118, 16196), 'tensorflow.get_variable', 'tf.get_variable', (['"""g"""'], {'dtype': 'tf.float32', 'initializer': 'scale_init', 'trainable': '(True)'}), "('g', dtype=tf.float32, initializer=scale_init, trainable=True)\n", (16133, 16196), True, 'import tensorflow as tf\n'), ((16247, 16339), 'tensorflow.get_variable', 'tf.get_variable', (['"""b"""'], {'dtype': 'tf.float32', 'initializer': '(-m_init * scale_init)', 'trainable': '(True)'}), "('b', dtype=tf.float32, initializer=-m_init * scale_init,\n trainable=True)\n", (16262, 16339), True, 'import tensorflow as tf\n'), ((17031, 17106), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (['x', 'W', 'target_shape', '([1] + stride + [1])'], {'padding': 'pad'}), '(x, W, target_shape, [1] + stride + [1], padding=pad)\n', (17053, 17106), True, 'import tensorflow as tf\n'), ((17142, 17162), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['x', 'b'], {}), '(x, b)\n', (17156, 17162), True, 'import tensorflow as tf\n'), ((17451, 17466), 'numpy.prod', 'np.prod', (['s[:-1]'], {}), '(s[:-1])\n', (17458, 17466), True, 'import numpy as np\n'), ((18630, 18646), 'tensorflow.matmul', 'tf.matmul', (['h', 'hw'], {}), '(h, hw)\n', (18639, 18646), True, 'import tensorflow as tf\n'), ((18967, 19004), 'tensorflow.zeros', 'tf.zeros', (['[xs[0], step, xs[2], xs[3]]'], {}), '([xs[0], step, xs[2], xs[3]])\n', (18975, 19004), True, 'import tensorflow as tf\n'), ((19116, 19153), 'tensorflow.zeros', 'tf.zeros', (['[xs[0], xs[1], step, xs[3]]'], {}), '([xs[0], xs[1], step, xs[3]])\n', (19124, 19153), True, 'import tensorflow as tf\n'), ((19281, 19318), 'tensorflow.zeros', 'tf.zeros', (['[xs[0], xs[1], step, xs[3]]'], {}), '([xs[0], xs[1], step, xs[3]])\n', (19289, 19318), True, 'import tensorflow as tf\n'), ((22063, 22083), 'numpy.sqrt', 'np.sqrt', (['canvas_size'], {}), '(canvas_size)\n', (22070, 22083), True, 'import numpy as np\n'), ((22184, 22260), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['(-causal_mask)', '[1, rate, rate, 1]', '[1, rate, rate, 1]', '"""SAME"""'], {}), "(-causal_mask, [1, rate, rate, 1], [1, rate, rate, 1], 'SAME')\n", (22198, 22260), True, 'import tensorflow as tf\n'), ((22773, 22793), 'tensorflow.range', 'tf.range', (['(0.0)', 'xs[1]'], {}), '(0.0, xs[1])\n', (22781, 22793), True, 'import tensorflow as tf\n'), ((22817, 22837), 'tensorflow.range', 'tf.range', (['(0.0)', 'xs[2]'], {}), '(0.0, xs[2])\n', (22825, 22837), True, 'import tensorflow as tf\n'), ((23306, 23353), 'tensorflow.reshape', 'tf.reshape', (['query', '[bs, canvas_size_q, nr_chns]'], {}), '(query, [bs, canvas_size_q, nr_chns])\n', (23316, 23353), True, 'import tensorflow as tf\n'), ((23364, 23407), 'tensorflow.reshape', 'tf.reshape', (['key', '[bs, canvas_size, nr_chns]'], {}), '(key, [bs, canvas_size, nr_chns])\n', (23374, 23407), True, 'import tensorflow as tf\n'), ((23657, 23711), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['causal_exp_dot'], {'axis': '(-1)', 'keep_dims': '(True)'}), '(causal_exp_dot, axis=-1, keep_dims=True)\n', (23670, 23711), True, 'import tensorflow as tf\n'), ((24122, 24162), 'tensorflow.ones', 'tf.ones', (['[canvas_size]'], {'dtype': 'tf.float32'}), '([canvas_size], dtype=tf.float32)\n', (24129, 24162), True, 'import tensorflow as tf\n'), ((24990, 25033), 'tensorflow.reduce_max', 'tf.reduce_max', (['dot'], {'axis': '(-1)', 'keep_dims': '(True)'}), '(dot, axis=-1, keep_dims=True)\n', (25003, 25033), True, 'import tensorflow as tf\n'), ((25309, 25357), 'tensorflow.reshape', 'tf.reshape', (['mixin', '[bs, canvas_size, mixin_chns]'], {}), '(mixin, [bs, canvas_size, mixin_chns])\n', (25319, 25357), True, 'import tensorflow as tf\n'), ((906, 920), 'tensorflow.exp', 'tf.exp', (['(x - m2)'], {}), '(x - m2)\n', (912, 920), True, 'import tensorflow as tf\n'), ((1169, 1182), 'tensorflow.exp', 'tf.exp', (['(x - m)'], {}), '(x - m)\n', (1175, 1182), True, 'import tensorflow as tf\n'), ((9014, 9023), 'tensorflow.log', 'tf.log', (['u'], {}), '(u)\n', (9020, 9023), True, 'import tensorflow as tf\n'), ((9026, 9041), 'tensorflow.log', 'tf.log', (['(1.0 - u)'], {}), '(1.0 - u)\n', (9032, 9041), True, 'import tensorflow as tf\n'), ((10615, 10627), 'tensorflow.square', 'tf.square', (['g'], {}), '(g)\n', (10624, 10627), True, 'import tensorflow as tf\n'), ((10659, 10674), 'tensorflow.pow', 'tf.pow', (['mom2', 't'], {}), '(mom2, t)\n', (10665, 10674), True, 'import tensorflow as tf\n'), ((11594, 11631), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(0)', '(0.05)'], {}), '(0, 0.05)\n', (11622, 11631), True, 'import tensorflow as tf\n'), ((11857, 11880), 'tensorflow.sqrt', 'tf.sqrt', (['(v_init + 1e-10)'], {}), '(v_init + 1e-10)\n', (11864, 11880), True, 'import tensorflow as tf\n'), ((12171, 12209), 'tensorflow.reshape', 'tf.reshape', (['scale_init', '[1, num_units]'], {}), '(scale_init, [1, num_units])\n', (12181, 12209), True, 'import tensorflow as tf\n'), ((12777, 12806), 'tensorflow.reshape', 'tf.reshape', (['b', '[1, num_units]'], {}), '(b, [1, num_units])\n', (12787, 12806), True, 'import tensorflow as tf\n'), ((13437, 13474), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(0)', '(0.05)'], {}), '(0, 0.05)\n', (13465, 13474), True, 'import tensorflow as tf\n'), ((13740, 13763), 'tensorflow.sqrt', 'tf.sqrt', (['(v_init + 1e-08)'], {}), '(v_init + 1e-08)\n', (13747, 13763), True, 'import tensorflow as tf\n'), ((14053, 14099), 'tensorflow.reshape', 'tf.reshape', (['scale_init', '[1, 1, 1, num_filters]'], {}), '(scale_init, [1, 1, 1, num_filters])\n', (14063, 14099), True, 'import tensorflow as tf\n'), ((14530, 14567), 'tensorflow.reshape', 'tf.reshape', (['g', '[1, 1, 1, num_filters]'], {}), '(g, [1, 1, 1, num_filters])\n', (14540, 14567), True, 'import tensorflow as tf\n'), ((14589, 14621), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (['V', '[0, 1, 2]'], {}), '(V, [0, 1, 2])\n', (14607, 14621), True, 'import tensorflow as tf\n'), ((14708, 14751), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x', 'W', '([1] + stride + [1])', 'pad'], {}), '(x, W, [1] + stride + [1], pad)\n', (14720, 14751), True, 'import tensorflow as tf\n'), ((15697, 15734), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(0)', '(0.05)'], {}), '(0, 0.05)\n', (15725, 15734), True, 'import tensorflow as tf\n'), ((16078, 16101), 'tensorflow.sqrt', 'tf.sqrt', (['(v_init + 1e-08)'], {}), '(v_init + 1e-08)\n', (16085, 16101), True, 'import tensorflow as tf\n'), ((16391, 16437), 'tensorflow.reshape', 'tf.reshape', (['scale_init', '[1, 1, 1, num_filters]'], {}), '(scale_init, [1, 1, 1, num_filters])\n', (16401, 16437), True, 'import tensorflow as tf\n'), ((16868, 16905), 'tensorflow.reshape', 'tf.reshape', (['g', '[1, 1, num_filters, 1]'], {}), '(g, [1, 1, num_filters, 1])\n', (16878, 16905), True, 'import tensorflow as tf\n'), ((16927, 16959), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (['V', '[0, 1, 3]'], {}), '(V, [0, 1, 3])\n', (16945, 16959), True, 'import tensorflow as tf\n'), ((21463, 21486), 'tensorflow.get_variable_scope', 'tf.get_variable_scope', ([], {}), '()\n', (21484, 21486), True, 'import tensorflow as tf\n'), ((22907, 22963), 'tensorflow.tile', 'tf.tile', (['pos1[None, :, None, None]', '[xs[0], 1, xs[2], 1]'], {}), '(pos1[None, :, None, None], [xs[0], 1, xs[2], 1])\n', (22914, 22963), True, 'import tensorflow as tf\n'), ((22978, 23034), 'tensorflow.tile', 'tf.tile', (['pos2[None, None, :, None]', '[xs[0], xs[2], 1, 1]'], {}), '(pos2[None, None, :, None], [xs[0], xs[2], 1, 1])\n', (22985, 23034), True, 'import tensorflow as tf\n'), ((24779, 24824), 'tensorflow.reshape', 'tf.reshape', (['query', '[bs, canvas_size, nr_chns]'], {}), '(query, [bs, canvas_size, nr_chns])\n', (24789, 24824), True, 'import tensorflow as tf\n'), ((24843, 24886), 'tensorflow.reshape', 'tf.reshape', (['key', '[bs, canvas_size, nr_chns]'], {}), '(key, [bs, canvas_size, nr_chns])\n', (24853, 24886), True, 'import tensorflow as tf\n'), ((25168, 25222), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['causal_exp_dot'], {'axis': '(-1)', 'keep_dims': '(True)'}), '(causal_exp_dot, axis=-1, keep_dims=True)\n', (25181, 25222), True, 'import tensorflow as tf\n'), ((4516, 4544), 'tensorflow.maximum', 'tf.maximum', (['cdf_delta', '(1e-12)'], {}), '(cdf_delta, 1e-12)\n', (4526, 4544), True, 'import tensorflow as tf\n'), ((4561, 4574), 'numpy.log', 'np.log', (['(127.5)'], {}), '(127.5)\n', (4567, 4574), True, 'import numpy as np\n'), ((7690, 7718), 'tensorflow.maximum', 'tf.maximum', (['cdf_delta', '(1e-12)'], {}), '(cdf_delta, 1e-12)\n', (7700, 7718), True, 'import tensorflow as tf\n'), ((7735, 7748), 'numpy.log', 'np.log', (['(127.5)'], {}), '(127.5)\n', (7741, 7748), True, 'import numpy as np\n'), ((10475, 10490), 'tensorflow.pow', 'tf.pow', (['mom1', 't'], {}), '(mom1, t)\n', (10481, 10490), True, 'import tensorflow as tf\n'), ((12240, 12274), 'tensorflow.reshape', 'tf.reshape', (['m_init', '[1, num_units]'], {}), '(m_init, [1, num_units])\n', (12250, 12274), True, 'import tensorflow as tf\n'), ((12717, 12751), 'tensorflow.reshape', 'tf.reshape', (['scaler', '[1, num_units]'], {}), '(scaler, [1, num_units])\n', (12727, 12751), True, 'import tensorflow as tf\n'), ((14146, 14188), 'tensorflow.reshape', 'tf.reshape', (['m_init', '[1, 1, 1, num_filters]'], {}), '(m_init, [1, 1, 1, num_filters])\n', (14156, 14188), True, 'import tensorflow as tf\n'), ((16484, 16526), 'tensorflow.reshape', 'tf.reshape', (['m_init', '[1, 1, 1, num_filters]'], {}), '(m_init, [1, 1, 1, num_filters])\n', (16494, 16526), True, 'import tensorflow as tf\n'), ((18490, 18527), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(0)', '(0.05)'], {}), '(0, 0.05)\n', (18518, 18527), True, 'import tensorflow as tf\n'), ((12680, 12692), 'tensorflow.square', 'tf.square', (['V'], {}), '(V)\n', (12689, 12692), True, 'import tensorflow as tf\n'), ((23568, 23584), 'numpy.sqrt', 'np.sqrt', (['nr_chns'], {}), '(nr_chns)\n', (23575, 23584), True, 'import numpy as np\n'), ((25075, 25091), 'numpy.sqrt', 'np.sqrt', (['nr_chns'], {}), '(nr_chns)\n', (25082, 25091), True, 'import numpy as np\n')]
|
"""This module builds a LALR(1) transition-table for lalr_parser.py
For now, shift/reduce conflicts are automatically resolved as shifts.
"""
# Author: <NAME> (2017)
# Email : <EMAIL>
import logging
from collections import defaultdict
from array import array
from ..utils import classify, classify_bool, bfs, fzset
from ..common import GrammarError, is_terminal
from .grammar_analysis import GrammarAnalyzer
class Action:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return str(self)
Shift = Action('Shift')
Reduce = Action('Reduce')
class ParseTable:
def __init__(self, states, start_state, end_state):
self.states = states
self.start_state = start_state
self.end_state = end_state
class ArrayParseTable(ParseTable):
"""
Stores the parse table in a array of 16bit ints with all the states
packed one after the other in the following structure:
num_tokens [ token_id atom ]*
The atom has its highest bit set if it's a reduce action.
"""
class StateProxy(object):
__slots__ = ('context', 'offset', 'cache')
def __init__(self, context, offset):
self.context = context
self.offset = offset
# self.cache = {}
def keys(self):
ofs = self.offset
table = self.context.table
inv_tokens = {v:k for k,v in self.context.tokens.items()}
length = table[ofs]
ofs += 1
while length > 0:
yield inv_tokens[ table[ofs] ]
ofs += 2
length -= 1
def values(self):
for token in self.keys():
yield self[token]
def items(self):
for token in self.keys():
yield (token, self[token])
def __getitem__(self, token):
# if token in self.cache:
# return self.cache[token]
token_idx = self.context.tokens[token]
table = self.context.table
ofs = self.offset
# tokens are sorted so use a binary search to find it
lo = 0
hi = table[ofs]
ofs += 1
while lo < hi:
mid = (lo+hi)//2
value = table[ofs + mid*2]
if value < token_idx:
lo = mid + 1
elif value > token_idx:
hi = mid
else:
# self.cache[token] = self._unpack(table[ofs + mid*2 + 1])
return self._unpack(table[ofs + mid*2 + 1])
return self.cache[token]
else:
raise KeyError(token)
def _unpack(self, atom):
if atom & 0x8000:
atom &= ~0x8000
return Reduce, self.context.idx_rules[atom]
else:
return Shift, atom
def __init__(self, states, start_state, end_state):
states_idx = dict( (s, i) for i,s in enumerate(states.keys()) )
self.start_state = states_idx[start_state]
self.end_state = states_idx[end_state]
self.table = table = array('H') # unsigned 16bit ints
self.tokens = tokens = {}
idx_tokens = []
rules = {}
self.idx_rules = idx_rules = []
self.states = {}
for state, productions in states.items():
# Create state proxy for the current offset
self.states[ states_idx[state] ] = ArrayParseTable.StateProxy(self, len(table))
# Prefix with the length of the state
table.append(len(productions))
# Sort the tokens based on the assigned index, indexing those
# that we haven't seen before
subtokens = []
for token in productions:
if token not in tokens:
tokens[token] = len(idx_tokens)
idx_tokens.append(token)
subtokens.append(tokens[token])
subtokens.sort()
for token_idx in subtokens:
token = idx_tokens[token_idx]
value = productions[token]
table.append(token_idx)
if value[0] is Shift:
table.append(states_idx[ value[1] ])
else:
if value[1] not in rules:
rules[ value[1] ] = len(idx_rules)
idx_rules.append(value[1])
ptr = rules[ value[1] ] | 0x8000
table.append(ptr)
class IntParseTable(ParseTable):
@classmethod
def from_ParseTable(cls, parse_table):
enum = list(parse_table.states)
state_to_idx = {s:i for i,s in enumerate(enum)}
int_states = {}
for s, la in parse_table.states.items():
la = {k:(v[0], state_to_idx[v[1]]) if v[0] is Shift else v
for k,v in la.items()}
int_states[ state_to_idx[s] ] = la
start_state = state_to_idx[parse_table.start_state]
end_state = state_to_idx[parse_table.end_state]
return cls(int_states, start_state, end_state)
class LALR_Analyzer(GrammarAnalyzer):
def compute_lookahead(self):
self.end_states = []
self.states = {}
def step(state):
lookahead = defaultdict(list)
sat, unsat = classify_bool(state, lambda rp: rp.is_satisfied)
for rp in sat:
for term in self.FOLLOW.get(rp.rule.origin, ()):
lookahead[term].append((Reduce, rp.rule))
d = classify(unsat, lambda rp: rp.next)
for sym, rps in d.items():
rps = {rp.advance(sym) for rp in rps}
for rp in set(rps):
if not rp.is_satisfied and not is_terminal(rp.next):
rps |= self.expand_rule(rp.next)
new_state = fzset(rps)
lookahead[sym].append((Shift, new_state))
if sym == '$END':
self.end_states.append( new_state )
yield new_state
for k, v in lookahead.items():
if len(v) > 1:
if self.debug:
logging.warn("Shift/reduce conflict for %s: %s. Resolving as shift.", k, v)
for x in v:
# XXX resolving shift/reduce into shift, like PLY
# Give a proper warning
if x[0] is Shift:
lookahead[k] = [x]
for k, v in lookahead.items():
if not len(v) == 1:
raise GrammarError("Collision in %s: %s" %(k, ', '.join(['\n * %s: %s' % x for x in v])))
self.states[state] = {k:v[0] for k, v in lookahead.items()}
for _ in bfs([self.start_state], step):
pass
self.end_state ,= self.end_states
self._parse_table = ParseTable(self.states, self.start_state, self.end_state)
if self.debug:
self.parse_table = self._parse_table
elif self.parser_conf.parsetable_class:
self.parse_table = self.parser_conf.parsetable_class(self.states, self.start_state, self.end_state)
else:
self.parse_table = IntParseTable.from_ParseTable(self._parse_table)
|
[
"collections.defaultdict",
"logging.warn",
"array.array"
] |
[((3228, 3238), 'array.array', 'array', (['"""H"""'], {}), "('H')\n", (3233, 3238), False, 'from array import array\n'), ((5408, 5425), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5419, 5425), False, 'from collections import defaultdict\n'), ((6321, 6396), 'logging.warn', 'logging.warn', (['"""Shift/reduce conflict for %s: %s. Resolving as shift."""', 'k', 'v'], {}), "('Shift/reduce conflict for %s: %s. Resolving as shift.', k, v)\n", (6333, 6396), False, 'import logging\n')]
|
#
# Copyright 2019 BrainPad Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
import os
import re
from cliboa.scenario.aws import BaseS3
from cliboa.scenario.validator import EssentialParameters
class S3Download(BaseS3):
"""
Download from S3
"""
def __init__(self):
super().__init__()
self._prefix = ""
self._delimiter = ""
self._src_pattern = None
self._dest_dir = "."
def prefix(self, prefix):
self._prefix = prefix
def delimiter(self, delimiter):
self._delimiter = delimiter
def src_pattern(self, src_pattern):
self._src_pattern = src_pattern
def dest_dir(self, dest_dir):
self._dest_dir = dest_dir
def execute(self, *args):
super().execute()
valid = EssentialParameters(self.__class__.__name__, [self._src_pattern])
valid()
client = self._s3_client()
p = client.get_paginator("list_objects")
for page in p.paginate(
Bucket=self._bucket, Delimiter=self._delimiter, Prefix=self._prefix
):
for c in page.get("Contents", []):
filename = c.get("Key")
rec = re.compile(self._src_pattern)
if not rec.fullmatch(filename):
continue
dest_path = os.path.join(self._dest_dir, os.path.basename(filename))
client.download_file(self._bucket, filename, dest_path)
|
[
"os.path.basename",
"re.compile",
"cliboa.scenario.validator.EssentialParameters"
] |
[((1346, 1411), 'cliboa.scenario.validator.EssentialParameters', 'EssentialParameters', (['self.__class__.__name__', '[self._src_pattern]'], {}), '(self.__class__.__name__, [self._src_pattern])\n', (1365, 1411), False, 'from cliboa.scenario.validator import EssentialParameters\n'), ((1745, 1774), 're.compile', 're.compile', (['self._src_pattern'], {}), '(self._src_pattern)\n', (1755, 1774), False, 'import re\n'), ((1909, 1935), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (1925, 1935), False, 'import os\n')]
|
import mongoengine as db
import datetime
import bson
class Collection(db.DynamicDocument):
"""
A collection of precomuted workflows such as datasets, ..
This is a dynamic document, so it will accept any number of
extra fields (expandable and uncontrolled schema)
"""
collection = db.StringField(required=True) # , choices=['dataset', '?'])
name = db.StringField(required=True) # Example 'water'
meta = {
'collection': 'collections', # DB collection/table name
'indexes': [
{'fields': ('collection', 'name'), 'unique': True}
]
}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Molecule(db.DynamicDocument):
"""
The molecule DB collection is managed by pymongo, so far
"""
name = db.StringField()
symbols = db.ListField()
molecular_formula = db.StringField()
molecule_hash = db.StringField()
geometry = db.ListField()
real = db.ListField()
fragments = db.DynamicField()
def create_hash(self):
""" TODO: create a special hash before saving"""
return ''
def save(self, *args, **kwargs):
"""Override save to add molecule_hash"""
# self.molecule_hash = self.create_hash()
return super(Molecule, self).save(*args, **kwargs)
def __str__(self):
return str(self.id)
meta = {
'collection': 'molecules',
'indexes': [
{'fields': ('molecule_hash', 'molecular_formula'),
'unique': False
} # TODO: what is unique?
]
}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Options(db.DynamicDocument):
"""
Options are unique for a specific program and name
"""
# TODO: pull choices from const config
program = db.StringField(required=True) #, choices=['rdkit', 'psi4', 'geometric', 'torsiondrive'])
# "default is reserved, insert on start
# option_name = db.StringField(required=True)
name = db.StringField(required=True)
meta = {
'indexes': [
{'fields': ('program', 'name'), 'unique': True}
]
}
def __str__(self):
return str(self.id)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class BaseResult(db.DynamicDocument):
"""
Abstract Base class for Results and Procedures
"""
# queue related
task_queue_id = db.StringField() # ObjectId, reference task_queue but without validation
status = db.StringField(required=True, choices=['COMPLETE', 'INCOMPLETE', 'ERROR'])
meta = {
'abstract': True,
# 'allow_inheritance': True,
'indexes': [
'status'
]
}
def save(self, *args, **kwargs):
"""Override save to set defaults"""
if not self.status:
self.status = 'INCOMPLETE'
return super(BaseResult, self).save(*args, **kwargs)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Result(BaseResult):
"""
Hold the result of an atomic single calculation
"""
# uniquely identifying a result
program = db.StringField(required=True) # example "rdkit", is it the same as program in options?
driver = db.StringField(required=True) # example "gradient"
method = db.StringField(required=True) # example "uff"
basis = db.StringField()
molecule = db.ReferenceField(Molecule, required=True) # or LazyReferenceField if only ID is needed?
# options = db.ReferenceField(Options) # ** has to be a FK or empty, can't be a string
options = db.StringField()
# output related
properties = db.DynamicField() # accept any, no validation
return_result = db.DynamicField() # better performance than db.ListField(db.FloatField())
provenance = db.DynamicField() # or an Embedded Documents with a structure?
# {"creator": "rdkit", "version": "2018.03.4",
# "routine": "rdkit.Chem.AllChem.UFFGetMoleculeForceField",
# "cpu": "Intel(R) Core(TM) i7-8650U CPU @ 1.90GHz", "hostname": "x1-carbon6", "username": "doaa",
# "wall_time": 0.14191770553588867},
schema_name = db.StringField() #default="qc_ret_data_output")
schema_version = db.IntField() # or String?
meta = {
'collection': 'results',
'indexes': [
{'fields': ('program', 'driver', 'method', 'basis',
'molecule', 'options'), 'unique': True},
]
}
# not used yet
# or use pre_save
def _save(self, *args, **kwargs):
"""Override save to handle options"""
if not isinstance(self.options, Options):
# self.options = Options.objects(program=self.program, option_name='default')\
# .modify(upsert=True, new=True, option_name='default')
self.options = Options.objects(program=self.program, option_name='default').first()
if not self.options:
self.options = Options(program=self.program, option_name='default').save()
# self.options.save()
return super(Result, self).save(*args, **kwargs)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Procedure(BaseResult):
"""
A procedure is a group of related results applied to a list of molecules
TODO: this looks exactly like results except those attributes listed here
"""
procedure = db.StringField(required=True)
# choices=['undefined', 'optimization', 'torsiondrive'])
# Todo: change name to be different from results program
program = db.StringField(required=True) # example: 'Geometric'
options = db.ReferenceField(Options) # options of the procedure
qc_meta = db.DynamicField() # --> all inside results
meta = {
'collection': 'procedure',
'allow_inheritance': True,
'indexes': [
# TODO: needs a unique index, + molecule?
{'fields': ('procedure', 'program'), 'unique': False} # TODO: check
]
}
# ================== Types of Procedures ================== #
class OptimizationProcedure(Procedure):
"""
An Optimization procedure
"""
procedure = db.StringField(default='optimization', required=True)
# initial_molecule = db.ReferenceField(Molecule) # always load with select_related
# final_molecule = db.ReferenceField(Molecule)
# output
# trajectory = db.ListField(Result)
meta = {
'indexes': [
# {'fields': ('initial_molecule', 'procedure_type', 'procedure_program'), 'unique': False} # TODO: check
]
}
class TorsiondriveProcedure(Procedure):
"""
An torsion drive procedure
"""
procedure = db.StringField(default='torsiondrive', required=True)
# TODO: add more fields
meta = {
'indexes': [
]
}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Spec(db.DynamicEmbeddedDocument):
""" The spec of a task in the queue
This is an embedded document, meaning that it will be embedded
in the task_queue collection and won't be stored as a seperate
collection/table --> for faster parsing
"""
function = db.StringField()
args = db.DynamicField() # fast, can take any structure
kwargs = db.DynamicField()
class TaskQueue(db.DynamicDocument):
"""A queue of tasks corresponding to a procedure"""
# spec = db.EmbeddedDocumentField(Spec, default=Spec)
spec = db.DynamicField()
# others
hooks = db.ListField(db.DynamicField()) # ??
tag = db.StringField(default=None)
parser = db.StringField(default='')
status = db.StringField(default='WAITING')
# choices=['RUNNING', 'WAITING', 'ERROR', 'COMPLETE'])
created_on = db.DateTimeField(required=True, default=datetime.datetime.now)
modified_on = db.DateTimeField(required=True, default=datetime.datetime.now)
base_result = db.GenericLazyReferenceField(dbref=True) # GenericLazyReferenceField() # can reference Results or any Procedure
meta = {
'indexes': [
'-created_on',
'status',
# {'fields': ("status", "tag", "hash_index"), 'unique': False}
{'fields': ("base_result",), 'unique': True} # new
]
# 'indexes': [
# '$function', # text index, not needed
# '#function', # hash index
# ('title', '-rating'), # rating is descending, direction only for multi-indices
# {
# 'fields': ('spec.function', 'tag'),
# 'unique': True
# }
# ]
}
# override to simplify the generic reference field
def to_json(self):
data = self.to_mongo()
data['base_result'] = data['base_result']['_ref']
return bson.json_util.dumps(data)
def save(self, *args, **kwargs):
"""Override save to update modified_on"""
self.modified_on = datetime.datetime.now()
return super(TaskQueue, self).save(*args, **kwargs)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ServiceQueue(db.DynamicDocument):
meta = {
'indexes': [
'status',
{'fields': ("status", "tag", "hash_index"), 'unique': False}
]
}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class User(db.DynamicDocument):
username = db.StringField(required=True, unique=True)
password = db.BinaryField(required=True)
permissions = db.ListField()
meta = {
'indexes': ['username']
}
class QueueManagers(db.DynamicDocument):
name = db.StringField(unique=True)
meta = {
'indexes': ['name']
}
|
[
"mongoengine.ReferenceField",
"mongoengine.DynamicField",
"bson.json_util.dumps",
"mongoengine.BinaryField",
"datetime.datetime.now",
"mongoengine.DateTimeField",
"mongoengine.IntField",
"mongoengine.StringField",
"mongoengine.ListField",
"mongoengine.GenericLazyReferenceField"
] |
[((320, 349), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (334, 349), True, 'import mongoengine as db\n'), ((392, 421), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (406, 421), True, 'import mongoengine as db\n'), ((822, 838), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (836, 838), True, 'import mongoengine as db\n'), ((853, 867), 'mongoengine.ListField', 'db.ListField', ([], {}), '()\n', (865, 867), True, 'import mongoengine as db\n'), ((892, 908), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (906, 908), True, 'import mongoengine as db\n'), ((929, 945), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (943, 945), True, 'import mongoengine as db\n'), ((961, 975), 'mongoengine.ListField', 'db.ListField', ([], {}), '()\n', (973, 975), True, 'import mongoengine as db\n'), ((987, 1001), 'mongoengine.ListField', 'db.ListField', ([], {}), '()\n', (999, 1001), True, 'import mongoengine as db\n'), ((1018, 1035), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (1033, 1035), True, 'import mongoengine as db\n'), ((1846, 1875), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (1860, 1875), True, 'import mongoengine as db\n'), ((2040, 2069), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (2054, 2069), True, 'import mongoengine as db\n'), ((2457, 2473), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (2471, 2473), True, 'import mongoengine as db\n'), ((2544, 2618), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)', 'choices': "['COMPLETE', 'INCOMPLETE', 'ERROR']"}), "(required=True, choices=['COMPLETE', 'INCOMPLETE', 'ERROR'])\n", (2558, 2618), True, 'import mongoengine as db\n'), ((3188, 3217), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (3202, 3217), True, 'import mongoengine as db\n'), ((3289, 3318), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (3303, 3318), True, 'import mongoengine as db\n'), ((3354, 3383), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (3368, 3383), True, 'import mongoengine as db\n'), ((3413, 3429), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (3427, 3429), True, 'import mongoengine as db\n'), ((3445, 3487), 'mongoengine.ReferenceField', 'db.ReferenceField', (['Molecule'], {'required': '(True)'}), '(Molecule, required=True)\n', (3462, 3487), True, 'import mongoengine as db\n'), ((3642, 3658), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (3656, 3658), True, 'import mongoengine as db\n'), ((3698, 3715), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (3713, 3715), True, 'import mongoengine as db\n'), ((3765, 3782), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (3780, 3782), True, 'import mongoengine as db\n'), ((3857, 3874), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (3872, 3874), True, 'import mongoengine as db\n'), ((4216, 4232), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (4230, 4232), True, 'import mongoengine as db\n'), ((4285, 4298), 'mongoengine.IntField', 'db.IntField', ([], {}), '()\n', (4296, 4298), True, 'import mongoengine as db\n'), ((5480, 5509), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (5494, 5509), True, 'import mongoengine as db\n'), ((5678, 5707), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)'}), '(required=True)\n', (5692, 5707), True, 'import mongoengine as db\n'), ((5746, 5772), 'mongoengine.ReferenceField', 'db.ReferenceField', (['Options'], {}), '(Options)\n', (5763, 5772), True, 'import mongoengine as db\n'), ((5816, 5833), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (5831, 5833), True, 'import mongoengine as db\n'), ((6289, 6342), 'mongoengine.StringField', 'db.StringField', ([], {'default': '"""optimization"""', 'required': '(True)'}), "(default='optimization', required=True)\n", (6303, 6342), True, 'import mongoengine as db\n'), ((6817, 6870), 'mongoengine.StringField', 'db.StringField', ([], {'default': '"""torsiondrive"""', 'required': '(True)'}), "(default='torsiondrive', required=True)\n", (6831, 6870), True, 'import mongoengine as db\n'), ((7318, 7334), 'mongoengine.StringField', 'db.StringField', ([], {}), '()\n', (7332, 7334), True, 'import mongoengine as db\n'), ((7346, 7363), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (7361, 7363), True, 'import mongoengine as db\n'), ((7411, 7428), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (7426, 7428), True, 'import mongoengine as db\n'), ((7594, 7611), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (7609, 7611), True, 'import mongoengine as db\n'), ((7686, 7714), 'mongoengine.StringField', 'db.StringField', ([], {'default': 'None'}), '(default=None)\n', (7700, 7714), True, 'import mongoengine as db\n'), ((7728, 7754), 'mongoengine.StringField', 'db.StringField', ([], {'default': '""""""'}), "(default='')\n", (7742, 7754), True, 'import mongoengine as db\n'), ((7768, 7801), 'mongoengine.StringField', 'db.StringField', ([], {'default': '"""WAITING"""'}), "(default='WAITING')\n", (7782, 7801), True, 'import mongoengine as db\n'), ((7903, 7965), 'mongoengine.DateTimeField', 'db.DateTimeField', ([], {'required': '(True)', 'default': 'datetime.datetime.now'}), '(required=True, default=datetime.datetime.now)\n', (7919, 7965), True, 'import mongoengine as db\n'), ((7984, 8046), 'mongoengine.DateTimeField', 'db.DateTimeField', ([], {'required': '(True)', 'default': 'datetime.datetime.now'}), '(required=True, default=datetime.datetime.now)\n', (8000, 8046), True, 'import mongoengine as db\n'), ((8066, 8106), 'mongoengine.GenericLazyReferenceField', 'db.GenericLazyReferenceField', ([], {'dbref': '(True)'}), '(dbref=True)\n', (8094, 8106), True, 'import mongoengine as db\n'), ((9558, 9600), 'mongoengine.StringField', 'db.StringField', ([], {'required': '(True)', 'unique': '(True)'}), '(required=True, unique=True)\n', (9572, 9600), True, 'import mongoengine as db\n'), ((9616, 9645), 'mongoengine.BinaryField', 'db.BinaryField', ([], {'required': '(True)'}), '(required=True)\n', (9630, 9645), True, 'import mongoengine as db\n'), ((9664, 9678), 'mongoengine.ListField', 'db.ListField', ([], {}), '()\n', (9676, 9678), True, 'import mongoengine as db\n'), ((9786, 9813), 'mongoengine.StringField', 'db.StringField', ([], {'unique': '(True)'}), '(unique=True)\n', (9800, 9813), True, 'import mongoengine as db\n'), ((7651, 7668), 'mongoengine.DynamicField', 'db.DynamicField', ([], {}), '()\n', (7666, 7668), True, 'import mongoengine as db\n'), ((8951, 8977), 'bson.json_util.dumps', 'bson.json_util.dumps', (['data'], {}), '(data)\n', (8971, 8977), False, 'import bson\n'), ((9093, 9116), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9114, 9116), False, 'import datetime\n')]
|
from ctypes import byref, c_int, sizeof
from numba.cuda.cudadrv.driver import host_to_device, device_to_host, driver
from numba.cuda.cudadrv import devices, drvapi
from numba.cuda.testing import unittest, CUDATestCase
from numba.cuda.testing import skip_on_cudasim
ptx1 = '''
.version 1.4
.target sm_10, map_f64_to_f32
.entry _Z10helloworldPi (
.param .u64 __cudaparm__Z10helloworldPi_A)
{
.reg .u32 %r<3>;
.reg .u64 %rd<6>;
.loc 14 4 0
$LDWbegin__Z10helloworldPi:
.loc 14 6 0
cvt.s32.u16 %r1, %tid.x;
ld.param.u64 %rd1, [__cudaparm__Z10helloworldPi_A];
cvt.u64.u16 %rd2, %tid.x;
mul.lo.u64 %rd3, %rd2, 4;
add.u64 %rd4, %rd1, %rd3;
st.global.s32 [%rd4+0], %r1;
.loc 14 7 0
exit;
$LDWend__Z10helloworldPi:
} // _Z10helloworldPi
'''
ptx2 = '''
.version 3.0
.target sm_20
.address_size 64
.file 1 "/tmp/tmpxft_000012c7_00000000-9_testcuda.cpp3.i"
.file 2 "testcuda.cu"
.entry _Z10helloworldPi(
.param .u64 _Z10helloworldPi_param_0
)
{
.reg .s32 %r<3>;
.reg .s64 %rl<5>;
ld.param.u64 %rl1, [_Z10helloworldPi_param_0];
cvta.to.global.u64 %rl2, %rl1;
.loc 2 6 1
mov.u32 %r1, %tid.x;
mul.wide.u32 %rl3, %r1, 4;
add.s64 %rl4, %rl2, %rl3;
st.global.u32 [%rl4], %r1;
.loc 2 7 2
ret;
}
'''
@skip_on_cudasim('CUDA Driver API unsupported in the simulator')
class TestCudaDriver(CUDATestCase):
def setUp(self):
self.assertTrue(len(devices.gpus) > 0)
self.context = devices.get_context()
device = self.context.device
ccmajor, _ = device.compute_capability
if ccmajor >= 2:
self.ptx = ptx2
else:
self.ptx = ptx1
def tearDown(self):
del self.context
def test_cuda_driver_basic(self):
module = self.context.create_module_ptx(self.ptx)
function = module.get_function('_Z10helloworldPi')
array = (c_int * 100)()
memory = self.context.memalloc(sizeof(array))
host_to_device(memory, array, sizeof(array))
function = function.configure((1,), (100,))
function(memory)
device_to_host(array, memory, sizeof(array))
for i, v in enumerate(array):
self.assertEqual(i, v)
module.unload()
def test_cuda_driver_stream_operations(self):
module = self.context.create_module_ptx(self.ptx)
function = module.get_function('_Z10helloworldPi')
array = (c_int * 100)()
stream = self.context.create_stream()
with stream.auto_synchronize():
memory = self.context.memalloc(sizeof(array))
host_to_device(memory, array, sizeof(array), stream=stream)
function = function.configure((1,), (100,), stream=stream)
function(memory)
device_to_host(array, memory, sizeof(array), stream=stream)
for i, v in enumerate(array):
self.assertEqual(i, v)
def test_cuda_driver_default_stream(self):
# Test properties of the default stream
ds = self.context.get_default_stream()
self.assertIn("Default CUDA stream", repr(ds))
self.assertEqual(0, int(ds))
# bool(stream) is the check that is done in memcpy to decide if async
# version should be used. So the default (0) stream should be true-ish
# even though 0 is usually false-ish in Python.
self.assertTrue(ds)
self.assertFalse(ds.external)
def test_cuda_driver_legacy_default_stream(self):
# Test properties of the legacy default stream
ds = self.context.get_legacy_default_stream()
self.assertIn("Legacy default CUDA stream", repr(ds))
self.assertEqual(1, int(ds))
self.assertTrue(ds)
self.assertFalse(ds.external)
def test_cuda_driver_per_thread_default_stream(self):
# Test properties of the per-thread default stream
ds = self.context.get_per_thread_default_stream()
self.assertIn("Per-thread default CUDA stream", repr(ds))
self.assertEqual(2, int(ds))
self.assertTrue(ds)
self.assertFalse(ds.external)
def test_cuda_driver_stream(self):
# Test properties of non-default streams
s = self.context.create_stream()
self.assertIn("CUDA stream", repr(s))
self.assertNotIn("Default", repr(s))
self.assertNotIn("External", repr(s))
self.assertNotEqual(0, int(s))
self.assertTrue(s)
self.assertFalse(s.external)
def test_cuda_driver_external_stream(self):
# Test properties of a stream created from an external stream object.
# We use the driver API directly to create a stream, to emulate an
# external library creating a stream
handle = drvapi.cu_stream()
driver.cuStreamCreate(byref(handle), 0)
ptr = handle.value
s = self.context.create_external_stream(ptr)
self.assertIn("External CUDA stream", repr(s))
# Ensure neither "Default" nor "default"
self.assertNotIn("efault", repr(s))
self.assertEqual(ptr, int(s))
self.assertTrue(s)
self.assertTrue(s.external)
def test_cuda_driver_occupancy(self):
module = self.context.create_module_ptx(self.ptx)
function = module.get_function('_Z10helloworldPi')
value = self.context.get_active_blocks_per_multiprocessor(function, 128, 128)
self.assertTrue(value > 0)
def b2d(bs): return bs
grid, block = self.context.get_max_potential_block_size(function, b2d, 128, 128)
self.assertTrue(grid > 0)
self.assertTrue(block > 0)
if __name__ == '__main__':
unittest.main()
|
[
"numba.cuda.testing.skip_on_cudasim",
"numba.cuda.cudadrv.drvapi.cu_stream",
"numba.cuda.testing.unittest.main",
"ctypes.byref",
"ctypes.sizeof",
"numba.cuda.cudadrv.devices.get_context"
] |
[((1331, 1394), 'numba.cuda.testing.skip_on_cudasim', 'skip_on_cudasim', (['"""CUDA Driver API unsupported in the simulator"""'], {}), "('CUDA Driver API unsupported in the simulator')\n", (1346, 1394), False, 'from numba.cuda.testing import skip_on_cudasim\n'), ((5686, 5701), 'numba.cuda.testing.unittest.main', 'unittest.main', ([], {}), '()\n', (5699, 5701), False, 'from numba.cuda.testing import unittest, CUDATestCase\n'), ((1522, 1543), 'numba.cuda.cudadrv.devices.get_context', 'devices.get_context', ([], {}), '()\n', (1541, 1543), False, 'from numba.cuda.cudadrv import devices, drvapi\n'), ((4786, 4804), 'numba.cuda.cudadrv.drvapi.cu_stream', 'drvapi.cu_stream', ([], {}), '()\n', (4802, 4804), False, 'from numba.cuda.cudadrv import devices, drvapi\n'), ((2002, 2015), 'ctypes.sizeof', 'sizeof', (['array'], {}), '(array)\n', (2008, 2015), False, 'from ctypes import byref, c_int, sizeof\n'), ((2056, 2069), 'ctypes.sizeof', 'sizeof', (['array'], {}), '(array)\n', (2062, 2069), False, 'from ctypes import byref, c_int, sizeof\n'), ((2188, 2201), 'ctypes.sizeof', 'sizeof', (['array'], {}), '(array)\n', (2194, 2201), False, 'from ctypes import byref, c_int, sizeof\n'), ((2860, 2873), 'ctypes.sizeof', 'sizeof', (['array'], {}), '(array)\n', (2866, 2873), False, 'from ctypes import byref, c_int, sizeof\n'), ((4835, 4848), 'ctypes.byref', 'byref', (['handle'], {}), '(handle)\n', (4840, 4848), False, 'from ctypes import byref, c_int, sizeof\n'), ((2633, 2646), 'ctypes.sizeof', 'sizeof', (['array'], {}), '(array)\n', (2639, 2646), False, 'from ctypes import byref, c_int, sizeof\n'), ((2690, 2703), 'ctypes.sizeof', 'sizeof', (['array'], {}), '(array)\n', (2696, 2703), False, 'from ctypes import byref, c_int, sizeof\n')]
|
# coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class MessageEvaluation(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
MessageEvaluation - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'contact_column': 'str',
'contact_address': 'str',
'wrapup_code_id': 'str',
'timestamp': 'datetime'
}
self.attribute_map = {
'contact_column': 'contactColumn',
'contact_address': 'contactAddress',
'wrapup_code_id': 'wrapupCodeId',
'timestamp': 'timestamp'
}
self._contact_column = None
self._contact_address = None
self._wrapup_code_id = None
self._timestamp = None
@property
def contact_column(self):
"""
Gets the contact_column of this MessageEvaluation.
:return: The contact_column of this MessageEvaluation.
:rtype: str
"""
return self._contact_column
@contact_column.setter
def contact_column(self, contact_column):
"""
Sets the contact_column of this MessageEvaluation.
:param contact_column: The contact_column of this MessageEvaluation.
:type: str
"""
self._contact_column = contact_column
@property
def contact_address(self):
"""
Gets the contact_address of this MessageEvaluation.
:return: The contact_address of this MessageEvaluation.
:rtype: str
"""
return self._contact_address
@contact_address.setter
def contact_address(self, contact_address):
"""
Sets the contact_address of this MessageEvaluation.
:param contact_address: The contact_address of this MessageEvaluation.
:type: str
"""
self._contact_address = contact_address
@property
def wrapup_code_id(self):
"""
Gets the wrapup_code_id of this MessageEvaluation.
:return: The wrapup_code_id of this MessageEvaluation.
:rtype: str
"""
return self._wrapup_code_id
@wrapup_code_id.setter
def wrapup_code_id(self, wrapup_code_id):
"""
Sets the wrapup_code_id of this MessageEvaluation.
:param wrapup_code_id: The wrapup_code_id of this MessageEvaluation.
:type: str
"""
self._wrapup_code_id = wrapup_code_id
@property
def timestamp(self):
"""
Gets the timestamp of this MessageEvaluation.
Date time is represented as an ISO-8601 string. For example: yyyy-MM-ddTHH:mm:ss[.mmm]Z
:return: The timestamp of this MessageEvaluation.
:rtype: datetime
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""
Sets the timestamp of this MessageEvaluation.
Date time is represented as an ISO-8601 string. For example: yyyy-MM-ddTHH:mm:ss[.mmm]Z
:param timestamp: The timestamp of this MessageEvaluation.
:type: datetime
"""
self._timestamp = timestamp
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""
Returns the model as raw JSON
"""
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"six.iteritems"
] |
[((4409, 4438), 'six.iteritems', 'iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (4418, 4438), False, 'from six import iteritems\n')]
|
import pandas as pd
import numpy as np
from prophet import Prophet
from prophet.diagnostics import cross_validation
import plotly.express as px
from statistics import NormalDist
class ProphetNewsvendor:
def __init__(self):
pass
def fit(**kwargs):
"""Calculate mean & standard deviation from cross validation results
Args:
**kwargs : facbook prophet cross_validation parameters
Returns:
tuple: residual mean, residual standard deviation, residuals
"""
df_cv = cross_validation(**kwargs)
df_cv['Residuals'] = df_cv.yhat - df_cv.y
mean_residual = df_cv.Residuals.mean()
std_residual = df_cv.Residuals.std()
return mean_residual, std_residual, df_cv.Residuals
def plot_residuals(residuals:pd.Series):
"""Plot histogram of residuals.
Note: Assumption of normal distributed residuals
Args:
residuals (pd.Series): previously calculated residuals
"""
px.histogram(residuals).show()
def applynewsvendor(mean:float, mean_res:float, std:float, price:float, cost:float):
"""calculate newsvendor quantity from forecast and previously calculated forecast error statistics
Note: Assumption of normal distributed residuals
Args:
mean (float): forecast
mean_res (float): mean error of forecast derived by cross_validation
std (float): standard deviation of forecast derived by cross_validation
price (float): sales price of item
cost (float): variable cost of item
Returns:
float: newsvendor quantity
"""
newsvendor_result = mean + mean_res + std * NormalDist().inv_cdf((price - cost) / price)
return newsvendor_result
|
[
"statistics.NormalDist",
"plotly.express.histogram",
"prophet.diagnostics.cross_validation"
] |
[((542, 568), 'prophet.diagnostics.cross_validation', 'cross_validation', ([], {}), '(**kwargs)\n', (558, 568), False, 'from prophet.diagnostics import cross_validation\n'), ((1015, 1038), 'plotly.express.histogram', 'px.histogram', (['residuals'], {}), '(residuals)\n', (1027, 1038), True, 'import plotly.express as px\n'), ((1730, 1742), 'statistics.NormalDist', 'NormalDist', ([], {}), '()\n', (1740, 1742), False, 'from statistics import NormalDist\n')]
|
import getpass
from unittest import TestCase
from dvc.remote.ssh import RemoteSSH
class TestRemoteSSH(TestCase):
def test_user(self):
url = 'ssh://127.0.0.1:/path/to/dir'
config = {'url': url}
remote = RemoteSSH(None, config)
self.assertEqual(remote.user, getpass.getuser())
user = 'test1'
url = 'ssh://{}@127.0.0.1:/path/to/dir'.format(user)
config = {'url': url}
remote = RemoteSSH(None, config)
self.assertEqual(remote.user, user)
user = 'test2'
config['user'] = user
remote = RemoteSSH(None, config)
self.assertEqual(remote.user, user)
def test_url(self):
user = 'test'
host = '172.16.58.3'
port = 1234
path = '/path/to/dir'
# URL ssh://[user@]host.xz[:port]/path
url = 'ssh://{}@{}:{}{}'.format(user, host, port, path)
config = {'url': url}
remote = RemoteSSH(None, config)
self.assertEqual(remote.user, user)
self.assertEqual(remote.host, host)
self.assertEqual(remote.port, port)
self.assertEqual(remote.prefix, path)
# SCP-like URL ssh://[user@]host.xz:path
url = 'ssh://{}@{}:{}'.format(user, host, path)
config = {'url': url}
remote = RemoteSSH(None, config)
self.assertEqual(remote.user, user)
self.assertEqual(remote.host, host)
self.assertEqual(remote.port, remote.DEFAULT_PORT)
self.assertEqual(remote.prefix, path)
def test_port(self):
url = 'ssh://127.0.0.1/path/to/dir'
config = {'url': url}
remote = RemoteSSH(None, config)
self.assertEqual(remote.port, remote.DEFAULT_PORT)
port = 1234
url = 'ssh://127.0.0.1:{}/path/to/dir'.format(port)
config = {'url': url}
remote = RemoteSSH(None, config)
self.assertEqual(remote.port, port)
port = 4321
config['port'] = port
remote = RemoteSSH(None, config)
self.assertEqual(remote.port, port)
|
[
"dvc.remote.ssh.RemoteSSH",
"getpass.getuser"
] |
[((234, 257), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (243, 257), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((447, 470), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (456, 470), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((586, 609), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (595, 609), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((940, 963), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (949, 963), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((1297, 1320), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (1306, 1320), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((1632, 1655), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (1641, 1655), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((1843, 1866), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (1852, 1866), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((1979, 2002), 'dvc.remote.ssh.RemoteSSH', 'RemoteSSH', (['None', 'config'], {}), '(None, config)\n', (1988, 2002), False, 'from dvc.remote.ssh import RemoteSSH\n'), ((296, 313), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (311, 313), False, 'import getpass\n')]
|
from pypy.interpreter.mixedmodule import MixedModule
from pypy.interpreter.error import OperationError
class Module(MixedModule):
"""Operator Builtin Module. """
# HACK! override loaders to be able to access different operations
# under same name. I.e., operator.eq == operator.__eq__
def __init__(self, space, w_name):
def create_lambda(name, alsoname):
return lambda space : self.getdictvalue(space, space.wrap(alsoname))
MixedModule.__init__(self, space, w_name)
for name, alsoname in self.mapping.iteritems():
self.loaders[name] = create_lambda(name, alsoname)
appleveldefs = {}
app_names = ['__delslice__', '__getslice__', '__repeat__', '__setslice__',
'attrgetter', 'countOf', 'delslice', 'getslice', 'indexOf',
'isMappingType', 'isNumberType', 'isSequenceType',
'itemgetter','repeat', 'setslice',
]
for name in app_names:
appleveldefs[name] = 'app_operator.%s' % name
interp_names = ['index', 'abs', 'add',
'and_', 'concat', 'contains', 'delitem', 'div', 'eq', 'floordiv',
'ge', 'getitem', 'gt', 'inv',
'invert', 'is_', 'is_not', 'isCallable', 'le',
'lshift', 'lt', 'mod', 'mul',
'ne', 'neg', 'not_', 'or_',
'pos', 'pow', 'rshift', 'setitem', 'sequenceIncludes',
'sub', 'truediv', 'truth', 'xor']
interpleveldefs = {}
for name in interp_names:
interpleveldefs[name] = 'interp_operator.%s' % name
mapping = {
'__abs__' : 'abs',
'__add__' : 'add',
'__and__' : 'and_',
'__concat__' : 'concat',
'__contains__' : 'contains',
'sequenceIncludes' : 'contains',
'__delitem__' : 'delitem',
'__div__' : 'div',
'__eq__' : 'eq',
'__floordiv__' : 'floordiv',
'__ge__' : 'ge',
'__getitem__' : 'getitem',
'__gt__' : 'gt',
'__inv__' : 'inv',
'__invert__' : 'invert',
'__le__' : 'le',
'__lshift__' : 'lshift',
'__lt__' : 'lt',
'__mod__' : 'mod',
'__mul__' : 'mul',
'__ne__' : 'ne',
'__neg__' : 'neg',
'__not__' : 'not_',
'__or__' : 'or_',
'__pos__' : 'pos',
'__pow__' : 'pow',
'__rshift__' : 'rshift',
'__setitem__' : 'setitem',
'__sub__' : 'sub',
'__truediv__' : 'truediv',
'__xor__' : 'xor',
}
|
[
"pypy.interpreter.mixedmodule.MixedModule.__init__"
] |
[((482, 523), 'pypy.interpreter.mixedmodule.MixedModule.__init__', 'MixedModule.__init__', (['self', 'space', 'w_name'], {}), '(self, space, w_name)\n', (502, 523), False, 'from pypy.interpreter.mixedmodule import MixedModule\n')]
|
"""
NCL_proj_3.py
=============
This script illustrates the following concepts:
- Drawing filled contours over an orthographic map
- Changing the center latitude and longitude for an orthographic projection
- Turning off map fill
See following URLs to see the reproduced NCL plot & script:
- Original NCL script: https://www.ncl.ucar.edu/Applications/Scripts/proj_3.ncl
- Original NCL plot: https://www.ncl.ucar.edu/Applications/Images/proj_3_lg.png
"""
###############################################################################
# Import packages:
import numpy as np
import xarray as xr
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import geocat.datafiles as gdf
from geocat.viz import util as gvutil
###############################################################################
# Read in data:
# Open a netCDF data file using xarray default engine and load the data into xarrays
ds = xr.open_dataset(gdf.get("netcdf_files/atmos.nc"), decode_times=False)
t = ds.TS.isel(time=0)
###############################################################################
# Fix the artifact of not-shown-data around 0 and 360-degree longitudes
wrap_t = gvutil.xr_add_cyclic_longitudes(t, "lon")
###############################################################################
#Plot:
# Generate figure (set its size (width, height) in inches)
fig = plt.figure(figsize=(10, 10))
# Generate axes using Cartopy and draw coastlines with
ax = plt.axes(
projection=ccrs.Orthographic(central_longitude=-120, central_latitude=50))
# Set extent to include latitudes between 0 and 90, and longitude between
# 0 and -180 only
ax.set_extent([0, -180, 0, 90], ccrs.PlateCarree())
ax.set_global()
ax.coastlines(linewidths=0.5)
# Plot data and add a colorbar
temp = wrap_t.plot.contourf(ax=ax,
transform=ccrs.PlateCarree(),
levels=11,
cmap='coolwarm',
add_colorbar=False)
cbar_ticks = np.arange(210, 311, 10)
cbar = plt.colorbar(temp,
orientation='horizontal',
shrink=0.75,
pad=0.05,
extendrect=True,
ticks=cbar_ticks)
cbar.ax.tick_params(labelsize=10)
# Use geocat.viz.util convenience function to add titles to left and right
# of the plot axis.
gvutil.set_titles_and_labels(ax,
maintitle="Example of Orthogonal Projection",
lefttitle="Surface Temperature",
righttitle="K")
# Show the plot
plt.show()
|
[
"matplotlib.pyplot.show",
"geocat.viz.util.set_titles_and_labels",
"geocat.viz.util.xr_add_cyclic_longitudes",
"geocat.datafiles.get",
"matplotlib.pyplot.colorbar",
"matplotlib.pyplot.figure",
"numpy.arange",
"cartopy.crs.PlateCarree",
"cartopy.crs.Orthographic"
] |
[((1184, 1225), 'geocat.viz.util.xr_add_cyclic_longitudes', 'gvutil.xr_add_cyclic_longitudes', (['t', '"""lon"""'], {}), "(t, 'lon')\n", (1215, 1225), True, 'from geocat.viz import util as gvutil\n'), ((1380, 1408), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (1390, 1408), True, 'import matplotlib.pyplot as plt\n'), ((2021, 2044), 'numpy.arange', 'np.arange', (['(210)', '(311)', '(10)'], {}), '(210, 311, 10)\n', (2030, 2044), True, 'import numpy as np\n'), ((2052, 2158), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['temp'], {'orientation': '"""horizontal"""', 'shrink': '(0.75)', 'pad': '(0.05)', 'extendrect': '(True)', 'ticks': 'cbar_ticks'}), "(temp, orientation='horizontal', shrink=0.75, pad=0.05,\n extendrect=True, ticks=cbar_ticks)\n", (2064, 2158), True, 'import matplotlib.pyplot as plt\n'), ((2386, 2522), 'geocat.viz.util.set_titles_and_labels', 'gvutil.set_titles_and_labels', (['ax'], {'maintitle': '"""Example of Orthogonal Projection"""', 'lefttitle': '"""Surface Temperature"""', 'righttitle': '"""K"""'}), "(ax, maintitle=\n 'Example of Orthogonal Projection', lefttitle='Surface Temperature',\n righttitle='K')\n", (2414, 2522), True, 'from geocat.viz import util as gvutil\n'), ((2618, 2628), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2626, 2628), True, 'import matplotlib.pyplot as plt\n'), ((945, 977), 'geocat.datafiles.get', 'gdf.get', (['"""netcdf_files/atmos.nc"""'], {}), "('netcdf_files/atmos.nc')\n", (952, 977), True, 'import geocat.datafiles as gdf\n'), ((1684, 1702), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (1700, 1702), True, 'import cartopy.crs as ccrs\n'), ((1495, 1557), 'cartopy.crs.Orthographic', 'ccrs.Orthographic', ([], {'central_longitude': '(-120)', 'central_latitude': '(50)'}), '(central_longitude=-120, central_latitude=50)\n', (1512, 1557), True, 'import cartopy.crs as ccrs\n'), ((1855, 1873), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (1871, 1873), True, 'import cartopy.crs as ccrs\n')]
|
# -*- coding: utf-8 -*-
"""
Spectral Distributions of the Luminous Efficiency Functions
===========================================================
Defines the spectral distributions of the luminous efficiency functions.
The luminous efficiency data is in the form of a *dict* of
:class:`colour.SpectralDistribution` classes as follows::
{'name': SpectralDistribution, ..., 'name': SpectralDistribution}
The following luminous efficiency functions are available:
- CIE 1924 Photopic Standard Observer
- Judd Modified CIE 1951 Photopic Standard Observer
- Judd-Vos Modified CIE 1978 Photopic Standard Observer
- CIE 1964 Photopic 10 Degree Standard Observer
- CIE 2008 2 Degree Physiologically Relevant LEF
- CIE 2008 10 Degree Physiologically Relevant LEF
- CIE 1951 Scotopic Standard Observer
Notes
-----
- The luminous efficiency functions are provided at 1 nm interval.
- The mesopic luminous efficiency function is calculated using the
*CIE 1924 Photopic Standard Observer* and
*CIE 1951 Scotopic Standard Observer* luminous efficiency functions with
the :func:`colour.sd_mesopic_luminous_efficiency_function` definition and
the data from :attr:`colour.colorimetry.datasets.lefs.DATA_MESOPIC_X`
attribute that defines weighting factors dependent on the photopic
luminance :math:`L_p`.
References
----------
- :cite:`CVRLq` : CVRL. (n.d.). Luminous efficiency. Retrieved April 19,
2014, from http://www.cvrl.org/lumindex.htm
- :cite:`CVRLs` : CVRL. (n.d.). Older CIE Standards. Retrieved February 24,
2014, from http://cvrl.ioo.ucl.ac.uk/cie.htm
- :cite:`Wikipedia2005d` : Wikipedia. (2005). Mesopic weighting function.
Retrieved June 20, 2014, from
http://en.wikipedia.org/wiki/Mesopic_vision#Mesopic_weighting_function
"""
from functools import partial
from colour.colorimetry import SpectralDistribution
from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2021 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = '<EMAIL>'
__status__ = 'Production'
__all__ = [
'DATA_LEFS_PHOTOPIC', 'SDS_LEFS_PHOTOPIC', 'DATA_LEFS_SCOTOPIC',
'SDS_LEFS_SCOTOPIC', 'SDS_LEFS', 'DATA_MESOPIC_X'
]
DATA_LEFS_PHOTOPIC = {
'CIE 1924 Photopic Standard Observer': {
360: 0.0000039170000,
361: 0.0000043935810,
362: 0.0000049296040,
363: 0.0000055321360,
364: 0.0000062082450,
365: 0.0000069650000,
366: 0.0000078132190,
367: 0.0000087673360,
368: 0.0000098398440,
369: 0.0000110432300,
370: 0.0000123900000,
371: 0.0000138864100,
372: 0.0000155572800,
373: 0.0000174429600,
374: 0.0000195837500,
375: 0.0000220200000,
376: 0.0000248396500,
377: 0.0000280412600,
378: 0.0000315310400,
379: 0.0000352152100,
380: 0.0000390000000,
381: 0.0000428264000,
382: 0.0000469146000,
383: 0.0000515896000,
384: 0.0000571764000,
385: 0.0000640000000,
386: 0.0000723442100,
387: 0.0000822122400,
388: 0.0000935081600,
389: 0.0001061361000,
390: 0.0001200000000,
391: 0.0001349840000,
392: 0.0001514920000,
393: 0.0001702080000,
394: 0.0001918160000,
395: 0.0002170000000,
396: 0.0002469067000,
397: 0.0002812400000,
398: 0.0003185200000,
399: 0.0003572667000,
400: 0.0003960000000,
401: 0.0004337147000,
402: 0.0004730240000,
403: 0.0005178760000,
404: 0.0005722187000,
405: 0.0006400000000,
406: 0.0007245600000,
407: 0.0008255000000,
408: 0.0009411600000,
409: 0.0010698800000,
410: 0.0012100000000,
411: 0.0013620910000,
412: 0.0015307520000,
413: 0.0017203680000,
414: 0.0019353230000,
415: 0.0021800000000,
416: 0.0024548000000,
417: 0.0027640000000,
418: 0.0031178000000,
419: 0.0035264000000,
420: 0.0040000000000,
421: 0.0045462400000,
422: 0.0051593200000,
423: 0.0058292800000,
424: 0.0065461600000,
425: 0.0073000000000,
426: 0.0080865070000,
427: 0.0089087200000,
428: 0.0097676800000,
429: 0.0106644300000,
430: 0.0116000000000,
431: 0.0125731700000,
432: 0.0135827200000,
433: 0.0146296800000,
434: 0.0157150900000,
435: 0.0168400000000,
436: 0.0180073600000,
437: 0.0192144800000,
438: 0.0204539200000,
439: 0.0217182400000,
440: 0.0230000000000,
441: 0.0242946100000,
442: 0.0256102400000,
443: 0.0269585700000,
444: 0.0283512500000,
445: 0.0298000000000,
446: 0.0313108300000,
447: 0.0328836800000,
448: 0.0345211200000,
449: 0.0362257100000,
450: 0.0380000000000,
451: 0.0398466700000,
452: 0.0417680000000,
453: 0.0437660000000,
454: 0.0458426700000,
455: 0.0480000000000,
456: 0.0502436800000,
457: 0.0525730400000,
458: 0.0549805600000,
459: 0.0574587200000,
460: 0.0600000000000,
461: 0.0626019700000,
462: 0.0652775200000,
463: 0.0680420800000,
464: 0.0709110900000,
465: 0.0739000000000,
466: 0.0770160000000,
467: 0.0802664000000,
468: 0.0836668000000,
469: 0.0872328000000,
470: 0.0909800000000,
471: 0.0949175500000,
472: 0.0990458400000,
473: 0.1033674000000,
474: 0.1078846000000,
475: 0.1126000000000,
476: 0.1175320000000,
477: 0.1226744000000,
478: 0.1279928000000,
479: 0.1334528000000,
480: 0.1390200000000,
481: 0.1446764000000,
482: 0.1504693000000,
483: 0.1564619000000,
484: 0.1627177000000,
485: 0.1693000000000,
486: 0.1762431000000,
487: 0.1835581000000,
488: 0.1912735000000,
489: 0.1994180000000,
490: 0.2080200000000,
491: 0.2171199000000,
492: 0.2267345000000,
493: 0.2368571000000,
494: 0.2474812000000,
495: 0.2586000000000,
496: 0.2701849000000,
497: 0.2822939000000,
498: 0.2950505000000,
499: 0.3085780000000,
500: 0.3230000000000,
501: 0.3384021000000,
502: 0.3546858000000,
503: 0.3716986000000,
504: 0.3892875000000,
505: 0.4073000000000,
506: 0.4256299000000,
507: 0.4443096000000,
508: 0.4633944000000,
509: 0.4829395000000,
510: 0.5030000000000,
511: 0.5235693000000,
512: 0.5445120000000,
513: 0.5656900000000,
514: 0.5869653000000,
515: 0.6082000000000,
516: 0.6293456000000,
517: 0.6503068000000,
518: 0.6708752000000,
519: 0.6908424000000,
520: 0.7100000000000,
521: 0.7281852000000,
522: 0.7454636000000,
523: 0.7619694000000,
524: 0.7778368000000,
525: 0.7932000000000,
526: 0.8081104000000,
527: 0.8224962000000,
528: 0.8363068000000,
529: 0.8494916000000,
530: 0.8620000000000,
531: 0.8738108000000,
532: 0.8849624000000,
533: 0.8954936000000,
534: 0.9054432000000,
535: 0.9148501000000,
536: 0.9237348000000,
537: 0.9320924000000,
538: 0.9399226000000,
539: 0.9472252000000,
540: 0.9540000000000,
541: 0.9602561000000,
542: 0.9660074000000,
543: 0.9712606000000,
544: 0.9760225000000,
545: 0.9803000000000,
546: 0.9840924000000,
547: 0.9874182000000,
548: 0.9903128000000,
549: 0.9928116000000,
550: 0.9949501000000,
551: 0.9967108000000,
552: 0.9980983000000,
553: 0.9991120000000,
554: 0.9997482000000,
555: 1.0000000000000,
556: 0.9998567000000,
557: 0.9993046000000,
558: 0.9983255000000,
559: 0.9968987000000,
560: 0.9950000000000,
561: 0.9926005000000,
562: 0.9897426000000,
563: 0.9864444000000,
564: 0.9827241000000,
565: 0.9786000000000,
566: 0.9740837000000,
567: 0.9691712000000,
568: 0.9638568000000,
569: 0.9581349000000,
570: 0.9520000000000,
571: 0.9454504000000,
572: 0.9384992000000,
573: 0.9311628000000,
574: 0.9234576000000,
575: 0.9154000000000,
576: 0.9070064000000,
577: 0.8982772000000,
578: 0.8892048000000,
579: 0.8797816000000,
580: 0.8700000000000,
581: 0.8598613000000,
582: 0.8493920000000,
583: 0.8386220000000,
584: 0.8275813000000,
585: 0.8163000000000,
586: 0.8047947000000,
587: 0.7930820000000,
588: 0.7811920000000,
589: 0.7691547000000,
590: 0.7570000000000,
591: 0.7447541000000,
592: 0.7324224000000,
593: 0.7200036000000,
594: 0.7074965000000,
595: 0.6949000000000,
596: 0.6822192000000,
597: 0.6694716000000,
598: 0.6566744000000,
599: 0.6438448000000,
600: 0.6310000000000,
601: 0.6181555000000,
602: 0.6053144000000,
603: 0.5924756000000,
604: 0.5796379000000,
605: 0.5668000000000,
606: 0.5539611000000,
607: 0.5411372000000,
608: 0.5283528000000,
609: 0.5156323000000,
610: 0.5030000000000,
611: 0.4904688000000,
612: 0.4780304000000,
613: 0.4656776000000,
614: 0.4534032000000,
615: 0.4412000000000,
616: 0.4290800000000,
617: 0.4170360000000,
618: 0.4050320000000,
619: 0.3930320000000,
620: 0.3810000000000,
621: 0.3689184000000,
622: 0.3568272000000,
623: 0.3447768000000,
624: 0.3328176000000,
625: 0.3210000000000,
626: 0.3093381000000,
627: 0.2978504000000,
628: 0.2865936000000,
629: 0.2756245000000,
630: 0.2650000000000,
631: 0.2547632000000,
632: 0.2448896000000,
633: 0.2353344000000,
634: 0.2260528000000,
635: 0.2170000000000,
636: 0.2081616000000,
637: 0.1995488000000,
638: 0.1911552000000,
639: 0.1829744000000,
640: 0.1750000000000,
641: 0.1672235000000,
642: 0.1596464000000,
643: 0.1522776000000,
644: 0.1451259000000,
645: 0.1382000000000,
646: 0.1315003000000,
647: 0.1250248000000,
648: 0.1187792000000,
649: 0.1127691000000,
650: 0.1070000000000,
651: 0.1014762000000,
652: 0.0961886400000,
653: 0.0911229600000,
654: 0.0862648500000,
655: 0.0816000000000,
656: 0.0771206400000,
657: 0.0728255200000,
658: 0.0687100800000,
659: 0.0647697600000,
660: 0.0610000000000,
661: 0.0573962100000,
662: 0.0539550400000,
663: 0.0506737600000,
664: 0.0475496500000,
665: 0.0445800000000,
666: 0.0417587200000,
667: 0.0390849600000,
668: 0.0365638400000,
669: 0.0342004800000,
670: 0.0320000000000,
671: 0.0299626100000,
672: 0.0280766400000,
673: 0.0263293600000,
674: 0.0247080500000,
675: 0.0232000000000,
676: 0.0218007700000,
677: 0.0205011200000,
678: 0.0192810800000,
679: 0.0181206900000,
680: 0.0170000000000,
681: 0.0159037900000,
682: 0.0148371800000,
683: 0.0138106800000,
684: 0.0128347800000,
685: 0.0119200000000,
686: 0.0110683100000,
687: 0.0102733900000,
688: 0.0095333110000,
689: 0.0088461570000,
690: 0.0082100000000,
691: 0.0076237810000,
692: 0.0070854240000,
693: 0.0065914760000,
694: 0.0061384850000,
695: 0.0057230000000,
696: 0.0053430590000,
697: 0.0049957960000,
698: 0.0046764040000,
699: 0.0043800750000,
700: 0.0041020000000,
701: 0.0038384530000,
702: 0.0035890990000,
703: 0.0033542190000,
704: 0.0031340930000,
705: 0.0029290000000,
706: 0.0027381390000,
707: 0.0025598760000,
708: 0.0023932440000,
709: 0.0022372750000,
710: 0.0020910000000,
711: 0.0019535870000,
712: 0.0018245800000,
713: 0.0017035800000,
714: 0.0015901870000,
715: 0.0014840000000,
716: 0.0013844960000,
717: 0.0012912680000,
718: 0.0012040920000,
719: 0.0011227440000,
720: 0.0010470000000,
721: 0.0009765896000,
722: 0.0009111088000,
723: 0.0008501332000,
724: 0.0007932384000,
725: 0.0007400000000,
726: 0.0006900827000,
727: 0.0006433100000,
728: 0.0005994960000,
729: 0.0005584547000,
730: 0.0005200000000,
731: 0.0004839136000,
732: 0.0004500528000,
733: 0.0004183452000,
734: 0.0003887184000,
735: 0.0003611000000,
736: 0.0003353835000,
737: 0.0003114404000,
738: 0.0002891656000,
739: 0.0002684539000,
740: 0.0002492000000,
741: 0.0002313019000,
742: 0.0002146856000,
743: 0.0001992884000,
744: 0.0001850475000,
745: 0.0001719000000,
746: 0.0001597781000,
747: 0.0001486044000,
748: 0.0001383016000,
749: 0.0001287925000,
750: 0.0001200000000,
751: 0.0001118595000,
752: 0.0001043224000,
753: 0.0000973356000,
754: 0.0000908458700,
755: 0.0000848000000,
756: 0.0000791466700,
757: 0.0000738580000,
758: 0.0000689160000,
759: 0.0000643026700,
760: 0.0000600000000,
761: 0.0000559818700,
762: 0.0000522256000,
763: 0.0000487184000,
764: 0.0000454474700,
765: 0.0000424000000,
766: 0.0000395610400,
767: 0.0000369151200,
768: 0.0000344486800,
769: 0.0000321481600,
770: 0.0000300000000,
771: 0.0000279912500,
772: 0.0000261135600,
773: 0.0000243602400,
774: 0.0000227246100,
775: 0.0000212000000,
776: 0.0000197785500,
777: 0.0000184528500,
778: 0.0000172168700,
779: 0.0000160645900,
780: 0.0000149900000,
781: 0.0000139872800,
782: 0.0000130515500,
783: 0.0000121781800,
784: 0.0000113625400,
785: 0.0000106000000,
786: 0.0000098858770,
787: 0.0000092173040,
788: 0.0000085923620,
789: 0.0000080091330,
790: 0.0000074657000,
791: 0.0000069595670,
792: 0.0000064879950,
793: 0.0000060486990,
794: 0.0000056393960,
795: 0.0000052578000,
796: 0.0000049017710,
797: 0.0000045697200,
798: 0.0000042601940,
799: 0.0000039717390,
800: 0.0000037029000,
801: 0.0000034521630,
802: 0.0000032183020,
803: 0.0000030003000,
804: 0.0000027971390,
805: 0.0000026078000,
806: 0.0000024312200,
807: 0.0000022665310,
808: 0.0000021130130,
809: 0.0000019699430,
810: 0.0000018366000,
811: 0.0000017122300,
812: 0.0000015962280,
813: 0.0000014880900,
814: 0.0000013873140,
815: 0.0000012934000,
816: 0.0000012058200,
817: 0.0000011241430,
818: 0.0000010480090,
819: 0.0000009770578,
820: 0.0000009109300,
821: 0.0000008492513,
822: 0.0000007917212,
823: 0.0000007380904,
824: 0.0000006881098,
825: 0.0000006415300,
826: 0.0000005980895,
827: 0.0000005575746,
828: 0.0000005198080,
829: 0.0000004846123,
830: 0.0000004518100
},
'Judd Modified CIE 1951 Photopic Standard Observer': {
370: 0.0001,
380: 0.0004,
390: 0.0015,
400: 0.0045,
410: 0.0093,
420: 0.0175,
430: 0.0273,
440: 0.0379,
450: 0.0468,
460: 0.0600,
470: 0.0910,
480: 0.1390,
490: 0.2080,
500: 0.3230,
510: 0.5030,
520: 0.7100,
530: 0.8620,
540: 0.9540,
550: 0.9950,
560: 0.9950,
570: 0.9520,
580: 0.8700,
590: 0.7570,
600: 0.6310,
610: 0.5030,
620: 0.3810,
630: 0.2650,
640: 0.1750,
650: 0.1070,
660: 0.0610,
670: 0.0320,
680: 0.0170,
690: 0.0082,
700: 0.0041,
710: 0.0021,
720: 0.0011,
730: 0.0005,
740: 0.0002,
750: 0.0001,
760: 0.0001,
770: 0.0000
},
'Judd-Vos Modified CIE 1978 Photopic Standard Observer': {
380: 0.0002000000,
381: 0.0002282100,
382: 0.0002610900,
383: 0.0002993600,
384: 0.0003438700,
385: 0.0003955600,
386: 0.0004554400,
387: 0.0005246200,
388: 0.0006042800,
389: 0.0006956500,
390: 0.0008000000,
391: 0.0009163500,
392: 0.0010477000,
393: 0.0011955000,
394: 0.0013611000,
395: 0.0015457000,
396: 0.0017508000,
397: 0.0018776000,
398: 0.0022273000,
399: 0.0025011000,
400: 0.0028000000,
401: 0.0031159000,
402: 0.0034576000,
403: 0.0038268000,
404: 0.0042256000,
405: 0.0046562000,
406: 0.0051216000,
407: 0.0056248000,
408: 0.0061695000,
409: 0.0067597000,
410: 0.0074000000,
411: 0.0081451000,
412: 0.0089555000,
413: 0.0098322000,
414: 0.0107740000,
415: 0.0117790000,
416: 0.0128420000,
417: 0.0139560000,
418: 0.0151110000,
419: 0.0162970000,
420: 0.0175000000,
421: 0.0185820000,
422: 0.0196450000,
423: 0.0206830000,
424: 0.0216940000,
425: 0.0226780000,
426: 0.0236360000,
427: 0.0245720000,
428: 0.0254900000,
429: 0.0263970000,
430: 0.0273000000,
431: 0.0283350000,
432: 0.0293830000,
433: 0.0304420000,
434: 0.0315100000,
435: 0.0325840000,
436: 0.0336610000,
437: 0.0347350000,
438: 0.0358030000,
439: 0.0368600000,
440: 0.0379000000,
441: 0.0388380000,
442: 0.0397520000,
443: 0.0406460000,
444: 0.0415240000,
445: 0.0423910000,
446: 0.0432520000,
447: 0.0441160000,
448: 0.0449900000,
449: 0.0458810000,
450: 0.0468000000,
451: 0.0477430000,
452: 0.0487330000,
453: 0.0497850000,
454: 0.0509100000,
455: 0.0521220000,
456: 0.0534350000,
457: 0.0548640000,
458: 0.0564240000,
459: 0.0581310000,
460: 0.0600000000,
461: 0.0626019700,
462: 0.0652775200,
463: 0.0680420800,
464: 0.0709110900,
465: 0.0739000000,
466: 0.0770160000,
467: 0.0802664000,
468: 0.0836668000,
469: 0.0872328000,
470: 0.0909800000,
471: 0.0949175500,
472: 0.0990458400,
473: 0.1033674000,
474: 0.1078846000,
475: 0.1126000000,
476: 0.1175320000,
477: 0.1226744000,
478: 0.1279928000,
479: 0.1334528000,
480: 0.1390200000,
481: 0.1446764000,
482: 0.1504693000,
483: 0.1564619000,
484: 0.1627177000,
485: 0.1693000000,
486: 0.1762431000,
487: 0.1835581000,
488: 0.1912735000,
489: 0.1994180000,
490: 0.2080200000,
491: 0.2171199000,
492: 0.2267345000,
493: 0.2368571000,
494: 0.2474812000,
495: 0.2586000000,
496: 0.2701849000,
497: 0.2822939000,
498: 0.2950505000,
499: 0.3085780000,
500: 0.3230000000,
501: 0.3384021000,
502: 0.3546858000,
503: 0.3716986000,
504: 0.3892875000,
505: 0.4073000000,
506: 0.4256299000,
507: 0.4443096000,
508: 0.4633944000,
509: 0.4829395000,
510: 0.5030000000,
511: 0.5235693000,
512: 0.5445120000,
513: 0.5656900000,
514: 0.5869653000,
515: 0.6082000000,
516: 0.6293456000,
517: 0.6503068000,
518: 0.6708752000,
519: 0.6908424000,
520: 0.7100000000,
521: 0.7281852000,
522: 0.7454636000,
523: 0.7619694000,
524: 0.7778368000,
525: 0.7932000000,
526: 0.8081104000,
527: 0.8224962000,
528: 0.8363068000,
529: 0.8494916000,
530: 0.8620000000,
531: 0.8738108000,
532: 0.8849624000,
533: 0.8954936000,
534: 0.9054432000,
535: 0.9148501000,
536: 0.9237348000,
537: 0.9320924000,
538: 0.9399226000,
539: 0.9472252000,
540: 0.9540000000,
541: 0.9602561000,
542: 0.9660074000,
543: 0.9712606000,
544: 0.9760225000,
545: 0.9803000000,
546: 0.9840924000,
547: 0.9874182000,
548: 0.9903128000,
549: 0.9928116000,
550: 0.9949501000,
551: 0.9967108000,
552: 0.9980983000,
553: 0.9991120000,
554: 0.9997482000,
555: 1.0000000000,
556: 0.9998567000,
557: 0.9993046000,
558: 0.9983255000,
559: 0.9968987000,
560: 0.9950000000,
561: 0.9926005000,
562: 0.9897426000,
563: 0.9864444000,
564: 0.9827241000,
565: 0.9786000000,
566: 0.9740837000,
567: 0.9691712000,
568: 0.9638568000,
569: 0.9581349000,
570: 0.9520000000,
571: 0.9454504000,
572: 0.9384992000,
573: 0.9311628000,
574: 0.9234576000,
575: 0.9154000000,
576: 0.9070064000,
577: 0.8982772000,
578: 0.8892048000,
579: 0.8797816000,
580: 0.8700000000,
581: 0.8598613000,
582: 0.8493920000,
583: 0.8386220000,
584: 0.8275813000,
585: 0.8163000000,
586: 0.8047947000,
587: 0.7930820000,
588: 0.7811920000,
589: 0.7691547000,
590: 0.7570000000,
591: 0.7447541000,
592: 0.7324224000,
593: 0.7200036000,
594: 0.7074965000,
595: 0.6949000000,
596: 0.6822192000,
597: 0.6694716000,
598: 0.6566744000,
599: 0.6438448000,
600: 0.6310000000,
601: 0.6181555000,
602: 0.6053144000,
603: 0.5924756000,
604: 0.5796379000,
605: 0.5668000000,
606: 0.5539611000,
607: 0.5411372000,
608: 0.5283528000,
609: 0.5156323000,
610: 0.5030000000,
611: 0.4904688000,
612: 0.4780304000,
613: 0.4656776000,
614: 0.4534032000,
615: 0.4412000000,
616: 0.4290800000,
617: 0.4170360000,
618: 0.4050320000,
619: 0.3930320000,
620: 0.3810000000,
621: 0.3689184000,
622: 0.3568272000,
623: 0.3447768000,
624: 0.3328176000,
625: 0.3210000000,
626: 0.3093381000,
627: 0.2978504000,
628: 0.2865936000,
629: 0.2756245000,
630: 0.2650000000,
631: 0.2547632000,
632: 0.2448896000,
633: 0.2353344000,
634: 0.2260528000,
635: 0.2170000000,
636: 0.2081616000,
637: 0.1995488000,
638: 0.1911552000,
639: 0.1829744000,
640: 0.1750000000,
641: 0.1672235000,
642: 0.1596464000,
643: 0.1522776000,
644: 0.1451259000,
645: 0.1382000000,
646: 0.1315003000,
647: 0.1250248000,
648: 0.1187792000,
649: 0.1127691000,
650: 0.1070000000,
651: 0.1014762000,
652: 0.0961886400,
653: 0.0911229600,
654: 0.0862648500,
655: 0.0816000000,
656: 0.0771206400,
657: 0.0728255200,
658: 0.0687100800,
659: 0.0647697600,
660: 0.0610000000,
661: 0.0573962100,
662: 0.0539550400,
663: 0.0506737600,
664: 0.0475496500,
665: 0.0445800000,
666: 0.0417587200,
667: 0.0390849600,
668: 0.0365638400,
669: 0.0342004800,
670: 0.0320000000,
671: 0.0299626100,
672: 0.0280766400,
673: 0.0263293600,
674: 0.0247080500,
675: 0.0232000000,
676: 0.0218007700,
677: 0.0205011200,
678: 0.0192810800,
679: 0.0181206900,
680: 0.0170000000,
681: 0.0159037900,
682: 0.0148371800,
683: 0.0138106800,
684: 0.0128347800,
685: 0.0119200000,
686: 0.0110683100,
687: 0.0102733900,
688: 0.0095333110,
689: 0.0088461570,
690: 0.0082100000,
691: 0.0076237810,
692: 0.0070854240,
693: 0.0065914760,
694: 0.0061384850,
695: 0.0057230000,
696: 0.0053430590,
697: 0.0049957960,
698: 0.0046764040,
699: 0.0043800750,
700: 0.0041020000,
701: 0.0038384530,
702: 0.0035890990,
703: 0.0033542190,
704: 0.0031340930,
705: 0.0029290000,
706: 0.0027381390,
707: 0.0025598760,
708: 0.0023932440,
709: 0.0022372750,
710: 0.0020910000,
711: 0.0019535870,
712: 0.0018245800,
713: 0.0017035800,
714: 0.0015901870,
715: 0.0014840000,
716: 0.0013844960,
717: 0.0012912680,
718: 0.0012040920,
719: 0.0011227440,
720: 0.0010470000,
721: 0.0009765896,
722: 0.0009111088,
723: 0.0008501332,
724: 0.0007932384,
725: 0.0007400000,
726: 0.0006900827,
727: 0.0006433100,
728: 0.0005994960,
729: 0.0005584547,
730: 0.0005200000,
731: 0.0004839136,
732: 0.0004500528,
733: 0.0004183452,
734: 0.0003887184,
735: 0.0003611000,
736: 0.0003353835,
737: 0.0003114404,
738: 0.0002891656,
739: 0.0002684539,
740: 0.0002492000,
741: 0.0002313019,
742: 0.0002146856,
743: 0.0001992884,
744: 0.0001850475,
745: 0.0001719000,
746: 0.0001597781,
747: 0.0001486044,
748: 0.0001383016,
749: 0.0001287925,
750: 0.0001200000,
751: 0.0001118595,
752: 0.0001043224,
753: 0.0000973356,
754: 0.0000908459,
755: 0.0000848000,
756: 0.0000791467,
757: 0.0000738580,
758: 0.0000689160,
759: 0.0000643027,
760: 0.0000600000,
761: 0.0000559819,
762: 0.0000522256,
763: 0.0000487184,
764: 0.0000454475,
765: 0.0000424000,
766: 0.0000395610,
767: 0.0000369151,
768: 0.0000344487,
769: 0.0000321482,
770: 0.0000300000,
771: 0.0000279913,
772: 0.0000261136,
773: 0.0000243602,
774: 0.0000227246,
775: 0.0000212000,
776: 0.0000197786,
777: 0.0000184529,
778: 0.0000172169,
779: 0.0000160646,
780: 0.0000149900
},
'CIE 1964 Photopic 10 Degree Standard Observer': {
360: 0.000000013398,
361: 0.000000020294,
362: 0.000000030560,
363: 0.000000045740,
364: 0.000000068050,
365: 0.000000100650,
366: 0.000000147980,
367: 0.000000216270,
368: 0.000000314200,
369: 0.000000453700,
370: 0.000000651100,
371: 0.000000928800,
372: 0.000001317500,
373: 0.000001857200,
374: 0.000002602000,
375: 0.000003625000,
376: 0.000005019000,
377: 0.000006907000,
378: 0.000009449000,
379: 0.000012848000,
380: 0.000017364000,
381: 0.000023327000,
382: 0.000031150000,
383: 0.000041350000,
384: 0.000054560000,
385: 0.000071560000,
386: 0.000093300000,
387: 0.000120870000,
388: 0.000155640000,
389: 0.000199200000,
390: 0.000253400000,
391: 0.000320200000,
392: 0.000402400000,
393: 0.000502300000,
394: 0.000623200000,
395: 0.000768500000,
396: 0.000941700000,
397: 0.001147800000,
398: 0.001390300000,
399: 0.001674000000,
400: 0.002004400000,
401: 0.002386000000,
402: 0.002822000000,
403: 0.003319000000,
404: 0.003880000000,
405: 0.004509000000,
406: 0.005209000000,
407: 0.005985000000,
408: 0.006833000000,
409: 0.007757000000,
410: 0.008756000000,
411: 0.009816000000,
412: 0.010918000000,
413: 0.012058000000,
414: 0.013237000000,
415: 0.014456000000,
416: 0.015717000000,
417: 0.017025000000,
418: 0.018399000000,
419: 0.019848000000,
420: 0.021391000000,
421: 0.022992000000,
422: 0.024598000000,
423: 0.026213000000,
424: 0.027841000000,
425: 0.029497000000,
426: 0.031195000000,
427: 0.032927000000,
428: 0.034738000000,
429: 0.036654000000,
430: 0.038676000000,
431: 0.040792000000,
432: 0.042946000000,
433: 0.045114000000,
434: 0.047333000000,
435: 0.049602000000,
436: 0.051934000000,
437: 0.054337000000,
438: 0.056822000000,
439: 0.059399000000,
440: 0.062077000000,
441: 0.064737000000,
442: 0.067285000000,
443: 0.069764000000,
444: 0.072218000000,
445: 0.074704000000,
446: 0.077272000000,
447: 0.079979000000,
448: 0.082874000000,
449: 0.086000000000,
450: 0.089456000000,
451: 0.092947000000,
452: 0.096275000000,
453: 0.099535000000,
454: 0.102829000000,
455: 0.106256000000,
456: 0.109901000000,
457: 0.113835000000,
458: 0.118167000000,
459: 0.122932000000,
460: 0.128201000000,
461: 0.133457000000,
462: 0.138323000000,
463: 0.143042000000,
464: 0.147787000000,
465: 0.152761000000,
466: 0.158102000000,
467: 0.163941000000,
468: 0.170362000000,
469: 0.177425000000,
470: 0.185190000000,
471: 0.193025000000,
472: 0.200313000000,
473: 0.207156000000,
474: 0.213644000000,
475: 0.219940000000,
476: 0.226170000000,
477: 0.232467000000,
478: 0.239025000000,
479: 0.245997000000,
480: 0.253589000000,
481: 0.261876000000,
482: 0.270643000000,
483: 0.279645000000,
484: 0.288694000000,
485: 0.297665000000,
486: 0.306469000000,
487: 0.315035000000,
488: 0.323335000000,
489: 0.331366000000,
490: 0.339133000000,
491: 0.347860000000,
492: 0.358326000000,
493: 0.370001000000,
494: 0.382464000000,
495: 0.395379000000,
496: 0.408482000000,
497: 0.421588000000,
498: 0.434619000000,
499: 0.447601000000,
500: 0.460777000000,
501: 0.474340000000,
502: 0.488200000000,
503: 0.502340000000,
504: 0.516740000000,
505: 0.531360000000,
506: 0.546190000000,
507: 0.561180000000,
508: 0.576290000000,
509: 0.591500000000,
510: 0.606741000000,
511: 0.622150000000,
512: 0.637830000000,
513: 0.653710000000,
514: 0.669680000000,
515: 0.685660000000,
516: 0.701550000000,
517: 0.717230000000,
518: 0.732570000000,
519: 0.747460000000,
520: 0.761757000000,
521: 0.775340000000,
522: 0.788220000000,
523: 0.800460000000,
524: 0.812140000000,
525: 0.823330000000,
526: 0.834120000000,
527: 0.844600000000,
528: 0.854870000000,
529: 0.865040000000,
530: 0.875211000000,
531: 0.885370000000,
532: 0.895370000000,
533: 0.905150000000,
534: 0.914650000000,
535: 0.923810000000,
536: 0.932550000000,
537: 0.940810000000,
538: 0.948520000000,
539: 0.955600000000,
540: 0.961988000000,
541: 0.967540000000,
542: 0.972230000000,
543: 0.976170000000,
544: 0.979460000000,
545: 0.982200000000,
546: 0.984520000000,
547: 0.986520000000,
548: 0.988320000000,
549: 0.990020000000,
550: 0.991761000000,
551: 0.993530000000,
552: 0.995230000000,
553: 0.996770000000,
554: 0.998090000000,
555: 0.999110000000,
556: 0.999770000000,
557: 1.000000000000,
558: 0.999710000000,
559: 0.998850000000,
560: 0.997340000000,
561: 0.995260000000,
562: 0.992740000000,
563: 0.989750000000,
564: 0.986300000000,
565: 0.982380000000,
566: 0.977980000000,
567: 0.973110000000,
568: 0.967740000000,
569: 0.961890000000,
570: 0.955552000000,
571: 0.948601000000,
572: 0.940981000000,
573: 0.932798000000,
574: 0.924158000000,
575: 0.915175000000,
576: 0.905954000000,
577: 0.896608000000,
578: 0.887249000000,
579: 0.877986000000,
580: 0.868934000000,
581: 0.860164000000,
582: 0.851519000000,
583: 0.842963000000,
584: 0.834393000000,
585: 0.825623000000,
586: 0.816764000000,
587: 0.807544000000,
588: 0.797947000000,
589: 0.787893000000,
590: 0.777405000000,
591: 0.766490000000,
592: 0.755309000000,
593: 0.743845000000,
594: 0.732190000000,
595: 0.720353000000,
596: 0.708281000000,
597: 0.696055000000,
598: 0.683621000000,
599: 0.671048000000,
600: 0.658341000000,
601: 0.645545000000,
602: 0.632718000000,
603: 0.619815000000,
604: 0.606887000000,
605: 0.593878000000,
606: 0.580781000000,
607: 0.567653000000,
608: 0.554490000000,
609: 0.541228000000,
610: 0.527963000000,
611: 0.514634000000,
612: 0.501363000000,
613: 0.488124000000,
614: 0.474935000000,
615: 0.461834000000,
616: 0.448823000000,
617: 0.435917000000,
618: 0.423153000000,
619: 0.410526000000,
620: 0.398057000000,
621: 0.385835000000,
622: 0.373951000000,
623: 0.362311000000,
624: 0.350863000000,
625: 0.339554000000,
626: 0.328309000000,
627: 0.317118000000,
628: 0.305936000000,
629: 0.294737000000,
630: 0.283493000000,
631: 0.272222000000,
632: 0.260990000000,
633: 0.249877000000,
634: 0.238946000000,
635: 0.228254000000,
636: 0.217853000000,
637: 0.207780000000,
638: 0.198072000000,
639: 0.188748000000,
640: 0.179828000000,
641: 0.171285000000,
642: 0.163059000000,
643: 0.155151000000,
644: 0.147535000000,
645: 0.140211000000,
646: 0.133170000000,
647: 0.126400000000,
648: 0.119892000000,
649: 0.113640000000,
650: 0.107633000000,
651: 0.101870000000,
652: 0.096347000000,
653: 0.091063000000,
654: 0.086010000000,
655: 0.081187000000,
656: 0.076583000000,
657: 0.072198000000,
658: 0.068024000000,
659: 0.064052000000,
660: 0.060281000000,
661: 0.056697000000,
662: 0.053292000000,
663: 0.050059000000,
664: 0.046998000000,
665: 0.044096000000,
666: 0.041345000000,
667: 0.038750700000,
668: 0.036297800000,
669: 0.033983200000,
670: 0.031800400000,
671: 0.029739500000,
672: 0.027791800000,
673: 0.025955100000,
674: 0.024226300000,
675: 0.022601700000,
676: 0.021077900000,
677: 0.019650500000,
678: 0.018315300000,
679: 0.017068600000,
680: 0.015905100000,
681: 0.014818300000,
682: 0.013800800000,
683: 0.012849500000,
684: 0.011960700000,
685: 0.011130300000,
686: 0.010355500000,
687: 0.009633200000,
688: 0.008959900000,
689: 0.008332400000,
690: 0.007748800000,
691: 0.007204600000,
692: 0.006697500000,
693: 0.006225100000,
694: 0.005785000000,
695: 0.005375100000,
696: 0.004994100000,
697: 0.004639200000,
698: 0.004309300000,
699: 0.004002800000,
700: 0.003717740000,
701: 0.003452620000,
702: 0.003205830000,
703: 0.002976230000,
704: 0.002762810000,
705: 0.002564560000,
706: 0.002380480000,
707: 0.002209710000,
708: 0.002051320000,
709: 0.001904490000,
710: 0.001768470000,
711: 0.001642360000,
712: 0.001525350000,
713: 0.001416720000,
714: 0.001315950000,
715: 0.001222390000,
716: 0.001135550000,
717: 0.001054940000,
718: 0.000980140000,
719: 0.000910660000,
720: 0.000846190000,
721: 0.000786290000,
722: 0.000730680000,
723: 0.000678990000,
724: 0.000631010000,
725: 0.000586440000,
726: 0.000545110000,
727: 0.000506720000,
728: 0.000471110000,
729: 0.000438050000,
730: 0.000407410000,
731: 0.000378962000,
732: 0.000352543000,
733: 0.000328001000,
734: 0.000305208000,
735: 0.000284041000,
736: 0.000264375000,
737: 0.000246109000,
738: 0.000229143000,
739: 0.000213376000,
740: 0.000198730000,
741: 0.000185115000,
742: 0.000172454000,
743: 0.000160678000,
744: 0.000149730000,
745: 0.000139550000,
746: 0.000130086000,
747: 0.000121290000,
748: 0.000113106000,
749: 0.000105501000,
750: 0.000098428000,
751: 0.000091853000,
752: 0.000085738000,
753: 0.000080048000,
754: 0.000074751000,
755: 0.000069819000,
756: 0.000065222000,
757: 0.000060939000,
758: 0.000056942000,
759: 0.000053217000,
760: 0.000049737000,
761: 0.000046491000,
762: 0.000043464000,
763: 0.000040635000,
764: 0.000038000000,
765: 0.000035540500,
766: 0.000033244800,
767: 0.000031100600,
768: 0.000029099000,
769: 0.000027230700,
770: 0.000025486000,
771: 0.000023856100,
772: 0.000022333200,
773: 0.000020910400,
774: 0.000019580800,
775: 0.000018338400,
776: 0.000017177700,
777: 0.000016093400,
778: 0.000015080000,
779: 0.000014133600,
780: 0.000013249000,
781: 0.000012422600,
782: 0.000011649900,
783: 0.000010927700,
784: 0.000010251900,
785: 0.000009619600,
786: 0.000009028100,
787: 0.000008474000,
788: 0.000007954800,
789: 0.000007468600,
790: 0.000007012800,
791: 0.000006585800,
792: 0.000006185700,
793: 0.000005810700,
794: 0.000005459000,
795: 0.000005129800,
796: 0.000004820600,
797: 0.000004531200,
798: 0.000004259100,
799: 0.000004004200,
800: 0.000003764730,
801: 0.000003539950,
802: 0.000003329140,
803: 0.000003131150,
804: 0.000002945290,
805: 0.000002770810,
806: 0.000002607050,
807: 0.000002453290,
808: 0.000002308940,
809: 0.000002173380,
810: 0.000002046130,
811: 0.000001926620,
812: 0.000001814400,
813: 0.000001708950,
814: 0.000001609880,
815: 0.000001516770,
816: 0.000001429210,
817: 0.000001346860,
818: 0.000001269450,
819: 0.000001196620,
820: 0.000001128090,
821: 0.000001063680,
822: 0.000001003130,
823: 0.000000946220,
824: 0.000000892630,
825: 0.000000842160,
826: 0.000000794640,
827: 0.000000749780,
828: 0.000000707440,
829: 0.000000667480,
830: 0.000000629700
},
'CIE 2008 2 Degree Physiologically Relevant LEF': {
390: 4.14616e-04,
391: 5.02833e-04,
392: 6.08499e-04,
393: 7.34444e-04,
394: 8.83739e-04,
395: 1.05965e-03,
396: 1.26553e-03,
397: 1.50475e-03,
398: 1.78049e-03,
399: 2.09557e-03,
400: 2.45219e-03,
401: 2.85222e-03,
402: 3.29912e-03,
403: 3.79747e-03,
404: 4.35277e-03,
405: 4.97172e-03,
406: 5.66101e-03,
407: 6.42161e-03,
408: 7.25031e-03,
409: 8.14017e-03,
410: 9.07986e-03,
411: 1.00561e-02,
412: 1.10646e-02,
413: 1.21052e-02,
414: 1.31801e-02,
415: 1.42938e-02,
416: 1.54500e-02,
417: 1.66409e-02,
418: 1.78530e-02,
419: 1.90702e-02,
420: 2.02737e-02,
421: 2.14481e-02,
422: 2.26004e-02,
423: 2.37479e-02,
424: 2.49125e-02,
425: 2.61211e-02,
426: 2.73992e-02,
427: 2.87499e-02,
428: 3.01691e-02,
429: 3.16514e-02,
430: 3.31904e-02,
431: 3.47791e-02,
432: 3.64149e-02,
433: 3.80957e-02,
434: 3.98184e-02,
435: 4.15794e-02,
436: 4.33710e-02,
437: 4.51718e-02,
438: 4.69542e-02,
439: 4.86872e-02,
440: 5.03366e-02,
441: 5.18761e-02,
442: 5.33222e-02,
443: 5.47060e-02,
444: 5.60634e-02,
445: 5.74339e-02,
446: 5.88511e-02,
447: 6.03081e-02,
448: 6.17864e-02,
449: 6.32657e-02,
450: 6.47235e-02,
451: 6.61475e-02,
452: 6.75726e-02,
453: 6.90493e-02,
454: 7.06328e-02,
455: 7.23834e-02,
456: 7.43596e-02,
457: 7.65938e-02,
458: 7.91144e-02,
459: 8.19535e-02,
460: 8.51482e-02,
461: 8.87266e-02,
462: 9.26601e-02,
463: 9.68972e-02,
464: 1.01375e-01,
465: 1.06014e-01,
466: 1.10738e-01,
467: 1.15511e-01,
468: 1.20312e-01,
469: 1.25116e-01,
470: 1.29896e-01,
471: 1.34630e-01,
472: 1.39331e-01,
473: 1.44023e-01,
474: 1.48737e-01,
475: 1.53507e-01,
476: 1.58364e-01,
477: 1.63320e-01,
478: 1.68376e-01,
479: 1.73537e-01,
480: 1.78805e-01,
481: 1.84182e-01,
482: 1.89656e-01,
483: 1.95210e-01,
484: 2.00826e-01,
485: 2.06483e-01,
486: 2.12183e-01,
487: 2.18028e-01,
488: 2.24159e-01,
489: 2.30730e-01,
490: 2.37916e-01,
491: 2.45871e-01,
492: 2.54602e-01,
493: 2.64076e-01,
494: 2.74249e-01,
495: 2.85068e-01,
496: 2.96484e-01,
497: 3.08501e-01,
498: 3.21139e-01,
499: 3.34418e-01,
500: 3.48354e-01,
501: 3.62960e-01,
502: 3.78228e-01,
503: 3.94136e-01,
504: 4.10658e-01,
505: 4.27760e-01,
506: 4.45399e-01,
507: 4.63540e-01,
508: 4.82138e-01,
509: 5.01143e-01,
510: 5.20497e-01,
511: 5.40139e-01,
512: 5.60021e-01,
513: 5.80097e-01,
514: 6.00317e-01,
515: 6.20626e-01,
516: 6.40940e-01,
517: 6.61077e-01,
518: 6.80813e-01,
519: 6.99904e-01,
520: 7.18089e-01,
521: 7.35159e-01,
522: 7.51182e-01,
523: 7.66314e-01,
524: 7.80735e-01,
525: 7.94645e-01,
526: 8.08207e-01,
527: 8.21382e-01,
528: 8.34070e-01,
529: 8.46171e-01,
530: 8.57580e-01,
531: 8.68241e-01,
532: 8.78306e-01,
533: 8.87991e-01,
534: 8.97521e-01,
535: 9.07135e-01,
536: 9.16995e-01,
537: 9.26929e-01,
538: 9.36673e-01,
539: 9.45948e-01,
540: 9.54468e-01,
541: 9.61983e-01,
542: 9.68439e-01,
543: 9.73829e-01,
544: 9.78152e-01,
545: 9.81411e-01,
546: 9.83667e-01,
547: 9.85208e-01,
548: 9.86381e-01,
549: 9.87536e-01,
550: 9.89023e-01,
551: 9.91081e-01,
552: 9.93491e-01,
553: 9.95917e-01,
554: 9.98021e-01,
555: 9.99461e-01,
556: 9.99993e-01,
557: 9.99756e-01,
558: 9.98984e-01,
559: 9.97912e-01,
560: 9.96774e-01,
561: 9.95736e-01,
562: 9.94711e-01,
563: 9.93553e-01,
564: 9.92116e-01,
565: 9.90255e-01,
566: 9.87860e-01,
567: 9.84932e-01,
568: 9.81504e-01,
569: 9.77603e-01,
570: 9.73261e-01,
571: 9.68476e-01,
572: 9.63137e-01,
573: 9.57106e-01,
574: 9.50254e-01,
575: 9.42457e-01,
576: 9.33690e-01,
577: 9.24289e-01,
578: 9.14671e-01,
579: 9.05233e-01,
580: 8.96361e-01,
581: 8.88307e-01,
582: 8.80846e-01,
583: 8.73645e-01,
584: 8.66376e-01,
585: 8.58720e-01,
586: 8.50430e-01,
587: 8.41505e-01,
588: 8.32011e-01,
589: 8.22015e-01,
590: 8.11587e-01,
591: 8.00787e-01,
592: 7.89652e-01,
593: 7.78205e-01,
594: 7.66473e-01,
595: 7.54479e-01,
596: 7.42247e-01,
597: 7.29823e-01,
598: 7.17252e-01,
599: 7.04582e-01,
600: 6.91855e-01,
601: 6.79101e-01,
602: 6.66285e-01,
603: 6.53359e-01,
604: 6.40281e-01,
605: 6.27007e-01,
606: 6.13515e-01,
607: 5.99849e-01,
608: 5.86068e-01,
609: 5.72226e-01,
610: 5.58375e-01,
611: 5.44554e-01,
612: 5.30767e-01,
613: 5.17013e-01,
614: 5.03289e-01,
615: 4.89595e-01,
616: 4.75944e-01,
617: 4.62396e-01,
618: 4.49015e-01,
619: 4.35862e-01,
620: 4.22990e-01,
621: 4.10415e-01,
622: 3.98036e-01,
623: 3.85730e-01,
624: 3.73391e-01,
625: 3.60924e-01,
626: 3.48286e-01,
627: 3.35570e-01,
628: 3.22896e-01,
629: 3.10370e-01,
630: 2.98086e-01,
631: 2.86116e-01,
632: 2.74482e-01,
633: 2.63195e-01,
634: 2.52263e-01,
635: 2.41690e-01,
636: 2.31481e-01,
637: 2.21638e-01,
638: 2.12162e-01,
639: 2.03054e-01,
640: 1.94312e-01,
641: 1.85923e-01,
642: 1.77827e-01,
643: 1.69965e-01,
644: 1.62284e-01,
645: 1.54740e-01,
646: 1.47308e-01,
647: 1.40017e-01,
648: 1.32901e-01,
649: 1.25991e-01,
650: 1.19312e-01,
651: 1.12882e-01,
652: 1.06711e-01,
653: 1.00805e-01,
654: 9.51665e-02,
655: 8.97959e-02,
656: 8.46904e-02,
657: 7.98401e-02,
658: 7.52337e-02,
659: 7.08606e-02,
660: 6.67104e-02,
661: 6.27736e-02,
662: 5.90418e-02,
663: 5.55070e-02,
664: 5.21614e-02,
665: 4.89970e-02,
666: 4.60058e-02,
667: 4.31788e-02,
668: 4.05075e-02,
669: 3.79838e-02,
670: 3.55998e-02,
671: 3.33486e-02,
672: 3.12233e-02,
673: 2.92178e-02,
674: 2.73260e-02,
675: 2.55422e-02,
676: 2.38612e-02,
677: 2.22786e-02,
678: 2.07902e-02,
679: 1.93919e-02,
680: 1.80794e-02,
681: 1.68482e-02,
682: 1.56919e-02,
683: 1.46045e-02,
684: 1.35806e-02,
685: 1.26157e-02,
686: 1.17070e-02,
687: 1.08561e-02,
688: 1.00648e-02,
689: 9.33338e-03,
690: 8.66128e-03,
691: 8.04605e-03,
692: 7.48113e-03,
693: 6.95999e-03,
694: 6.47707e-03,
695: 6.02768e-03,
696: 5.60817e-03,
697: 5.21669e-03,
698: 4.85179e-03,
699: 4.51201e-03,
700: 4.19594e-03,
701: 3.90206e-03,
702: 3.62837e-03,
703: 3.37301e-03,
704: 3.13432e-03,
705: 2.91086e-03,
706: 2.70153e-03,
707: 2.50580e-03,
708: 2.32323e-03,
709: 2.15333e-03,
710: 1.99556e-03,
711: 1.84932e-03,
712: 1.71398e-03,
713: 1.58890e-03,
714: 1.47345e-03,
715: 1.36702e-03,
716: 1.26895e-03,
717: 1.17842e-03,
718: 1.09464e-03,
719: 1.01694e-03,
720: 9.44727e-04,
721: 8.77517e-04,
722: 8.15044e-04,
723: 7.57076e-04,
724: 7.03376e-04,
725: 6.53705e-04,
726: 6.07805e-04,
727: 5.65344e-04,
728: 5.26005e-04,
729: 4.89506e-04,
730: 4.55597e-04,
731: 4.24055e-04,
732: 3.94686e-04,
733: 3.67318e-04,
734: 3.41794e-04,
735: 3.17974e-04,
736: 2.95744e-04,
737: 2.75056e-04,
738: 2.55864e-04,
739: 2.38114e-04,
740: 2.21745e-04,
741: 2.06671e-04,
742: 1.92747e-04,
743: 1.79831e-04,
744: 1.67802e-04,
745: 1.56557e-04,
746: 1.46017e-04,
747: 1.36153e-04,
748: 1.26945e-04,
749: 1.18367e-04,
750: 1.10393e-04,
751: 1.02991e-04,
752: 9.61184e-05,
753: 8.97332e-05,
754: 8.37969e-05,
755: 7.82744e-05,
756: 7.31331e-05,
757: 6.83414e-05,
758: 6.38704e-05,
759: 5.96939e-05,
760: 5.57886e-05,
761: 5.21351e-05,
762: 4.87218e-05,
763: 4.55385e-05,
764: 4.25744e-05,
765: 3.98188e-05,
766: 3.72588e-05,
767: 3.48747e-05,
768: 3.26477e-05,
769: 3.05614e-05,
770: 2.86018e-05,
771: 2.67584e-05,
772: 2.50294e-05,
773: 2.34137e-05,
774: 2.19091e-05,
775: 2.05126e-05,
776: 1.92190e-05,
777: 1.80180e-05,
778: 1.68990e-05,
779: 1.58531e-05,
780: 1.48724e-05,
781: 1.39509e-05,
782: 1.30853e-05,
783: 1.22733e-05,
784: 1.15123e-05,
785: 1.08000e-05,
786: 1.01336e-05,
787: 9.50992e-06,
788: 8.92563e-06,
789: 8.37785e-06,
790: 7.86392e-06,
791: 7.38154e-06,
792: 6.92910e-06,
793: 6.50514e-06,
794: 6.10822e-06,
795: 5.73694e-06,
796: 5.38983e-06,
797: 5.06527e-06,
798: 4.76167e-06,
799: 4.47756e-06,
800: 4.21160e-06,
801: 3.96246e-06,
802: 3.72867e-06,
803: 3.50888e-06,
804: 3.30187e-06,
805: 3.10656e-06,
806: 2.92212e-06,
807: 2.74821e-06,
808: 2.58456e-06,
809: 2.43087e-06,
810: 2.28679e-06,
811: 2.15191e-06,
812: 2.02566e-06,
813: 1.90746e-06,
814: 1.79679e-06,
815: 1.69315e-06,
816: 1.59603e-06,
817: 1.50490e-06,
818: 1.41925e-06,
819: 1.33860e-06,
820: 1.26256e-06,
821: 1.19077e-06,
822: 1.12303e-06,
823: 1.05915e-06,
824: 9.98951e-07,
825: 9.42251e-07,
826: 8.88880e-07,
827: 8.38669e-07,
828: 7.91454e-07,
829: 7.47077e-07,
830: 7.05386e-07
},
'CIE 2008 10 Degree Physiologically Relevant LEF': {
390: 4.07678e-04,
391: 4.97777e-04,
392: 6.06475e-04,
393: 7.37004e-04,
394: 8.92939e-04,
395: 1.07817e-03,
396: 1.29682e-03,
397: 1.55316e-03,
398: 1.85146e-03,
399: 2.19579e-03,
400: 2.58977e-03,
401: 3.03680e-03,
402: 3.54193e-03,
403: 4.11142e-03,
404: 4.75262e-03,
405: 5.47421e-03,
406: 6.28503e-03,
407: 7.18807e-03,
408: 8.18179e-03,
409: 9.26042e-03,
410: 1.04130e-02,
411: 1.16264e-02,
412: 1.28988e-02,
413: 1.42344e-02,
414: 1.56408e-02,
415: 1.71297e-02,
416: 1.87127e-02,
417: 2.03839e-02,
418: 2.21294e-02,
419: 2.39299e-02,
420: 2.57613e-02,
421: 2.76016e-02,
422: 2.94551e-02,
423: 3.13388e-02,
424: 3.32758e-02,
425: 3.52955e-02,
426: 3.74271e-02,
427: 3.96714e-02,
428: 4.20200e-02,
429: 4.44617e-02,
430: 4.69823e-02,
431: 4.95674e-02,
432: 5.22122e-02,
433: 5.49139e-02,
434: 5.76692e-02,
435: 6.04743e-02,
436: 6.33220e-02,
437: 6.61927e-02,
438: 6.90619e-02,
439: 7.19019e-02,
440: 7.46829e-02,
441: 7.73845e-02,
442: 8.00360e-02,
443: 8.26852e-02,
444: 8.53875e-02,
445: 8.82054e-02,
446: 9.11893e-02,
447: 9.43104e-02,
448: 9.75135e-02,
449: 1.00735e-01,
450: 1.03903e-01,
451: 1.06964e-01,
452: 1.09968e-01,
453: 1.12999e-01,
454: 1.16154e-01,
455: 1.19539e-01,
456: 1.23250e-01,
457: 1.27305e-01,
458: 1.31696e-01,
459: 1.36418e-01,
460: 1.41459e-01,
461: 1.46800e-01,
462: 1.52400e-01,
463: 1.58202e-01,
464: 1.64140e-01,
465: 1.70137e-01,
466: 1.76123e-01,
467: 1.82090e-01,
468: 1.88046e-01,
469: 1.94006e-01,
470: 1.99986e-01,
471: 2.06005e-01,
472: 2.12098e-01,
473: 2.18304e-01,
474: 2.24669e-01,
475: 2.31243e-01,
476: 2.38074e-01,
477: 2.45180e-01,
478: 2.52568e-01,
479: 2.60248e-01,
480: 2.68227e-01,
481: 2.76501e-01,
482: 2.85004e-01,
483: 2.93647e-01,
484: 3.02332e-01,
485: 3.10944e-01,
486: 3.19410e-01,
487: 3.27868e-01,
488: 3.36526e-01,
489: 3.45618e-01,
490: 3.55402e-01,
491: 3.66089e-01,
492: 3.77586e-01,
493: 3.89696e-01,
494: 4.02195e-01,
495: 4.14823e-01,
496: 4.27354e-01,
497: 4.39821e-01,
498: 4.52336e-01,
499: 4.65030e-01,
500: 4.78048e-01,
501: 4.91517e-01,
502: 5.05422e-01,
503: 5.19706e-01,
504: 5.34301e-01,
505: 5.49134e-01,
506: 5.64130e-01,
507: 5.79242e-01,
508: 5.94426e-01,
509: 6.09639e-01,
510: 6.24830e-01,
511: 6.39966e-01,
512: 6.55094e-01,
513: 6.70290e-01,
514: 6.85638e-01,
515: 7.01229e-01,
516: 7.17110e-01,
517: 7.33092e-01,
518: 7.48904e-01,
519: 7.64253e-01,
520: 7.78820e-01,
521: 7.92341e-01,
522: 8.04851e-01,
523: 8.16475e-01,
524: 8.27352e-01,
525: 8.37636e-01,
526: 8.47465e-01,
527: 8.56887e-01,
528: 8.65924e-01,
529: 8.74604e-01,
530: 8.82955e-01,
531: 8.91027e-01,
532: 8.98949e-01,
533: 9.06875e-01,
534: 9.14965e-01,
535: 9.23386e-01,
536: 9.32232e-01,
537: 9.41286e-01,
538: 9.50238e-01,
539: 9.58765e-01,
540: 9.66532e-01,
541: 9.73250e-01,
542: 9.78842e-01,
543: 9.83287e-01,
544: 9.86572e-01,
545: 9.88689e-01,
546: 9.89706e-01,
547: 9.89985e-01,
548: 9.89962e-01,
549: 9.90073e-01,
550: 9.90750e-01,
551: 9.92283e-01,
552: 9.94384e-01,
553: 9.96622e-01,
554: 9.98565e-01,
555: 9.99778e-01,
556: 9.99944e-01,
557: 9.99220e-01,
558: 9.97879e-01,
559: 9.96193e-01,
560: 9.94430e-01,
561: 9.92783e-01,
562: 9.91158e-01,
563: 9.89392e-01,
564: 9.87329e-01,
565: 9.84813e-01,
566: 9.81725e-01,
567: 9.78071e-01,
568: 9.73886e-01,
569: 9.69203e-01,
570: 9.64055e-01,
571: 9.58441e-01,
572: 9.52238e-01,
573: 9.45297e-01,
574: 9.37477e-01,
575: 9.28649e-01,
576: 9.18795e-01,
577: 9.08301e-01,
578: 8.97635e-01,
579: 8.87240e-01,
580: 8.77536e-01,
581: 8.68792e-01,
582: 8.60747e-01,
583: 8.53023e-01,
584: 8.45253e-01,
585: 8.37084e-01,
586: 8.28241e-01,
587: 8.18732e-01,
588: 8.08635e-01,
589: 7.98030e-01,
590: 7.86995e-01,
591: 7.75604e-01,
592: 7.63900e-01,
593: 7.51916e-01,
594: 7.39683e-01,
595: 7.27231e-01,
596: 7.14588e-01,
597: 7.01793e-01,
598: 6.88887e-01,
599: 6.75910e-01,
600: 6.62904e-01,
601: 6.49891e-01,
602: 6.36841e-01,
603: 6.23709e-01,
604: 6.10454e-01,
605: 5.97037e-01,
606: 5.83440e-01,
607: 5.69704e-01,
608: 5.55889e-01,
609: 5.42047e-01,
610: 5.28230e-01,
611: 5.14475e-01,
612: 5.00788e-01,
613: 4.87169e-01,
614: 4.73616e-01,
615: 4.60131e-01,
616: 4.46726e-01,
617: 4.33459e-01,
618: 4.20392e-01,
619: 4.07581e-01,
620: 3.95076e-01,
621: 3.82889e-01,
622: 3.70919e-01,
623: 3.59045e-01,
624: 3.47162e-01,
625: 3.35179e-01,
626: 3.23056e-01,
627: 3.10886e-01,
628: 2.98784e-01,
629: 2.86853e-01,
630: 2.75181e-01,
631: 2.63834e-01,
632: 2.52833e-01,
633: 2.42183e-01,
634: 2.31890e-01,
635: 2.21956e-01,
636: 2.12383e-01,
637: 2.03170e-01,
638: 1.94318e-01,
639: 1.85825e-01,
640: 1.77688e-01,
641: 1.69893e-01,
642: 1.62382e-01,
643: 1.55099e-01,
644: 1.47992e-01,
645: 1.41020e-01,
646: 1.34161e-01,
647: 1.27440e-01,
648: 1.20889e-01,
649: 1.14534e-01,
650: 1.08400e-01,
651: 1.02501e-01,
652: 9.68459e-02,
653: 9.14394e-02,
654: 8.62832e-02,
655: 8.13769e-02,
656: 7.67171e-02,
657: 7.22940e-02,
658: 6.80970e-02,
659: 6.41155e-02,
660: 6.03398e-02,
661: 5.67605e-02,
662: 5.33699e-02,
663: 5.01603e-02,
664: 4.71241e-02,
665: 4.42538e-02,
666: 4.15421e-02,
667: 3.89804e-02,
668: 3.65609e-02,
669: 3.42760e-02,
670: 3.21185e-02,
671: 3.00819e-02,
672: 2.81600e-02,
673: 2.63470e-02,
674: 2.46373e-02,
675: 2.30257e-02,
676: 2.15074e-02,
677: 2.00784e-02,
678: 1.87347e-02,
679: 1.74727e-02,
680: 1.62884e-02,
681: 1.51777e-02,
682: 1.41347e-02,
683: 1.31541e-02,
684: 1.22309e-02,
685: 1.13611e-02,
686: 1.05419e-02,
687: 9.77505e-03,
688: 9.06196e-03,
689: 8.40296e-03,
690: 7.79746e-03,
691: 7.24323e-03,
692: 6.73438e-03,
693: 6.26500e-03,
694: 5.83009e-03,
695: 5.42539e-03,
696: 5.04763e-03,
697: 4.69514e-03,
698: 4.36659e-03,
699: 4.06069e-03,
700: 3.77614e-03,
701: 3.51158e-03,
702: 3.26521e-03,
703: 3.03534e-03,
704: 2.82050e-03,
705: 2.61937e-03,
706: 2.43096e-03,
707: 2.25480e-03,
708: 2.09049e-03,
709: 1.93759e-03,
710: 1.79560e-03,
711: 1.66399e-03,
712: 1.54220e-03,
713: 1.42964e-03,
714: 1.32575e-03,
715: 1.22998e-03,
716: 1.14173e-03,
717: 1.06027e-03,
718: 9.84885e-04,
719: 9.14970e-04,
720: 8.49990e-04,
721: 7.89516e-04,
722: 7.33304e-04,
723: 6.81146e-04,
724: 6.32829e-04,
725: 5.88138e-04,
726: 5.46839e-04,
727: 5.08635e-04,
728: 4.73240e-04,
729: 4.40402e-04,
730: 4.09893e-04,
731: 3.81514e-04,
732: 3.55090e-04,
733: 3.30467e-04,
734: 3.07503e-04,
735: 2.86072e-04,
736: 2.66072e-04,
737: 2.47459e-04,
738: 2.30192e-04,
739: 2.14223e-04,
740: 1.99495e-04,
741: 1.85934e-04,
742: 1.73407e-04,
743: 1.61786e-04,
744: 1.50964e-04,
745: 1.40847e-04,
746: 1.31364e-04,
747: 1.22490e-04,
748: 1.14206e-04,
749: 1.06489e-04,
750: 9.93144e-05,
751: 9.26551e-05,
752: 8.64722e-05,
753: 8.07278e-05,
754: 7.53872e-05,
755: 7.04188e-05,
756: 6.57934e-05,
757: 6.14825e-05,
758: 5.74601e-05,
759: 5.37027e-05,
760: 5.01893e-05,
761: 4.69024e-05,
762: 4.38317e-05,
763: 4.09678e-05,
764: 3.83012e-05,
765: 3.58222e-05,
766: 3.35190e-05,
767: 3.13742e-05,
768: 2.93707e-05,
769: 2.74938e-05,
770: 2.57308e-05,
771: 2.40725e-05,
772: 2.25170e-05,
773: 2.10635e-05,
774: 1.97099e-05,
775: 1.84535e-05,
776: 1.72898e-05,
777: 1.62093e-05,
778: 1.52026e-05,
779: 1.42617e-05,
780: 1.33795e-05,
781: 1.25504e-05,
782: 1.17717e-05,
783: 1.10412e-05,
784: 1.03566e-05,
785: 9.71580e-06,
786: 9.11632e-06,
787: 8.55520e-06,
788: 8.02956e-06,
789: 7.53677e-06,
790: 7.07442e-06,
791: 6.64046e-06,
792: 6.23344e-06,
793: 5.85204e-06,
794: 5.49496e-06,
795: 5.16095e-06,
796: 4.84869e-06,
797: 4.55671e-06,
798: 4.28358e-06,
799: 4.02799e-06,
800: 3.78873e-06,
801: 3.56460e-06,
802: 3.35428e-06,
803: 3.15656e-06,
804: 2.97033e-06,
805: 2.79463e-06,
806: 2.62870e-06,
807: 2.47225e-06,
808: 2.32503e-06,
809: 2.18677e-06,
810: 2.05715e-06,
811: 1.93581e-06,
812: 1.82224e-06,
813: 1.71591e-06,
814: 1.61636e-06,
815: 1.52311e-06,
816: 1.43575e-06,
817: 1.35377e-06,
818: 1.27671e-06,
819: 1.20417e-06,
820: 1.13576e-06,
821: 1.07118e-06,
822: 1.01024e-06,
823: 9.52778e-07,
824: 8.98622e-07,
825: 8.47617e-07,
826: 7.99605e-07,
827: 7.54436e-07,
828: 7.11962e-07,
829: 6.72042e-07,
830: 6.34538e-07
}
}
SDS_LEFS_PHOTOPIC = LazyCaseInsensitiveMapping({
'CIE 1924 Photopic Standard Observer':
partial(
SpectralDistribution,
DATA_LEFS_PHOTOPIC['CIE 1924 Photopic Standard Observer'],
name='CIE 1924 Photopic Standard Observer'),
'Judd Modified CIE 1951 Photopic Standard Observer':
partial(
SpectralDistribution,
DATA_LEFS_PHOTOPIC[
'Judd Modified CIE 1951 Photopic Standard Observer'],
name='Judd Modified CIE 1951 Photopic Standard Observer'),
'Judd-Vos Modified CIE 1978 Photopic Standard Observer':
partial(
SpectralDistribution,
DATA_LEFS_PHOTOPIC[
'Judd-Vos Modified CIE 1978 Photopic Standard Observer'],
name='Judd-Vos Modified CIE 1978 Photopic Standard Observer'),
'CIE 1964 Photopic 10 Degree Standard Observer':
partial(
SpectralDistribution,
DATA_LEFS_PHOTOPIC[
'CIE 1964 Photopic 10 Degree Standard Observer'],
name='CIE 1964 Photopic 10 Degree Standard Observer',
strict_name='CIE 1964 Photopic 10$^\\circ$ Standard Observer'),
'CIE 2008 2 Degree Physiologically Relevant LEF':
partial(
SpectralDistribution,
DATA_LEFS_PHOTOPIC[
'CIE 2008 2 Degree Physiologically Relevant LEF'],
name='CIE 2008 2 Degree Physiologically Relevant LEF',
strict_name='CIE 2008 2$^\\circ$ Physiologically Relevant LEF'),
'CIE 2008 10 Degree Physiologically Relevant LEF':
partial(
SpectralDistribution,
DATA_LEFS_PHOTOPIC[
'CIE 2008 10 Degree Physiologically Relevant LEF'],
name='CIE 2008 10 Degree Physiologically Relevant LEF',
strict_name='CIE 2008 10$^\\circ$ Physiologically Relevant LEF')
})
SDS_LEFS_PHOTOPIC.__doc__ = """
Spectral distributions of the photopic luminous efficiency functions.
References
----------
:cite:`CVRLq`, :cite:`CVRLs`
SDS_LEFS_PHOTOPIC : LazyCaseInsensitiveMapping
**{'CIE 1924 Photopic Standard Observer',
'Judd Modified CIE 1951 Photopic Standard Observer',
'Judd-Vos Modified CIE 1978 Photopic Standard Observer',
'CIE 1964 Photopic 10 Degree Standard Observer',
'CIE 2008 2 Degree Physiologically Relevant LEF',
'CIE 2008 10 Degree Physiologically Relevant LEF'}**
Aliases:
- 'cie_2_1924': 'CIE 1931 2 Degree Standard Observer'
- 'cie_10_1964': 'CIE 1964 Photopic 10 Degree Standard Observer'
"""
SDS_LEFS_PHOTOPIC['cie_2_1924'] = (
SDS_LEFS_PHOTOPIC['CIE 1924 Photopic Standard Observer'])
SDS_LEFS_PHOTOPIC['cie_10_1964'] = (
SDS_LEFS_PHOTOPIC['CIE 1964 Photopic 10 Degree Standard Observer'])
DATA_LEFS_SCOTOPIC = {
'CIE 1951 Scotopic Standard Observer': {
380: 0.0005890000,
381: 0.0006650000,
382: 0.0007520000,
383: 0.0008540000,
384: 0.0009720000,
385: 0.0011080000,
386: 0.0012680000,
387: 0.0014530000,
388: 0.0016680000,
389: 0.0019180000,
390: 0.0022090000,
391: 0.0025470000,
392: 0.0029390000,
393: 0.0033940000,
394: 0.0039210000,
395: 0.0045300000,
396: 0.0052400000,
397: 0.0060500000,
398: 0.0069800000,
399: 0.0080600000,
400: 0.0092900000,
401: 0.0107000000,
402: 0.0123100000,
403: 0.0141300000,
404: 0.0161900000,
405: 0.0185200000,
406: 0.0211300000,
407: 0.0240500000,
408: 0.0273000000,
409: 0.0308900000,
410: 0.0348400000,
411: 0.0391600000,
412: 0.0439000000,
413: 0.0490000000,
414: 0.0545000000,
415: 0.0604000000,
416: 0.0668000000,
417: 0.0736000000,
418: 0.0808000000,
419: 0.0885000000,
420: 0.0966000000,
421: 0.1052000000,
422: 0.1141000000,
423: 0.1235000000,
424: 0.1334000000,
425: 0.1436000000,
426: 0.1541000000,
427: 0.1651000000,
428: 0.1764000000,
429: 0.1879000000,
430: 0.1998000000,
431: 0.2119000000,
432: 0.2243000000,
433: 0.2369000000,
434: 0.2496000000,
435: 0.2625000000,
436: 0.2755000000,
437: 0.2886000000,
438: 0.3017000000,
439: 0.3149000000,
440: 0.3281000000,
441: 0.3412000000,
442: 0.3543000000,
443: 0.3673000000,
444: 0.3803000000,
445: 0.3931000000,
446: 0.4060000000,
447: 0.4180000000,
448: 0.4310000000,
449: 0.4430000000,
450: 0.4550000000,
451: 0.4670000000,
452: 0.4790000000,
453: 0.4900000000,
454: 0.5020000000,
455: 0.5130000000,
456: 0.5240000000,
457: 0.5350000000,
458: 0.5460000000,
459: 0.5570000000,
460: 0.5670000000,
461: 0.5780000000,
462: 0.5880000000,
463: 0.5990000000,
464: 0.6100000000,
465: 0.6200000000,
466: 0.6310000000,
467: 0.6420000000,
468: 0.6530000000,
469: 0.6640000000,
470: 0.6760000000,
471: 0.6870000000,
472: 0.6990000000,
473: 0.7100000000,
474: 0.7220000000,
475: 0.7340000000,
476: 0.7450000000,
477: 0.7570000000,
478: 0.7690000000,
479: 0.7810000000,
480: 0.7930000000,
481: 0.8050000000,
482: 0.8170000000,
483: 0.8280000000,
484: 0.8400000000,
485: 0.8510000000,
486: 0.8620000000,
487: 0.8730000000,
488: 0.8840000000,
489: 0.8940000000,
490: 0.9040000000,
491: 0.9140000000,
492: 0.9230000000,
493: 0.9320000000,
494: 0.9410000000,
495: 0.9490000000,
496: 0.9570000000,
497: 0.9640000000,
498: 0.9700000000,
499: 0.9760000000,
500: 0.9820000000,
501: 0.9860000000,
502: 0.9900000000,
503: 0.9940000000,
504: 0.9970000000,
505: 0.9980000000,
506: 1.0000000000,
507: 1.0000000000,
508: 1.0000000000,
509: 0.9980000000,
510: 0.9970000000,
511: 0.9940000000,
512: 0.9900000000,
513: 0.9860000000,
514: 0.9810000000,
515: 0.9750000000,
516: 0.9680000000,
517: 0.9610000000,
518: 0.9530000000,
519: 0.9440000000,
520: 0.9350000000,
521: 0.9250000000,
522: 0.9150000000,
523: 0.9040000000,
524: 0.8920000000,
525: 0.8800000000,
526: 0.8670000000,
527: 0.8540000000,
528: 0.8400000000,
529: 0.8260000000,
530: 0.8110000000,
531: 0.7960000000,
532: 0.7810000000,
533: 0.7650000000,
534: 0.7490000000,
535: 0.7330000000,
536: 0.7170000000,
537: 0.7000000000,
538: 0.6830000000,
539: 0.6670000000,
540: 0.6500000000,
541: 0.6330000000,
542: 0.6160000000,
543: 0.5990000000,
544: 0.5810000000,
545: 0.5640000000,
546: 0.5480000000,
547: 0.5310000000,
548: 0.5140000000,
549: 0.4970000000,
550: 0.4810000000,
551: 0.4650000000,
552: 0.4480000000,
553: 0.4330000000,
554: 0.4170000000,
555: 0.4020000000,
556: 0.3864000000,
557: 0.3715000000,
558: 0.3569000000,
559: 0.3427000000,
560: 0.3288000000,
561: 0.3151000000,
562: 0.3018000000,
563: 0.2888000000,
564: 0.2762000000,
565: 0.2639000000,
566: 0.2519000000,
567: 0.2403000000,
568: 0.2291000000,
569: 0.2182000000,
570: 0.2076000000,
571: 0.1974000000,
572: 0.1876000000,
573: 0.1782000000,
574: 0.1690000000,
575: 0.1602000000,
576: 0.1517000000,
577: 0.1436000000,
578: 0.1358000000,
579: 0.1284000000,
580: 0.1212000000,
581: 0.1143000000,
582: 0.1078000000,
583: 0.1015000000,
584: 0.0956000000,
585: 0.0899000000,
586: 0.0845000000,
587: 0.0793000000,
588: 0.0745000000,
589: 0.0699000000,
590: 0.0655000000,
591: 0.0613000000,
592: 0.0574000000,
593: 0.0537000000,
594: 0.0502000000,
595: 0.0469000000,
596: 0.0438000000,
597: 0.0409000000,
598: 0.0381600000,
599: 0.0355800000,
600: 0.0331500000,
601: 0.0308700000,
602: 0.0287400000,
603: 0.0267400000,
604: 0.0248700000,
605: 0.0231200000,
606: 0.0214700000,
607: 0.0199400000,
608: 0.0185100000,
609: 0.0171800000,
610: 0.0159300000,
611: 0.0147700000,
612: 0.0136900000,
613: 0.0126900000,
614: 0.0117500000,
615: 0.0108800000,
616: 0.0100700000,
617: 0.0093200000,
618: 0.0086200000,
619: 0.0079700000,
620: 0.0073700000,
621: 0.0068200000,
622: 0.0063000000,
623: 0.0058200000,
624: 0.0053800000,
625: 0.0049700000,
626: 0.0045900000,
627: 0.0042400000,
628: 0.0039130000,
629: 0.0036130000,
630: 0.0033350000,
631: 0.0030790000,
632: 0.0028420000,
633: 0.0026230000,
634: 0.0024210000,
635: 0.0022350000,
636: 0.0020620000,
637: 0.0019030000,
638: 0.0017570000,
639: 0.0016210000,
640: 0.0014970000,
641: 0.0013820000,
642: 0.0012760000,
643: 0.0011780000,
644: 0.0010880000,
645: 0.0010050000,
646: 0.0009280000,
647: 0.0008570000,
648: 0.0007920000,
649: 0.0007320000,
650: 0.0006770000,
651: 0.0006260000,
652: 0.0005790000,
653: 0.0005360000,
654: 0.0004960000,
655: 0.0004590000,
656: 0.0004250000,
657: 0.0003935000,
658: 0.0003645000,
659: 0.0003377000,
660: 0.0003129000,
661: 0.0002901000,
662: 0.0002689000,
663: 0.0002493000,
664: 0.0002313000,
665: 0.0002146000,
666: 0.0001991000,
667: 0.0001848000,
668: 0.0001716000,
669: 0.0001593000,
670: 0.0001480000,
671: 0.0001375000,
672: 0.0001277000,
673: 0.0001187000,
674: 0.0001104000,
675: 0.0001026000,
676: 0.0000954000,
677: 0.0000888000,
678: 0.0000826000,
679: 0.0000769000,
680: 0.0000715000,
681: 0.0000666000,
682: 0.0000620000,
683: 0.0000578000,
684: 0.0000538000,
685: 0.0000501000,
686: 0.0000467000,
687: 0.0000436000,
688: 0.0000406000,
689: 0.0000378900,
690: 0.0000353300,
691: 0.0000329500,
692: 0.0000307500,
693: 0.0000287000,
694: 0.0000267900,
695: 0.0000250100,
696: 0.0000233600,
697: 0.0000218200,
698: 0.0000203800,
699: 0.0000190500,
700: 0.0000178000,
701: 0.0000166400,
702: 0.0000155600,
703: 0.0000145400,
704: 0.0000136000,
705: 0.0000127300,
706: 0.0000119100,
707: 0.0000111400,
708: 0.0000104300,
709: 0.0000097600,
710: 0.0000091400,
711: 0.0000085600,
712: 0.0000080200,
713: 0.0000075100,
714: 0.0000070400,
715: 0.0000066000,
716: 0.0000061800,
717: 0.0000058000,
718: 0.0000054400,
719: 0.0000051000,
720: 0.0000047800,
721: 0.0000044900,
722: 0.0000042100,
723: 0.0000039510,
724: 0.0000037090,
725: 0.0000034820,
726: 0.0000032700,
727: 0.0000030700,
728: 0.0000028840,
729: 0.0000027100,
730: 0.0000025460,
731: 0.0000023930,
732: 0.0000022500,
733: 0.0000021150,
734: 0.0000019890,
735: 0.0000018700,
736: 0.0000017590,
737: 0.0000016550,
738: 0.0000015570,
739: 0.0000014660,
740: 0.0000013790,
741: 0.0000012990,
742: 0.0000012230,
743: 0.0000011510,
744: 0.0000010840,
745: 0.0000010220,
746: 0.0000009620,
747: 0.0000009070,
748: 0.0000008550,
749: 0.0000008060,
750: 0.0000007600,
751: 0.0000007160,
752: 0.0000006750,
753: 0.0000006370,
754: 0.0000006010,
755: 0.0000005670,
756: 0.0000005350,
757: 0.0000005050,
758: 0.0000004770,
759: 0.0000004500,
760: 0.0000004250,
761: 0.0000004010,
762: 0.0000003790,
763: 0.0000003580,
764: 0.0000003382,
765: 0.0000003196,
766: 0.0000003021,
767: 0.0000002855,
768: 0.0000002699,
769: 0.0000002552,
770: 0.0000002413,
771: 0.0000002282,
772: 0.0000002159,
773: 0.0000002042,
774: 0.0000001932,
775: 0.0000001829,
776: 0.0000001731,
777: 0.0000001638,
778: 0.0000001551,
779: 0.0000001468,
780: 0.0000001390,
}
}
SDS_LEFS_SCOTOPIC = LazyCaseInsensitiveMapping({
'CIE 1951 Scotopic Standard Observer':
partial(
SpectralDistribution,
DATA_LEFS_SCOTOPIC['CIE 1951 Scotopic Standard Observer'],
name='CIE 1951 Scotopic Standard Observer')
})
SDS_LEFS_SCOTOPIC.__doc__ = """
Spectral distributions of the scotopic luminous efficiency functions.
References
----------
:cite:`CVRLs`
SDS_LEFS_SCOTOPIC : LazyCaseInsensitiveMapping
**{'CIE 1951 Scotopic Standard Observer', }**
Aliases:
- 'cie_1951': 'CIE 1951 Scotopic Standard Observer'
"""
SDS_LEFS_SCOTOPIC['cie_1951'] = (
SDS_LEFS_SCOTOPIC['CIE 1951 Scotopic Standard Observer'])
SDS_LEFS = LazyCaseInsensitiveMapping(SDS_LEFS_PHOTOPIC)
SDS_LEFS.__doc__ = """
Spectral distributions of the luminous efficiency functions.
References
----------
:cite:`CVRLq`, :cite:`CVRLs`, :cite:`Wikipedia2005d`
SDS_LEFS : LazyCaseInsensitiveMapping
**{'CIE 1924 Photopic Standard Observer',
'Judd Modified CIE 1951 Photopic Standard Observer',
'Judd-Vos Modified CIE 1978 Photopic Standard Observer',
'CIE 1964 Photopic 10 Degree Standard Observer',
'CIE 2008 2 Degree Physiologically Relevant LEF',
'CIE 2008 10 Degree Physiologically Relevant LEF',
'CIE 1951 Scotopic Standard Observer'}**
"""
SDS_LEFS.update(SDS_LEFS_SCOTOPIC)
DATA_MESOPIC_X = {
0.01:
CaseInsensitiveMapping({
'Blue Heavy': CaseInsensitiveMapping({
'MOVE': 0.13,
'LRC': 0.04
}),
'Red Heavy': CaseInsensitiveMapping({
'MOVE': 0.00,
'LRC': 0.01
})
}),
0.1:
CaseInsensitiveMapping({
'Blue Heavy': CaseInsensitiveMapping({
'MOVE': 0.42,
'LRC': 0.28
}),
'Red Heavy': CaseInsensitiveMapping({
'MOVE': 0.34,
'LRC': 0.11
})
}),
1.0:
CaseInsensitiveMapping({
'Blue Heavy': CaseInsensitiveMapping({
'MOVE': 0.70,
'LRC': 1.00
}),
'Red Heavy': CaseInsensitiveMapping({
'MOVE': 0.68,
'LRC': 1.00
})
}),
10:
CaseInsensitiveMapping({
'Blue Heavy': CaseInsensitiveMapping({
'MOVE': 0.98,
'LRC': 1.00
}),
'Red Heavy': CaseInsensitiveMapping({
'MOVE': 0.98,
'LRC': 1.00
})
})
}
"""
Weighting factors for the mesopic luminous efficiency function calculation.
DATA_MESOPIC_X : CaseInsensitiveMapping
"""
|
[
"functools.partial",
"colour.utilities.CaseInsensitiveMapping",
"colour.utilities.LazyCaseInsensitiveMapping"
] |
[((79529, 79574), 'colour.utilities.LazyCaseInsensitiveMapping', 'LazyCaseInsensitiveMapping', (['SDS_LEFS_PHOTOPIC'], {}), '(SDS_LEFS_PHOTOPIC)\n', (79555, 79574), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((65271, 65413), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_PHOTOPIC['CIE 1924 Photopic Standard Observer']"], {'name': '"""CIE 1924 Photopic Standard Observer"""'}), "(SpectralDistribution, DATA_LEFS_PHOTOPIC[\n 'CIE 1924 Photopic Standard Observer'], name=\n 'CIE 1924 Photopic Standard Observer')\n", (65278, 65413), False, 'from functools import partial\n'), ((65507, 65677), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_PHOTOPIC['Judd Modified CIE 1951 Photopic Standard Observer']"], {'name': '"""Judd Modified CIE 1951 Photopic Standard Observer"""'}), "(SpectralDistribution, DATA_LEFS_PHOTOPIC[\n 'Judd Modified CIE 1951 Photopic Standard Observer'], name=\n 'Judd Modified CIE 1951 Photopic Standard Observer')\n", (65514, 65677), False, 'from functools import partial\n'), ((65792, 65970), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_PHOTOPIC['Judd-Vos Modified CIE 1978 Photopic Standard Observer']"], {'name': '"""Judd-Vos Modified CIE 1978 Photopic Standard Observer"""'}), "(SpectralDistribution, DATA_LEFS_PHOTOPIC[\n 'Judd-Vos Modified CIE 1978 Photopic Standard Observer'], name=\n 'Judd-Vos Modified CIE 1978 Photopic Standard Observer')\n", (65799, 65970), False, 'from functools import partial\n'), ((66077, 66307), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_PHOTOPIC['CIE 1964 Photopic 10 Degree Standard Observer']"], {'name': '"""CIE 1964 Photopic 10 Degree Standard Observer"""', 'strict_name': '"""CIE 1964 Photopic 10$^\\\\circ$ Standard Observer"""'}), "(SpectralDistribution, DATA_LEFS_PHOTOPIC[\n 'CIE 1964 Photopic 10 Degree Standard Observer'], name=\n 'CIE 1964 Photopic 10 Degree Standard Observer', strict_name=\n 'CIE 1964 Photopic 10$^\\\\circ$ Standard Observer')\n", (66084, 66307), False, 'from functools import partial\n'), ((66422, 66655), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_PHOTOPIC['CIE 2008 2 Degree Physiologically Relevant LEF']"], {'name': '"""CIE 2008 2 Degree Physiologically Relevant LEF"""', 'strict_name': '"""CIE 2008 2$^\\\\circ$ Physiologically Relevant LEF"""'}), "(SpectralDistribution, DATA_LEFS_PHOTOPIC[\n 'CIE 2008 2 Degree Physiologically Relevant LEF'], name=\n 'CIE 2008 2 Degree Physiologically Relevant LEF', strict_name=\n 'CIE 2008 2$^\\\\circ$ Physiologically Relevant LEF')\n", (66429, 66655), False, 'from functools import partial\n'), ((66771, 67007), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_PHOTOPIC['CIE 2008 10 Degree Physiologically Relevant LEF']"], {'name': '"""CIE 2008 10 Degree Physiologically Relevant LEF"""', 'strict_name': '"""CIE 2008 10$^\\\\circ$ Physiologically Relevant LEF"""'}), "(SpectralDistribution, DATA_LEFS_PHOTOPIC[\n 'CIE 2008 10 Degree Physiologically Relevant LEF'], name=\n 'CIE 2008 10 Degree Physiologically Relevant LEF', strict_name=\n 'CIE 2008 10$^\\\\circ$ Physiologically Relevant LEF')\n", (66778, 67007), False, 'from functools import partial\n'), ((78942, 79084), 'functools.partial', 'partial', (['SpectralDistribution', "DATA_LEFS_SCOTOPIC['CIE 1951 Scotopic Standard Observer']"], {'name': '"""CIE 1951 Scotopic Standard Observer"""'}), "(SpectralDistribution, DATA_LEFS_SCOTOPIC[\n 'CIE 1951 Scotopic Standard Observer'], name=\n 'CIE 1951 Scotopic Standard Observer')\n", (78949, 79084), False, 'from functools import partial\n'), ((80273, 80324), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.13, 'LRC': 0.04}"], {}), "({'MOVE': 0.13, 'LRC': 0.04})\n", (80295, 80324), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((80397, 80447), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.0, 'LRC': 0.01}"], {}), "({'MOVE': 0.0, 'LRC': 0.01})\n", (80419, 80447), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((80575, 80626), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.42, 'LRC': 0.28}"], {}), "({'MOVE': 0.42, 'LRC': 0.28})\n", (80597, 80626), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((80699, 80750), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.34, 'LRC': 0.11}"], {}), "({'MOVE': 0.34, 'LRC': 0.11})\n", (80721, 80750), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((80877, 80926), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.7, 'LRC': 1.0}"], {}), "({'MOVE': 0.7, 'LRC': 1.0})\n", (80899, 80926), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((81001, 81051), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.68, 'LRC': 1.0}"], {}), "({'MOVE': 0.68, 'LRC': 1.0})\n", (81023, 81051), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((81178, 81228), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.98, 'LRC': 1.0}"], {}), "({'MOVE': 0.98, 'LRC': 1.0})\n", (81200, 81228), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n'), ((81302, 81352), 'colour.utilities.CaseInsensitiveMapping', 'CaseInsensitiveMapping', (["{'MOVE': 0.98, 'LRC': 1.0}"], {}), "({'MOVE': 0.98, 'LRC': 1.0})\n", (81324, 81352), False, 'from colour.utilities import CaseInsensitiveMapping, LazyCaseInsensitiveMapping\n')]
|
# Generated by Django 3.1.5 on 2021-05-08 12:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hospital', '0023_auto_20210507_1439'),
]
operations = [
migrations.CreateModel(
name='CovidVaccination',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='PatientVaccination', to='hospital.patient')),
('vaccine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='Vaccine', to='hospital.medicines')),
],
),
]
|
[
"django.db.models.ForeignKey",
"django.db.models.AutoField"
] |
[((374, 467), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (390, 467), False, 'from django.db import migrations, models\n'), ((494, 619), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""PatientVaccination"""', 'to': '"""hospital.patient"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='PatientVaccination', to='hospital.patient')\n", (511, 619), False, 'from django.db import migrations, models\n'), ((645, 761), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""Vaccine"""', 'to': '"""hospital.medicines"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='Vaccine', to='hospital.medicines')\n", (662, 761), False, 'from django.db import migrations, models\n')]
|
from rl_coach.agents.ppo_agent import PPOAgentParameters
from rl_coach.architectures.layers import Dense
from rl_coach.base_parameters import VisualizationParameters, PresetValidationParameters, DistributedCoachSynchronizationType
from rl_coach.core_types import TrainingSteps, EnvironmentEpisodes, EnvironmentSteps
from rl_coach.environments.environment import SingleLevelSelection
from rl_coach.environments.gym_environment import GymVectorEnvironment, mujoco_v2
from rl_coach.filters.filter import InputFilter
from rl_coach.filters.observation.observation_normalization_filter import ObservationNormalizationFilter
from rl_coach.graph_managers.basic_rl_graph_manager import BasicRLGraphManager
from rl_coach.graph_managers.graph_manager import ScheduleParameters
####################
# Graph Scheduling #
####################
schedule_params = ScheduleParameters()
schedule_params.improve_steps = TrainingSteps(10000000000)
schedule_params.steps_between_evaluation_periods = EnvironmentSteps(2000)
schedule_params.evaluation_steps = EnvironmentEpisodes(1)
schedule_params.heatup_steps = EnvironmentSteps(0)
#########
# Agent #
#########
agent_params = PPOAgentParameters()
agent_params.network_wrappers['actor'].learning_rate = 0.001
agent_params.network_wrappers['critic'].learning_rate = 0.001
agent_params.network_wrappers['actor'].input_embedders_parameters['observation'].scheme = [Dense(64)]
agent_params.network_wrappers['actor'].middleware_parameters.scheme = [Dense(64)]
agent_params.network_wrappers['critic'].input_embedders_parameters['observation'].scheme = [Dense(64)]
agent_params.network_wrappers['critic'].middleware_parameters.scheme = [Dense(64)]
agent_params.input_filter = InputFilter()
agent_params.input_filter.add_observation_filter('observation', 'normalize', ObservationNormalizationFilter())
# Distributed Coach synchronization type.
agent_params.algorithm.distributed_coach_synchronization_type = DistributedCoachSynchronizationType.SYNC
###############
# Environment #
###############
env_params = GymVectorEnvironment()
env_params.level = '../automated_deep_compression/ADC.py:DistillerWrapperEnvironment'
vis_params = VisualizationParameters()
vis_params.dump_parameters_documentation = False
vis_params.render = True
vis_params.native_rendering = True
vis_params.dump_signals_to_csv_every_x_episodes = 1
graph_manager = BasicRLGraphManager(agent_params=agent_params, env_params=env_params,
schedule_params=schedule_params, vis_params=vis_params)
|
[
"rl_coach.architectures.layers.Dense",
"rl_coach.base_parameters.VisualizationParameters",
"rl_coach.core_types.TrainingSteps",
"rl_coach.agents.ppo_agent.PPOAgentParameters",
"rl_coach.environments.gym_environment.GymVectorEnvironment",
"rl_coach.core_types.EnvironmentSteps",
"rl_coach.graph_managers.graph_manager.ScheduleParameters",
"rl_coach.graph_managers.basic_rl_graph_manager.BasicRLGraphManager",
"rl_coach.core_types.EnvironmentEpisodes",
"rl_coach.filters.observation.observation_normalization_filter.ObservationNormalizationFilter",
"rl_coach.filters.filter.InputFilter"
] |
[((848, 868), 'rl_coach.graph_managers.graph_manager.ScheduleParameters', 'ScheduleParameters', ([], {}), '()\n', (866, 868), False, 'from rl_coach.graph_managers.graph_manager import ScheduleParameters\n'), ((901, 927), 'rl_coach.core_types.TrainingSteps', 'TrainingSteps', (['(10000000000)'], {}), '(10000000000)\n', (914, 927), False, 'from rl_coach.core_types import TrainingSteps, EnvironmentEpisodes, EnvironmentSteps\n'), ((979, 1001), 'rl_coach.core_types.EnvironmentSteps', 'EnvironmentSteps', (['(2000)'], {}), '(2000)\n', (995, 1001), False, 'from rl_coach.core_types import TrainingSteps, EnvironmentEpisodes, EnvironmentSteps\n'), ((1037, 1059), 'rl_coach.core_types.EnvironmentEpisodes', 'EnvironmentEpisodes', (['(1)'], {}), '(1)\n', (1056, 1059), False, 'from rl_coach.core_types import TrainingSteps, EnvironmentEpisodes, EnvironmentSteps\n'), ((1091, 1110), 'rl_coach.core_types.EnvironmentSteps', 'EnvironmentSteps', (['(0)'], {}), '(0)\n', (1107, 1110), False, 'from rl_coach.core_types import TrainingSteps, EnvironmentEpisodes, EnvironmentSteps\n'), ((1157, 1177), 'rl_coach.agents.ppo_agent.PPOAgentParameters', 'PPOAgentParameters', ([], {}), '()\n', (1175, 1177), False, 'from rl_coach.agents.ppo_agent import PPOAgentParameters\n'), ((1701, 1714), 'rl_coach.filters.filter.InputFilter', 'InputFilter', ([], {}), '()\n', (1712, 1714), False, 'from rl_coach.filters.filter import InputFilter\n'), ((2036, 2058), 'rl_coach.environments.gym_environment.GymVectorEnvironment', 'GymVectorEnvironment', ([], {}), '()\n', (2056, 2058), False, 'from rl_coach.environments.gym_environment import GymVectorEnvironment, mujoco_v2\n'), ((2159, 2184), 'rl_coach.base_parameters.VisualizationParameters', 'VisualizationParameters', ([], {}), '()\n', (2182, 2184), False, 'from rl_coach.base_parameters import VisualizationParameters, PresetValidationParameters, DistributedCoachSynchronizationType\n'), ((2362, 2491), 'rl_coach.graph_managers.basic_rl_graph_manager.BasicRLGraphManager', 'BasicRLGraphManager', ([], {'agent_params': 'agent_params', 'env_params': 'env_params', 'schedule_params': 'schedule_params', 'vis_params': 'vis_params'}), '(agent_params=agent_params, env_params=env_params,\n schedule_params=schedule_params, vis_params=vis_params)\n', (2381, 2491), False, 'from rl_coach.graph_managers.basic_rl_graph_manager import BasicRLGraphManager\n'), ((1393, 1402), 'rl_coach.architectures.layers.Dense', 'Dense', (['(64)'], {}), '(64)\n', (1398, 1402), False, 'from rl_coach.architectures.layers import Dense\n'), ((1475, 1484), 'rl_coach.architectures.layers.Dense', 'Dense', (['(64)'], {}), '(64)\n', (1480, 1484), False, 'from rl_coach.architectures.layers import Dense\n'), ((1578, 1587), 'rl_coach.architectures.layers.Dense', 'Dense', (['(64)'], {}), '(64)\n', (1583, 1587), False, 'from rl_coach.architectures.layers import Dense\n'), ((1661, 1670), 'rl_coach.architectures.layers.Dense', 'Dense', (['(64)'], {}), '(64)\n', (1666, 1670), False, 'from rl_coach.architectures.layers import Dense\n'), ((1792, 1824), 'rl_coach.filters.observation.observation_normalization_filter.ObservationNormalizationFilter', 'ObservationNormalizationFilter', ([], {}), '()\n', (1822, 1824), False, 'from rl_coach.filters.observation.observation_normalization_filter import ObservationNormalizationFilter\n')]
|
from kubernetes import client, config
from ruamel import yaml
from pathlib import Path
from zipfile import ZipFile
QHUB_SUPPORT_LOG_FILE = "./qhub-support-logs.zip"
def create_support_subcommand(subparser):
subparser = subparser.add_parser("support")
subparser.add_argument(
"-c", "--config", help="qhub configuration yaml file", required=True
)
subparser.add_argument(
"-o", "--output", default=QHUB_SUPPORT_LOG_FILE, help="output filename"
)
subparser.set_defaults(func=handle_support)
def handle_support(args):
config.load_kube_config()
v1 = client.CoreV1Api()
namespace = get_config_namespace(config=args.config)
pods = v1.list_namespaced_pod(namespace=namespace)
for pod in pods.items:
Path(f"./log/{namespace}").mkdir(parents=True, exist_ok=True)
path = Path(f"./log/{namespace}/{pod.metadata.name}.txt")
with path.open(mode="wt") as file:
try:
file.write(
"%s\t%s\t%s\n"
% (
pod.status.pod_ip,
namespace,
pod.metadata.name,
)
)
# some pods are running multiple containers
containers = [
_.name if len(pod.spec.containers) > 1 else None
for _ in pod.spec.containers
]
for container in containers:
if container is not None:
file.write(f"Container: {container}\n")
file.write(
v1.read_namespaced_pod_log(
name=pod.metadata.name,
namespace=namespace,
container=container,
)
)
except client.exceptions.ApiException as e:
file.write("%s not available" % pod.metadata.name)
raise e
with ZipFile(QHUB_SUPPORT_LOG_FILE, "w") as zip:
for file in list(Path(f"./log/{namespace}").glob("*.txt")):
print(file)
zip.write(file)
def get_config_namespace(config):
config_filename = Path(config)
if not config_filename.is_file():
raise ValueError(
f"passed in configuration filename={config_filename} must exist"
)
with config_filename.open() as f:
config = yaml.safe_load(f.read())
return config["namespace"]
|
[
"kubernetes.config.load_kube_config",
"pathlib.Path",
"zipfile.ZipFile",
"kubernetes.client.CoreV1Api"
] |
[((566, 591), 'kubernetes.config.load_kube_config', 'config.load_kube_config', ([], {}), '()\n', (589, 591), False, 'from kubernetes import client, config\n'), ((602, 620), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (618, 620), False, 'from kubernetes import client, config\n'), ((2253, 2265), 'pathlib.Path', 'Path', (['config'], {}), '(config)\n', (2257, 2265), False, 'from pathlib import Path\n'), ((848, 898), 'pathlib.Path', 'Path', (['f"""./log/{namespace}/{pod.metadata.name}.txt"""'], {}), "(f'./log/{namespace}/{pod.metadata.name}.txt')\n", (852, 898), False, 'from pathlib import Path\n'), ((2031, 2066), 'zipfile.ZipFile', 'ZipFile', (['QHUB_SUPPORT_LOG_FILE', '"""w"""'], {}), "(QHUB_SUPPORT_LOG_FILE, 'w')\n", (2038, 2066), False, 'from zipfile import ZipFile\n'), ((771, 797), 'pathlib.Path', 'Path', (['f"""./log/{namespace}"""'], {}), "(f'./log/{namespace}')\n", (775, 797), False, 'from pathlib import Path\n'), ((2100, 2126), 'pathlib.Path', 'Path', (['f"""./log/{namespace}"""'], {}), "(f'./log/{namespace}')\n", (2104, 2126), False, 'from pathlib import Path\n')]
|
from JumpscalePortalClassic.portal.macrolib import div_base
def main(j, args, params, *other_args):
return div_base.macro(j, args, params)
def match(j, args, params, tags, tasklet):
return True
|
[
"JumpscalePortalClassic.portal.macrolib.div_base.macro"
] |
[((117, 148), 'JumpscalePortalClassic.portal.macrolib.div_base.macro', 'div_base.macro', (['j', 'args', 'params'], {}), '(j, args, params)\n', (131, 148), False, 'from JumpscalePortalClassic.portal.macrolib import div_base\n')]
|
from collections import defaultdict
class Solution:
def accountsMerge(self, accounts: List[List[str]]) -> List[List[str]]:
graph=defaultdict(set)
email_name={}
for account in accounts:
name = account[0]
for email in account[1:]:
graph[email].add(account[1])
graph[account[1]].add(email)
email_name[email]=name
res=[]
seen=set()
for email in email_name:
if email not in seen:
que=[email]
seen.add(email)
sub=[email]
while que:
node=que.pop(0)
for nei in graph[node]:
if nei not in seen:
seen.add(nei)
que.append(nei)
sub.append(nei)
res.append([email_name[email]]+sorted(sub))
return res
|
[
"collections.defaultdict"
] |
[((142, 158), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (153, 158), False, 'from collections import defaultdict\n')]
|
from __future__ import print_function
import os
import re
from glob import glob
import numpy as np
import tensorflow as tf
from keras.utils.data_utils import get_file
def get_filename(key):
"""Rename tensor name to the corresponding Keras layer weight name.
# Arguments
key: tensor name in TF (determined by tf.variable_scope)
"""
filename = str(key)
filename = filename.replace('/', '_')
filename = filename.replace('xception_65_', '')
filename = filename.replace('decoder_','',1)
filename = filename.replace('BatchNorm','BN')
if 'Momentum' in filename:
return None
if 'entry_flow' in filename or 'exit_flow' in filename:
filename = filename.replace('_unit_1_xception_module','')
elif 'middle_flow' in filename:
filename = filename.replace('_block1','')
filename = filename.replace('_xception_module','')
# from TF to Keras naming
filename = filename.replace('_weights', '_kernel')
filename = filename.replace('_biases', '_bias')
return filename + '.npy'
def extract_tensors_from_checkpoint_file(filename, output_folder='weights'):
"""Extract tensors from a TF checkpoint file.
# Arguments
filename: TF checkpoint file
output_folder: where to save the output numpy array files
"""
if not os.path.exists(output_folder):
os.makedirs(output_folder)
reader = tf.train.NewCheckpointReader(filename)
for key in reader.get_variable_to_shape_map():
# convert tensor name into the corresponding Keras layer weight name and save
filename = get_filename(key)
if filename:
path = os.path.join(output_folder, get_filename(key))
arr = reader.get_tensor(key)
np.save(path, arr)
print("tensor_name: ", key)
CKPT_URL = 'http://download.tensorflow.org/models/deeplabv3_pascal_trainval_2018_01_04.tar.gz'
MODEL_DIR = 'models'
MODEL_SUBDIR = 'deeplabv3_pascal_trainval'
if not os.path.exists(MODEL_DIR):
os.makedirs(MODEL_DIR)
checkpoint_tar = get_file(
'deeplabv3_pascal_trainval_2018_01_04.tar.gz',
CKPT_URL,
extract=True,
cache_subdir='',
cache_dir=MODEL_DIR)
checkpoint_file = os.path.join(MODEL_DIR,MODEL_SUBDIR, 'model.ckpt')
extract_tensors_from_checkpoint_file(checkpoint_file)
|
[
"numpy.save",
"os.makedirs",
"os.path.exists",
"keras.utils.data_utils.get_file",
"tensorflow.train.NewCheckpointReader",
"os.path.join"
] |
[((2059, 2181), 'keras.utils.data_utils.get_file', 'get_file', (['"""deeplabv3_pascal_trainval_2018_01_04.tar.gz"""', 'CKPT_URL'], {'extract': '(True)', 'cache_subdir': '""""""', 'cache_dir': 'MODEL_DIR'}), "('deeplabv3_pascal_trainval_2018_01_04.tar.gz', CKPT_URL, extract=\n True, cache_subdir='', cache_dir=MODEL_DIR)\n", (2067, 2181), False, 'from keras.utils.data_utils import get_file\n'), ((2217, 2268), 'os.path.join', 'os.path.join', (['MODEL_DIR', 'MODEL_SUBDIR', '"""model.ckpt"""'], {}), "(MODEL_DIR, MODEL_SUBDIR, 'model.ckpt')\n", (2229, 2268), False, 'import os\n'), ((1407, 1445), 'tensorflow.train.NewCheckpointReader', 'tf.train.NewCheckpointReader', (['filename'], {}), '(filename)\n', (1435, 1445), True, 'import tensorflow as tf\n'), ((1988, 2013), 'os.path.exists', 'os.path.exists', (['MODEL_DIR'], {}), '(MODEL_DIR)\n', (2002, 2013), False, 'import os\n'), ((2019, 2041), 'os.makedirs', 'os.makedirs', (['MODEL_DIR'], {}), '(MODEL_DIR)\n', (2030, 2041), False, 'import os\n'), ((1327, 1356), 'os.path.exists', 'os.path.exists', (['output_folder'], {}), '(output_folder)\n', (1341, 1356), False, 'import os\n'), ((1366, 1392), 'os.makedirs', 'os.makedirs', (['output_folder'], {}), '(output_folder)\n', (1377, 1392), False, 'import os\n'), ((1761, 1779), 'numpy.save', 'np.save', (['path', 'arr'], {}), '(path, arr)\n', (1768, 1779), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 24 14:40:41 2019
@author
"""
def create_cell_cnn_inceptionv3():
from keras.applications.inception_v3 import InceptionV3
from keras import layers
from keras import models
input_tensor = layers.Input(shape=(224, 224, 3)) # this assumes K.image_data_format() == 'channels_last'
base_model = InceptionV3(input_tensor=input_tensor, weights='imagenet', include_top=False)
x = layers.Flatten()(base_model.output)
x = layers.Dense(1024, activation='relu')(x)
x = layers.Dense(147, activation='relu')(x)
head_model = models.Model(input=base_model.input, output = x)
head_model.summary()
return head_model
|
[
"keras.layers.Flatten",
"keras.models.Model",
"keras.layers.Dense",
"keras.applications.inception_v3.InceptionV3",
"keras.layers.Input"
] |
[((255, 288), 'keras.layers.Input', 'layers.Input', ([], {'shape': '(224, 224, 3)'}), '(shape=(224, 224, 3))\n', (267, 288), False, 'from keras import layers\n'), ((363, 440), 'keras.applications.inception_v3.InceptionV3', 'InceptionV3', ([], {'input_tensor': 'input_tensor', 'weights': '"""imagenet"""', 'include_top': '(False)'}), "(input_tensor=input_tensor, weights='imagenet', include_top=False)\n", (374, 440), False, 'from keras.applications.inception_v3 import InceptionV3\n'), ((604, 650), 'keras.models.Model', 'models.Model', ([], {'input': 'base_model.input', 'output': 'x'}), '(input=base_model.input, output=x)\n', (616, 650), False, 'from keras import models\n'), ((449, 465), 'keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (463, 465), False, 'from keras import layers\n'), ((493, 530), 'keras.layers.Dense', 'layers.Dense', (['(1024)'], {'activation': '"""relu"""'}), "(1024, activation='relu')\n", (505, 530), False, 'from keras import layers\n'), ((542, 578), 'keras.layers.Dense', 'layers.Dense', (['(147)'], {'activation': '"""relu"""'}), "(147, activation='relu')\n", (554, 578), False, 'from keras import layers\n')]
|
import logging
from pyAitu import executor, Bot, Dispatcher
from pyAitu.models import Message, Options, Form, Header, FormClosed, ItemInfo
API_TOKEN = 'YOUR_API_TOKEN'
bot = Bot(token=API_TOKEN)
dp = Dispatcher(bot)
logging.basicConfig(level=logging.INFO)
@dp.message_handler()
async def send_ui(message: Message):
item_info = ItemInfo(
content_id="item_info_id",
title="Item info title",
subtitle="Subtitle for item info",
options=Options(
title_lines_count=2,
subtitle_lines_count=3
)
)
header = Header(
_type="toolbar",
title="Title",
options=Options(closeable=True)
)
form = Form(_id="lol", header=header, content=item_info, options=Options(fullscreen=True))
await bot.send_form(message.chat.id, form=form)
@dp.form_closed_handler()
async def get_form_closed(fc: FormClosed):
await bot.send_message(fc.chat.id, "form closed")
if __name__ == '__main__':
executor.start_polling(dp)
|
[
"pyAitu.models.Options",
"logging.basicConfig",
"pyAitu.executor.start_polling",
"pyAitu.Dispatcher",
"pyAitu.Bot"
] |
[((176, 196), 'pyAitu.Bot', 'Bot', ([], {'token': 'API_TOKEN'}), '(token=API_TOKEN)\n', (179, 196), False, 'from pyAitu import executor, Bot, Dispatcher\n'), ((202, 217), 'pyAitu.Dispatcher', 'Dispatcher', (['bot'], {}), '(bot)\n', (212, 217), False, 'from pyAitu import executor, Bot, Dispatcher\n'), ((219, 258), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (238, 258), False, 'import logging\n'), ((986, 1012), 'pyAitu.executor.start_polling', 'executor.start_polling', (['dp'], {}), '(dp)\n', (1008, 1012), False, 'from pyAitu import executor, Bot, Dispatcher\n'), ((473, 525), 'pyAitu.models.Options', 'Options', ([], {'title_lines_count': '(2)', 'subtitle_lines_count': '(3)'}), '(title_lines_count=2, subtitle_lines_count=3)\n', (480, 525), False, 'from pyAitu.models import Message, Options, Form, Header, FormClosed, ItemInfo\n'), ((651, 674), 'pyAitu.models.Options', 'Options', ([], {'closeable': '(True)'}), '(closeable=True)\n', (658, 674), False, 'from pyAitu.models import Message, Options, Form, Header, FormClosed, ItemInfo\n'), ((750, 774), 'pyAitu.models.Options', 'Options', ([], {'fullscreen': '(True)'}), '(fullscreen=True)\n', (757, 774), False, 'from pyAitu.models import Message, Options, Form, Header, FormClosed, ItemInfo\n')]
|
#!/usr/bin/env python
#
# License: BSD
# https://raw.githubusercontent.com/splintered-reality/py_trees/devel/LICENSE
#
##############################################################################
# Documentation
##############################################################################
"""
A library of fundamental behaviours for use.
"""
##############################################################################
# Imports
##############################################################################
import copy
import functools
import operator
import typing
from py_trees import behaviour
from py_trees import blackboard
from py_trees import common
from py_trees import meta
##############################################################################
# Function Behaviours
##############################################################################
def success(self):
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
self.feedback_message = "success"
return common.Status.SUCCESS
def failure(self):
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
self.feedback_message = "failure"
return common.Status.FAILURE
def running(self):
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
self.feedback_message = "running"
return common.Status.RUNNING
def dummy(self):
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
self.feedback_message = "crash test dummy"
return common.Status.RUNNING
Success = meta.create_behaviour_from_function(success)
"""
Do nothing but tick over with :data:`~py_trees.common.Status.SUCCESS`.
"""
Failure = meta.create_behaviour_from_function(failure)
"""
Do nothing but tick over with :data:`~py_trees.common.Status.FAILURE`.
"""
Running = meta.create_behaviour_from_function(running)
"""
Do nothing but tick over with :data:`~py_trees.common.Status.RUNNING`.
"""
Dummy = meta.create_behaviour_from_function(dummy)
"""
Crash test dummy used for anything dangerous.
"""
##############################################################################
# Standalone Behaviours
##############################################################################
class Periodic(behaviour.Behaviour):
"""
Simply periodically rotates it's status over the
:data:`~py_trees.common.Status.RUNNING`, :data:`~py_trees.common.Status.SUCCESS`,
:data:`~py_trees.common.Status.FAILURE` states.
That is, :data:`~py_trees.common.Status.RUNNING` for N ticks,
:data:`~py_trees.common.Status.SUCCESS` for N ticks,
:data:`~py_trees.common.Status.FAILURE` for N ticks...
Args:
name (:obj:`str`): name of the behaviour
n (:obj:`int`): period value (in ticks)
.. note:: It does not reset the count when initialising.
"""
def __init__(self, name, n):
super(Periodic, self).__init__(name)
self.count = 0
self.period = n
self.response = common.Status.RUNNING
def update(self):
self.count += 1
if self.count > self.period:
if self.response == common.Status.FAILURE:
if self.logger.level < 1:
self.feedback_message = "flip to running"
self.response = common.Status.RUNNING
elif self.response == common.Status.RUNNING:
if self.logger.level < 1:
self.feedback_message = "flip to success"
self.response = common.Status.SUCCESS
else:
if self.logger.level < 1:
self.feedback_message = "flip to failure"
self.response = common.Status.FAILURE
self.count = 0
else:
if self.logger.level < 1:
self.feedback_message = "constant"
return self.response
class StatusSequence(behaviour.Behaviour):
"""
Cycle through the specified sequence of states.
Args:
name: name of the behaviour
sequence: list of status values to cycle through
eventually: status to use eventually, None to re-cycle the sequence
"""
def __init__(
self,
name: str,
sequence: typing.List[common.Status],
eventually: typing.Optional[common.Status]
):
super(StatusSequence, self).__init__(name)
self.sequence = sequence
self.eventually = eventually
self.current_sequence = copy.copy(sequence)
def update(self):
if self.current_sequence:
status = self.current_sequence.pop(0)
elif self.eventually is not None:
status = self.eventually
else:
self.current_sequence = copy.copy(self.sequence)
status = self.current_sequence.pop(0)
return status
class SuccessEveryN(behaviour.Behaviour):
"""
This behaviour updates it's status with :data:`~py_trees.common.Status.SUCCESS`
once every N ticks, :data:`~py_trees.common.Status.FAILURE` otherwise.
Args:
name (:obj:`str`): name of the behaviour
n (:obj:`int`): trigger success on every n'th tick
.. tip::
Use with decorators to change the status value as desired, e.g.
:meth:`py_trees.decorators.FailureIsRunning`
"""
def __init__(self, name, n):
super(SuccessEveryN, self).__init__(name)
self.count = 0
self.every_n = n
def update(self):
self.count += 1
if self.logger.level < 1:
self.logger.debug("%s.update()][%s]" % (self.__class__.__name__, self.count))
if self.count % self.every_n == 0:
if self.logger.level < 1:
self.feedback_message = "now"
return common.Status.SUCCESS
else:
if self.logger.level < 1:
self.feedback_message = "not yet"
return common.Status.FAILURE
class TickCounter(behaviour.Behaviour):
"""
A useful utility behaviour for demos and tests. Simply
ticks with :data:`~py_trees.common.Status.RUNNING` for
the specified number of ticks before returning the
requested completion status (:data:`~py_trees.common.Status.SUCCESS`
or :data:`~py_trees.common.Status.FAILURE`).
This behaviour will reset the tick counter when initialising.
Args:
name: name of the behaviour
duration: number of ticks to run
completion_status: status to switch to once the counter has expired
"""
def __init__(
self,
duration: int,
name=common.Name.AUTO_GENERATED,
completion_status: common.Status=common.Status.SUCCESS
):
super().__init__(name=name)
self.completion_status = completion_status
self.duration = duration
self.counter = 0
def initialise(self):
"""
Reset the tick counter.
"""
self.counter = 0
def update(self):
"""
Increment the tick counter and return the appropriate status for this behaviour
based on the tick count.
Returns
:data:`~py_trees.common.Status.RUNNING` while not expired, the given completion status otherwise
"""
self.counter += 1
if self.counter <= self.duration:
return common.Status.RUNNING
else:
return self.completion_status
class Count(behaviour.Behaviour):
"""
A counting behaviour that updates its status at each tick depending on
the value of the counter. The status will move through the states in order -
:data:`~py_trees.common.Status.FAILURE`, :data:`~py_trees.common.Status.RUNNING`,
:data:`~py_trees.common.Status.SUCCESS`.
This behaviour is useful for simple testing and demo scenarios.
Args:
name (:obj:`str`): name of the behaviour
fail_until (:obj:`int`): set status to :data:`~py_trees.common.Status.FAILURE` until the counter reaches this value
running_until (:obj:`int`): set status to :data:`~py_trees.common.Status.RUNNING` until the counter reaches this value
success_until (:obj:`int`): set status to :data:`~py_trees.common.Status.SUCCESS` until the counter reaches this value
reset (:obj:`bool`): whenever invalidated (usually by a sequence reinitialising, or higher priority interrupting)
Attributes:
count (:obj:`int`): a simple counter which increments every tick
"""
def __init__(self, name="Count", fail_until=3, running_until=5, success_until=6, reset=True):
super(Count, self).__init__(name)
self.count = 0
self.fail_until = fail_until
self.running_until = running_until
self.success_until = success_until
self.number_count_resets = 0
self.number_updated = 0
self.reset = reset
def terminate(self, new_status):
if self.logger.level < 1:
self.logger.debug("%s.terminate(%s->%s)" % (self.__class__.__name__, self.status, new_status))
# reset only if udpate got us into an invalid state
if new_status == common.Status.INVALID and self.reset:
self.count = 0
self.number_count_resets += 1
self.feedback_message = ""
def update(self):
self.number_updated += 1
self.count += 1
if self.count <= self.fail_until:
if self.logger.level < 1:
self.logger.debug("%s.update()[%s: failure]" % (self.__class__.__name__, self.count))
self.feedback_message = "failing"
return common.Status.FAILURE
elif self.count <= self.running_until:
if self.logger.level < 1:
self.logger.debug("%s.update()[%s: running]" % (self.__class__.__name__, self.count))
self.feedback_message = "running"
return common.Status.RUNNING
elif self.count <= self.success_until:
if self.logger.level < 1:
self.logger.debug("%s.update()[%s: success]" % (self.__class__.__name__, self.count))
self.feedback_message = "success"
return common.Status.SUCCESS
else:
if self.logger.level < 1:
self.logger.debug("%s.update()[%s: failure]" % (self.__class__.__name__, self.count))
self.feedback_message = "failing forever more"
return common.Status.FAILURE
def __repr__(self):
"""
Simple string representation of the object.
Returns:
:obj:`str`: string representation
"""
s = "%s\n" % self.name
s += " Status : %s\n" % self.status
s += " Count : %s\n" % self.count
s += " Resets : %s\n" % self.number_count_resets
s += " Updates: %s\n" % self.number_updated
return s
##############################################################################
# Blackboard Behaviours
##############################################################################
class BlackboardToStatus(behaviour.Behaviour):
"""
This behaviour reverse engineers the :class:`~py_trees.decorators.StatusToBlackboard`
decorator. Used in conjuction with that decorator, this behaviour can be used to
reflect the status of a decision elsewhere in the tree.
.. note::
A word of caution. The consequences of a behaviour's status should be discernable
upon inspection of the tree graph. If using StatusToBlackboard
and BlackboardToStatus to reflect a behaviour's status across a tree,
this is no longer true. The graph of the tree communicates the local consequences,
but not the reflected consequences at the point BlackboardToStatus is used. A
recommendation, use this class only where other options are infeasible or impractical.
Args:
variable_name: name of the variable look for, may be nested, e.g. battery.percentage
name: name of the behaviour
Raises:
KeyError: if the variable doesn't exist
TypeError: if the variable isn't of type :py:data:`~py_trees.common.Status`
"""
def __init__(
self,
variable_name: str,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED
):
super().__init__(name=name)
name_components = variable_name.split('.')
self.key = name_components[0]
self.key_attributes = '.'.join(name_components[1:]) # empty string if no other parts
self.variable_name = variable_name
self.blackboard = self.attach_blackboard_client()
self.blackboard.register_key(key=self.key, access=common.Access.READ)
def update(self) -> common.Status:
"""
Check for existence.
Returns:
:data:`~py_trees.common.Status.SUCCESS` if key found, :data:`~py_trees.common.Status.FAILURE` otherwise.
"""
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
# raises a KeyError if the variable doesn't exist
status = self.blackboard.get(self.variable_name)
if type(status) != common.Status:
raise TypeError(f"{self.variable_name} is not of type py_trees.common.Status")
if self.logger.level < 1:
self.feedback_message = f"{self.variable_name}: {status}"
return status
class CheckBlackboardVariableExists(behaviour.Behaviour):
"""
Check the blackboard to verify if a specific variable (key-value pair)
exists. This is non-blocking, so will always tick with
status :data:`~py_trees.common.Status.FAILURE`
:data:`~py_trees.common.Status.SUCCESS`.
.. seealso::
:class:`~py_trees.behaviours.WaitForBlackboardVariable` for
the blocking counterpart to this behaviour.
Args:
variable_name: name of the variable look for, may be nested, e.g. battery.percentage
name: name of the behaviour
"""
def __init__(
self,
variable_name: str,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED
):
super().__init__(name=name)
self.variable_name = variable_name
name_components = variable_name.split('.')
self.key = name_components[0]
self.key_attributes = '.'.join(name_components[1:]) # empty string if no other parts
self.blackboard = self.attach_blackboard_client()
self.blackboard.register_key(key=self.key, access=common.Access.READ)
def update(self) -> common.Status:
"""
Check for existence.
Returns:
:data:`~py_trees.common.Status.SUCCESS` if key found, :data:`~py_trees.common.Status.FAILURE` otherwise.
"""
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
try:
unused_value = self.blackboard.get(self.variable_name)
if self.logger.level < 1:
self.feedback_message = "variable '{}' found".format(self.variable_name)
return common.Status.SUCCESS
except KeyError:
if self.logger.level < 1:
self.feedback_message = "variable '{}' not found".format(self.variable_name)
return common.Status.FAILURE
class WaitForBlackboardVariable(CheckBlackboardVariableExists):
"""
Wait for the blackboard variable to become available on the blackboard.
This is blocking, so it will tick with
status :data:`~py_trees.common.Status.SUCCESS` if the variable is found,
and :data:`~py_trees.common.Status.RUNNING` otherwise.
.. seealso::
:class:`~py_trees.behaviours.CheckBlackboardVariableExists` for
the non-blocking counterpart to this behaviour.
Args:
variable_name: name of the variable to wait for, may be nested, e.g. battery.percentage
name: name of the behaviour
"""
def __init__(
self,
variable_name: str,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED
):
super().__init__(name=name, variable_name=variable_name)
def update(self) -> common.Status:
"""
Check for existence, wait otherwise.
Returns:
:data:`~py_trees.common.Status.SUCCESS` if key found, :data:`~py_trees.common.Status.RUNNING` otherwise.
"""
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
new_status = super().update()
# CheckBlackboardExists only returns SUCCESS || FAILURE
if new_status == common.Status.SUCCESS:
if self.logger.level < 1:
self.feedback_message = "'{}' found".format(self.key)
return common.Status.SUCCESS
else: # new_status == common.Status.FAILURE
if self.logger.level < 1:
self.feedback_message = "waiting for key '{}'...".format(self.key)
return common.Status.RUNNING
class UnsetBlackboardVariable(behaviour.Behaviour):
"""
Unset the specified variable (key-value pair) from the blackboard.
This always returns
:data:`~py_trees.common.Status.SUCCESS` regardless of whether
the variable was already present or not.
Args:
key: unset this key-value pair
name: name of the behaviour
"""
def __init__(self,
key: str,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED,
):
super().__init__(name=name)
self.key = key
self.blackboard = self.attach_blackboard_client()
self.blackboard.register_key(key=self.key, access=common.Access.WRITE)
def update(self) -> common.Status:
"""
Unset and always return success.
Returns:
:data:`~py_trees.common.Status.SUCCESS`
"""
if self.logger.level < 1:
if self.blackboard.unset(self.key):
self.feedback_message = "'{}' found and removed".format(self.key)
else:
self.feedback_message = "'{}' not found, nothing to remove"
return common.Status.SUCCESS
class SetBlackboardVariable(behaviour.Behaviour):
"""
Set the specified variable on the blackboard.
Args:
variable_name: name of the variable to set, may be nested, e.g. battery.percentage
variable_value: value of the variable to set
overwrite: when False, do not set the variable if it already exists
name: name of the behaviour
"""
def __init__(
self,
variable_name: str,
variable_value: typing.Union[typing.Any, typing.Callable[[], typing.Any]],
overwrite: bool = True,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED,
):
super().__init__(name=name)
self.variable_name = variable_name
name_components = variable_name.split('.')
self.key = name_components[0]
self.key_attributes = '.'.join(name_components[1:]) # empty string if no other parts
self.blackboard = self.attach_blackboard_client()
self.blackboard.register_key(key=self.key, access=common.Access.WRITE)
self.variable_value_generator = variable_value if callable(variable_value) else lambda: variable_value
self.overwrite = overwrite
def update(self) -> common.Status:
"""
Always return success.
Returns:
:data:`~py_trees.common.Status.FAILURE` if no overwrite requested and the variable exists, :data:`~py_trees.common.Status.SUCCESS` otherwise
"""
if self.blackboard.set(
self.variable_name,
self.variable_value_generator(),
overwrite=self.overwrite
):
return common.Status.SUCCESS
else:
return common.Status.FAILURE
class CheckBlackboardVariableValue(behaviour.Behaviour):
"""
Inspect a blackboard variable and if it exists, check that it
meets the specified criteria (given by operation type and expected value).
This is non-blocking, so it will always tick with
:data:`~py_trees.common.Status.SUCCESS` or
:data:`~py_trees.common.Status.FAILURE`.
Args:
check: a comparison expression to check against
name: name of the behaviour
.. note::
If the variable does not yet exist on the blackboard, the behaviour will
return with status :data:`~py_trees.common.Status.FAILURE`.
.. tip::
The python `operator module`_ includes many useful comparison operations.
"""
def __init__(
self,
check: common.ComparisonExpression,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED
):
super().__init__(name=name)
self.check = check
name_components = self.check.variable.split('.')
self.key = name_components[0]
self.key_attributes = '.'.join(name_components[1:]) # empty string if no other parts
self.blackboard = self.attach_blackboard_client()
self.blackboard.register_key(key=self.key, access=common.Access.READ)
def update(self):
"""
Check for existence, or the appropriate match on the expected value.
Returns:
:class:`~py_trees.common.Status`: :data:`~py_trees.common.Status.FAILURE` if not matched, :data:`~py_trees.common.Status.SUCCESS` otherwise.
"""
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
try:
value = self.blackboard.get(self.key)
if self.key_attributes:
try:
value = operator.attrgetter(self.key_attributes)(value)
except AttributeError:
if self.logger.level < 1:
self.feedback_message = 'blackboard key-value pair exists, but the value does not have the requested nested attributes [{}]'.format(self.variable_name)
return common.Status.FAILURE
except KeyError:
if self.logger.level < 1:
self.feedback_message = "key '{}' does not yet exist on the blackboard".format(self.check.variable)
return common.Status.FAILURE
success = self.check.operator(value, self.check.value)
if success:
if self.logger.level < 1:
self.feedback_message = "'%s' comparison succeeded [v: %s][e: %s]" % (self.check.variable, value, self.check.value)
return common.Status.SUCCESS
else:
if self.logger.level < 1:
self.feedback_message = "'%s' comparison failed [v: %s][e: %s]" % (self.check.variable, value, self.check.value)
return common.Status.FAILURE
class WaitForBlackboardVariableValue(CheckBlackboardVariableValue):
"""
Inspect a blackboard variable and if it exists, check that it
meets the specified criteria (given by operation type and expected value).
This is blocking, so it will always tick with
:data:`~py_trees.common.Status.SUCCESS` or
:data:`~py_trees.common.Status.RUNNING`.
.. seealso::
:class:`~py_trees.behaviours.CheckBlackboardVariableValue` for
the non-blocking counterpart to this behaviour.
.. note::
If the variable does not yet exist on the blackboard, the behaviour will
return with status :data:`~py_trees.common.Status.RUNNING`.
Args:
check: a comparison expression to check against
name: name of the behaviour
"""
def __init__(
self,
check: common.ComparisonExpression,
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED
):
super().__init__(
check=check,
name=name
)
def update(self):
"""
Check for existence, or the appropriate match on the expected value.
Returns:
:class:`~py_trees.common.Status`: :data:`~py_trees.common.Status.FAILURE` if not matched, :data:`~py_trees.common.Status.SUCCESS` otherwise.
"""
new_status = super().update()
if new_status == common.Status.FAILURE:
return common.Status.RUNNING
else:
return new_status
class CheckBlackboardVariableValues(behaviour.Behaviour):
"""
Apply a logical operation across a set of blackboard variable checks.
This is non-blocking, so will always tick with status
:data:`~py_trees.common.Status.FAILURE` or
:data:`~py_trees.common.Status.SUCCESS`.
Args:
checks: a list of comparison checks to apply to blackboard variables
logical_operator: a logical check to apply across the results of the blackboard variable checks
name: name of the behaviour
namespace: optionally store results of the checks (boolean) under this namespace
.. tip::
The python `operator module`_ includes many useful logical operators, e.g. operator.xor.
Raises:
ValueError if less than two variable checks are specified (insufficient for logical operations)
"""
def __init__(
self,
checks: typing.List[common.ComparisonExpression],
operator: typing.Callable[[bool, bool], bool],
name: typing.Union[str, common.Name]=common.Name.AUTO_GENERATED,
namespace: typing.Optional[str]=None,
):
super().__init__(name=name)
self.checks = checks
self.operator = operator
self.blackboard = self.attach_blackboard_client()
if len(checks) < 2:
raise ValueError("Must be at least two variables to operate on [only {} provided]".format(len(checks)))
for check in self.checks:
self.blackboard.register_key(
key=blackboard.Blackboard.key(check.variable),
access=common.Access.READ
)
self.blackboard_results = None
if namespace is not None:
self.blackboard_results = self.attach_blackboard_client(namespace=namespace)
for counter in range(1, len(self.checks) + 1):
self.blackboard_results.register_key(
key=str(counter),
access=common.Access.WRITE
)
def update(self) -> common.Status:
"""
Applies comparison checks on each variable and a logical check across the
complete set of variables.
Returns:
:data:`~py_trees.common.Status.FAILURE` if key retrieval or logical checks failed, :data:`~py_trees.common.Status.SUCCESS` otherwise.
"""
if self.logger.level < 1:
self.logger.debug("%s.update()" % self.__class__.__name__)
results = []
for check in self.checks:
try:
value = self.blackboard.get(check.variable)
except KeyError:
if self.logger.level < 1:
self.feedback_message = "variable '{}' does not yet exist on the blackboard".format(check.variable)
return common.Status.FAILURE
results.append(check.operator(value, check.value))
if self.blackboard_results is not None:
for counter in range(1, len(results) + 1):
self.blackboard_results.set(str(counter), results[counter - 1])
logical_result = functools.reduce(self.operator, results)
if logical_result:
if self.logger.level < 1:
self.feedback_message = "[{}]".format(
"|".join(["T" if result else "F" for result in results])
)
return common.Status.SUCCESS
else:
if self.logger.level < 1:
self.feedback_message = "[{}]".format(
"|".join(["T" if result else "F" for result in results])
)
return common.Status.FAILURE
|
[
"py_trees.meta.create_behaviour_from_function",
"py_trees.blackboard.Blackboard.key",
"copy.copy",
"operator.attrgetter",
"functools.reduce"
] |
[((1667, 1711), 'py_trees.meta.create_behaviour_from_function', 'meta.create_behaviour_from_function', (['success'], {}), '(success)\n', (1702, 1711), False, 'from py_trees import meta\n'), ((1802, 1846), 'py_trees.meta.create_behaviour_from_function', 'meta.create_behaviour_from_function', (['failure'], {}), '(failure)\n', (1837, 1846), False, 'from py_trees import meta\n'), ((1937, 1981), 'py_trees.meta.create_behaviour_from_function', 'meta.create_behaviour_from_function', (['running'], {}), '(running)\n', (1972, 1981), False, 'from py_trees import meta\n'), ((2070, 2112), 'py_trees.meta.create_behaviour_from_function', 'meta.create_behaviour_from_function', (['dummy'], {}), '(dummy)\n', (2105, 2112), False, 'from py_trees import meta\n'), ((4583, 4602), 'copy.copy', 'copy.copy', (['sequence'], {}), '(sequence)\n', (4592, 4602), False, 'import copy\n'), ((27479, 27519), 'functools.reduce', 'functools.reduce', (['self.operator', 'results'], {}), '(self.operator, results)\n', (27495, 27519), False, 'import functools\n'), ((4839, 4863), 'copy.copy', 'copy.copy', (['self.sequence'], {}), '(self.sequence)\n', (4848, 4863), False, 'import copy\n'), ((25912, 25953), 'py_trees.blackboard.Blackboard.key', 'blackboard.Blackboard.key', (['check.variable'], {}), '(check.variable)\n', (25937, 25953), False, 'from py_trees import blackboard\n'), ((21803, 21843), 'operator.attrgetter', 'operator.attrgetter', (['self.key_attributes'], {}), '(self.key_attributes)\n', (21822, 21843), False, 'import operator\n')]
|
#!/usr/bin/env python3
import cv2
import terminator as t
#
# Camera, HUD overlays, sounds...
#
T800_CAMERA = 0
HUD_VIDEO = "overlay1.mp4"
HUD_SOUND = "overlay1.mp3"
WINDOW_NAME = "T800 Vision"
#
# Overlay HUD on visual input...
#
hud = t.HeadsUpDisplay(HUD_VIDEO, HUD_SOUND) # Access HUD analysis...
cam = t.TerminatorVision(T800_CAMERA, hud.get_shape()) # Access T800 visual cortex...
while True:
camFrame = cam.read()
hudFrame = hud.read()
out = cv2.addWeighted(camFrame, 1.0, hudFrame, 1.0, 0) # Overlay HUD.
# Display the resulting frame...
cv2.imshow(WINDOW_NAME, out)
if cv2.waitKey(1) == 27:
break
#
# When everything done, release visual input and HUD...
#
cam.release()
hud.release()
cv2.destroyAllWindows()
|
[
"cv2.waitKey",
"terminator.HeadsUpDisplay",
"cv2.imshow",
"cv2.addWeighted",
"cv2.destroyAllWindows"
] |
[((240, 278), 'terminator.HeadsUpDisplay', 't.HeadsUpDisplay', (['HUD_VIDEO', 'HUD_SOUND'], {}), '(HUD_VIDEO, HUD_SOUND)\n', (256, 278), True, 'import terminator as t\n'), ((736, 759), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (757, 759), False, 'import cv2\n'), ((466, 514), 'cv2.addWeighted', 'cv2.addWeighted', (['camFrame', '(1.0)', 'hudFrame', '(1.0)', '(0)'], {}), '(camFrame, 1.0, hudFrame, 1.0, 0)\n', (481, 514), False, 'import cv2\n'), ((573, 601), 'cv2.imshow', 'cv2.imshow', (['WINDOW_NAME', 'out'], {}), '(WINDOW_NAME, out)\n', (583, 601), False, 'import cv2\n'), ((610, 624), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (621, 624), False, 'import cv2\n')]
|
import copy
import sys
if sys.version_info < (3,):
range = xrange
import numpy as np
import pandas as pd
import scipy.stats as ss
from patsy import dmatrices, dmatrix, demo_data
from .. import families as fam
from .. import tsm as tsm
from .. import data_check as dc
from .kalman import *
class DAR(tsm.TSM):
""" Inherits time series methods from TSM class.
**** DYNAMIC AUTOREGRESSIVE MODEL ****
Parameters
----------
ar : int
Number of autoregressive lags
data : pd.DataFrame
Field to specify the data that will be used
"""
def __init__(self, data, ar, integ=0, target=None):
# Initialize TSM object
super(DAR, self).__init__('DAR')
# Latent Variable information
self.ar = ar
self.integ = integ
self.target = target
self.model_name = "DAR(" + str(self.ar) + ", integrated=" + str(self.integ) + ")"
self.max_lag = self.ar
self._z_hide = 0 # Whether to cutoff latent variables from results table
self.supported_methods = ["MLE", "PML", "Laplace", "M-H", "BBVI"]
self.default_method = "MLE"
self.multivariate_model = False
# Format the data
self.data_original = data.copy()
self.data, self.data_name, self.is_pandas, self.index = dc.data_check(data,target)
self.data = self.data.astype(np.float) # treat as float for Cython
self.data_original_nondf = self.data.copy()
# Difference data
for order in range(0, self.integ):
self.data = np.diff(self.data)
self.data_name = "Differenced " + self.data_name
self.X = self._ar_matrix()
self.data = self.data[self.max_lag:]
self.y = self.data
self.y_name = self.data_name
self._create_latent_variables()
self.z_no = len(self.latent_variables.z_list)
def _ar_matrix(self):
""" Creates Autoregressive matrix
Returns
----------
X : np.ndarray
Autoregressive Matrix
"""
Y = np.array(self.data[self.max_lag:self.data.shape[0]])
X = np.ones(Y.shape[0])
if self.ar != 0:
for i in range(0, self.ar):
X = np.vstack((X,self.data[(self.max_lag-i-1):-i-1]))
return X.T
def _create_latent_variables(self):
""" Creates model latent variables
Returns
----------
None (changes model attributes)
"""
self.latent_variables.add_z('Sigma^2 irregular', fam.Flat(transform='exp'), fam.Normal(0,3))
self.latent_variables.add_z('Constant', fam.Flat(transform=None), fam.Normal(0,3))
for parm in range(1,self.ar+1):
self.latent_variables.add_z('Sigma^2 AR(' + str(parm) + ')', fam.Flat(transform='exp'), fam.Normal(0,3))
def _forecast_model(self,beta,Z,h):
""" Creates forecasted states and variances
Parameters
----------
beta : np.ndarray
Contains untransformed starting values for latent variables
Returns
----------
a : np.ndarray
Forecasted states
P : np.ndarray
Variance of forecasted states
"""
T, _, R, Q, H = self._ss_matrices(beta)
return dl_univariate_kalman_fcst(self.data,Z,H,T,Q,R,0.0,h)
def _model(self,data,beta):
""" Creates the structure of the model
Parameters
----------
data : np.array
Contains the time series
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
a,P,K,F,v : np.array
Filted states, filtered variances, Kalman gains, F matrix, residuals
"""
T, Z, R, Q, H = self._ss_matrices(beta)
return dl_univariate_kalman(data,Z,H,T,Q,R,0.0)
def _ss_matrices(self,beta):
""" Creates the state space matrices required
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
T, Z, R, Q, H : np.array
State space matrices used in KFS algorithm
"""
T = np.identity(self.z_no-1)
H = np.identity(1)*self.latent_variables.z_list[0].prior.transform(beta[0])
Z = self.X
R = np.identity(self.z_no-1)
Q = np.identity(self.z_no-1)
for i in range(0,self.z_no-1):
Q[i][i] = self.latent_variables.z_list[i+1].prior.transform(beta[i+1])
return T, Z, R, Q, H
def neg_loglik(self,beta):
""" Creates the negative log marginal likelihood of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
The negative log logliklihood of the model
"""
_, _, _, F, v = self._model(self.y,beta)
loglik = 0.0
for i in range(0,self.y.shape[0]):
loglik += np.linalg.slogdet(F[:,:,i])[1] + np.dot(v[i],np.dot(np.linalg.pinv(F[:,:,i]),v[i]))
return -(-((self.y.shape[0]/2)*np.log(2*np.pi))-0.5*loglik.T[0].sum())
def plot_predict(self, h=5, past_values=20, intervals=True, **kwargs):
""" Makes forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
past_values : int (default : 20)
How many past observations to show on the forecast graph?
intervals : Boolean
Would you like to show 95% prediction intervals for the forecast?
Returns
----------
- Plot of the forecast
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize',(10,7))
if self.latent_variables.estimated is False:
raise Exception("No latent variables estimated!")
else:
y_holder = self.y.copy() # holds past data and predicted data to create AR matrix
full_X = self.X.copy()
full_X = np.append(full_X,np.array([np.append(1.0, y_holder[-self.ar:][::-1])]), axis=0)
Z = full_X
# Construct Z matrix
for step in range(h):
a, P = self._forecast_model(self.latent_variables.get_z_values(),Z,step)
new_value = np.dot(Z[-1,:],a[:,self.y.shape[0]+step])
y_holder = np.append(y_holder, new_value)
Z = np.append(Z, np.array([np.append(1.0, y_holder[-self.ar:][::-1])]), axis=0)
# Retrieve data, dates and (transformed) latent variables
a, P = self._forecast_model(self.latent_variables.get_z_values(),Z,h)
smoothed_series = np.zeros(self.y.shape[0]+h)
series_variance = np.zeros(self.y.shape[0]+h)
for t in range(self.y.shape[0]+h):
smoothed_series[t] = np.dot(Z[t],a[:,t])
series_variance[t] = np.dot(np.dot(Z[t],P[:,:,t]),Z[t].T) + self.latent_variables.z_list[0].prior.transform(self.latent_variables.get_z_values()[0])
date_index = self.shift_dates(h)
plot_values = smoothed_series[-h-past_values:]
forecasted_values = smoothed_series[-h:]
lower = forecasted_values - 1.98*np.power(series_variance[-h:],0.5)
upper = forecasted_values + 1.98*np.power(series_variance[-h:],0.5)
lower = np.append(plot_values[-h-1],lower)
upper = np.append(plot_values[-h-1],upper)
plot_index = date_index[-h-past_values:]
plt.figure(figsize=figsize)
if intervals == True:
plt.fill_between(date_index[-h-1:], lower, upper, alpha=0.2)
plt.plot(plot_index,plot_values)
plt.title("Forecast for " + self.y_name)
plt.xlabel("Time")
plt.ylabel(self.y_name)
plt.show()
def plot_fit(self,intervals=False,**kwargs):
""" Plots the fit of the model
Parameters
----------
intervals : Boolean
Whether to plot 95% confidence interval of states
Returns
----------
None (plots data and the fit)
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize',(10,7))
series_type = kwargs.get('series_type','Smoothed')
if self.latent_variables.estimated is False:
raise Exception("No latent variables estimated!")
else:
date_index = copy.deepcopy(self.index)
date_index = date_index[self.integ+self.ar:]
if series_type == 'Smoothed':
mu, V = self.smoothed_state(self.data,self.latent_variables.get_z_values())
elif series_type == 'Filtered':
mu, V, _, _, _ = self._model(self.data,self.latent_variables.get_z_values())
else:
mu, V = self.smoothed_state(self.data,self.latent_variables.get_z_values())
# Create smoothed/filtered aggregate series
_, Z, _, _, _ = self._ss_matrices(self.latent_variables.get_z_values())
smoothed_series = np.zeros(self.y.shape[0])
for t in range(0,self.y.shape[0]):
smoothed_series[t] = np.dot(Z[t],mu[:,t])
plt.figure(figsize=figsize)
plt.subplot(self.z_no+1, 1, 1)
plt.title(self.y_name + " Raw and " + series_type)
plt.plot(date_index,self.data,label='Data')
plt.plot(date_index,smoothed_series,label=series_type,c='black')
plt.legend(loc=2)
for coef in range(0,self.z_no-1):
V_coef = V[0][coef][:-1]
plt.subplot(self.z_no+1, 1, 2+coef)
plt.title("Beta " + self.latent_variables.z_list[1+coef].name)
if intervals == True:
alpha =[0.15*i/float(100) for i in range(50,12,-2)]
plt.fill_between(date_index[5:], mu[coef,0:mu.shape[1]-1][5:] + 1.98*np.sqrt(V_coef[5:]), mu[coef,0:mu.shape[1]-1][5:] - 1.98*np.sqrt(V_coef[5:]), alpha=0.15,label='95% C.I.')
plt.plot(date_index,mu[coef,0:mu.shape[1]-1],label='Data')
plt.legend(loc=2)
plt.subplot(self.z_no+1, 1, self.z_no+1)
plt.title("Measurement Error")
plt.plot(date_index,self.data-smoothed_series,label='Irregular')
plt.legend(loc=2)
plt.show()
def predict(self, h=5):
""" Makes forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
Returns
----------
- pd.DataFrame with predictions
"""
if self.latent_variables.estimated is False:
raise Exception("No latent variables estimated!")
else:
y_holder = self.y.copy() # holds past data and predicted data to create AR matrix
full_X = self.X.copy()
full_X = np.append(full_X,np.array([np.append(1.0, y_holder[-self.ar:][::-1])]), axis=0)
Z = full_X
for step in range(h):
a, P = self._forecast_model(self.latent_variables.get_z_values(),Z,step)
new_value = np.dot(Z[-1,:],a[:,self.y.shape[0]+step])
y_holder = np.append(y_holder, new_value)
Z = np.append(Z, np.array([np.append(1.0, y_holder[-self.ar:][::-1])]), axis=0)
date_index = self.shift_dates(h)
result = pd.DataFrame(y_holder[-h:])
result.rename(columns={0:self.y_name}, inplace=True)
result.index = date_index[-h:]
return result
def predict_is(self, h=5, fit_once=True):
""" Makes dynamic in-sample predictions with the estimated model
Parameters
----------
h : int (default : 5)
How many steps would you like to forecast?
fit_once : boolean
(default: True) Fits only once before the in-sample prediction; if False, fits after every new datapoint
Returns
----------
- pd.DataFrame with predicted values
"""
predictions = []
for t in range(0,h):
data1 = self.data_original_nondf[:-h+t]
x = DAR(data=data1, ar=self.ar, integ=self.integ)
if fit_once is False:
x.fit(printer=False)
if t == 0:
if fit_once is True:
x.fit(printer=False)
saved_lvs = x.latent_variables
predictions = x.predict(1)
else:
if fit_once is True:
x.latent_variables = saved_lvs
predictions = pd.concat([predictions,x.predict(1)])
predictions.rename(columns={0:self.y_name}, inplace=True)
predictions.index = self.index[-h:]
return predictions
def plot_predict_is(self, h=5, **kwargs):
""" Plots forecasts with the estimated model against data
(Simulated prediction with data)
Parameters
----------
h : int (default : 5)
How many steps to forecast
Returns
----------
- Plot of the forecast against data
"""
import matplotlib.pyplot as plt
import seaborn as sns
figsize = kwargs.get('figsize',(10,7))
plt.figure(figsize=figsize)
predictions = self.predict_is(h)
data = self.data[-h:]
plt.plot(predictions.index,data,label='Data')
plt.plot(predictions.index,predictions,label='Predictions',c='black')
plt.title(self.y_name)
plt.legend(loc=2)
plt.show()
def simulation_smoother(self,beta):
""" Koopman's simulation smoother - simulates from states given
model parameters and observations
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
- A simulated state evolution
"""
T, Z, R, Q, H = self._ss_matrices(beta)
# Generate e_t+ and n_t+
rnd_h = np.random.normal(0,np.sqrt(H),self.data.shape[0]+1)
q_dist = ss.multivariate_normal([0.0, 0.0], Q)
rnd_q = q_dist.rvs(self.data.shape[0]+1)
# Generate a_t+ and y_t+
a_plus = np.zeros((T.shape[0],self.data.shape[0]+1))
a_plus[0,0] = np.mean(self.data[0:5])
y_plus = np.zeros(self.data.shape[0])
for t in range(0,self.data.shape[0]+1):
if t == 0:
a_plus[:,t] = np.dot(T,a_plus[:,t]) + rnd_q[t,:]
y_plus[t] = np.dot(Z,a_plus[:,t]) + rnd_h[t]
else:
if t != self.data.shape[0]:
a_plus[:,t] = np.dot(T,a_plus[:,t-1]) + rnd_q[t,:]
y_plus[t] = np.dot(Z,a_plus[:,t]) + rnd_h[t]
alpha_hat, _ = self.smoothed_state(self.data,beta)
alpha_hat_plus, _ = self.smoothed_state(y_plus,beta)
alpha_tilde = alpha_hat - alpha_hat_plus + a_plus
return alpha_tilde
def smoothed_state(self,data,beta):
""" Creates the negative log marginal likelihood of the model
Parameters
----------
data : np.array
Data to be smoothed
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
- Smoothed states
"""
T, Z, R, Q, H = self._ss_matrices(beta)
alpha, V = dl_univariate_KFS(data,Z,H,T,Q,R,0.0)
return alpha, V
|
[
"matplotlib.pyplot.title",
"numpy.ones",
"matplotlib.pyplot.figure",
"numpy.mean",
"matplotlib.pyplot.fill_between",
"numpy.linalg.pinv",
"pandas.DataFrame",
"numpy.power",
"numpy.identity",
"numpy.append",
"copy.deepcopy",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"scipy.stats.multivariate_normal",
"matplotlib.pyplot.ylabel",
"numpy.dot",
"numpy.vstack",
"matplotlib.pyplot.subplot",
"numpy.log",
"matplotlib.pyplot.plot",
"numpy.zeros",
"numpy.diff",
"numpy.array",
"numpy.linalg.slogdet",
"matplotlib.pyplot.xlabel",
"numpy.sqrt"
] |
[((2067, 2119), 'numpy.array', 'np.array', (['self.data[self.max_lag:self.data.shape[0]]'], {}), '(self.data[self.max_lag:self.data.shape[0]])\n', (2075, 2119), True, 'import numpy as np\n'), ((2132, 2151), 'numpy.ones', 'np.ones', (['Y.shape[0]'], {}), '(Y.shape[0])\n', (2139, 2151), True, 'import numpy as np\n'), ((4274, 4300), 'numpy.identity', 'np.identity', (['(self.z_no - 1)'], {}), '(self.z_no - 1)\n', (4285, 4300), True, 'import numpy as np\n'), ((4421, 4447), 'numpy.identity', 'np.identity', (['(self.z_no - 1)'], {}), '(self.z_no - 1)\n', (4432, 4447), True, 'import numpy as np\n'), ((4467, 4493), 'numpy.identity', 'np.identity', (['(self.z_no - 1)'], {}), '(self.z_no - 1)\n', (4478, 4493), True, 'import numpy as np\n'), ((13747, 13774), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (13757, 13774), True, 'import matplotlib.pyplot as plt\n'), ((13854, 13901), 'matplotlib.pyplot.plot', 'plt.plot', (['predictions.index', 'data'], {'label': '"""Data"""'}), "(predictions.index, data, label='Data')\n", (13862, 13901), True, 'import matplotlib.pyplot as plt\n'), ((13908, 13980), 'matplotlib.pyplot.plot', 'plt.plot', (['predictions.index', 'predictions'], {'label': '"""Predictions"""', 'c': '"""black"""'}), "(predictions.index, predictions, label='Predictions', c='black')\n", (13916, 13980), True, 'import matplotlib.pyplot as plt\n'), ((13986, 14008), 'matplotlib.pyplot.title', 'plt.title', (['self.y_name'], {}), '(self.y_name)\n', (13995, 14008), True, 'import matplotlib.pyplot as plt\n'), ((14017, 14034), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(2)'}), '(loc=2)\n', (14027, 14034), True, 'import matplotlib.pyplot as plt\n'), ((14046, 14056), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (14054, 14056), True, 'import matplotlib.pyplot as plt\n'), ((14621, 14658), 'scipy.stats.multivariate_normal', 'ss.multivariate_normal', (['[0.0, 0.0]', 'Q'], {}), '([0.0, 0.0], Q)\n', (14643, 14658), True, 'import scipy.stats as ss\n'), ((14759, 14805), 'numpy.zeros', 'np.zeros', (['(T.shape[0], self.data.shape[0] + 1)'], {}), '((T.shape[0], self.data.shape[0] + 1))\n', (14767, 14805), True, 'import numpy as np\n'), ((14826, 14849), 'numpy.mean', 'np.mean', (['self.data[0:5]'], {}), '(self.data[0:5])\n', (14833, 14849), True, 'import numpy as np\n'), ((14867, 14895), 'numpy.zeros', 'np.zeros', (['self.data.shape[0]'], {}), '(self.data.shape[0])\n', (14875, 14895), True, 'import numpy as np\n'), ((1561, 1579), 'numpy.diff', 'np.diff', (['self.data'], {}), '(self.data)\n', (1568, 1579), True, 'import numpy as np\n'), ((4311, 4325), 'numpy.identity', 'np.identity', (['(1)'], {}), '(1)\n', (4322, 4325), True, 'import numpy as np\n'), ((6925, 6954), 'numpy.zeros', 'np.zeros', (['(self.y.shape[0] + h)'], {}), '(self.y.shape[0] + h)\n', (6933, 6954), True, 'import numpy as np\n'), ((6983, 7012), 'numpy.zeros', 'np.zeros', (['(self.y.shape[0] + h)'], {}), '(self.y.shape[0] + h)\n', (6991, 7012), True, 'import numpy as np\n'), ((7622, 7659), 'numpy.append', 'np.append', (['plot_values[-h - 1]', 'lower'], {}), '(plot_values[-h - 1], lower)\n', (7631, 7659), True, 'import numpy as np\n'), ((7677, 7714), 'numpy.append', 'np.append', (['plot_values[-h - 1]', 'upper'], {}), '(plot_values[-h - 1], upper)\n', (7686, 7714), True, 'import numpy as np\n'), ((7779, 7806), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (7789, 7806), True, 'import matplotlib.pyplot as plt\n'), ((7943, 7976), 'matplotlib.pyplot.plot', 'plt.plot', (['plot_index', 'plot_values'], {}), '(plot_index, plot_values)\n', (7951, 7976), True, 'import matplotlib.pyplot as plt\n'), ((7988, 8028), 'matplotlib.pyplot.title', 'plt.title', (["('Forecast for ' + self.y_name)"], {}), "('Forecast for ' + self.y_name)\n", (7997, 8028), True, 'import matplotlib.pyplot as plt\n'), ((8041, 8059), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (8051, 8059), True, 'import matplotlib.pyplot as plt\n'), ((8072, 8095), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['self.y_name'], {}), '(self.y_name)\n', (8082, 8095), True, 'import matplotlib.pyplot as plt\n'), ((8108, 8118), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8116, 8118), True, 'import matplotlib.pyplot as plt\n'), ((8755, 8780), 'copy.deepcopy', 'copy.deepcopy', (['self.index'], {}), '(self.index)\n', (8768, 8780), False, 'import copy\n'), ((9391, 9416), 'numpy.zeros', 'np.zeros', (['self.y.shape[0]'], {}), '(self.y.shape[0])\n', (9399, 9416), True, 'import numpy as np\n'), ((9536, 9563), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (9546, 9563), True, 'import matplotlib.pyplot as plt\n'), ((9590, 9622), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(self.z_no + 1)', '(1)', '(1)'], {}), '(self.z_no + 1, 1, 1)\n', (9601, 9622), True, 'import matplotlib.pyplot as plt\n'), ((9633, 9683), 'matplotlib.pyplot.title', 'plt.title', (["(self.y_name + ' Raw and ' + series_type)"], {}), "(self.y_name + ' Raw and ' + series_type)\n", (9642, 9683), True, 'import matplotlib.pyplot as plt\n'), ((9699, 9744), 'matplotlib.pyplot.plot', 'plt.plot', (['date_index', 'self.data'], {'label': '"""Data"""'}), "(date_index, self.data, label='Data')\n", (9707, 9744), True, 'import matplotlib.pyplot as plt\n'), ((9755, 9822), 'matplotlib.pyplot.plot', 'plt.plot', (['date_index', 'smoothed_series'], {'label': 'series_type', 'c': '"""black"""'}), "(date_index, smoothed_series, label=series_type, c='black')\n", (9763, 9822), True, 'import matplotlib.pyplot as plt\n'), ((9832, 9849), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(2)'}), '(loc=2)\n', (9842, 9849), True, 'import matplotlib.pyplot as plt\n'), ((10531, 10575), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(self.z_no + 1)', '(1)', '(self.z_no + 1)'], {}), '(self.z_no + 1, 1, self.z_no + 1)\n', (10542, 10575), True, 'import matplotlib.pyplot as plt\n'), ((10584, 10614), 'matplotlib.pyplot.title', 'plt.title', (['"""Measurement Error"""'], {}), "('Measurement Error')\n", (10593, 10614), True, 'import matplotlib.pyplot as plt\n'), ((10627, 10695), 'matplotlib.pyplot.plot', 'plt.plot', (['date_index', '(self.data - smoothed_series)'], {'label': '"""Irregular"""'}), "(date_index, self.data - smoothed_series, label='Irregular')\n", (10635, 10695), True, 'import matplotlib.pyplot as plt\n'), ((10704, 10721), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(2)'}), '(loc=2)\n', (10714, 10721), True, 'import matplotlib.pyplot as plt\n'), ((10738, 10748), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10746, 10748), True, 'import matplotlib.pyplot as plt\n'), ((11862, 11889), 'pandas.DataFrame', 'pd.DataFrame', (['y_holder[-h:]'], {}), '(y_holder[-h:])\n', (11874, 11889), True, 'import pandas as pd\n'), ((14571, 14581), 'numpy.sqrt', 'np.sqrt', (['H'], {}), '(H)\n', (14578, 14581), True, 'import numpy as np\n'), ((2238, 2292), 'numpy.vstack', 'np.vstack', (['(X, self.data[self.max_lag - i - 1:-i - 1])'], {}), '((X, self.data[self.max_lag - i - 1:-i - 1]))\n', (2247, 2292), True, 'import numpy as np\n'), ((6537, 6583), 'numpy.dot', 'np.dot', (['Z[-1, :]', 'a[:, self.y.shape[0] + step]'], {}), '(Z[-1, :], a[:, self.y.shape[0] + step])\n', (6543, 6583), True, 'import numpy as np\n'), ((6606, 6636), 'numpy.append', 'np.append', (['y_holder', 'new_value'], {}), '(y_holder, new_value)\n', (6615, 6636), True, 'import numpy as np\n'), ((7095, 7116), 'numpy.dot', 'np.dot', (['Z[t]', 'a[:, t]'], {}), '(Z[t], a[:, t])\n', (7101, 7116), True, 'import numpy as np\n'), ((7857, 7919), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['date_index[-h - 1:]', 'lower', 'upper'], {'alpha': '(0.2)'}), '(date_index[-h - 1:], lower, upper, alpha=0.2)\n', (7873, 7919), True, 'import matplotlib.pyplot as plt\n'), ((9502, 9524), 'numpy.dot', 'np.dot', (['Z[t]', 'mu[:, t]'], {}), '(Z[t], mu[:, t])\n', (9508, 9524), True, 'import numpy as np\n'), ((9958, 9997), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(self.z_no + 1)', '(1)', '(2 + coef)'], {}), '(self.z_no + 1, 1, 2 + coef)\n', (9969, 9997), True, 'import matplotlib.pyplot as plt\n'), ((10010, 10074), 'matplotlib.pyplot.title', 'plt.title', (["('Beta ' + self.latent_variables.z_list[1 + coef].name)"], {}), "('Beta ' + self.latent_variables.z_list[1 + coef].name)\n", (10019, 10074), True, 'import matplotlib.pyplot as plt\n'), ((10398, 10461), 'matplotlib.pyplot.plot', 'plt.plot', (['date_index', 'mu[coef, 0:mu.shape[1] - 1]'], {'label': '"""Data"""'}), "(date_index, mu[coef, 0:mu.shape[1] - 1], label='Data')\n", (10406, 10461), True, 'import matplotlib.pyplot as plt\n'), ((10473, 10490), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(2)'}), '(loc=2)\n', (10483, 10490), True, 'import matplotlib.pyplot as plt\n'), ((11598, 11644), 'numpy.dot', 'np.dot', (['Z[-1, :]', 'a[:, self.y.shape[0] + step]'], {}), '(Z[-1, :], a[:, self.y.shape[0] + step])\n', (11604, 11644), True, 'import numpy as np\n'), ((11667, 11697), 'numpy.append', 'np.append', (['y_holder', 'new_value'], {}), '(y_holder, new_value)\n', (11676, 11697), True, 'import numpy as np\n'), ((5124, 5153), 'numpy.linalg.slogdet', 'np.linalg.slogdet', (['F[:, :, i]'], {}), '(F[:, :, i])\n', (5141, 5153), True, 'import numpy as np\n'), ((7487, 7522), 'numpy.power', 'np.power', (['series_variance[-h:]', '(0.5)'], {}), '(series_variance[-h:], 0.5)\n', (7495, 7522), True, 'import numpy as np\n'), ((7567, 7602), 'numpy.power', 'np.power', (['series_variance[-h:]', '(0.5)'], {}), '(series_variance[-h:], 0.5)\n', (7575, 7602), True, 'import numpy as np\n'), ((14998, 15021), 'numpy.dot', 'np.dot', (['T', 'a_plus[:, t]'], {}), '(T, a_plus[:, t])\n', (15004, 15021), True, 'import numpy as np\n'), ((15061, 15084), 'numpy.dot', 'np.dot', (['Z', 'a_plus[:, t]'], {}), '(Z, a_plus[:, t])\n', (15067, 15084), True, 'import numpy as np\n'), ((5176, 5202), 'numpy.linalg.pinv', 'np.linalg.pinv', (['F[:, :, i]'], {}), '(F[:, :, i])\n', (5190, 5202), True, 'import numpy as np\n'), ((5247, 5264), 'numpy.log', 'np.log', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (5253, 5264), True, 'import numpy as np\n'), ((6276, 6317), 'numpy.append', 'np.append', (['(1.0)', 'y_holder[-self.ar:][::-1]'], {}), '(1.0, y_holder[-self.ar:][::-1])\n', (6285, 6317), True, 'import numpy as np\n'), ((7159, 7183), 'numpy.dot', 'np.dot', (['Z[t]', 'P[:, :, t]'], {}), '(Z[t], P[:, :, t])\n', (7165, 7183), True, 'import numpy as np\n'), ((11370, 11411), 'numpy.append', 'np.append', (['(1.0)', 'y_holder[-self.ar:][::-1]'], {}), '(1.0, y_holder[-self.ar:][::-1])\n', (11379, 11411), True, 'import numpy as np\n'), ((15190, 15217), 'numpy.dot', 'np.dot', (['T', 'a_plus[:, t - 1]'], {}), '(T, a_plus[:, t - 1])\n', (15196, 15217), True, 'import numpy as np\n'), ((15259, 15282), 'numpy.dot', 'np.dot', (['Z', 'a_plus[:, t]'], {}), '(Z, a_plus[:, t])\n', (15265, 15282), True, 'import numpy as np\n'), ((6680, 6721), 'numpy.append', 'np.append', (['(1.0)', 'y_holder[-self.ar:][::-1]'], {}), '(1.0, y_holder[-self.ar:][::-1])\n', (6689, 6721), True, 'import numpy as np\n'), ((11741, 11782), 'numpy.append', 'np.append', (['(1.0)', 'y_holder[-self.ar:][::-1]'], {}), '(1.0, y_holder[-self.ar:][::-1])\n', (11750, 11782), True, 'import numpy as np\n'), ((10274, 10293), 'numpy.sqrt', 'np.sqrt', (['V_coef[5:]'], {}), '(V_coef[5:])\n', (10281, 10293), True, 'import numpy as np\n'), ((10331, 10350), 'numpy.sqrt', 'np.sqrt', (['V_coef[5:]'], {}), '(V_coef[5:])\n', (10338, 10350), True, 'import numpy as np\n')]
|
import pandas as pd
import numpy as np
import tensorflow as tf
import sys
sys.path.append("/data")
csv = pd.read_csv("bmi.csv")
csv["height"] = csv["height"] / 200
csv["weight"] = csv["weight"] / 100
bclass = {"thin": [1, 0, 0], "normal": [0, 1, 0], "fat": [0, 0, 1]}
csv["label_pat"] = csv["label"].apply(lambda x: np.array(bclass[x]))
test_csv = csv[15000:20000]
test_pat = test_csv[["weight", "height"]]
test_ans = list(test_csv["label_pat"])
x = tf.placeholder(tf.float32, [None, 2])
y_ = tf.placeholder(tf.float32, [None, 3])
W = tf.Variable(tf.zeros([2, 3]))
b = tf.Variable(tf.zeros([3]))
y = tf.nn.softmax(tf.matmul(x, W) + b)
cross_entropy = -tf.reduce_sum(y_ * tf.log(y))
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = optimizer.minimize(cross_entropy)
predict = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(predict, tf.float32))
sess = tf.Session()
tw = tf.summary.FileWriter("log_dir", graph=sess.graph)
tf.train.write_graph(sess.graph_def, '/data/scripts/study_case/pbtxt_files', 'ml_plactice_tbbmi.pbtxt')
sess.run(tf.initialize_all_variables())
'''inserted code'''
from scripts.utils.tf_utils import TensorFlowScheduler
scheduler = TensorFlowScheduler(name="ml_plactice.ch5.tb-bmi")
'''inserted code'''
step = 0
while True:
i = (step * 100) % 14000
rows = csv[1 + i: 1 + i + 100]
x_pat = rows[["weight", "height"]]
y_ans = list(rows["label_pat"])
fd = {x: x_pat, y_: y_ans}
_, loss = sess.run([train, cross_entropy], feed_dict=fd)
if step % 500 == 0:
cre = sess.run(cross_entropy, feed_dict=fd)
acc = sess.run(accuracy, feed_dict={x: test_pat, y_: test_ans})
# print("step=", step, "cre=", cre, "acc=", acc)
step += 1
'''inserted code'''
scheduler.loss_checker(loss)
scheduler.check_time()
'''inserted code'''
|
[
"sys.path.append",
"tensorflow.log",
"pandas.read_csv",
"tensorflow.argmax",
"tensorflow.Session",
"tensorflow.placeholder",
"tensorflow.train.write_graph",
"tensorflow.summary.FileWriter",
"tensorflow.zeros",
"tensorflow.cast",
"tensorflow.initialize_all_variables",
"numpy.array",
"tensorflow.matmul",
"tensorflow.train.GradientDescentOptimizer",
"scripts.utils.tf_utils.TensorFlowScheduler"
] |
[((75, 99), 'sys.path.append', 'sys.path.append', (['"""/data"""'], {}), "('/data')\n", (90, 99), False, 'import sys\n'), ((106, 128), 'pandas.read_csv', 'pd.read_csv', (['"""bmi.csv"""'], {}), "('bmi.csv')\n", (117, 128), True, 'import pandas as pd\n'), ((453, 490), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 2]'], {}), '(tf.float32, [None, 2])\n', (467, 490), True, 'import tensorflow as tf\n'), ((496, 533), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 3]'], {}), '(tf.float32, [None, 3])\n', (510, 533), True, 'import tensorflow as tf\n'), ((699, 738), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['(0.01)'], {}), '(0.01)\n', (732, 738), True, 'import tensorflow as tf\n'), ((900, 912), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (910, 912), True, 'import tensorflow as tf\n'), ((918, 968), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['"""log_dir"""'], {'graph': 'sess.graph'}), "('log_dir', graph=sess.graph)\n", (939, 968), True, 'import tensorflow as tf\n'), ((969, 1076), 'tensorflow.train.write_graph', 'tf.train.write_graph', (['sess.graph_def', '"""/data/scripts/study_case/pbtxt_files"""', '"""ml_plactice_tbbmi.pbtxt"""'], {}), "(sess.graph_def, '/data/scripts/study_case/pbtxt_files',\n 'ml_plactice_tbbmi.pbtxt')\n", (989, 1076), True, 'import tensorflow as tf\n'), ((1202, 1252), 'scripts.utils.tf_utils.TensorFlowScheduler', 'TensorFlowScheduler', ([], {'name': '"""ml_plactice.ch5.tb-bmi"""'}), "(name='ml_plactice.ch5.tb-bmi')\n", (1221, 1252), False, 'from scripts.utils.tf_utils import TensorFlowScheduler\n'), ((551, 567), 'tensorflow.zeros', 'tf.zeros', (['[2, 3]'], {}), '([2, 3])\n', (559, 567), True, 'import tensorflow as tf\n'), ((585, 598), 'tensorflow.zeros', 'tf.zeros', (['[3]'], {}), '([3])\n', (593, 598), True, 'import tensorflow as tf\n'), ((801, 816), 'tensorflow.argmax', 'tf.argmax', (['y', '(1)'], {}), '(y, 1)\n', (810, 816), True, 'import tensorflow as tf\n'), ((818, 834), 'tensorflow.argmax', 'tf.argmax', (['y_', '(1)'], {}), '(y_, 1)\n', (827, 834), True, 'import tensorflow as tf\n'), ((862, 890), 'tensorflow.cast', 'tf.cast', (['predict', 'tf.float32'], {}), '(predict, tf.float32)\n', (869, 890), True, 'import tensorflow as tf\n'), ((1083, 1112), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ([], {}), '()\n', (1110, 1112), True, 'import tensorflow as tf\n'), ((317, 336), 'numpy.array', 'np.array', (['bclass[x]'], {}), '(bclass[x])\n', (325, 336), True, 'import numpy as np\n'), ((618, 633), 'tensorflow.matmul', 'tf.matmul', (['x', 'W'], {}), '(x, W)\n', (627, 633), True, 'import tensorflow as tf\n'), ((676, 685), 'tensorflow.log', 'tf.log', (['y'], {}), '(y)\n', (682, 685), True, 'import tensorflow as tf\n')]
|
from e2e.scripts.config import MOCK_IDP_BASE_URL
from e2e.scripts import config
import pytest
from api_test_utils.oauth_helper import OauthHelper
from api_test_utils.apigee_api_trace import ApigeeApiTraceDebug
from api_test_utils.apigee_api_apps import ApigeeApiDeveloperApps
from api_test_utils.apigee_api_products import ApigeeApiProducts
import asyncio
@pytest.fixture(scope="class")
async def test_app_and_product():
apigee_product = ApigeeApiProducts()
apigee_product2 = ApigeeApiProducts()
await apigee_product.create_new_product()
await apigee_product.update_proxies([config.SERVICE_NAME])
await apigee_product2.create_new_product()
await apigee_product2.update_proxies([config.SERVICE_NAME])
apigee_app = ApigeeApiDeveloperApps()
await apigee_app.create_new_app()
# Set default JWT Testing resource url
await apigee_app.set_custom_attributes(
{
"jwks-resource-url": "https://raw.githubusercontent.com/NHSDigital/"
"identity-service-jwks/main/jwks/internal-dev/"
"9baed6f4-1361-4a8e-8531-1f8426e3aba8.json"
}
)
await apigee_app.add_api_product(
api_products=[apigee_product.name, apigee_product2.name]
)
[
await product.update_ratelimits(
quota=60000,
quota_interval="1",
quota_time_unit="minute",
rate_limit="1000ps",
)
for product in [apigee_product, apigee_product2]
]
yield apigee_product, apigee_product2, apigee_app
await apigee_app.destroy_app()
await apigee_product.destroy_product()
await apigee_product2.destroy_product()
@pytest.fixture(scope="class")
def event_loop(request):
loop = asyncio.new_event_loop()
yield loop
loop.close()
@pytest.mark.asyncio
class TestClientCredentialsHappyCases:
@pytest.mark.apm_1701
@pytest.mark.happy_path
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes, expected_filtered_scopes",
[
# Scenario 1: one product with valid scope
(
["urn:nhsd:apim:app:level3:personal-demographics"],
[],
["urn:nhsd:apim:app:level3:personal-demographics"],
),
# Scenario 2: one product with valid scope, one product with invalid scope
(
["urn:nhsd:apim:app:level3:personal-demographics-service"],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:personal-demographics-service"],
),
# Scenario 3: multiple products with valid scopes
(
["urn:nhsd:apim:app:level3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
),
# Scenario 4: one product with multiple valid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
[
"urn:nhsd:apim:app:level3:personal-demographics",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
),
# Scenario 5: multiple products with multiple valid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
[
"urn:nhsd:apim:app:level3:personal-demographics",
"urn:nhsd:apim:app:level3:ambulance-analytics",
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
),
# Scenario 6: one product with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:app:level3:ambulance-analytics",
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
],
[],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
),
# Scenario 7: multiple products with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:app:level3:ambulance-analytics",
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
],
[
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:example-2",
],
[
"urn:nhsd:apim:app:level3:ambulance-analytics",
"urn:nhsd:apim:app:level3:example-1",
],
),
# Scenario 8: one product with valid scope with trailing and leading spaces
(
[" urn:nhsd:apim:app:level3:ambulance-analytics "],
[],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
),
],
)
async def test_valid_application_restricted_scope_combination(
self,
product_1_scopes,
product_2_scopes,
expected_filtered_scopes,
test_app_and_product,
):
test_product, test_product2, test_app = test_app_and_product
apigee_trace = ApigeeApiTraceDebug(proxy=config.SERVICE_NAME)
await test_product.update_scopes(product_1_scopes)
await test_product2.update_scopes(product_2_scopes)
jwt = self.oauth.create_jwt(kid="test-1", client_id=test_app.client_id)
await apigee_trace.start_trace()
resp = await self.oauth.get_token_response(
grant_type="client_credentials", _jwt=jwt
)
application_scope = await apigee_trace.get_apigee_variable_from_trace(
name="apigee.application_restricted_scopes"
)
assert (
application_scope is not None
), "variable apigee.user_restricted_scopes not found in the trace"
application_scope = application_scope.split(" ")
assert list(resp["body"].keys()) == [
"access_token",
"expires_in",
"token_type",
"issued_at",
]
assert resp["status_code"] == 200
assert application_scope.sort() == expected_filtered_scopes.sort()
@pytest.mark.asyncio
class TestClientCredentialsErrorCases:
@pytest.mark.apm_1701
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes",
[
# Scenario 1: multiple products with no scopes
([], []),
# Scenario 2: one product with test_user_restricted_scope_combinationinvalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service"], []),
# Scenario 3: multiple products with invalid scopes
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
["urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics"],
),
# Scenario 4: one product with multiple invalid scopes
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
[],
),
# Scenario 5: multiple products with multiple invalid scopes
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:example-2",
],
),
# Scenario 6: one product with invalid scope (wrong formation)
(["ThisDoesNotExist"], []),
# Scenario 7: one product with invalid scope (special caracters)
(["#£$?!&%*.;@~_-"], []),
# Scenario 8: one product with invalid scope (empty string)
([""], []),
# Scenario 8: one product with invalid scope (None object)
([None], []),
# Scenario 9: one product with invalid scope (missing colon)
(["urn:nshd:apim:app:level3personal-demographics-service"], []),
],
)
def test_error_application_restricted_scope_combination(
self, product_1_scopes, product_2_scopes, test_app_and_product, event_loop
):
test_product, test_product2, test_app = test_app_and_product
event_loop.run_until_complete(test_product.update_scopes(product_1_scopes))
event_loop.run_until_complete(test_product2.update_scopes(product_2_scopes))
resp = event_loop.run_until_complete(
self.oauth.get_token_response(
grant_type="client_credentials",
_jwt=self.oauth.create_jwt(kid="test-1", client_id=test_app.client_id),
)
)
assert resp["status_code"] == 401
assert resp["body"] == {
"error": "unauthorized_client",
"error_description": "you have tried to requests authorization but your "
"application is not configured to use this authorization grant type",
}
@pytest.mark.asyncio
class TestAuthorizationCodeCis2HappyCases:
@pytest.mark.apm_1701
@pytest.mark.happy_path
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes, expected_filtered_scopes",
[
# Scenario 1: one product with valid scope
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
[],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
# Scenario 2: one product with valid scope, one product with invalid scope
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
# Scenario 3: multiple products with valid scopes
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
["urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics"],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
),
# Scenario 4: one product with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
[],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
),
# Scenario 5: multiple products with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:example-2",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:example-2",
],
),
# Scenario 6: one product with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
# Scenario 7: multiple products with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
],
),
# Scenario 8: one product with valid scope with trailing and leading spaces
(
[" urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service "],
[],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
],
)
async def test_cis2_user_restricted_scope_combination(
self,
product_1_scopes,
product_2_scopes,
expected_filtered_scopes,
test_app_and_product,
helper,
):
test_product, test_product2, test_app = test_app_and_product
await test_product.update_scopes(product_1_scopes)
await test_product2.update_scopes(product_2_scopes)
apigee_trace = ApigeeApiTraceDebug(proxy=config.SERVICE_NAME)
callback_url = await test_app.get_callback_url()
oauth = OauthHelper(test_app.client_id, test_app.client_secret, callback_url)
apigee_trace.add_trace_filter(
header_name="Auto-Test-Header", header_value="flow-callback"
)
await apigee_trace.start_trace()
assert helper.check_endpoint(
verb="POST",
endpoint=f"{config.OAUTH_URL}/token",
expected_status_code=200,
expected_response=[
"access_token",
"expires_in",
"refresh_count",
"refresh_token",
"refresh_token_expires_in",
"sid",
"token_type",
],
data={
"client_id": test_app.get_client_id(),
"client_secret": test_app.get_client_secret(),
"redirect_uri": callback_url,
"grant_type": "authorization_code",
"code": await oauth.get_authenticated_with_simulated_auth(),
},
)
user_restricted_scopes = await apigee_trace.get_apigee_variable_from_trace(
name="apigee.user_restricted_scopes"
)
assert (
user_restricted_scopes is not None
), "variable apigee.user_restricted_scopes not found in the trace"
user_restricted_scopes = user_restricted_scopes.split(" ")
assert expected_filtered_scopes.sort() == user_restricted_scopes.sort()
@pytest.mark.asyncio
class TestAuthorizationCodeCis2ErrorCases:
@pytest.mark.apm_1701
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes",
[
# Scenario 1: multiple products with no scopes
([], []),
# Scenario 2: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service"], []),
# Scenario 3: multiple products with invalid scopes
(
["urn:nhsd:apim:app:level3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
),
# Scenario 4: one product with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
),
# Scenario 5: multiple products with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
),
# Scenario 6: one product with invalid scope (wrong formation)
(["ThisDoesNotExist"], []),
# Scenario 7: one product with invalid scope (special characters)
(["#£$?!&%*.;@~_-"], []),
# Scenario 8: one product with invalid scope (empty string)
([""], []),
# Scenario 8: one product with invalid scope (None object)
([None], []),
# Scenario 9: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user:aal3personal-demographics-service"], []),
],
)
@pytest.mark.parametrize("auth_method", [(None)])
async def test_cis2_error_user_restricted_scope_combination(
self, product_1_scopes, product_2_scopes, test_app_and_product, helper, auth_code_nhs_cis2
):
test_product, test_product2, test_app = test_app_and_product
# Given
expected_status_code = 401
expected_error = "unauthorized_client"
expected_error_description = "you have tried to requests authorization but your application is not configured to use this authorization grant type"
# When
await test_product.update_scopes(product_1_scopes)
await test_product2.update_scopes(product_2_scopes)
state = await auth_code_nhs_cis2.get_state(self.oauth, test_app)
# Make simulated auth request to authenticate and Make initial callback request
auth_code = await auth_code_nhs_cis2.make_auth_request(self.oauth, state)
await auth_code_nhs_cis2.make_callback_request(self.oauth, state, auth_code)
response = auth_code_nhs_cis2.response
# Then
assert expected_status_code == response["status_code"]
assert expected_error == response["body"]["error"]
assert expected_error_description == response["body"]["error_description"]
@pytest.mark.asyncio
class TestTokenExchangeCis2ErrorCases:
@pytest.mark.token_exchange
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes",
[
# Scenario 1: multiple products with no scopes
([], []),
# Scenario 2: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service"], []),
# Scenario 3: multiple products with invalid scopes
(
["urn:nhsd:apim:app:level3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
),
# Scenario 4: one product with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
),
# Scenario 5: multiple products with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
),
# Scenario 6: one product with invalid scope (wrong formation)
(["ThisDoesNotExist"], []),
# Scenario 7: one product with invalid scope (special characters)
(["#£$?!&%*.;@~_-"], []),
# Scenario 8: one product with invalid scope (empty string)
([""], []),
# Scenario 8: one product with invalid scope (None object)
([None], []),
# Scenario 9: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user:aal3personal-demographics-service"], []),
],
)
async def test_cis2_token_exchange_error_user_restricted_scope_combination(
self, get_token_cis2_token_exchange
):
expected_status_code = 401
expected_error = "unauthorized_client"
expected_error_description = (
"you have tried to requests authorization but your "
"application is not configured to use this authorization grant type"
)
# When
resp = get_token_cis2_token_exchange
# Then
assert expected_status_code == resp["status_code"]
assert expected_error == resp["body"]["error"]
assert expected_error_description == resp["body"]["error_description"]
@pytest.mark.asyncio
class TestTokenExchangeCis2HappyCases:
@pytest.mark.token_exchange
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes, expected_filtered_scopes",
[
# Scenario 1: one product with valid scope
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
[],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
# Scenario 2: one product with valid scope, one product with invalid scope
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
# Scenario 3: multiple products with valid scopes
(
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
["urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics"],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
),
# Scenario 4: one product with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
[],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
),
# Scenario 5: multiple products with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:example-2",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics",
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:user-nhs-id:aal3:example-2",
],
),
# Scenario 6: one product with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
# Scenario 7: multiple products with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
[
"urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service",
"urn:nhsd:apim:user-nhs-id:aal3:example-1",
],
),
# Scenario 8: one product with valid scope with trailing and leading spaces
(
[" urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service "],
[],
["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"],
),
],
)
async def test_cis2_token_exchange_user_restricted_scope_combination(
self,
expected_filtered_scopes,
apigee_start_trace,
get_token_cis2_token_exchange,
):
expected_status_code = 200
expected_expires_in = "599"
expected_token_type = "Bearer"
expected_issued_token_type = "urn:ietf:params:oauth:token-type:access_token"
# When
resp = get_token_cis2_token_exchange
apigee_trace = apigee_start_trace
filtered_scopes = await apigee_trace.get_apigee_variable_from_trace(
name="apigee.user_restricted_scopes"
)
assert (
filtered_scopes is not None
), "variable apigee.user_restricted_scopes not found in the trace"
filtered_scopes = filtered_scopes.split(" ")
# Then
assert expected_status_code == resp["status_code"], resp["body"]
assert "access_token" in resp["body"]
assert expected_expires_in == resp["body"]["expires_in"]
assert expected_token_type == resp["body"]["token_type"]
assert expected_issued_token_type == resp["body"]["issued_token_type"]
assert expected_filtered_scopes.sort() == filtered_scopes.sort()
@pytest.mark.asyncio
class TestClientCredentialsRemoveExternalScopes:
@pytest.mark.parametrize(
"external_scope",
[
# passing in external scopes via form params
"invavlid scope",
"$£$12vdg@@fd",
" external scope",
[
"urn:nhsd:apim:user:aal3personal-demographics-service",
"urn:nhsd:apim:app:level3:example-2",
],
],
)
async def test_client_credentials_flow_remove_external_scopes(
self, test_app_and_product, external_scope
):
product_scope = ["urn:nhsd:apim:app:level3:personal-demographics"]
test_product, test_product2, test_app = test_app_and_product
await test_product.update_scopes(product_scope)
await test_product2.update_scopes(product_scope)
jwt = self.oauth.create_jwt(kid="test-1", client_id=test_app.client_id)
data = {
"scope": external_scope,
"grant_type": "client_credentials",
"client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"client_assertion": jwt,
}
resp = await self.oauth.get_token_response(
grant_type="client_credentials", data=data
)
assert list(resp["body"].keys()) == [
"access_token",
"expires_in",
"token_type",
"issued_at",
]
assert resp["status_code"] == 200
@pytest.mark.asyncio
class TestTokenExchangeRemoveExternalScopes:
@pytest.mark.parametrize(
"external_scope",
[
# passing in external scopes via form params
"invavlid scope",
"$£$12vdg@@fd",
" external scope",
[
"urn:nhsd:apim:user:aal3personal-demographics-service",
"urn:nhsd:apim:app:level3:example-2",
],
],
)
async def test_token_exchange_remove_external_scopes(self, external_scope):
client_assertion_jwt = self.oauth.create_jwt(kid="test-1")
id_token_jwt = self.oauth.create_id_token_jwt()
data = {
"scope": external_scope,
"grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
"subject_token_type": "urn:ietf:params:oauth:token-type:id_token",
"client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"subject_token": id_token_jwt,
"client_assertion": client_assertion_jwt,
}
resp = await self.oauth.get_token_response(
grant_type="token_exchange", data=data
)
assert resp["status_code"] == 200
@pytest.mark.asyncio
class TestAuthorizationCodeRemoveExternalScopes:
@pytest.mark.parametrize(
"external_scope",
[
# passing in external scopes via form params
"invavlid scope",
"$£$12vdg@@fd",
" external scope",
[
"urn:nhsd:apim:user:aal3personal-demographics-service",
"urn:nhsd:apim:app:level3:example-2",
],
],
)
async def test_authorization_code_flow_remove_external_scopes(
self, test_app_and_product, helper, external_scope
):
product_scope = ["urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service"]
test_product, test_product2, test_app = test_app_and_product
await test_product.update_scopes(product_scope)
await test_product2.update_scopes(product_scope)
callback_url = await test_app.get_callback_url()
oauth = OauthHelper(test_app.client_id, test_app.client_secret, callback_url)
assert helper.check_endpoint(
verb="POST",
endpoint=f"{config.OAUTH_URL}/token",
expected_status_code=200,
expected_response=[
"access_token",
"expires_in",
"refresh_count",
"refresh_token",
"refresh_token_expires_in",
"sid",
"token_type",
],
data={
"scope": external_scope,
"client_id": test_app.get_client_id(),
"client_secret": test_app.get_client_secret(),
"redirect_uri": callback_url,
"grant_type": "authorization_code",
"code": await oauth.get_authenticated_with_simulated_auth(),
},
)
@pytest.mark.asyncio
class TestTokenExchangeNhsLoginHappyCases:
@pytest.mark.token_exchange
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes, expected_filtered_scopes",
[
# Scenario 1: one product with valid scope
(
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
[],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
# Scenario 2: one product with valid scope, one product with invalid scope
(
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
# Scenario 3: multiple products with valid scopes
(
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
["urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics"],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service,"
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics"
],
),
# Scenario 4: one product with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
],
[],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
],
),
# Scenario 5: multiple products with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-login:P9:example-1",
"urn:nhsd:apim:user-nhs-login:P9:example-2",
],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
"urn:nhsd:apim:user-nhs-login:P9:example-1",
"urn:nhsd:apim:user-nhs-login:P9:example-2",
],
),
# Scenario 6: one product with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
# Scenario 7: multiple products with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-login:P9:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:example-1",
],
),
# Scenario 8: one product with valid scope with trailing and leading spaces
(
[" urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service "],
[],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
],
)
async def test_nhs_login_token_exchange_user_restricted_scope_combination(
self,
apigee_start_trace,
get_token_nhs_login_token_exchange,
expected_filtered_scopes,
):
expected_status_code = 200
expected_expires_in = "599"
expected_token_type = "Bearer"
expected_issued_token_type = "urn:ietf:params:oauth:token-type:access_token"
# When
resp = get_token_nhs_login_token_exchange
apigee_trace = apigee_start_trace
filtered_scopes = await apigee_trace.get_apigee_variable_from_trace(
name="apigee.user_restricted_scopes"
)
assert (
filtered_scopes is not None
), "variable apigee.user_restricted_scopes not found in the trace"
filtered_scopes = filtered_scopes.split(" ")
# Then
assert expected_status_code == resp["status_code"], resp["body"]
assert "access_token" in resp["body"]
assert expected_expires_in == resp["body"]["expires_in"]
assert expected_token_type == resp["body"]["token_type"]
assert expected_issued_token_type == resp["body"]["issued_token_type"]
assert expected_filtered_scopes.sort() == filtered_scopes.sort()
@pytest.mark.asyncio
class TestTokenExchangeNhsLoginErrorCases:
@pytest.mark.token_exchange
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes",
[
# Scenario 1: multiple products with no scopes
([], []),
# Scenario 2: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-login:P0:personal-demographics-service"], []),
# Scenario 3: multiple products with invalid scopes
(
["urn:nhsd:apim:app:level3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
),
# Scenario 4: one product with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
),
# Scenario 5: multiple products with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
),
# Scenario 6: one product with invalid scope (wrong formation)
(["ThisDoesNotExist"], []),
# Scenario 7: one product with invalid scope (special characters)
(["#£$?!&%*.;@~_-"], []),
# Scenario 8: one product with invalid scope (empty string)
([""], []),
# Scenario 8: one product with invalid scope (None object)
([None], []),
# Scenario 9: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-login:P0personal-demographics-service"], []),
],
)
async def test_nhs_login_token_exchange_error_user_restricted_scope_combination(
self, get_token_nhs_login_token_exchange
):
expected_status_code = 401
expected_error = "unauthorized_client"
expected_error_description = (
"you have tried to requests authorization but your "
"application is not configured to use this authorization grant type"
)
# When
resp = get_token_nhs_login_token_exchange
# Then
assert expected_status_code == resp["status_code"]
assert expected_error == resp["body"]["error"]
assert expected_error_description == resp["body"]["error_description"]
@pytest.mark.asyncio
class TestAuthorizationCodeNhsLoginHappyCases:
@pytest.mark.happy_path
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes, expected_filtered_scopes",
[
# Scenario 1: one product with valid scope
(
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
[],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
# Scenario 2: one product with valid scope, one product with invalid scope
(
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
# Scenario 3: multiple products with valid scopes
(
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
["urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics"],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service,"
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics"
],
),
# Scenario 4: one product with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
],
[],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
],
),
# Scenario 5: multiple products with multiple valid scopes
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-login:P9:example-1",
"urn:nhsd:apim:user-nhs-login:P9:example-2",
],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics",
"urn:nhsd:apim:user-nhs-login:P9:example-1",
"urn:nhsd:apim:user-nhs-login:P9:example-2",
],
),
# Scenario 6: one product with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
# Scenario 7: multiple products with multiple scopes (valid and invalid)
(
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:user-nhs-login:P9:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
[
"urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service",
"urn:nhsd:apim:user-nhs-login:P9:example-1",
],
),
# Scenario 8: one product with valid scope with trailing and leading spaces
(
[" urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service "],
[],
["urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service"],
),
],
)
@pytest.mark.parametrize("auth_method", [('P9')])
async def test_nhs_login_user_restricted_scope_combination(
self,
product_1_scopes,
product_2_scopes,
expected_filtered_scopes,
test_app_and_product,
helper,
auth_code_nhs_login,
):
test_product, test_product2, test_app = test_app_and_product
await test_product.update_scopes(product_1_scopes)
await test_product2.update_scopes(product_2_scopes)
apigee_trace = ApigeeApiTraceDebug(proxy=config.SERVICE_NAME)
callback_url = await test_app.get_callback_url()
state = await auth_code_nhs_login.get_state(self.oauth, test_app)
auth_code = await auth_code_nhs_login.make_auth_request(self.oauth, state)
await apigee_trace.start_trace()
await auth_code_nhs_login.make_callback_request(self.oauth, state, auth_code)
user_restricted_scopes = await apigee_trace.get_apigee_variable_from_trace(
name="apigee.user_restricted_scopes"
)
assert (
user_restricted_scopes is not None
), "variable apigee.user_restricted_scopes not found in the trace"
user_restricted_scopes = user_restricted_scopes.split(" ")
assert expected_filtered_scopes.sort() == user_restricted_scopes.sort()
@pytest.mark.asyncio
class TestAuthorizationCodeNhsLoginErrorCases:
@pytest.mark.errors
@pytest.mark.parametrize(
"product_1_scopes, product_2_scopes",
[
# Scenario 1: multiple products with no scopes
([], []),
# Scenario 2: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-login:P0:personal-demographics-service"], []),
# Scenario 3: multiple products with invalid scopes
(
["urn:nhsd:apim:app:level3:personal-demographics-service"],
["urn:nhsd:apim:app:level3:ambulance-analytics"],
),
# Scenario 4: one product with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[],
),
# Scenario 5: multiple products with multiple invalid scopes
(
[
"urn:nhsd:apim:app:level3:personal-demographics-service",
"urn:nhsd:apim:app:level3:ambulance-analytics",
],
[
"urn:nhsd:apim:app:level3:example-1",
"urn:nhsd:apim:app:level3:example-2",
],
),
# Scenario 6: one product with invalid scope (wrong formation)
(["ThisDoesNotExist"], []),
# Scenario 7: one product with invalid scope (special characters)
(["#£$?!&%*.;@~_-"], []),
# Scenario 8: one product with invalid scope (empty string)
([""], []),
# Scenario 8: one product with invalid scope (None object)
([None], []),
# Scenario 9: one product with invalid scope, one product with no scope
(["urn:nhsd:apim:user-nhs-login:P0personal-demographics-service"], []),
],
)
@pytest.mark.parametrize("auth_method", [("P9")])
async def test_nhs_login_user_restricted_error_scope_combination(
self, product_1_scopes, product_2_scopes, test_app_and_product, helper, auth_code_nhs_login
):
test_product, test_product2, test_app = test_app_and_product
expected_status_code = 401
expected_error = "unauthorized_client"
expected_error_description = (
"you have tried to requests authorization but your "
"application is not configured to use this authorization grant type"
)
await test_product.update_scopes(product_1_scopes)
await test_product2.update_scopes(product_2_scopes)
state = await auth_code_nhs_login.get_state(self.oauth, test_app)
# Make simulated auth request to authenticate and Make initial callback request
auth_code = await auth_code_nhs_login.make_auth_request(self.oauth, state)
await auth_code_nhs_login.make_callback_request(self.oauth, state, auth_code)
response = auth_code_nhs_login.response
assert expected_status_code == response["status_code"]
assert expected_error == response["body"]["error"]
assert expected_error_description == response["body"]["error_description"]
|
[
"api_test_utils.oauth_helper.OauthHelper",
"pytest.fixture",
"api_test_utils.apigee_api_products.ApigeeApiProducts",
"api_test_utils.apigee_api_trace.ApigeeApiTraceDebug",
"pytest.mark.parametrize",
"api_test_utils.apigee_api_apps.ApigeeApiDeveloperApps",
"asyncio.new_event_loop"
] |
[((359, 388), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""'}), "(scope='class')\n", (373, 388), False, 'import pytest\n'), ((1658, 1687), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""'}), "(scope='class')\n", (1672, 1687), False, 'import pytest\n'), ((444, 463), 'api_test_utils.apigee_api_products.ApigeeApiProducts', 'ApigeeApiProducts', ([], {}), '()\n', (461, 463), False, 'from api_test_utils.apigee_api_products import ApigeeApiProducts\n'), ((486, 505), 'api_test_utils.apigee_api_products.ApigeeApiProducts', 'ApigeeApiProducts', ([], {}), '()\n', (503, 505), False, 'from api_test_utils.apigee_api_products import ApigeeApiProducts\n'), ((744, 768), 'api_test_utils.apigee_api_apps.ApigeeApiDeveloperApps', 'ApigeeApiDeveloperApps', ([], {}), '()\n', (766, 768), False, 'from api_test_utils.apigee_api_apps import ApigeeApiDeveloperApps\n'), ((1724, 1748), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (1746, 1748), False, 'import asyncio\n'), ((1902, 3790), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes, expected_filtered_scopes"""', "[(['urn:nhsd:apim:app:level3:personal-demographics'], [], [\n 'urn:nhsd:apim:app:level3:personal-demographics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:personal-demographics-service']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), ([\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:app:level3:example-1']), ([\n ' urn:nhsd:apim:app:level3:ambulance-analytics '], [], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'])]"], {}), "(\n 'product_1_scopes, product_2_scopes, expected_filtered_scopes', [([\n 'urn:nhsd:apim:app:level3:personal-demographics'], [], [\n 'urn:nhsd:apim:app:level3:personal-demographics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:personal-demographics-service']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:app:level3:personal-demographics',\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), ([\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics',\n 'urn:nhsd:apim:app:level3:example-1']), ([\n ' urn:nhsd:apim:app:level3:ambulance-analytics '], [], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'])])\n", (1925, 3790), False, 'import pytest\n'), ((6928, 7732), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes"""', "[([], []), (['urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service'\n ], []), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2']), (['ThisDoesNotExist'], []\n ), (['#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nshd:apim:app:level3personal-demographics-service'], [])]"], {}), "('product_1_scopes, product_2_scopes', [([], []), ([\n 'urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service'], []), (\n ['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2']), (['ThisDoesNotExist'], []\n ), (['#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nshd:apim:app:level3personal-demographics-service'], [])])\n", (6951, 7732), False, 'import pytest\n'), ((9970, 12079), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes, expected_filtered_scopes"""', "[(['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n ' urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'])]"], {}), "(\n 'product_1_scopes, product_2_scopes, expected_filtered_scopes', [([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n ' urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'])])\n", (9993, 12079), False, 'import pytest\n'), ((15875, 16630), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes"""', "[([], []), (['urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service'\n ], []), (['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user:aal3personal-demographics-service'], [])]"], {}), "('product_1_scopes, product_2_scopes', [([], []), ([\n 'urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service'], []), (\n ['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user:aal3personal-demographics-service'], [])])\n", (15898, 16630), False, 'import pytest\n'), ((17783, 17829), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""auth_method"""', '[None]'], {}), "('auth_method', [None])\n", (17806, 17829), False, 'import pytest\n'), ((19198, 19953), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes"""', "[([], []), (['urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service'\n ], []), (['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user:aal3personal-demographics-service'], [])]"], {}), "('product_1_scopes, product_2_scopes', [([], []), ([\n 'urn:nhsd:apim:user-nhs-id:aal2:personal-demographics-service'], []), (\n ['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user:aal3personal-demographics-service'], [])])\n", (19221, 19953), False, 'import pytest\n'), ((21902, 24011), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes, expected_filtered_scopes"""', "[(['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1']), ([\n ' urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'])]"], {}), "(\n 'product_1_scopes, product_2_scopes, expected_filtered_scopes', [([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-2']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-id:aal3:example-1']), ([\n ' urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-id:aal3:personal-demographics-service'])])\n", (21925, 24011), False, 'import pytest\n'), ((27027, 27235), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""external_scope"""', "['invavlid scope', '$£$12vdg@@fd', ' external scope', [\n 'urn:nhsd:apim:user:aal3personal-demographics-service',\n 'urn:nhsd:apim:app:level3:example-2']]"], {}), "('external_scope', ['invavlid scope', '$£$12vdg@@fd',\n ' external scope', [\n 'urn:nhsd:apim:user:aal3personal-demographics-service',\n 'urn:nhsd:apim:app:level3:example-2']])\n", (27050, 27235), False, 'import pytest\n'), ((28513, 28721), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""external_scope"""', "['invavlid scope', '$£$12vdg@@fd', ' external scope', [\n 'urn:nhsd:apim:user:aal3personal-demographics-service',\n 'urn:nhsd:apim:app:level3:example-2']]"], {}), "('external_scope', ['invavlid scope', '$£$12vdg@@fd',\n ' external scope', [\n 'urn:nhsd:apim:user:aal3personal-demographics-service',\n 'urn:nhsd:apim:app:level3:example-2']])\n", (28536, 28721), False, 'import pytest\n'), ((29745, 29953), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""external_scope"""', "['invavlid scope', '$£$12vdg@@fd', ' external scope', [\n 'urn:nhsd:apim:user:aal3personal-demographics-service',\n 'urn:nhsd:apim:app:level3:example-2']]"], {}), "('external_scope', ['invavlid scope', '$£$12vdg@@fd',\n ' external scope', [\n 'urn:nhsd:apim:user:aal3personal-demographics-service',\n 'urn:nhsd:apim:app:level3:example-2']])\n", (29768, 29953), False, 'import pytest\n'), ((31606, 33736), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes, expected_filtered_scopes"""', "[(['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service,urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'\n ]), (['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1']), ([\n ' urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'])]"], {}), "(\n 'product_1_scopes, product_2_scopes, expected_filtered_scopes', [([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service,urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'\n ]), (['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1']), ([\n ' urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'])])\n", (31629, 33736), False, 'import pytest\n'), ((36822, 37585), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes"""', "[([], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0:personal-demographics-service'], []),\n (['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0personal-demographics-service'], [])]"], {}), "('product_1_scopes, product_2_scopes', [([], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0:personal-demographics-service'], []),\n (['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0personal-demographics-service'], [])])\n", (36845, 37585), False, 'import pytest\n'), ((39529, 41659), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes, expected_filtered_scopes"""', "[(['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service,urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'\n ]), (['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1']), ([\n ' urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'])]"], {}), "(\n 'product_1_scopes, product_2_scopes, expected_filtered_scopes', [([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'], [\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service,urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'\n ]), (['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:ambulance-analytics',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:user-nhs-login:P9:example-2']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service']), ([\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:user-nhs-login:P9:example-1',\n 'urn:nhsd:apim:app:level3:example-2'], [\n 'urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service',\n 'urn:nhsd:apim:user-nhs-login:P9:example-1']), ([\n ' urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service '], [],\n ['urn:nhsd:apim:user-nhs-login:P9:personal-demographics-service'])])\n", (39552, 41659), False, 'import pytest\n'), ((43375, 43421), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""auth_method"""', "['P9']"], {}), "('auth_method', ['P9'])\n", (43398, 43421), False, 'import pytest\n'), ((44812, 45575), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""product_1_scopes, product_2_scopes"""', "[([], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0:personal-demographics-service'], []),\n (['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0personal-demographics-service'], [])]"], {}), "('product_1_scopes, product_2_scopes', [([], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0:personal-demographics-service'], []),\n (['urn:nhsd:apim:app:level3:personal-demographics-service'], [\n 'urn:nhsd:apim:app:level3:ambulance-analytics']), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], []), ([\n 'urn:nhsd:apim:app:level3:personal-demographics-service',\n 'urn:nhsd:apim:app:level3:ambulance-analytics'], [\n 'urn:nhsd:apim:app:level3:example-1',\n 'urn:nhsd:apim:app:level3:example-2']), (['ThisDoesNotExist'], []), ([\n '#£$?!&%*.;@~_-'], []), ([''], []), ([None], []), ([\n 'urn:nhsd:apim:user-nhs-login:P0personal-demographics-service'], [])])\n", (44835, 45575), False, 'import pytest\n'), ((46729, 46775), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""auth_method"""', "['P9']"], {}), "('auth_method', ['P9'])\n", (46752, 46775), False, 'import pytest\n'), ((5790, 5836), 'api_test_utils.apigee_api_trace.ApigeeApiTraceDebug', 'ApigeeApiTraceDebug', ([], {'proxy': 'config.SERVICE_NAME'}), '(proxy=config.SERVICE_NAME)\n', (5809, 5836), False, 'from api_test_utils.apigee_api_trace import ApigeeApiTraceDebug\n'), ((14208, 14254), 'api_test_utils.apigee_api_trace.ApigeeApiTraceDebug', 'ApigeeApiTraceDebug', ([], {'proxy': 'config.SERVICE_NAME'}), '(proxy=config.SERVICE_NAME)\n', (14227, 14254), False, 'from api_test_utils.apigee_api_trace import ApigeeApiTraceDebug\n'), ((14329, 14398), 'api_test_utils.oauth_helper.OauthHelper', 'OauthHelper', (['test_app.client_id', 'test_app.client_secret', 'callback_url'], {}), '(test_app.client_id, test_app.client_secret, callback_url)\n', (14340, 14398), False, 'from api_test_utils.oauth_helper import OauthHelper\n'), ((30607, 30676), 'api_test_utils.oauth_helper.OauthHelper', 'OauthHelper', (['test_app.client_id', 'test_app.client_secret', 'callback_url'], {}), '(test_app.client_id, test_app.client_secret, callback_url)\n', (30618, 30676), False, 'from api_test_utils.oauth_helper import OauthHelper\n'), ((43882, 43928), 'api_test_utils.apigee_api_trace.ApigeeApiTraceDebug', 'ApigeeApiTraceDebug', ([], {'proxy': 'config.SERVICE_NAME'}), '(proxy=config.SERVICE_NAME)\n', (43901, 43928), False, 'from api_test_utils.apigee_api_trace import ApigeeApiTraceDebug\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import Enum
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy.orm import relationship
from sqlalchemy import String
from nailgun.db.sqlalchemy.models.base import Base
from nailgun.db.sqlalchemy.models.fields import JSON
class IPAddr(Base):
__tablename__ = 'ip_addrs'
id = Column(Integer, primary_key=True)
network = Column(Integer, ForeignKey('network_groups.id',
ondelete="CASCADE"))
node = Column(Integer, ForeignKey('nodes.id', ondelete="CASCADE"))
ip_addr = Column(String(25), nullable=False)
network_data = relationship("NetworkGroup")
node_data = relationship("Node")
class IPAddrRange(Base):
__tablename__ = 'ip_addr_ranges'
id = Column(Integer, primary_key=True)
network_group_id = Column(Integer, ForeignKey('network_groups.id'))
first = Column(String(25), nullable=False)
last = Column(String(25), nullable=False)
class NetworkGroup(Base):
__tablename__ = 'network_groups'
NAMES = (
# Node networks
'fuelweb_admin',
'storage',
# internal in terms of fuel
'management',
'public',
# private in terms of fuel
'fixed',
'private'
)
id = Column(Integer, primary_key=True)
name = Column(Enum(*NAMES, name='network_group_name'), nullable=False)
# can be nullable only for fuelweb admin net
release = Column(Integer, ForeignKey('releases.id'))
# can be nullable only for fuelweb admin net
cluster_id = Column(Integer, ForeignKey('clusters.id'))
vlan_start = Column(Integer)
cidr = Column(String(25))
gateway = Column(String(25))
ip_ranges = relationship(
"IPAddrRange",
backref="network_group",
cascade="all, delete"
)
nodes = relationship(
"Node",
secondary=IPAddr.__table__,
backref="networks")
meta = Column(JSON, default={})
class NetworkNICAssignment(Base):
__tablename__ = 'net_nic_assignments'
id = Column(Integer, primary_key=True)
network_id = Column(
Integer,
ForeignKey('network_groups.id', ondelete="CASCADE"),
nullable=False
)
interface_id = Column(
Integer,
ForeignKey('node_nic_interfaces.id', ondelete="CASCADE"),
nullable=False
)
class NetworkBondAssignment(Base):
__tablename__ = 'net_bond_assignments'
id = Column(Integer, primary_key=True)
network_id = Column(
Integer,
ForeignKey('network_groups.id', ondelete="CASCADE"),
nullable=False
)
bond_id = Column(
Integer,
ForeignKey('node_bond_interfaces.id', ondelete="CASCADE"),
nullable=False
)
|
[
"sqlalchemy.Enum",
"sqlalchemy.ForeignKey",
"sqlalchemy.orm.relationship",
"sqlalchemy.Column",
"sqlalchemy.String"
] |
[((995, 1028), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1001, 1028), False, 'from sqlalchemy import Column\n'), ((1293, 1321), 'sqlalchemy.orm.relationship', 'relationship', (['"""NetworkGroup"""'], {}), "('NetworkGroup')\n", (1305, 1321), False, 'from sqlalchemy.orm import relationship\n'), ((1338, 1358), 'sqlalchemy.orm.relationship', 'relationship', (['"""Node"""'], {}), "('Node')\n", (1350, 1358), False, 'from sqlalchemy.orm import relationship\n'), ((1432, 1465), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1438, 1465), False, 'from sqlalchemy import Column\n'), ((1941, 1974), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1947, 1974), False, 'from sqlalchemy import Column\n'), ((2282, 2297), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (2288, 2297), False, 'from sqlalchemy import Column\n'), ((2377, 2452), 'sqlalchemy.orm.relationship', 'relationship', (['"""IPAddrRange"""'], {'backref': '"""network_group"""', 'cascade': '"""all, delete"""'}), "('IPAddrRange', backref='network_group', cascade='all, delete')\n", (2389, 2452), False, 'from sqlalchemy.orm import relationship\n'), ((2495, 2563), 'sqlalchemy.orm.relationship', 'relationship', (['"""Node"""'], {'secondary': 'IPAddr.__table__', 'backref': '"""networks"""'}), "('Node', secondary=IPAddr.__table__, backref='networks')\n", (2507, 2563), False, 'from sqlalchemy.orm import relationship\n'), ((2600, 2624), 'sqlalchemy.Column', 'Column', (['JSON'], {'default': '{}'}), '(JSON, default={})\n', (2606, 2624), False, 'from sqlalchemy import Column\n'), ((2712, 2745), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (2718, 2745), False, 'from sqlalchemy import Column\n'), ((3106, 3139), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (3112, 3139), False, 'from sqlalchemy import Column\n'), ((1059, 1110), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""network_groups.id"""'], {'ondelete': '"""CASCADE"""'}), "('network_groups.id', ondelete='CASCADE')\n", (1069, 1110), False, 'from sqlalchemy import ForeignKey\n'), ((1180, 1222), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""nodes.id"""'], {'ondelete': '"""CASCADE"""'}), "('nodes.id', ondelete='CASCADE')\n", (1190, 1222), False, 'from sqlalchemy import ForeignKey\n'), ((1245, 1255), 'sqlalchemy.String', 'String', (['(25)'], {}), '(25)\n', (1251, 1255), False, 'from sqlalchemy import String\n'), ((1505, 1536), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""network_groups.id"""'], {}), "('network_groups.id')\n", (1515, 1536), False, 'from sqlalchemy import ForeignKey\n'), ((1557, 1567), 'sqlalchemy.String', 'String', (['(25)'], {}), '(25)\n', (1563, 1567), False, 'from sqlalchemy import String\n'), ((1603, 1613), 'sqlalchemy.String', 'String', (['(25)'], {}), '(25)\n', (1609, 1613), False, 'from sqlalchemy import String\n'), ((1993, 2032), 'sqlalchemy.Enum', 'Enum', (['*NAMES'], {'name': '"""network_group_name"""'}), "(*NAMES, name='network_group_name')\n", (1997, 2032), False, 'from sqlalchemy import Enum\n'), ((2129, 2154), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""releases.id"""'], {}), "('releases.id')\n", (2139, 2154), False, 'from sqlalchemy import ForeignKey\n'), ((2238, 2263), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""clusters.id"""'], {}), "('clusters.id')\n", (2248, 2263), False, 'from sqlalchemy import ForeignKey\n'), ((2316, 2326), 'sqlalchemy.String', 'String', (['(25)'], {}), '(25)\n', (2322, 2326), False, 'from sqlalchemy import String\n'), ((2349, 2359), 'sqlalchemy.String', 'String', (['(25)'], {}), '(25)\n', (2355, 2359), False, 'from sqlalchemy import String\n'), ((2796, 2847), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""network_groups.id"""'], {'ondelete': '"""CASCADE"""'}), "('network_groups.id', ondelete='CASCADE')\n", (2806, 2847), False, 'from sqlalchemy import ForeignKey\n'), ((2930, 2986), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""node_nic_interfaces.id"""'], {'ondelete': '"""CASCADE"""'}), "('node_nic_interfaces.id', ondelete='CASCADE')\n", (2940, 2986), False, 'from sqlalchemy import ForeignKey\n'), ((3190, 3241), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""network_groups.id"""'], {'ondelete': '"""CASCADE"""'}), "('network_groups.id', ondelete='CASCADE')\n", (3200, 3241), False, 'from sqlalchemy import ForeignKey\n'), ((3319, 3376), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""node_bond_interfaces.id"""'], {'ondelete': '"""CASCADE"""'}), "('node_bond_interfaces.id', ondelete='CASCADE')\n", (3329, 3376), False, 'from sqlalchemy import ForeignKey\n')]
|
#!/usr/bin/env python
# bug_gc.py - test for refcounting/GC bug
#
# Copyright (C) 2010-2011 <NAME> <<EMAIL>>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import psycopg2
import psycopg2.extensions
import unittest
import gc
from .testutils import ConnectingTestCase, skip_if_no_uuid
class StolenReferenceTestCase(ConnectingTestCase):
@skip_if_no_uuid
def test_stolen_reference_bug(self):
def fish(val, cur):
gc.collect()
return 42
UUID = psycopg2.extensions.new_type((2950,), "UUID", fish)
psycopg2.extensions.register_type(UUID, self.conn)
curs = self.conn.cursor()
curs.execute("select 'b5219e01-19ab-4994-b71e-149225dc51e4'::uuid")
curs.fetchone()
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"psycopg2.extensions.register_type",
"gc.collect",
"psycopg2.extensions.new_type",
"unittest.TestLoader"
] |
[((1683, 1698), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1696, 1698), False, 'import unittest\n'), ((1326, 1377), 'psycopg2.extensions.new_type', 'psycopg2.extensions.new_type', (['(2950,)', '"""UUID"""', 'fish'], {}), "((2950,), 'UUID', fish)\n", (1354, 1377), False, 'import psycopg2\n'), ((1386, 1436), 'psycopg2.extensions.register_type', 'psycopg2.extensions.register_type', (['UUID', 'self.conn'], {}), '(UUID, self.conn)\n', (1419, 1436), False, 'import psycopg2\n'), ((1276, 1288), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1286, 1288), False, 'import gc\n'), ((1601, 1622), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (1620, 1622), False, 'import unittest\n')]
|
'''
This file implements the Binary Search Tree data structure.
The functions in this file are considerably harder than the
functions in the BinaryTree file.
'''
from containers.BinaryTree import BinaryTree, Node
class BST(BinaryTree):
'''
The BST is a superclass of BinaryTree.
That means that the BST class "inherits" all of the
methods from BinaryTree,
and we don't have to reimplement them.
'''
def __init__(self, xs=None):
'''
FIXME:
If xs is a list (i.e. xs is not None),
then each element of xs needs to be inserted into the BST.
'''
super().__init__()
self.root = None
if xs:
self.insert_list(xs)
def __repr__(self):
'''
Notice that in the BinaryTree class,
we defined a __str__ function,
but not a __repr__ function.
Recall that the __repr__ function should return a
string that can be used to recreate a valid instance of the class.
Thus, if you create a variable using the command BST([1,2,3])
it's __repr__ will return "BST([1,2,3])"
For the BST, type(self).__name__ will be the string "BST",
but for the AVLTree, this expression will be "AVLTree".
Using this expression ensures that all subclasses
of BST will have a correct implementation of __repr__,
and that they won't have to reimplement it.
'''
return type(self).__name__ + '(' + str(self.to_list('inorder')) + ')'
def __eq__(self, t2):
'''
This method checks to see if the contents of self and t2 are equal.
The expression `a == b` desugars to `a.__eq__(b)`.
NOTE:
We only care about "semantic" equality,
and not "syntactic" equality.
That is, we do not care about the tree structure itself,
and only care about the contents of what the tree contains.
HINT:
Convert the contents of both trees into a sorted list,
then compare those sorted lists for equality.
'''
return self.to_list('inorder') == t2.to_list('inorder')
def is_bst_satisfied(self):
'''
Whenever you implement a data structure,
the first thing to do is to implement a function that checks whether
the structure obeys all of its laws.
This makes it possible to automatically test whether
insert/delete functions
are actually working.
'''
if self.root:
return BST._is_bst_satisfied(self.root)
return True
@staticmethod
def _is_bst_satisfied(node):
'''
FIXME:
The current implementation has a bug:
it only checks if the children of the current node
are less than/greater than,
rather than ensuring that all nodes to the left/right
are less than/greater than.
HINT:
Use the _find_smallest and _find_largest functions to fix the bug.
You should use the _ prefixed methods because those
are static methods just like this one.
'''
ret = True
if node.left:
if node.value >= BST._find_largest(node.left):
ret &= BST._is_bst_satisfied(node.left)
else:
ret = False
if node.right:
if node.value <= BST._find_smallest(node.right):
ret &= BST._is_bst_satisfied(node.right)
else:
ret = False
return ret
def insert(self, value):
'''
Inserts value into the BST.
FIXME:
Implement this function.
HINT:
Create a staticmethod helper function following the
pattern of _is_bst_satisfied.
'''
if self.root is None:
self.root = Node(value)
else:
BST._insert(value, self.root)
@staticmethod
def _insert(value, node):
if value < node.value:
if node.left is None:
node.left = Node(value)
else:
BST._insert(value, node.left)
elif value > node.value:
if node.right is None:
node.right = Node(value)
else:
BST._insert(value, node.right)
def insert_list(self, xs):
'''
Given a list xs, insert each element of xs into self.
FIXME:
Implement this function.
HINT:
Repeatedly call the insert method.
You cannot get this method to work correctly until you
have gotten insert to work correctly.
'''
for n in xs:
self.insert(n)
def __contains__(self, value):
'''
Recall that `x in tree` desugars to `tree.__contains__(x)`.
'''
return self.find(value)
def find(self, value):
'''
Returns whether value is contained in the BST.
FIXME:
Implement this function.
'''
if self.root:
if BST._find(value, self.root):
return True
else:
return False
@staticmethod
def _find(value, node):
'''
FIXME:
Implement this function.
'''
if value > node.value and node.right:
return BST._find(value, node.right)
elif value < node.value and node.left:
return BST._find(value, node.left)
if value == node.value:
return True
def find_smallest(self):
'''
Returns the smallest value in the tree.
'''
if self.root is None:
raise ValueError('Nothing in tree')
else:
return BST._find_smallest(self.root)
@staticmethod
def _find_smallest(node):
'''
This is a helper function for find_smallest and not
intended to be called directly by the user.
'''
assert node is not None
if node.left is None:
return node.value
else:
return BST._find_smallest(node.left)
def find_largest(self):
'''
Returns the largest value in the tree.
FIXME:
Implement this function.
HINT:
Follow the pattern of the _find_smallest function.
'''
if self.root is None:
raise ValueError('Nothing in tree')
else:
return BST._find_largest(self.root)
@staticmethod
def _find_largest(node):
assert node is not None
if node.right is None:
return node.value
else:
return BST._find_largest(node.right)
def remove(self, value):
'''
Removes value from the BST.
If value is not in the BST, it does nothing.
FIXME:
Implement this function.
HINT:
You should have everything else working before you
implement this function.
HINT:
Use a recursive helper function.
'''
self.root = BST._remove(self.root, value)
@staticmethod
def _remove(node, value):
if not node:
return node
if node.value > value:
node.left = BST._remove(node.left, value)
elif node.value < value:
node.right = BST._remove(node.right, value)
else:
if not node.right:
return node.left
if not node.left:
return node.right
temp_code = node.right
while temp_code.left:
temp_code = temp_code.left
node.value = temp_code.value
node.right = BST._remove(node.right, node.value)
return node
def remove_list(self, xs):
'''
Given a list xs, remove each element of xs from self.
FIXME:
Implement this function.
HINT:
See the insert_list function.
'''
for n in xs:
self.remove(n)
|
[
"containers.BinaryTree.Node"
] |
[((3820, 3831), 'containers.BinaryTree.Node', 'Node', (['value'], {}), '(value)\n', (3824, 3831), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((4030, 4041), 'containers.BinaryTree.Node', 'Node', (['value'], {}), '(value)\n', (4034, 4041), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((4203, 4214), 'containers.BinaryTree.Node', 'Node', (['value'], {}), '(value)\n', (4207, 4214), False, 'from containers.BinaryTree import BinaryTree, Node\n')]
|
import numpy as np
import pandas as pd
class Dataset:
"""A helper class to load various datasets.
Properties created with the ``@property`` decorator should be documented
in the property's getter method.
Attributes:
train_data (:obj:`pandas.DataFrame`): The training data without
the labels.
train_target (:obj:`numpy.ndarray`): The output variable for the
training data.
test_data (:obj:`pandas.DataFrame`): The testing data without
the labels.
test_target (:obj:`numpy..ndarray`): The output variable for the
test data.
Args:
f_train (str): The path to the file containing the training
dataset.
f_test (str): The path to the file containing the testing
dataset.
label (str, optional): The column in which the target variable
is located in the files. Defaults to `Label`.
Raises:
FileNotFoundError: if the supplied files are inexistent.
IndexError: if the label provided does not match any column in
the file.
"""
def __init__(self, f_train, f_test, label='Label'):
# Load the datasets
self.train_data = pd.read_csv(f_train)
self.test_data = pd.read_csv(f_test)
# Retrieve the relevant parts
self.train_target = self.train_data[label].to_numpy()
self.train_data = self.train_data.drop(columns=[label]).to_numpy()
self.test_target = self.test_data[label].to_numpy()
self.test_data = self.test_data.drop(columns=[label]).to_numpy()
|
[
"pandas.read_csv"
] |
[((1231, 1251), 'pandas.read_csv', 'pd.read_csv', (['f_train'], {}), '(f_train)\n', (1242, 1251), True, 'import pandas as pd\n'), ((1277, 1296), 'pandas.read_csv', 'pd.read_csv', (['f_test'], {}), '(f_test)\n', (1288, 1296), True, 'import pandas as pd\n')]
|
"""Runs healthcheck and reaps instances that are unhealthy."""
import collections
import logging
import os
import subprocess
import time
import urllib.request
import urllib.parse
import urllib.error
import click
from treadmill import cli
from treadmill import context
from treadmill import restclient
from treadmill import utils
_LOGGER = logging.getLogger(__name__)
_DEFAULT_INTERVAL = '1m'
_CLOSE_FDS = os.name != 'nt'
def _health_check(pattern, proto, endpoint, command):
"""Invoke instance health check."""
stateapi = context.GLOBAL.state_api()
stateurl = '/endpoint/%s/%s/%s' % (urllib.parse.quote(pattern),
proto,
endpoint)
response = restclient.get(stateapi, stateurl)
lines = [
'%s %s' % (end['name'], '%s:%s' % (end['host'], end['port']))
for end in response.json()
]
cmd_input = '\n'.join(lines)
bad = []
try:
proc = subprocess.Popen(
command,
close_fds=_CLOSE_FDS, shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
(out, _err) = proc.communicate(cmd_input)
retcode = proc.returncode
if proc.returncode == 0:
for instance in out.splitlines():
_LOGGER.info('not ok: %s', instance)
bad.append(instance)
else:
_LOGGER.warn('Health check ignored. %r, rc: %s.',
command, retcode)
except Exception: # pylint: disable=W0703
_LOGGER.exception('Error invoking: %r', command)
return bad
def _reap(bad):
"""Delete instances that did not path health check."""
if not bad:
return []
cellapi = context.GLOBAL.cell_api()
try:
for instance in bad:
_LOGGER.info('Delete: %s', instance)
restclient.post(cellapi, '/instance/_bulk/delete',
payload=dict(instances=bad))
return bad
except Exception: # pylint: disable=W0703
_LOGGER.exception('Error reaping: %r', bad)
return []
def init():
"""Return top level command handler"""
@click.command()
@click.option('--cell', required=True,
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False)
@click.option('--once', help='Run once.', is_flag=True, default=False)
@click.option('--interval', help='Wait interval between checks.',
default=_DEFAULT_INTERVAL)
@click.option('--threshold', help='Number of failed checks before reap.',
default=1)
@click.option('--proto', help='Endpoint protocol.', default='tcp',
type=click.Choice(['tcp', 'udp']))
@click.argument('pattern')
@click.argument('endpoint')
@click.argument('command', nargs=-1)
def reaper(once, interval, threshold, proto, pattern, endpoint, command):
"""Removes unhealthy instances of the app.
The health check script reads from STDIN and prints to STDOUT.
The input it list of instance host:port, similar to discovery.
Output - list of instances that did not pass health check.
For example, specifying awk '{print $1}' as COMMAND will remove all
instances.
"""
command = list(command)
failed = collections.Counter()
while True:
failed.update(_health_check(pattern, proto, endpoint, command))
for instance, count in failed.items():
_LOGGER.info('Failed: %s, count: %s', instance, count)
reaped = _reap([instance for instance, count in failed.items()
if count > threshold])
for instance in reaped:
del failed[instance]
if once:
break
time.sleep(utils.to_seconds(interval))
return reaper
|
[
"treadmill.utils.to_seconds",
"subprocess.Popen",
"click.argument",
"treadmill.context.GLOBAL.state_api",
"click.option",
"click.command",
"click.Choice",
"collections.Counter",
"treadmill.context.GLOBAL.cell_api",
"logging.getLogger",
"treadmill.restclient.get"
] |
[((346, 373), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (363, 373), False, 'import logging\n'), ((541, 567), 'treadmill.context.GLOBAL.state_api', 'context.GLOBAL.state_api', ([], {}), '()\n', (565, 567), False, 'from treadmill import context\n'), ((747, 781), 'treadmill.restclient.get', 'restclient.get', (['stateapi', 'stateurl'], {}), '(stateapi, stateurl)\n', (761, 781), False, 'from treadmill import restclient\n'), ((1763, 1788), 'treadmill.context.GLOBAL.cell_api', 'context.GLOBAL.cell_api', ([], {}), '()\n', (1786, 1788), False, 'from treadmill import context\n'), ((2188, 2203), 'click.command', 'click.command', ([], {}), '()\n', (2201, 2203), False, 'import click\n'), ((2209, 2329), 'click.option', 'click.option', (['"""--cell"""'], {'required': '(True)', 'envvar': '"""TREADMILL_CELL"""', 'callback': 'cli.handle_context_opt', 'expose_value': '(False)'}), "('--cell', required=True, envvar='TREADMILL_CELL', callback=cli\n .handle_context_opt, expose_value=False)\n", (2221, 2329), False, 'import click\n'), ((2384, 2453), 'click.option', 'click.option', (['"""--once"""'], {'help': '"""Run once."""', 'is_flag': '(True)', 'default': '(False)'}), "('--once', help='Run once.', is_flag=True, default=False)\n", (2396, 2453), False, 'import click\n'), ((2459, 2555), 'click.option', 'click.option', (['"""--interval"""'], {'help': '"""Wait interval between checks."""', 'default': '_DEFAULT_INTERVAL'}), "('--interval', help='Wait interval between checks.', default=\n _DEFAULT_INTERVAL)\n", (2471, 2555), False, 'import click\n'), ((2574, 2661), 'click.option', 'click.option', (['"""--threshold"""'], {'help': '"""Number of failed checks before reap."""', 'default': '(1)'}), "('--threshold', help='Number of failed checks before reap.',\n default=1)\n", (2586, 2661), False, 'import click\n'), ((2805, 2830), 'click.argument', 'click.argument', (['"""pattern"""'], {}), "('pattern')\n", (2819, 2830), False, 'import click\n'), ((2836, 2862), 'click.argument', 'click.argument', (['"""endpoint"""'], {}), "('endpoint')\n", (2850, 2862), False, 'import click\n'), ((2868, 2903), 'click.argument', 'click.argument', (['"""command"""'], {'nargs': '(-1)'}), "('command', nargs=-1)\n", (2882, 2903), False, 'import click\n'), ((977, 1089), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'close_fds': '_CLOSE_FDS', 'shell': '(False)', 'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE'}), '(command, close_fds=_CLOSE_FDS, shell=False, stdin=\n subprocess.PIPE, stdout=subprocess.PIPE)\n', (993, 1089), False, 'import subprocess\n'), ((3403, 3424), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (3422, 3424), False, 'import collections\n'), ((2770, 2798), 'click.Choice', 'click.Choice', (["['tcp', 'udp']"], {}), "(['tcp', 'udp'])\n", (2782, 2798), False, 'import click\n'), ((3912, 3938), 'treadmill.utils.to_seconds', 'utils.to_seconds', (['interval'], {}), '(interval)\n', (3928, 3938), False, 'from treadmill import utils\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'TriggerTriggerArgs',
]
@pulumi.input_type
class TriggerTriggerArgs:
def __init__(__self__, *,
destination_arn: pulumi.Input[str],
events: pulumi.Input[Sequence[pulumi.Input[str]]],
name: pulumi.Input[str],
branches: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
custom_data: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] destination_arn: The ARN of the resource that is the target for a trigger. For example, the ARN of a topic in Amazon Simple Notification Service (SNS).
:param pulumi.Input[Sequence[pulumi.Input[str]]] events: The repository events that will cause the trigger to run actions in another service, such as sending a notification through Amazon Simple Notification Service (SNS). If no events are specified, the trigger will run for all repository events. Event types include: `all`, `updateReference`, `createReference`, `deleteReference`.
:param pulumi.Input[str] name: The name of the trigger.
:param pulumi.Input[Sequence[pulumi.Input[str]]] branches: The branches that will be included in the trigger configuration. If no branches are specified, the trigger will apply to all branches.
:param pulumi.Input[str] custom_data: Any custom data associated with the trigger that will be included in the information sent to the target of the trigger.
"""
pulumi.set(__self__, "destination_arn", destination_arn)
pulumi.set(__self__, "events", events)
pulumi.set(__self__, "name", name)
if branches is not None:
pulumi.set(__self__, "branches", branches)
if custom_data is not None:
pulumi.set(__self__, "custom_data", custom_data)
@property
@pulumi.getter(name="destinationArn")
def destination_arn(self) -> pulumi.Input[str]:
"""
The ARN of the resource that is the target for a trigger. For example, the ARN of a topic in Amazon Simple Notification Service (SNS).
"""
return pulumi.get(self, "destination_arn")
@destination_arn.setter
def destination_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "destination_arn", value)
@property
@pulumi.getter
def events(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The repository events that will cause the trigger to run actions in another service, such as sending a notification through Amazon Simple Notification Service (SNS). If no events are specified, the trigger will run for all repository events. Event types include: `all`, `updateReference`, `createReference`, `deleteReference`.
"""
return pulumi.get(self, "events")
@events.setter
def events(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "events", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the trigger.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def branches(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The branches that will be included in the trigger configuration. If no branches are specified, the trigger will apply to all branches.
"""
return pulumi.get(self, "branches")
@branches.setter
def branches(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "branches", value)
@property
@pulumi.getter(name="customData")
def custom_data(self) -> Optional[pulumi.Input[str]]:
"""
Any custom data associated with the trigger that will be included in the information sent to the target of the trigger.
"""
return pulumi.get(self, "custom_data")
@custom_data.setter
def custom_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_data", value)
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set"
] |
[((2159, 2195), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""destinationArn"""'}), "(name='destinationArn')\n", (2172, 2195), False, 'import pulumi\n'), ((4009, 4041), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""customData"""'}), "(name='customData')\n", (4022, 4041), False, 'import pulumi\n'), ((1807, 1863), 'pulumi.set', 'pulumi.set', (['__self__', '"""destination_arn"""', 'destination_arn'], {}), "(__self__, 'destination_arn', destination_arn)\n", (1817, 1863), False, 'import pulumi\n'), ((1872, 1910), 'pulumi.set', 'pulumi.set', (['__self__', '"""events"""', 'events'], {}), "(__self__, 'events', events)\n", (1882, 1910), False, 'import pulumi\n'), ((1919, 1953), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (1929, 1953), False, 'import pulumi\n'), ((2430, 2465), 'pulumi.get', 'pulumi.get', (['self', '"""destination_arn"""'], {}), "(self, 'destination_arn')\n", (2440, 2465), False, 'import pulumi\n'), ((2560, 2602), 'pulumi.set', 'pulumi.set', (['self', '"""destination_arn"""', 'value'], {}), "(self, 'destination_arn', value)\n", (2570, 2602), False, 'import pulumi\n'), ((3078, 3104), 'pulumi.get', 'pulumi.get', (['self', '"""events"""'], {}), "(self, 'events')\n", (3088, 3104), False, 'import pulumi\n'), ((3205, 3238), 'pulumi.set', 'pulumi.set', (['self', '"""events"""', 'value'], {}), "(self, 'events', value)\n", (3215, 3238), False, 'import pulumi\n'), ((3386, 3410), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (3396, 3410), False, 'import pulumi\n'), ((3483, 3514), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (3493, 3514), False, 'import pulumi\n'), ((3810, 3838), 'pulumi.get', 'pulumi.get', (['self', '"""branches"""'], {}), "(self, 'branches')\n", (3820, 3838), False, 'import pulumi\n'), ((3953, 3988), 'pulumi.set', 'pulumi.set', (['self', '"""branches"""', 'value'], {}), "(self, 'branches', value)\n", (3963, 3988), False, 'import pulumi\n'), ((4267, 4298), 'pulumi.get', 'pulumi.get', (['self', '"""custom_data"""'], {}), "(self, 'custom_data')\n", (4277, 4298), False, 'import pulumi\n'), ((4395, 4433), 'pulumi.set', 'pulumi.set', (['self', '"""custom_data"""', 'value'], {}), "(self, 'custom_data', value)\n", (4405, 4433), False, 'import pulumi\n'), ((1999, 2041), 'pulumi.set', 'pulumi.set', (['__self__', '"""branches"""', 'branches'], {}), "(__self__, 'branches', branches)\n", (2009, 2041), False, 'import pulumi\n'), ((2090, 2138), 'pulumi.set', 'pulumi.set', (['__self__', '"""custom_data"""', 'custom_data'], {}), "(__self__, 'custom_data', custom_data)\n", (2100, 2138), False, 'import pulumi\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from todo import views
urlpatterns = patterns('',
url(r'^manage/$', views.ManageView, name='todo-manage'),
url(r'^manage/add/$', views.ManageAddView, name='todo-manage-add'),
url(r'^manage/done/(?P<todoid>\d+)$', views.ManageDoneView, name='todo-manage-done'),
url(r'^manage/delete/(?P<todoid>\d+)$', views.ManageDeleteView, name='todo-manage-delete'),
url(r'^manage/update/(?P<todoid>\d+)$', views.ManageUpdateView, name='todo-manage-update'),
)
|
[
"django.conf.urls.url"
] |
[((148, 202), 'django.conf.urls.url', 'url', (['"""^manage/$"""', 'views.ManageView'], {'name': '"""todo-manage"""'}), "('^manage/$', views.ManageView, name='todo-manage')\n", (151, 202), False, 'from django.conf.urls import patterns, url\n'), ((213, 278), 'django.conf.urls.url', 'url', (['"""^manage/add/$"""', 'views.ManageAddView'], {'name': '"""todo-manage-add"""'}), "('^manage/add/$', views.ManageAddView, name='todo-manage-add')\n", (216, 278), False, 'from django.conf.urls import patterns, url\n'), ((289, 378), 'django.conf.urls.url', 'url', (['"""^manage/done/(?P<todoid>\\\\d+)$"""', 'views.ManageDoneView'], {'name': '"""todo-manage-done"""'}), "('^manage/done/(?P<todoid>\\\\d+)$', views.ManageDoneView, name=\n 'todo-manage-done')\n", (292, 378), False, 'from django.conf.urls import patterns, url\n'), ((383, 478), 'django.conf.urls.url', 'url', (['"""^manage/delete/(?P<todoid>\\\\d+)$"""', 'views.ManageDeleteView'], {'name': '"""todo-manage-delete"""'}), "('^manage/delete/(?P<todoid>\\\\d+)$', views.ManageDeleteView, name=\n 'todo-manage-delete')\n", (386, 478), False, 'from django.conf.urls import patterns, url\n'), ((483, 578), 'django.conf.urls.url', 'url', (['"""^manage/update/(?P<todoid>\\\\d+)$"""', 'views.ManageUpdateView'], {'name': '"""todo-manage-update"""'}), "('^manage/update/(?P<todoid>\\\\d+)$', views.ManageUpdateView, name=\n 'todo-manage-update')\n", (486, 578), False, 'from django.conf.urls import patterns, url\n')]
|
from __future__ import absolute_import
import responses
from mock import patch
from exam import fixture
from django.test import RequestFactory
from sentry.integrations.github.integration import GitHubIntegration
from sentry.models import Integration, ExternalIssue
from sentry.testutils import TestCase
from sentry.utils import json
class GitHubIssueBasicTest(TestCase):
@fixture
def request(self):
return RequestFactory()
def setUp(self):
self.user = self.create_user()
self.organization = self.create_organization(owner=self.user)
self.model = Integration.objects.create(
provider='github',
external_id='github_external_id',
name='getsentry',
)
self.model.add_organization(self.organization.id)
self.integration = GitHubIntegration(self.model, self.organization.id)
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_get_allowed_assignees(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.GET,
'https://api.github.com/repos/getsentry/sentry/assignees',
json=[{'login': 'MeredithAnya'}]
)
repo = 'getsentry/sentry'
assert self.integration.get_allowed_assignees(repo) == (
('', 'Unassigned'),
('MeredithAnya', 'MeredithAnya')
)
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_create_issue(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.POST,
'https://api.github.com/repos/getsentry/sentry/issues',
json={'number': 321, 'title': 'hello', 'body': 'This is the description'}
)
form_data = {
'repo': 'getsentry/sentry',
'title': 'hello',
'description': 'This is the description',
}
assert self.integration.create_issue(form_data) == {
'key': 321,
'description': 'This is the description',
'title': 'hello',
'repo': 'getsentry/sentry',
}
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
payload = json.loads(request.body)
assert payload == {'body': 'This is the description', 'assignee': None, 'title': 'hello'}
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_get_repo_issues(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.GET,
'https://api.github.com/repos/getsentry/sentry/issues',
json=[{'number': 321, 'title': 'hello', 'body': 'This is the description'}]
)
repo = 'getsentry/sentry'
assert self.integration.get_repo_issues(repo) == ((321, '#321 hello'),)
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def test_link_issue(self, mock_get_jwt):
issue_id = 321
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.GET,
'https://api.github.com/repos/getsentry/sentry/issues/321',
json={'number': issue_id, 'title': 'hello', 'body': 'This is the description'}
)
data = {
'repo': 'getsentry/sentry',
'externalIssue': issue_id,
'comment': 'hello',
}
assert self.integration.get_issue(issue_id, data=data) == {
'key': issue_id,
'description': 'This is the description',
'title': 'hello',
'repo': 'getsentry/sentry',
}
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
@responses.activate
@patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
def after_link_issue(self, mock_get_jwt):
responses.add(
responses.POST,
'https://api.github.com/installations/github_external_id/access_tokens',
json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}
)
responses.add(
responses.POST,
'https://api.github.com/repos/getsentry/sentry/issues/321/comments',
json={'body': 'hello'}
)
data = {'comment': 'hello'}
external_issue = ExternalIssue.objects.create(
organization_id=self.organization.id,
integration_id=self.model.id,
key='hello#321',
)
self.integration.after_link_issue(external_issue, data=data)
request = responses.calls[0].request
assert request.headers['Authorization'] == 'Bearer jwt_token_1'
request = responses.calls[1].request
assert request.headers['Authorization'] == 'token token_1'
payload = json.loads(request.body)
assert payload == {'body': 'hello'}
|
[
"sentry.models.Integration.objects.create",
"django.test.RequestFactory",
"sentry.models.ExternalIssue.objects.create",
"responses.add",
"mock.patch",
"sentry.utils.json.loads",
"sentry.integrations.github.integration.GitHubIntegration"
] |
[((908, 986), 'mock.patch', 'patch', (['"""sentry.integrations.github.client.get_jwt"""'], {'return_value': '"""jwt_token_1"""'}), "('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')\n", (913, 986), False, 'from mock import patch\n'), ((1890, 1968), 'mock.patch', 'patch', (['"""sentry.integrations.github.client.get_jwt"""'], {'return_value': '"""jwt_token_1"""'}), "('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')\n", (1895, 1968), False, 'from mock import patch\n'), ((3232, 3310), 'mock.patch', 'patch', (['"""sentry.integrations.github.client.get_jwt"""'], {'return_value': '"""jwt_token_1"""'}), "('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')\n", (3237, 3310), False, 'from mock import patch\n'), ((4175, 4253), 'mock.patch', 'patch', (['"""sentry.integrations.github.client.get_jwt"""'], {'return_value': '"""jwt_token_1"""'}), "('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')\n", (4180, 4253), False, 'from mock import patch\n'), ((5399, 5477), 'mock.patch', 'patch', (['"""sentry.integrations.github.client.get_jwt"""'], {'return_value': '"""jwt_token_1"""'}), "('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')\n", (5404, 5477), False, 'from mock import patch\n'), ((427, 443), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (441, 443), False, 'from django.test import RequestFactory\n'), ((596, 698), 'sentry.models.Integration.objects.create', 'Integration.objects.create', ([], {'provider': '"""github"""', 'external_id': '"""github_external_id"""', 'name': '"""getsentry"""'}), "(provider='github', external_id=\n 'github_external_id', name='getsentry')\n", (622, 698), False, 'from sentry.models import Integration, ExternalIssue\n'), ((826, 877), 'sentry.integrations.github.integration.GitHubIntegration', 'GitHubIntegration', (['self.model', 'self.organization.id'], {}), '(self.model, self.organization.id)\n', (843, 877), False, 'from sentry.integrations.github.integration import GitHubIntegration\n'), ((1051, 1226), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/installations/github_external_id/access_tokens"""'], {'json': "{'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}"}), "(responses.POST,\n 'https://api.github.com/installations/github_external_id/access_tokens',\n json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'})\n", (1064, 1226), False, 'import responses\n'), ((1274, 1404), 'responses.add', 'responses.add', (['responses.GET', '"""https://api.github.com/repos/getsentry/sentry/assignees"""'], {'json': "[{'login': 'MeredithAnya'}]"}), "(responses.GET,\n 'https://api.github.com/repos/getsentry/sentry/assignees', json=[{\n 'login': 'MeredithAnya'}])\n", (1287, 1404), False, 'import responses\n'), ((2024, 2199), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/installations/github_external_id/access_tokens"""'], {'json': "{'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}"}), "(responses.POST,\n 'https://api.github.com/installations/github_external_id/access_tokens',\n json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'})\n", (2037, 2199), False, 'import responses\n'), ((2247, 2415), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/repos/getsentry/sentry/issues"""'], {'json': "{'number': 321, 'title': 'hello', 'body': 'This is the description'}"}), "(responses.POST,\n 'https://api.github.com/repos/getsentry/sentry/issues', json={'number':\n 321, 'title': 'hello', 'body': 'This is the description'})\n", (2260, 2415), False, 'import responses\n'), ((3079, 3103), 'sentry.utils.json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (3089, 3103), False, 'from sentry.utils import json\n'), ((3369, 3544), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/installations/github_external_id/access_tokens"""'], {'json': "{'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}"}), "(responses.POST,\n 'https://api.github.com/installations/github_external_id/access_tokens',\n json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'})\n", (3382, 3544), False, 'import responses\n'), ((3592, 3761), 'responses.add', 'responses.add', (['responses.GET', '"""https://api.github.com/repos/getsentry/sentry/issues"""'], {'json': "[{'number': 321, 'title': 'hello', 'body': 'This is the description'}]"}), "(responses.GET,\n 'https://api.github.com/repos/getsentry/sentry/issues', json=[{'number':\n 321, 'title': 'hello', 'body': 'This is the description'}])\n", (3605, 3761), False, 'import responses\n'), ((4330, 4505), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/installations/github_external_id/access_tokens"""'], {'json': "{'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}"}), "(responses.POST,\n 'https://api.github.com/installations/github_external_id/access_tokens',\n json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'})\n", (4343, 4505), False, 'import responses\n'), ((4553, 4730), 'responses.add', 'responses.add', (['responses.GET', '"""https://api.github.com/repos/getsentry/sentry/issues/321"""'], {'json': "{'number': issue_id, 'title': 'hello', 'body': 'This is the description'}"}), "(responses.GET,\n 'https://api.github.com/repos/getsentry/sentry/issues/321', json={\n 'number': issue_id, 'title': 'hello', 'body': 'This is the description'})\n", (4566, 4730), False, 'import responses\n'), ((5532, 5707), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/installations/github_external_id/access_tokens"""'], {'json': "{'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'}"}), "(responses.POST,\n 'https://api.github.com/installations/github_external_id/access_tokens',\n json={'token': 'token_1', 'expires_at': '2018-10-11T22:14:10Z'})\n", (5545, 5707), False, 'import responses\n'), ((5755, 5885), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.github.com/repos/getsentry/sentry/issues/321/comments"""'], {'json': "{'body': 'hello'}"}), "(responses.POST,\n 'https://api.github.com/repos/getsentry/sentry/issues/321/comments',\n json={'body': 'hello'})\n", (5768, 5885), False, 'import responses\n'), ((5986, 6103), 'sentry.models.ExternalIssue.objects.create', 'ExternalIssue.objects.create', ([], {'organization_id': 'self.organization.id', 'integration_id': 'self.model.id', 'key': '"""hello#321"""'}), "(organization_id=self.organization.id,\n integration_id=self.model.id, key='hello#321')\n", (6014, 6103), False, 'from sentry.models import Integration, ExternalIssue\n'), ((6466, 6490), 'sentry.utils.json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (6476, 6490), False, 'from sentry.utils import json\n')]
|
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import uuid
import grpc
import pytest
from squeak.params import SelectParams
from proto import squeak_admin_pb2
from proto import squeak_admin_pb2_grpc
from tests.util import bytes_to_base64_string
from tests.util import create_contact_profile
from tests.util import create_saved_peer
from tests.util import create_signing_profile
from tests.util import delete_profile
from tests.util import generate_signing_key
from tests.util import get_address
from tests.util import open_peer_connection
@pytest.fixture(autouse=True)
def select_mainnet_params():
# Set the network to simnet
SelectParams("simnet")
@pytest.fixture
def admin_stub():
with grpc.insecure_channel("squeaknode:8994") as admin_channel:
yield squeak_admin_pb2_grpc.SqueakAdminStub(admin_channel)
@pytest.fixture
def other_admin_stub():
with grpc.insecure_channel("squeaknode_other:8994") as admin_channel:
yield squeak_admin_pb2_grpc.SqueakAdminStub(admin_channel)
@pytest.fixture
def signing_key():
# Create a signing key
yield generate_signing_key()
@pytest.fixture
def squeak_address(signing_key):
yield get_address(signing_key)
# @pytest.fixture
# def profile_name():
# yield "fake_profile_{}".format(uuid.uuid1())
@pytest.fixture
def signing_profile_id(admin_stub, random_name):
# Create a new signing profile
profile_id = create_signing_profile(admin_stub, random_name)
yield profile_id
# Delete the profile
delete_profile(admin_stub, profile_id)
@pytest.fixture
def contact_profile_id(admin_stub, random_name, squeak_address):
# Create a new contact profile
contact_profile_id = create_contact_profile(
admin_stub, random_name, squeak_address)
yield contact_profile_id
# Delete the profile
admin_stub.DeleteSqueakProfile(
squeak_admin_pb2.DeleteSqueakProfileRequest(
profile_id=contact_profile_id,
)
)
@pytest.fixture
def saved_squeak_hash(admin_stub, signing_profile_id):
# Create a new squeak using the new profile
make_squeak_content = "Hello from the profile on the server!"
make_squeak_response = admin_stub.MakeSqueak(
squeak_admin_pb2.MakeSqueakRequest(
profile_id=signing_profile_id,
content=make_squeak_content,
)
)
squeak_hash = make_squeak_response.squeak_hash
yield squeak_hash
# Delete the squeak
admin_stub.DeleteSqueak(
squeak_admin_pb2.DeleteSqueakRequest(
squeak_hash=squeak_hash,
)
)
@pytest.fixture
def peer_id(admin_stub, random_name):
# Create a new peer
peer_id = create_saved_peer(
admin_stub,
random_name,
random_name,
1234,
)
yield peer_id
# Delete the peer
admin_stub.DeletePeer(
squeak_admin_pb2.DeletePeerRequest(
peer_id=peer_id,
)
)
@pytest.fixture
def random_name():
yield "random_name_{}".format(uuid.uuid1())
@pytest.fixture
def random_image():
yield os.urandom(567)
@pytest.fixture
def random_image_base64_string(random_image):
yield bytes_to_base64_string(random_image)
# @pytest.fixture
# def connected_peer_id(other_admin_stub):
# # Add the main node as a peer
# create_peer_response = other_admin_stub.CreatePeer(
# squeak_admin_pb2.CreatePeerRequest(
# peer_name="test_peer",
# host="squeaknode",
# port=8774,
# )
# )
# peer_id = create_peer_response.peer_id
# # Set the peer to be downloading
# other_admin_stub.SetPeerDownloading(
# squeak_admin_pb2.SetPeerDownloadingRequest(
# peer_id=peer_id,
# downloading=True,
# )
# )
# yield peer_id
# # Delete the peer
# other_admin_stub.DeletePeer(
# squeak_admin_pb2.DeletePeerRequest(
# peer_id=peer_id,
# )
# )
@pytest.fixture
def connected_tcp_peer_id(other_admin_stub):
with open_peer_connection(
other_admin_stub,
"test_peer",
"squeaknode",
18777,
):
yield
|
[
"tests.util.bytes_to_base64_string",
"tests.util.open_peer_connection",
"tests.util.delete_profile",
"squeak.params.SelectParams",
"pytest.fixture",
"grpc.insecure_channel",
"proto.squeak_admin_pb2.DeletePeerRequest",
"tests.util.create_contact_profile",
"tests.util.create_signing_profile",
"proto.squeak_admin_pb2.MakeSqueakRequest",
"tests.util.get_address",
"proto.squeak_admin_pb2.DeleteSqueakProfileRequest",
"proto.squeak_admin_pb2.DeleteSqueakRequest",
"tests.util.generate_signing_key",
"uuid.uuid1",
"proto.squeak_admin_pb2_grpc.SqueakAdminStub",
"tests.util.create_saved_peer",
"os.urandom"
] |
[((1608, 1636), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (1622, 1636), False, 'import pytest\n'), ((1702, 1724), 'squeak.params.SelectParams', 'SelectParams', (['"""simnet"""'], {}), "('simnet')\n", (1714, 1724), False, 'from squeak.params import SelectParams\n'), ((2474, 2521), 'tests.util.create_signing_profile', 'create_signing_profile', (['admin_stub', 'random_name'], {}), '(admin_stub, random_name)\n', (2496, 2521), False, 'from tests.util import create_signing_profile\n'), ((2572, 2610), 'tests.util.delete_profile', 'delete_profile', (['admin_stub', 'profile_id'], {}), '(admin_stub, profile_id)\n', (2586, 2610), False, 'from tests.util import delete_profile\n'), ((2754, 2817), 'tests.util.create_contact_profile', 'create_contact_profile', (['admin_stub', 'random_name', 'squeak_address'], {}), '(admin_stub, random_name, squeak_address)\n', (2776, 2817), False, 'from tests.util import create_contact_profile\n'), ((3729, 3790), 'tests.util.create_saved_peer', 'create_saved_peer', (['admin_stub', 'random_name', 'random_name', '(1234)'], {}), '(admin_stub, random_name, random_name, 1234)\n', (3746, 3790), False, 'from tests.util import create_saved_peer\n'), ((1770, 1810), 'grpc.insecure_channel', 'grpc.insecure_channel', (['"""squeaknode:8994"""'], {}), "('squeaknode:8994')\n", (1791, 1810), False, 'import grpc\n'), ((1947, 1993), 'grpc.insecure_channel', 'grpc.insecure_channel', (['"""squeaknode_other:8994"""'], {}), "('squeaknode_other:8994')\n", (1968, 1993), False, 'import grpc\n'), ((2153, 2175), 'tests.util.generate_signing_key', 'generate_signing_key', ([], {}), '()\n', (2173, 2175), False, 'from tests.util import generate_signing_key\n'), ((2237, 2261), 'tests.util.get_address', 'get_address', (['signing_key'], {}), '(signing_key)\n', (2248, 2261), False, 'from tests.util import get_address\n'), ((2925, 2999), 'proto.squeak_admin_pb2.DeleteSqueakProfileRequest', 'squeak_admin_pb2.DeleteSqueakProfileRequest', ([], {'profile_id': 'contact_profile_id'}), '(profile_id=contact_profile_id)\n', (2968, 2999), False, 'from proto import squeak_admin_pb2\n'), ((3274, 3373), 'proto.squeak_admin_pb2.MakeSqueakRequest', 'squeak_admin_pb2.MakeSqueakRequest', ([], {'profile_id': 'signing_profile_id', 'content': 'make_squeak_content'}), '(profile_id=signing_profile_id, content=\n make_squeak_content)\n', (3308, 3373), False, 'from proto import squeak_admin_pb2\n'), ((3544, 3605), 'proto.squeak_admin_pb2.DeleteSqueakRequest', 'squeak_admin_pb2.DeleteSqueakRequest', ([], {'squeak_hash': 'squeak_hash'}), '(squeak_hash=squeak_hash)\n', (3580, 3605), False, 'from proto import squeak_admin_pb2\n'), ((3905, 3956), 'proto.squeak_admin_pb2.DeletePeerRequest', 'squeak_admin_pb2.DeletePeerRequest', ([], {'peer_id': 'peer_id'}), '(peer_id=peer_id)\n', (3939, 3956), False, 'from proto import squeak_admin_pb2\n'), ((4119, 4134), 'os.urandom', 'os.urandom', (['(567)'], {}), '(567)\n', (4129, 4134), False, 'import os\n'), ((4209, 4245), 'tests.util.bytes_to_base64_string', 'bytes_to_base64_string', (['random_image'], {}), '(random_image)\n', (4231, 4245), False, 'from tests.util import bytes_to_base64_string\n'), ((5076, 5148), 'tests.util.open_peer_connection', 'open_peer_connection', (['other_admin_stub', '"""test_peer"""', '"""squeaknode"""', '(18777)'], {}), "(other_admin_stub, 'test_peer', 'squeaknode', 18777)\n", (5096, 5148), False, 'from tests.util import open_peer_connection\n'), ((1843, 1895), 'proto.squeak_admin_pb2_grpc.SqueakAdminStub', 'squeak_admin_pb2_grpc.SqueakAdminStub', (['admin_channel'], {}), '(admin_channel)\n', (1880, 1895), False, 'from proto import squeak_admin_pb2_grpc\n'), ((2026, 2078), 'proto.squeak_admin_pb2_grpc.SqueakAdminStub', 'squeak_admin_pb2_grpc.SqueakAdminStub', (['admin_channel'], {}), '(admin_channel)\n', (2063, 2078), False, 'from proto import squeak_admin_pb2_grpc\n'), ((4057, 4069), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (4067, 4069), False, 'import uuid\n')]
|