code
stringlengths 1
199k
|
|---|
"""
Course Grading Settings page.
"""
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.pages.studio.utils import press_the_notification_button
from common.test.acceptance.pages.common.utils import click_css
from selenium.webdriver import ActionChains
from bok_choy.promise import BrokenPromise
class GradingPage(SettingsPage):
"""
Course Grading Settings page.
"""
url_path = "settings/grading"
grade_ranges = '.grades .grade-specific-bar'
assignments = '.field-group.course-grading-assignment-list-item'
def is_browser_on_page(self):
return self.q(css='body.grading').present
def letter_grade(self, selector):
"""
Returns: first letter of grade range on grading page
Example: if there are no manually added grades it would
return Pass, if a grade is added it will return 'A'
"""
return self.q(css=selector)[0].text
@property
def total_number_of_grades(self):
"""
Gets total number of grades present in the grades bar
Returns:
int: Single number length of grades
"""
self.wait_for_element_visibility(self.grade_ranges, 'Grades are visible')
return len(self.q(css=self.grade_ranges))
def add_new_grade(self):
"""
Add new grade
"""
self.q(css='.new-grade-button').click()
self.save_changes()
def remove_grade(self):
"""
Remove an added grade
"""
# Button displays after hovering on it
btn_css = '.remove-button'
self.browser.execute_script("$('{}').focus().click()".format(btn_css))
self.wait_for_ajax()
self.save_changes()
def remove_grades(self, number_of_grades):
"""
Remove grade ranges from grades bar.
"""
for _ in range(number_of_grades):
self.browser.execute_script('document.getElementsByClassName("remove-button")[0].click()')
def remove_all_grades(self):
"""
Removes all grades
"""
while len(self.q(css='.remove-button')) > 0:
self.remove_grade()
def drag_and_drop_grade(self):
"""
Drag and drop grade range.
"""
self.wait_for_element_visibility(self.grade_ranges, "Grades ranges are visible")
# We have used jquery here to adjust the width of slider to
# desired range because drag and drop has behaved very inconsistently.
# This does not updates the text of range on the slider.
# So as a work around, we have used drag_and_drop without any offset
self.browser.execute_script('$(".ui-resizable").css("width","10")')
action = ActionChains(self.browser)
moveable_css = self.q(css='.ui-resizable-e').results[0]
action.drag_and_drop_by_offset(moveable_css, 0, 0).perform()
@property
def get_assignment_names(self):
"""
Get name of the all the assignment types.
Returns:
list: A list containing names of the assignment types.
"""
self.wait_for_element_visibility(
'#course-grading-assignment-name',
'Grade Names not visible.'
)
return self.q(css='#course-grading-assignment-name').attrs('value')
def change_assignment_name(self, old_name, new_name):
"""
Changes the assignment name.
Arguments:
old_name (str): The assignment type name which is to be changed.
new_name (str): New name of the assignment.
"""
self.wait_for_element_visibility('#course-grading-assignment-name', 'Assignment Name field visible')
self.q(css='#course-grading-assignment-name').filter(
lambda el: el.get_attribute('value') == old_name).fill(new_name)
@property
def grade_letters(self):
"""
Get names of grade ranges.
Returns:
list: A list containing names of the grade ranges.
"""
return self.q(css='.letter-grade').text
def click_add_grade(self):
"""
Clicks to add a grade
"""
click_css(self, '.new-grade-button', require_notification=False)
def is_grade_added(self, length):
"""
Checks to see if grade is added by comparing number of grades after the addition
Returns:
bool: True if grade is added
bool: False if grade is not added
"""
try:
self.wait_for(
lambda: len(self.q(css=self.grade_ranges)) == length + 1,
description="Grades are added",
timeout=3
)
return True
except BrokenPromise:
return False
def add_new_assignment_type(self):
"""
Add New Assignment type
"""
self.q(css='.add-grading-data').click()
self.save_changes()
@property
def grades_range(self):
"""
Get ranges of all the grades.
Returns:
list: A list containing ranges of all the grades
"""
self.wait_for_element_visibility('.range', 'Ranges are visible')
return self.q(css='.range').text
def fill_assignment_type_fields(
self,
name,
abbreviation,
total_grade,
total_number,
drop
):
"""
Fills text to Assignment Type fields according to assignment box
number and text provided
Arguments:
name: Assignment Type Name
abbreviation: Abbreviation
total_grade: Weight of Total Grade
total_number: Total Number
drop: Number of Droppable
"""
self.q(css='#course-grading-assignment-name').fill(name)
self.q(css='#course-grading-assignment-shortname').fill(abbreviation)
self.q(css='#course-grading-assignment-gradeweight').fill(total_grade)
self.q(
css='#course-grading-assignment-totalassignments'
).fill(total_number)
self.q(css='#course-grading-assignment-droppable').fill(drop)
self.save_changes()
def assignment_name_field_value(self):
"""
Returns:
list: Assignment type field value
"""
return self.q(css='#course-grading-assignment-name').attrs('value')
def delete_assignment_type(self):
"""
Deletes Assignment type
"""
self.q(css='.remove-grading-data').first.click()
self.save_changes()
def delete_all_assignment_types(self):
"""
Deletes all assignment types
"""
while len(self.q(css='.remove-grading-data')) > 0:
self.delete_assignment_type()
def get_confirmation_message(self):
"""
Get confirmation message received after saving settings.
"""
self.wait_for_element_visibility('#alert-confirmation-title', 'Confirmation text present')
return self.q(css='#alert-confirmation-title').text[0]
def _get_type_index(self, name):
"""
Gets the index of assignment type.
Arguments:
name(str): name of the assignment
Returns:
int: index of the assignment type
"""
name_id = '#course-grading-assignment-name'
all_types = self.q(css=name_id).results
for index, element in enumerate(all_types):
if element.get_attribute('value') == name:
return index
return -1
def save(self):
"""
Click on save settings button.
"""
press_the_notification_button(self, "Save")
def cancel(self):
"""
Click on cancel settings button.
"""
press_the_notification_button(self, "Cancel")
def refresh_and_wait_for_load(self):
"""
Refresh the page and wait for all resources to load.
"""
self.browser.refresh()
self.wait_for_page()
|
from enum import Enum
class InstanceLifecycle(Enum):
ON_DEMAND = "on-demand"
SPOT = "spot"
NA = "N/A"
|
import email_extended
|
__doc__ = """ Concurrently upload the same large file by two sync clients. It may ne necessary to run mulitple times. In owncloud 5.0.10 this testcase triggers a race condition which is reported in the following way:
2013-11-13 15:54:23,039 - INFO - checker - shared w0d1 af27141daa272ef2285695fe8e709d9f
2013-11-13 15:54:23,039 - INFO - checker - shared w0v1 19987ddec02a36d6403a274565032045
2013-11-13 15:54:23,040 - INFO - checker - shared w0v2 af27141daa272ef2285695fe8e709d9f
2013-11-13 15:54:23,040 - INFO - checker - shared w1d1 19987ddec02a36d6403a274565032045
2013-11-13 15:54:23,040 - INFO - checker - shared w1d2 ffffc84eaed851baa0e61b554aa90daa
2013-11-13 15:54:23,041 - INFO - checker - shared w1v1 ffffc84eaed851baa0e61b554aa90daa
2013-11-13 15:54:23,041 - INFO - checker - shared w2d1 ffffc84eaed851baa0e61b554aa90daa
2013-11-13 15:54:24,337 - ERROR - checker - a version af27141daa272ef2285695fe8e709d9f (filename test.BIG.v1384354395) does not correspond to any previously generated file
ISSUE WITH 1.7.2 CERNBOX Client: in step4 both worker0 and worker1 use the same transfer id for chunked upload
Possible reason: random number initialization sqrand() missing?
TO BE CHECKED WITH 2.x client and report to owncloud if needed.
The side effect is that with EOS 1.151 update the final chunk PUT always terminates with 412 response and the sync never finishes.
This is an effect of fixing checksum checks of chunked uploads:
git blame ./fst/http/HttpHandler.cc
a4594ec8 (Andreas Peters 2015-04-08 14:11:17 +0200 639) response->SetResponseCode(eos::common::HttpResponse::PRECONDITION_FAILED);
To be checked with interactive clients.
"""
import time
import tempfile
import glob
from smashbox.utilities import *
from smashbox.utilities import reflection
@add_worker
def worker0(step):
shared = reflection.getSharedObject()
reset_owncloud_account()
reset_rundir()
#versions = get_md5_versions_on_server('test.BIG')
step(1,'create initial content and sync')
d = make_workdir()
fn = '%s/test.BIG'%d
createfile(fn,'0',count=100000,bs=1000)
shared['w0v1'] = md5sum(fn)
logger.info(shared['w0v1'])
hexdump(fn)
run_ocsync(d)
step(3,'modify local content')
createfile(fn,'1',count=200,bs=1000000) # create large file -> it will take longer to sync
shared['w0v2'] = md5sum(fn)
logger.info(shared['w0v2'])
hexdump(fn)
step(4,'sync local content')
run_ocsync(d)
shared['w0d1'] = md5sum(fn)
logger.info(shared['w0d1'])
hexdump(fn)
if shared['w0d1'] == shared['w0v2']:
logger.info("Content NOT changed locally")
else:
logger.info("CONTENT CHANGED LOCALLY")
#step(4)
#run_ocsync(d)
#step(5)
logger.info('output %s',d)
@add_worker
def worker1(step):
shared = reflection.getSharedObject()
step(2,'sync initial state created by worker 0')
d = make_workdir()
run_ocsync(d)
fn = '%s/test.BIG'%d
shared['w1d1'] = md5sum(fn)
logger.info(shared['w1d1'])
error_check(shared['w1d1'] == shared['w0v1'],'downloaded files does not match the initially created file')
step(3,'modify local content')
createfile(fn,'2',count=200000,bs=1000) # create large file -> it will take longer to sync
shared['w1v1'] = md5sum(fn)
logger.info(shared['w1v1'])
hexdump(fn)
step(4,'sync modified file')
# add a bit of delay to make sure worker1 starts later than worker0
sleep(2.1)
run_ocsync(d)
shared['w1d2'] = md5sum(fn)
logger.info(shared['w1d2'])
hexdump(fn)
step(5)
logger.info('output %s',d)
@add_worker
def checker(step):
shared = reflection.getSharedObject()
step(6,'sync the final state of the repository into a fresh local folder')
#sleep(10)
d = make_workdir()
run_ocsync(d)
fn = '%s/test.BIG'%d
shared['w2d1'] = md5sum(fn)
logger.info(shared['w2d1'])
# print the status
logger.info('final output %s',d)
logger.info('content as reported by webdav')
#runcmd('curl -s -k -XPROPFIND %s | xmllint --format -'%oc_webdav_url()) #FIXME: no request body, unsupported by EOS
#DISABLED FOR NOW
#list_versions_on_server('test.BIG')
for x in sorted(shared.keys()):
logger.info('shared %s %s',x,shared[x])
# verify the status
error_check(shared['w2d1'] in [shared['w0v1'],shared['w0v2'],shared['w1v1']], "file downloaded by the checker does not correspond to any file created locally by the workers")
if False:
# DISABLED FOR NOW
# make sure that all versions stored on a server correpond to a version generated locally
versions = get_md5_versions_on_server('test.BIG')
for v5,name in versions:
error_check(not v5 in [shared['w0v1'],shared['w0v2'], shared['w1v1']],
'a version %s (filename %s) does not correspond to any previously generated file'%(v5,name))
### ASSERT
# make sure it is empty
#assert(glob.glob(d+'/*') == [])
|
import os
import sys
import sphinx
import sphinx.apidoc
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
sys.path.insert(0, os.path.abspath(os.path.join('..', '..')))
import segpy
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'cartouche',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'Segpy'
copyright = '2017, Sixty North'
version = segpy.__version__
release = segpy.__version__
exclude_patterns = []
pygments_style = 'sphinx'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
htmlhelp_basename = 'Segpydoc'
latex_elements = {
}
latex_documents = [
('index', 'Segpy.tex', 'Segpy Documentation',
'Sixty North', 'manual'),
]
man_pages = [
('index', 'segpy', 'Segpy Documentation',
['Sixty North'], 1)
]
texinfo_documents = [
('index', 'Segpy', 'Segpy Documentation',
'Sixty North', 'Segpy', 'One line description of project.',
'Miscellaneous'),
]
def run_apidoc(_):
module = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(module, '..', '..', 'segpy'))
out_dir = os.path.abspath(os.path.join(module, 'apidocs', 'segpy'))
print("src_dir =", src_dir)
sphinx.apidoc.main(
['', '--separate',
'-o', out_dir, src_dir])
def setup(app):
app.connect('builder-inited', run_apidoc)
|
from . import mrp_production
from . import product
|
import argparse
import glob
parser = argparse.ArgumentParser()
parser.add_argument(
'working_dir',
type=str,
metavar='PATH',
help='path to examples working directory',
)
args = parser.parse_args()
files_with_errors = []
for logfile in glob.glob(f"{args.working_dir}/*/output/log/activitysim.log"):
with open(logfile, 'rt') as f:
printing_traceback = False
found_traceback = False
for n, line in enumerate(f.readlines(), start=1):
if printing_traceback:
print(line.rstrip())
if not line.startswith(" "):
printing_traceback = False
else:
if "Traceback" in line:
print(f"======= TRACEBACK in {logfile} at line {n} =======")
print(line.rstrip())
printing_traceback = True
found_traceback = True
if not found_traceback:
print(f"OK: {logfile}")
else:
files_with_errors.append(logfile)
if files_with_errors:
print("=====================================================")
print(f"Found {len(files_with_errors)} examples with errors:")
for f in files_with_errors:
print(f"- {f}")
print("=====================================================")
|
"""
Tests for bookmark views.
"""
import ddt
import json
from nose.plugins.attrib import attr
from unittest import skipUnless
import urllib
from django.conf import settings
from django.core.urlresolvers import reverse
from mock import patch
from rest_framework.test import APIClient
from xmodule.modulestore import ModuleStoreEnum
from .test_models import BookmarksTestsBase
from .test_api import BookmarkApiEventTestMixin
class BookmarksViewsTestsBase(BookmarksTestsBase, BookmarkApiEventTestMixin):
"""
Base class for bookmarks views tests.
"""
def setUp(self):
super(BookmarksViewsTestsBase, self).setUp()
self.anonymous_client = APIClient()
self.client = self.login_client(user=self.user)
def login_client(self, user):
"""
Helper method for getting the client and user and logging in. Returns client.
"""
client = APIClient()
client.login(username=user.username, password=self.TEST_PASSWORD)
return client
def send_get(self, client, url, query_parameters=None, expected_status=200):
"""
Helper method for sending a GET to the server. Verifies the expected status and returns the response.
"""
url = url + '?' + query_parameters if query_parameters else url
response = client.get(url)
self.assertEqual(expected_status, response.status_code)
return response
def send_post(self, client, url, data, content_type='application/json', expected_status=201):
"""
Helper method for sending a POST to the server. Verifies the expected status and returns the response.
"""
response = client.post(url, data=json.dumps(data), content_type=content_type)
self.assertEqual(expected_status, response.status_code)
return response
def send_delete(self, client, url, expected_status=204):
"""
Helper method for sending a DELETE to the server. Verifies the expected status and returns the response.
"""
response = client.delete(url)
self.assertEqual(expected_status, response.status_code)
return response
@attr('shard_2')
@ddt.ddt
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Tests only valid in LMS')
class BookmarksListViewTests(BookmarksViewsTestsBase):
"""
This contains the tests for GET & POST methods of bookmark.views.BookmarksListView class
GET /api/bookmarks/v1/bookmarks/?course_id={course_id1}
POST /api/bookmarks/v1/bookmarks
"""
@ddt.data(
(1, False),
(10, False),
(25, False),
(1, True),
(10, True),
(25, True),
)
@ddt.unpack
@patch('eventtracking.tracker.emit')
def test_get_bookmarks_successfully(self, bookmarks_count, check_all_fields, mock_tracker):
"""
Test that requesting bookmarks for a course returns records successfully in
expected order without optional fields.
"""
course, __, bookmarks = self.create_course_with_bookmarks_count(
bookmarks_count, store_type=ModuleStoreEnum.Type.mongo
)
query_parameters = 'course_id={}&page_size={}'.format(urllib.quote(unicode(course.id)), 100)
if check_all_fields:
query_parameters += '&fields=path,display_name'
response = self.send_get(
client=self.client,
url=reverse('bookmarks'),
query_parameters=query_parameters,
)
bookmarks_data = response.data['results']
self.assertEqual(len(bookmarks_data), len(bookmarks))
self.assertEqual(response.data['count'], len(bookmarks))
self.assertEqual(response.data['num_pages'], 1)
# As bookmarks are sorted by -created so we will compare in that order.
self.assert_bookmark_data_is_valid(bookmarks[-1], bookmarks_data[0], check_optional_fields=check_all_fields)
self.assert_bookmark_data_is_valid(bookmarks[0], bookmarks_data[-1], check_optional_fields=check_all_fields)
self.assert_bookmark_event_emitted(
mock_tracker,
event_name='edx.bookmark.listed',
course_id=unicode(course.id),
list_type='per_course',
bookmarks_count=bookmarks_count,
page_size=100,
page_number=1
)
@ddt.data(
10, 25
)
@patch('eventtracking.tracker.emit')
def test_get_bookmarks_with_pagination(self, bookmarks_count, mock_tracker):
"""
Test that requesting bookmarks for a course return results with pagination 200 code.
"""
course, __, bookmarks = self.create_course_with_bookmarks_count(
bookmarks_count, store_type=ModuleStoreEnum.Type.mongo
)
page_size = 5
query_parameters = 'course_id={}&page_size={}'.format(urllib.quote(unicode(course.id)), page_size)
response = self.send_get(
client=self.client,
url=reverse('bookmarks'),
query_parameters=query_parameters
)
bookmarks_data = response.data['results']
# Pagination assertions.
self.assertEqual(response.data['count'], bookmarks_count)
self.assertIn('page=2&page_size={}'.format(page_size), response.data['next'])
self.assertEqual(response.data['num_pages'], bookmarks_count / page_size)
self.assertEqual(len(bookmarks_data), min(bookmarks_count, page_size))
self.assert_bookmark_data_is_valid(bookmarks[-1], bookmarks_data[0])
self.assert_bookmark_event_emitted(
mock_tracker,
event_name='edx.bookmark.listed',
course_id=unicode(course.id),
list_type='per_course',
bookmarks_count=bookmarks_count,
page_size=page_size,
page_number=1
)
@patch('eventtracking.tracker.emit')
def test_get_bookmarks_with_invalid_data(self, mock_tracker):
"""
Test that requesting bookmarks with invalid data returns 0 records.
"""
# Invalid course id.
response = self.send_get(
client=self.client,
url=reverse('bookmarks'),
query_parameters='course_id=invalid'
)
bookmarks_data = response.data['results']
self.assertEqual(len(bookmarks_data), 0)
self.assertFalse(mock_tracker.emit.called) # pylint: disable=maybe-no-member
@patch('eventtracking.tracker.emit')
def test_get_all_bookmarks_when_course_id_not_given(self, mock_tracker):
"""
Test that requesting bookmarks returns all records for that user.
"""
# Without course id we would return all the bookmarks for that user.
response = self.send_get(
client=self.client,
url=reverse('bookmarks')
)
bookmarks_data = response.data['results']
self.assertEqual(len(bookmarks_data), 3)
self.assert_bookmark_data_is_valid(self.other_bookmark_1, bookmarks_data[0])
self.assert_bookmark_data_is_valid(self.bookmark_2, bookmarks_data[1])
self.assert_bookmark_data_is_valid(self.bookmark_1, bookmarks_data[2])
self.assert_bookmark_event_emitted(
mock_tracker,
event_name='edx.bookmark.listed',
list_type='all_courses',
bookmarks_count=3,
page_size=10,
page_number=1
)
def test_anonymous_access(self):
"""
Test that an anonymous client (not logged in) cannot call GET or POST.
"""
query_parameters = 'course_id={}'.format(self.course_id)
self.send_get(
client=self.anonymous_client,
url=reverse('bookmarks'),
query_parameters=query_parameters,
expected_status=401
)
self.send_post(
client=self.anonymous_client,
url=reverse('bookmarks'),
data={'usage_id': 'test'},
expected_status=401
)
def test_post_bookmark_successfully(self):
"""
Test that posting a bookmark successfully returns newly created data with 201 code.
"""
response = self.send_post(
client=self.client,
url=reverse('bookmarks'),
data={'usage_id': unicode(self.vertical_3.location)}
)
# Assert Newly created bookmark.
self.assertEqual(response.data['id'], '%s,%s' % (self.user.username, unicode(self.vertical_3.location)))
self.assertEqual(response.data['course_id'], self.course_id)
self.assertEqual(response.data['usage_id'], unicode(self.vertical_3.location))
self.assertIsNotNone(response.data['created'])
self.assertEqual(len(response.data['path']), 2)
self.assertEqual(response.data['display_name'], self.vertical_3.display_name)
def test_post_bookmark_with_invalid_data(self):
"""
Test that posting a bookmark for a block with invalid usage id returns a 400.
Scenarios:
1) Invalid usage id.
2) Without usage id.
3) With empty request.data
"""
# Send usage_id with invalid format.
response = self.send_post(
client=self.client,
url=reverse('bookmarks'),
data={'usage_id': 'invalid'},
expected_status=400
)
self.assertEqual(response.data['user_message'], u'An error has occurred. Please try again.')
# Send data without usage_id.
response = self.send_post(
client=self.client,
url=reverse('bookmarks'),
data={'course_id': 'invalid'},
expected_status=400
)
self.assertEqual(response.data['user_message'], u'An error has occurred. Please try again.')
self.assertEqual(response.data['developer_message'], u'Parameter usage_id not provided.')
# Send empty data dictionary.
with self.assertNumQueries(7): # No queries for bookmark table.
response = self.send_post(
client=self.client,
url=reverse('bookmarks'),
data={},
expected_status=400
)
self.assertEqual(response.data['user_message'], u'An error has occurred. Please try again.')
self.assertEqual(response.data['developer_message'], u'No data provided.')
def test_post_bookmark_for_non_existing_block(self):
"""
Test that posting a bookmark for a block that does not exist returns a 400.
"""
response = self.send_post(
client=self.client,
url=reverse('bookmarks'),
data={'usage_id': 'i4x://arbi/100/html/340ef1771a094090ad260ec940d04a21'},
expected_status=400
)
self.assertEqual(
response.data['user_message'],
u'An error has occurred. Please try again.'
)
self.assertEqual(
response.data['developer_message'],
u'Block with usage_id: i4x://arbi/100/html/340ef1771a094090ad260ec940d04a21 not found.'
)
@patch('django.conf.settings.MAX_BOOKMARKS_PER_COURSE', 5)
def test_post_bookmark_when_max_bookmarks_already_exist(self):
"""
Test that posting a bookmark for a block that does not exist returns a 400.
"""
max_bookmarks = settings.MAX_BOOKMARKS_PER_COURSE
__, blocks, __ = self.create_course_with_bookmarks_count(max_bookmarks)
response = self.send_post(
client=self.client,
url=reverse('bookmarks'),
data={'usage_id': unicode(blocks[-1].location)},
expected_status=400
)
self.assertEqual(
response.data['user_message'],
u'You can create up to {0} bookmarks.'
u' You must remove some bookmarks before you can add new ones.'.format(max_bookmarks)
)
self.assertEqual(
response.data['developer_message'],
u'You can create up to {0} bookmarks.'
u' You must remove some bookmarks before you can add new ones.'.format(max_bookmarks)
)
def test_unsupported_methods(self):
"""
Test that DELETE and PUT are not supported.
"""
self.client.login(username=self.user.username, password=self.TEST_PASSWORD)
self.assertEqual(405, self.client.put(reverse('bookmarks')).status_code)
self.assertEqual(405, self.client.delete(reverse('bookmarks')).status_code)
@patch('eventtracking.tracker.emit')
@ddt.unpack
@ddt.data(
{'page_size': -1, 'expected_bookmarks_count': 2, 'expected_page_size': 10, 'expected_page_number': 1},
{'page_size': 0, 'expected_bookmarks_count': 2, 'expected_page_size': 10, 'expected_page_number': 1},
{'page_size': 999, 'expected_bookmarks_count': 2, 'expected_page_size': 100, 'expected_page_number': 1}
)
def test_listed_event_for_different_page_size_values(self, mock_tracker, page_size, expected_bookmarks_count,
expected_page_size, expected_page_number):
""" Test that edx.course.bookmark.listed event values are as expected for different page size values """
query_parameters = 'course_id={}&page_size={}'.format(urllib.quote(self.course_id), page_size)
self.send_get(client=self.client, url=reverse('bookmarks'), query_parameters=query_parameters)
self.assert_bookmark_event_emitted(
mock_tracker,
event_name='edx.bookmark.listed',
course_id=self.course_id,
list_type='per_course',
bookmarks_count=expected_bookmarks_count,
page_size=expected_page_size,
page_number=expected_page_number
)
@patch('openedx.core.djangoapps.bookmarks.views.eventtracking.tracker.emit')
def test_listed_event_for_page_number(self, mock_tracker):
""" Test that edx.course.bookmark.listed event values are as expected when we request a specific page number """
self.send_get(client=self.client, url=reverse('bookmarks'), query_parameters='page_size=2&page=2')
self.assert_bookmark_event_emitted(
mock_tracker,
event_name='edx.bookmark.listed',
list_type='all_courses',
bookmarks_count=3,
page_size=2,
page_number=2
)
@attr('shard_2')
@ddt.ddt
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Tests only valid in LMS')
class BookmarksDetailViewTests(BookmarksViewsTestsBase):
"""
This contains the tests for GET & DELETE methods of bookmark.views.BookmarksDetailView class
"""
@ddt.data(
('', False),
('fields=path,display_name', True)
)
@ddt.unpack
def test_get_bookmark_successfully(self, query_params, check_optional_fields):
"""
Test that requesting bookmark returns data with 200 code.
"""
response = self.send_get(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': self.user.username, 'usage_id': unicode(self.sequential_1.location)}
),
query_parameters=query_params
)
data = response.data
self.assertIsNotNone(data)
self.assert_bookmark_data_is_valid(self.bookmark_1, data, check_optional_fields=check_optional_fields)
def test_get_bookmark_that_belongs_to_other_user(self):
"""
Test that requesting bookmark that belongs to other user returns 404 status code.
"""
self.send_get(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': 'other', 'usage_id': unicode(self.vertical_1.location)}
),
expected_status=404
)
def test_get_bookmark_that_does_not_exist(self):
"""
Test that requesting bookmark that does not exist returns 404 status code.
"""
response = self.send_get(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': self.user.username, 'usage_id': 'i4x://arbi/100/html/340ef1771a0940'}
),
expected_status=404
)
self.assertEqual(
response.data['user_message'],
'Bookmark with usage_id: i4x://arbi/100/html/340ef1771a0940 does not exist.'
)
self.assertEqual(
response.data['developer_message'],
'Bookmark with usage_id: i4x://arbi/100/html/340ef1771a0940 does not exist.'
)
def test_get_bookmark_with_invalid_usage_id(self):
"""
Test that requesting bookmark with invalid usage id returns 400.
"""
response = self.send_get(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': self.user.username, 'usage_id': 'i4x'}
),
expected_status=404
)
self.assertEqual(response.data['user_message'], u'Invalid usage_id: i4x.')
def test_anonymous_access(self):
"""
Test that an anonymous client (not logged in) cannot call GET or DELETE.
"""
url = reverse('bookmarks_detail', kwargs={'username': self.user.username, 'usage_id': 'i4x'})
self.send_get(
client=self.anonymous_client,
url=url,
expected_status=401
)
self.send_delete(
client=self.anonymous_client,
url=url,
expected_status=401
)
def test_delete_bookmark_successfully(self):
"""
Test that delete bookmark returns 204 status code with success.
"""
query_parameters = 'course_id={}'.format(urllib.quote(self.course_id))
response = self.send_get(client=self.client, url=reverse('bookmarks'), query_parameters=query_parameters)
bookmarks_data = response.data['results']
self.assertEqual(len(bookmarks_data), 2)
self.send_delete(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': self.user.username, 'usage_id': unicode(self.sequential_1.location)}
)
)
response = self.send_get(client=self.client, url=reverse('bookmarks'), query_parameters=query_parameters)
bookmarks_data = response.data['results']
self.assertEqual(len(bookmarks_data), 1)
def test_delete_bookmark_that_belongs_to_other_user(self):
"""
Test that delete bookmark that belongs to other user returns 404.
"""
self.send_delete(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': 'other', 'usage_id': unicode(self.vertical_1.location)}
),
expected_status=404
)
def test_delete_bookmark_that_does_not_exist(self):
"""
Test that delete bookmark that does not exist returns 404.
"""
response = self.send_delete(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': self.user.username, 'usage_id': 'i4x://arbi/100/html/340ef1771a0940'}
),
expected_status=404
)
self.assertEqual(
response.data['user_message'],
u'Bookmark with usage_id: i4x://arbi/100/html/340ef1771a0940 does not exist.'
)
self.assertEqual(
response.data['developer_message'],
'Bookmark with usage_id: i4x://arbi/100/html/340ef1771a0940 does not exist.'
)
def test_delete_bookmark_with_invalid_usage_id(self):
"""
Test that delete bookmark with invalid usage id returns 400.
"""
response = self.send_delete(
client=self.client,
url=reverse(
'bookmarks_detail',
kwargs={'username': self.user.username, 'usage_id': 'i4x'}
),
expected_status=404
)
self.assertEqual(response.data['user_message'], u'Invalid usage_id: i4x.')
def test_unsupported_methods(self):
"""
Test that POST and PUT are not supported.
"""
url = reverse('bookmarks_detail', kwargs={'username': self.user.username, 'usage_id': 'i4x'})
self.client.login(username=self.user.username, password=self.TEST_PASSWORD)
self.assertEqual(405, self.client.put(url).status_code)
self.assertEqual(405, self.client.post(url).status_code)
|
from django.contrib.auth.decorators import login_required
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from .api import get_backend_class
from .backends.exceptions import NotificationDoesNotExist
from .serializers import NotificationSerializer
class InboxViewSet(viewsets.ViewSet):
"""
Provides `list` and `detail` actions.
And a POST endpoint to `read` inbox messages.
"""
def list(self, request):
BackendClass = get_backend_class()
backend = BackendClass()
notifications = backend.inbox_list(request.user)
serializer = NotificationSerializer(notifications, many=True)
return Response(serializer.data)
def retrieve(self, request, pk=None):
BackendClass = get_backend_class()
backend = BackendClass()
try:
notification = backend.inbox_get(request.user, pk)
except NotificationDoesNotExist as e:
return Response(e.message, status='404')
else:
serializer = NotificationSerializer(notification)
return Response(serializer.data)
@action(methods=['POST'], detail=True)
def read(self, request, pk=None):
"""
Mark the message as read.
"""
BackendClass = get_backend_class()
backend = BackendClass()
try:
backend.inbox_delete(request.user, pk)
except NotificationDoesNotExist as e:
return Response(e.message, status='404')
else:
return Response({
'status': 'Message marked as read successfully.'
})
|
import res_partner
|
"""Superdesk Users"""
from superdesk.metadata.item import BYLINE, SIGN_OFF
from superdesk.resource import Resource
class UsersResource(Resource):
def __init__(self, endpoint_name, app, service, endpoint_schema=None):
self.readonly = True if app.config.get('LDAP_SERVER', None) else False
self.additional_lookup = {
'url': r'regex("[\w]+")',
'field': 'username'
}
self.schema = {
'username': {
'type': 'string',
'unique': True,
'required': True,
'minlength': 1
},
'password': {
'type': 'string',
'minlength': 5
},
'password_changed_on': {
'type': 'datetime',
'nullable': True
},
'first_name': {
'type': 'string',
'readonly': self.readonly
},
'last_name': {
'type': 'string',
'readonly': self.readonly
},
'display_name': {
'type': 'string'
},
'email': {
'unique': True,
'type': 'email',
'required': True,
'coerce': lambda s: s.lower()
},
'phone': {
'type': 'string',
'nullable': True
},
'job_title': {
'type': 'string',
'required': False,
},
'biography': {
'type': 'string',
'required': False,
'nullable': True,
},
'facebook': {
'type': 'string',
'required': False,
'nullable': True,
},
'instagram': {
'type': 'string',
'required': False,
'nullable': True,
},
'twitter': {
'type': 'string',
'required': False,
'nullable': True,
'twitter': True,
},
'jid': {
'unique': True,
'type': 'string',
'required': False,
},
'language': {
'type': 'string',
'nullable': True
},
'user_info': {
'type': 'dict'
},
'picture_url': {
'type': 'string',
'nullable': True
},
'avatar': Resource.rel('upload', embeddable=True, nullable=True),
'avatar_renditions': {'type': 'dict'},
'role': Resource.rel('roles', True),
'privileges': {'type': 'dict'},
'workspace': {
'type': 'dict'
},
'user_type': {
'type': 'string',
'allowed': ['user', 'administrator'],
'default': 'user'
},
'is_support': {
'type': 'boolean',
'default': False
},
'is_author': {
'type': 'boolean',
'default': True
},
'is_active': {
'type': 'boolean',
'default': True
},
'is_enabled': {
'type': 'boolean',
'default': True
},
'needs_activation': {
'type': 'boolean',
'default': True
},
# Default desk of the user, which would be selected when logged-in.
'desk': Resource.rel('desks', nullable=True),
SIGN_OFF: { # Used for putting a sign-off on the content when it's created/updated except kill
'type': 'string',
'required': False,
'nullable': True,
'regex': '^[a-zA-Z0-9]+$'
},
BYLINE: {
'type': 'string',
'required': False,
'nullable': True
},
# list to hold invisible stages.
# This field is updated under following scenario:
# 1. stage visible flag is updated
# 2. desk membership is modified
# 3. new user is created
'invisible_stages': {
'type': 'list',
'required': False,
'nullable': True
},
# If Slack notifications are configured and enabled for the user
# the Slack username is stored here.
'slack_username': {
'type': 'string',
'required': False,
'nullable': True
},
# The Slack user id is stored here, to avoid repeatedly having to look it up
'slack_user_id': {
'type': 'string',
'required': False,
'nullable': True
}
}
self.extra_response_fields = [
'display_name',
'username',
'email',
'user_info',
'picture_url',
'avatar',
'is_active',
'is_enabled',
'needs_activation',
'desk'
]
self.etag_ignore_fields = ['session_preferences', '_etag', 'invisible_stages']
self.datasource = {
'projection': {'password': 0},
'default_sort': [('username', 1)],
}
self.mongo_indexes = {
'username_1': ([('username', 1)], {'unique': True}),
'first_name_1_last_name_-1': [('first_name', 1), ('last_name', -1)],
}
self.privileges = {'POST': 'users', 'DELETE': 'users', 'PATCH': 'users'}
super().__init__(endpoint_name, app=app, service=service, endpoint_schema=endpoint_schema)
|
from spack import *
class Rocthrust(CMakePackage):
"""Thrust is a parallel algorithm library. This library has been ported to
HIP/ROCm platform, which uses the rocPRIM library. The HIP ported
library works on HIP/ROCm platforms"""
homepage = "https://github.com/ROCmSoftwarePlatform/rocThrust"
git = "https://github.com/ROCmSoftwarePlatform/rocThrust.git"
url = "https://github.com/ROCmSoftwarePlatform/rocThrust/archive/rocm-4.5.0.tar.gz"
maintainers = ['srekolam', 'arjun-raj-kuppala']
version('4.5.0', sha256='86cf897b01a6f5df668d978ce42d44a6ae9df9f8adc92d0a1a49a7c3bbead259')
version('4.3.1', sha256='86fcd3bc275efe9a485aed48afdc6d3351804c076caee43e3fb8bd69752865e9')
version('4.3.0', sha256='a50eb6500687b4ea9e0b3affb1daff8bbc56199d39fbed3ee61d2d5bfc1a0271')
version('4.2.0', sha256='da2b6c831c26c26058218b0c5b7b2e43fd7f0dac3b2e3a8e39a839145592c727')
version('4.1.0', sha256='e3d06c0387a2a6880776c7423b1acf0808fb8833bc822be75793da8c2f521efd')
version('4.0.0', sha256='120c87316f44ce8e8975e57c9b9bf1246b1ffc00879d31d744289ba9438a976c')
version('3.10.0', sha256='31bea6cd19a0ffa15e4ab50ecde2402ea5aaa182149cfab98242357e41f1805b')
version('3.9.0', sha256='65f5e74d72c5aaee90459468d693b212af7d56e31098ee8237b18d1b4d620eb0')
version('3.8.0', sha256='39350aeb8bfbcd09e387717b2a05c7e3a19e0fa85ff4284b967bb8fae12f9013')
version('3.7.0', sha256='4cb923dde5eec150a566cb10d23ee5c7ce3aa892c4dea94886a89d95b90f3bdd')
version('3.5.0', sha256='0d1bac1129d17bb1259fd06f5c9cb4c1620d1790b5c295b866fb3442d18923cb')
variant('build_type', default='Release', values=("Release", "Debug", "RelWithDebInfo"),
description='CMake build type')
depends_on('cmake@3:', type='build')
depends_on('numactl', when='@3.7.0:')
for ver in ['3.5.0', '3.7.0', '3.8.0', '3.9.0', '3.10.0', '4.0.0', '4.1.0',
'4.2.0', '4.3.0', '4.3.1', '4.5.0']:
depends_on('hip@' + ver, when='@' + ver)
depends_on('rocprim@' + ver, when='@' + ver)
depends_on('rocm-cmake@' + ver, type='build', when='@' + ver)
def setup_build_environment(self, env):
env.set('CXX', self.spec['hip'].hipcc)
def cmake_args(self):
args = [
self.define(
'CMAKE_MODULE_PATH',
'{0}/cmake'.format(self.spec['hip'].prefix)
)
]
if self.spec.satisfies('^cmake@3.21.0:3.21.2'):
args.append(self.define('__skip_rocmclang', 'ON'))
return args
|
from spack import *
class Fp16(Package):
"""FP16 is a header-only library for
conversion to/from half-precision floating point formats"""
homepage = "https://github.com/Maratyszcza/FP16/"
url = "https://github.com/Maratyszcza/FP16.git"
version('master', git='https://github.com/Maratyszcza/FP16.git')
def install(self, spec, prefix):
install_tree('include', prefix.include)
|
import SU2
import os, time, sys, shutil
from optparse import OptionParser
def main():
# Command Line Options
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE")
parser.add_option("-n", "--partitions", dest="partitions", default=0,
help="number of PARTITIONS", metavar="PARTITIONS")
parser.add_option("-c", "--cycle", dest="cycle", default=1,
help="number of CYCLE adaptations", metavar="CYCLE")
parser.add_option("-o", "--overwrite", dest="overwrite", default="False",
help="OVERWRITE_MESH the output mesh with the adapted one", metavar="OVERWRITE_MESH")
parser.add_option("-s", "--save_all", dest="save_all", default="False",
help="SAVE_ALL the flow/adjoint/meshes solutions at each adaptation cycle", metavar="SAVE_ALL")
(options, args)=parser.parse_args()
options.partitions = int( options.partitions )
options.cycle = int( options.cycle )
options.overwrite = options.overwrite == "True"
options.save_all = options.save_all == "True"
# Run Mesh Adaptation
mesh_adaptation ( options.filename ,
options.partitions ,
options.cycle ,
options.overwrite ,
options.save_all )
def mesh_adaptation( filename ,
partitions = 0 ,
cycles = 1 ,
overwrite = False ,
save_all = False ):
# Set the name of the configuration file
config_name = filename
# Read the specified configuration file
config = SU2.io.Config(config_name)
# Set the number of partitions for parallel computations
config.NUMBER_PART = partitions
# Call CFD to generate a solution
SU2.run.CFD(config)
# Rename the output restart to the input solution file
SU2.io.restart2solution(config)
# Call MSH
SU2.run.MSH(config)
if __name__ == '__main__':
main()
|
from qwt.scale_draw import QwtScaleDraw
from qwt.scale_engine import QwtLinearScaleEngine
from qwt.color_map import QwtLinearColorMap
from qwt.text import QwtText
from qwt.painter import QwtPainter
from qwt.interval import QwtInterval
from qwt.color_map import QwtColorMap
from qwt.qt.QtGui import (QWidget, QSizePolicy, QPainter, QStyleOption, QStyle,
QPalette)
from qwt.qt.QtCore import Qt, QRectF, QSize, Signal
import numpy as np
class ColorBar(object):
def __init__(self):
self.isEnabled = None
self.width = None
self.interval = QwtInterval()
self.colorMap = QwtColorMap()
class QwtScaleWidget_PrivateData(object):
def __init__(self):
self.scaleDraw = None
self.borderDist = [None] * 2
self.minBorderDist = [None] * 2
self.scaleLength = None
self.margin = None
self.titleOffset = None
self.spacing = None
self.title = QwtText()
self.layoutFlags = None
self.colorBar = ColorBar()
class QwtScaleWidget(QWidget):
SIG_SCALE_DIV_CHANGED = Signal()
# enum LayoutFlag
TitleInverted = 1
def __init__(self, *args):
self.__data = None
align = QwtScaleDraw.LeftScale
if len(args) == 0:
parent = None
elif len(args) == 1:
parent, = args
elif len(args) == 2:
align, parent = args
else:
raise TypeError("%s() takes 0, 1 or 2 argument(s) (%s given)"\
% (self.__class__.__name__, len(args)))
super(QwtScaleWidget, self).__init__(parent)
self.initScale(align)
def initScale(self, align):
self.__data = QwtScaleWidget_PrivateData()
self.__data.layoutFlags = 0
if align == QwtScaleDraw.RightScale:
self.__data.layoutFlags |= self.TitleInverted
self.__data.borderDist = [0, 0]
self.__data.minBorderDist = [0, 0]
self.__data.margin = 4
self.__data.titleOffset = 0
self.__data.spacing = 2
self.__data.scaleDraw = QwtScaleDraw()
self.__data.scaleDraw.setAlignment(align)
self.__data.scaleDraw.setLength(10)
self.__data.scaleDraw.setScaleDiv(
QwtLinearScaleEngine().divideScale(0.0, 100.0, 10, 5))
self.__data.colorBar.colorMap = QwtLinearColorMap()
self.__data.colorBar.isEnabled = False
self.__data.colorBar.width = 10
flags = Qt.AlignmentFlag(Qt.AlignHCenter|Qt.TextExpandTabs|Qt.TextWordWrap)
self.__data.title.setRenderFlags(flags)
self.__data.title.setFont(self.font())
policy = QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed)
if self.__data.scaleDraw.orientation() == Qt.Vertical:
policy.transpose()
self.setSizePolicy(policy)
self.setAttribute(Qt.WA_WState_OwnSizePolicy, False)
def setLayoutFlag(self, flag, on=True):
if (self.__data.layoutFlags & flag != 0) != on:
if on:
self.__data.layoutFlags |= flag
else:
self.__data.layoutFlags &= ~flag
def testLayoutFlag(self, flag):
return self.__data.layoutFlags & flag
def setTitle(self, title):
if isinstance(title, QwtText):
flags = title.renderFlags() & (~ int(Qt.AlignTop|Qt.AlignBottom))
title.setRenderFlags(flags)
if title != self.__data.title:
self.__data.title = title
self.layoutScale()
else:
if self.__data.title.text() != title:
self.__data.title.setText(title)
self.layoutScale()
def setAlignment(self, alignment):
if self.__data.scaleDraw:
self.__data.scaleDraw.setAlignment(alignment)
if not self.testAttribute(Qt.WA_WState_OwnSizePolicy):
policy = QSizePolicy(QSizePolicy.MinimumExpanding,
QSizePolicy.Fixed)
if self.__data.scaleDraw.orientation() == Qt.Vertical:
policy.transpose()
self.setSizePolicy(policy)
self.setAttribute(Qt.WA_WState_OwnSizePolicy, False)
self.layoutScale()
def alignment(self):
if not self.scaleDraw():
return QwtScaleDraw.LeftScale
return self.scaleDraw().alignment()
def setBorderDist(self, dist1, dist2):
if dist1 != self.__data.borderDist[0] or\
dist2 != self.__data.borderDist[1]:
self.__data.borderDist = [dist1, dist2]
self.layoutScale()
def setMargin(self, margin):
margin = max([0, margin])
if margin != self.__data.margin:
self.__data.margin = margin
self.layoutScale()
def setSpacing(self, spacing):
spacing = max([0, spacing])
if spacing != self.__data.spacing:
self.__data.spacing = spacing
self.layoutScale()
def setLabelAlignment(self, alignment):
self.__data.scaleDraw.setLabelAlignment(alignment)
self.layoutScale()
def setLabelRotation(self, rotation):
self.__data.scaleDraw.setLabelRotation(rotation)
self.layoutScale()
def setScaleDraw(self, scaleDraw):
if scaleDraw is None or scaleDraw == self.__data.scaleDraw:
return
sd = self.__data.scaleDraw
if sd is not None:
scaleDraw.setAlignment(sd.alignment())
scaleDraw.setScaleDiv(sd.scaleDiv())
transform = None
if sd.scaleMap().transformation():
transform = sd.scaleMap().transformation().copy()
scaleDraw.setTransformation(transform)
self.__data.scaleDraw = scaleDraw
self.layoutScale()
def scaleDraw(self):
return self.__data.scaleDraw
def title(self):
return self.__data.title
def startBorderDist(self):
return self.__data.borderDist[0]
def endBorderDist(self):
return self.__data.borderDist[1]
def margin(self):
return self.__data.margin
def spacing(self):
return self.__data.spacing
def paintEvent(self, event):
painter = QPainter(self)
painter.setClipRegion(event.region())
opt = QStyleOption()
opt.initFrom(self)
self.style().drawPrimitive(QStyle.PE_Widget, opt, painter, self)
self.draw(painter)
def draw(self, painter):
self.__data.scaleDraw.draw(painter, self.palette())
if self.__data.colorBar.isEnabled and\
self.__data.colorBar.width > 0 and\
self.__data.colorBar.interval.isValid():
self.drawColorBar(painter, self.colorBarRect(self.contentsRect()))
r = self.contentsRect()
if self.__data.scaleDraw.orientation() == Qt.Horizontal:
r.setLeft(r.left() + self.__data.borderDist[0])
r.setWidth(r.width() - self.__data.borderDist[1])
else:
r.setTop(r.top() + self.__data.borderDist[0])
r.setHeight(r.height() - self.__data.borderDist[1])
if not self.__data.title.isEmpty():
self.drawTitle(painter, self.__data.scaleDraw.alignment(), r)
def colorBarRect(self, rect):
cr = QRectF(rect)
if self.__data.scaleDraw.orientation() == Qt.Horizontal:
cr.setLeft(cr.left() + self.__data.borderDist[0])
cr.setWidth(cr.width() - self.__data.borderDist[1] + 1)
else:
cr.setTop(cr.top() + self.__data.borderDist[0])
cr.setHeight(cr.height() - self.__data.borderDist[1] + 1)
sda = self.__data.scaleDraw.alignment()
if sda == QwtScaleDraw.LeftScale:
cr.setLeft(cr.right()-self.__data.margin-self.__data.colorBar.width)
cr.setWidth(self.__data.colorBar.width)
elif sda == QwtScaleDraw.RightScale:
cr.setLeft(cr.left()+self.__data.margin)
cr.setWidth(self.__data.colorBar.width)
elif sda == QwtScaleDraw.BottomScale:
cr.setTop(cr.top()+self.__data.margin)
cr.setHeight(self.__data.colorBar.width)
elif sda == QwtScaleDraw.TopScale:
cr.setTop(cr.bottom()-self.__data.margin-self.__data.colorBar.width)
cr.setHeight(self.__data.colorBar.width)
return cr
def resizeEvent(self, event):
self.layoutScale(False)
def layoutScale(self, update_geometry=True):
bd0, bd1 = self.getBorderDistHint()
if self.__data.borderDist[0] > bd0:
bd0 = self.__data.borderDist[0]
if self.__data.borderDist[1] > bd1:
bd1 = self.__data.borderDist[1]
colorBarWidth = 0
if self.__data.colorBar.isEnabled and\
self.__data.colorBar.interval.isValid():
colorBarWidth = self.__data.colorBar.width + self.__data.spacing
r = self.contentsRect()
if self.__data.scaleDraw.orientation() == Qt.Vertical:
y = r.top() + bd0
length = r.height() - (bd0 +bd1)
if self.__data.scaleDraw.alignment() == QwtScaleDraw.LeftScale:
x = r.right() - 1. - self.__data.margin - colorBarWidth
else:
x = r.left() + self.__data.margin + colorBarWidth
else:
x = r.left() + bd0
length = r.width() - (bd0 + bd1)
if self.__data.scaleDraw.alignment() == QwtScaleDraw.BottomScale:
y = r.top() + self.__data.margin + colorBarWidth
else:
y = r.bottom() - 1. - self.__data.margin - colorBarWidth
self.__data.scaleDraw.move(x, y)
self.__data.scaleDraw.setLength(length)
extent = np.ceil(self.__data.scaleDraw.extent(self.font()))
self.__data.titleOffset = self.__data.margin + self.__data.spacing +\
colorBarWidth + extent
if update_geometry:
self.updateGeometry()
self.update()
def drawColorBar(self, painter, rect):
if not self.__data.colorBar.interval.isValid():
return
sd = self.__data.scaleDraw
QwtPainter.drawColorBar(painter, self.__data.colorBar.colorMap,
self.__data.colorBar.interval.normalized(),
sd.scaleMap(), sd.orientation(), rect)
def drawTitle(self, painter, align, rect):
r = rect
flags = self.__data.title.renderFlags()\
&(~ int(Qt.AlignTop|Qt.AlignBottom|Qt.AlignVCenter))
if align == QwtScaleDraw.LeftScale:
angle = -90.
flags |= Qt.AlignTop
r.setRect(r.left(), r.bottom(), r.height(),
r.width()-self.__data.titleOffset)
elif align == QwtScaleDraw.RightScale:
angle = -90.
flags |= Qt.AlignTop
r.setRect(r.left()+self.__data.titleOffset, r.bottom(), r.height(),
r.width()-self.__data.titleOffset)
elif align == QwtScaleDraw.BottomScale:
angle = 0.
flags |= Qt.AlignBottom
r.setTop(r.top()+self.__data.titleOffset)
else:
angle = 0.
flags |= Qt.AlignTop
r.setBottom(r.bottom()-self.__data.titleOffset)
if self.__data.layoutFlags & self.TitleInverted:
if align in (QwtScaleDraw.LeftScale, QwtScaleDraw.RightScale):
angle = -angle
r.setRect(r.x()+r.height(), r.y()-r.width(),
r.width(), r.height())
painter.save()
painter.setFont(self.font())
painter.setPen(self.palette().color(QPalette.Text))
painter.translate(r.x(), r.y())
if angle != 0.:
painter.rotate(angle)
title = self.__data.title
title.setRenderFlags(flags)
title.draw(painter, QRectF(0., 0., r.width(), r.height()))
painter.restore()
def scaleChange(self):
self.layoutScale()
def sizeHint(self):
return self.minimumSizeHint()
def minimumSizeHint(self):
o = self.__data.scaleDraw.orientation()
length = 0
mbd1, mbd2 = self.getBorderDistHint()
length += max([0, self.__data.borderDist[0]-mbd1])
length += max([0, self.__data.borderDist[1]-mbd2])
length += self.__data.scaleDraw.minLength(self.font())
dim = self.dimForLength(length, self.font())
if length < dim:
length = dim
dim = self.dimForLength(length, self.font())
size = QSize(length+2, dim)
if o == Qt.Vertical:
size.transpose()
left, right, top, bottom = self.getContentsMargins()
return size + QSize(left + right, top + bottom)
def titleHeightForWidth(self, width):
return np.ceil(self.__data.title.heightForWidth(width, self.font()))
def dimForLength(self, length, scaleFont):
extent = np.ceil(self.__data.scaleDraw.extent(scaleFont))
dim = self.__data.margin + extent + 1
if not self.__data.title.isEmpty():
dim += self.titleHeightForWidth(length)+self.__data.spacing
if self.__data.colorBar.isEnabled and self.__data.colorBar.interval.isValid():
dim += self.__data.colorBar.width+self.__data.spacing
return dim
def getBorderDistHint(self):
start, end = self.__data.scaleDraw.getBorderDistHint(self.font())
if start < self.__data.minBorderDist[0]:
start = self.__data.minBorderDist[0]
if end < self.__data.minBorderDist[1]:
end = self.__data.minBorderDist[1]
return start, end
def setMinBorderDist(self, start, end):
self.__data.minBorderDist = [start, end]
def getMinBorderDist(self):
return self.__data.minBorderDist
def setScaleDiv(self, scaleDiv):
sd = self.__data.scaleDraw
if sd.scaleDiv() != scaleDiv:
sd.setScaleDiv(scaleDiv)
self.layoutScale()
self.SIG_SCALE_DIV_CHANGED.emit()
def setTransformation(self, transformation):
self.__data.scaleDraw.setTransformation(transformation)
self.layoutScale()
def setColorBarEnabled(self, on):
if on != self.__data.colorBar.isEnabled:
self.__data.colorBar.isEnabled = on
self.layoutScale()
def isColorBarEnabled(self):
return self.__data.colorBar.isEnabled
def setColorBarWidth(self, width):
if width != self.__data.colorBar.width:
self.__data.colorBar.width = width
if self.isColorBarEnabled():
self.layoutScale()
def colorBarWidth(self):
return self.__data.colorBar.width
def colorBarInterval(self):
return self.__data.colorBar.interval
def setColorMap(self, interval, colorMap):
self.__data.colorBar.interval = interval
if colorMap != self.__data.colorBar.colorMap:
self.__data.colorBar.colorMap = colorMap
if self.isColorBarEnabled():
self.layoutScale()
def colorMap(self):
return self.__data.colorBar.colorMap
|
import sys
import os
import re
import io
import traceback
from pybtex.plugin import find_plugin
from pybtex.database import BibliographyData, parse_file
from markdown.preprocessors import Preprocessor
from markdown.util import etree
import logging
log = logging.getLogger(__name__)
class MooseBibtex(Preprocessor):
"""
Creates per-page bibliographies using latex syntax.
"""
RE_BIBLIOGRAPHY = r'(?<!`)\\bibliography\{(.*?)\}'
RE_STYLE = r'(?<!`)\\bibliographystyle\{(.*?)\}'
RE_CITE = r'(?<!`)\\(?P<cmd>cite|citet|citep)\{(?P<key>.*?)\}'
def __init__(self, root=None, **kwargs):
Preprocessor.__init__(self, **kwargs)
self._citations = []
self._bibtex = BibliographyData()
self._root = root
def run(self, lines):
"""
Create a bibliography from cite commands.
"""
# Join the content to enable regex searches throughout entire text
content = '\n'.join(lines)
# Build the database of bibtex data
bibfiles = []
match = re.search(self.RE_BIBLIOGRAPHY, content)
if match:
bib_string = match.group(0)
for bfile in match.group(1).split(','):
try:
bibfiles.append(os.path.join(self._root, bfile))
data = parse_file(bibfiles[-1])
except Exception as e:
log.error('Failed to parse bibtex file: {}'.format(bfile))
traceback.prent_exc(e)
return lines
self._bibtex.add_entries(data.entries.iteritems())
else:
return lines
# Determine the style
match = re.search(self.RE_STYLE, content)
if match:
content = content.replace(match.group(0), '')
try:
style = find_plugin('pybtex.style.formatting', match.group(1))
except:
log.error('Unknown bibliography style "{}"'.format(match.group(1)))
return lines
else:
style = find_plugin('pybtex.style.formatting', 'plain')
# Replace citations with author date, as an anchor
content = re.sub(self.RE_CITE, self.authors, content)
# Create html bibliography
if self._citations:
# Generate formatted html using pybtex
formatted_bibliography = style().format_bibliography(self._bibtex, self._citations)
backend = find_plugin('pybtex.backends', 'html')
stream = io.StringIO()
backend().write_to_stream(formatted_bibliography, stream)
# Strip the bib items from the formatted html
html = re.findall(r'\<dd\>(.*?)\</dd\>', stream.getvalue(), flags=re.MULTILINE|re.DOTALL)
# Produces an ordered list with anchors to the citations
output = u'<ol class="moose-bibliography" data-moose-bibfiles="{}">\n'.format(str(bibfiles))
for i, item in enumerate(html):
output += u'<li name="{}">{}</li>\n'.format(self._citations[i], item)
output += u'</ol>\n'
content = re.sub(self.RE_BIBLIOGRAPHY, self.markdown.htmlStash.store(output, safe=True), content)
return content.split('\n')
def authors(self, match):
"""
Return the author(s) citation for text, linked to bibliography.
"""
cmd = match.group('cmd')
key = match.group('key')
tex = '\\%s{%s}' % (cmd, key)
if key in self._bibtex.entries:
self._citations.append(key)
entry = self._bibtex.entries[key]
a = entry.persons['author']
n = len(a)
if n > 2:
author = '{} et al.'.format(' '.join(a[0].last_names))
elif n == 2:
a0 = ' '.join(a[0].last_names)
a1 = ' '.join(a[1].last_names)
author = '{} and {}'.format(a0, a1)
else:
author = ' '.join(a[0].last_names)
if cmd == 'citep':
a = '<a href="#{}" data-moose-cite="{}">{}, {}</a>'.format(key, tex, author, entry.fields['year'])
return '({})'.format(self.markdown.htmlStash.store(a, safe=True))
else:
a = '<a href="#{}" data-moose-cite="{}">{} ({})</a>'.format(key, tex, author, entry.fields['year'])
return self.markdown.htmlStash.store(a, safe=True)
|
'''
Created on 14 janv. 2015
@author: Remi Cattiau
'''
from nxdrive.engine.workers import EngineWorker, ThreadInterrupt, PairInterrupt
from nxdrive.logging_config import get_logger
from nxdrive.client.common import LOCALLY_EDITED_FOLDER_NAME, UNACCESSIBLE_HASH
from nxdrive.client.common import NotFound
from nxdrive.engine.activity import Action
from nxdrive.utils import current_milli_time
from PyQt4.QtCore import pyqtSignal
from threading import Lock
import os
log = get_logger(__name__)
WindowsError = None
try:
from exceptions import WindowsError
except ImportError:
pass # this will never be raised under unix
class Processor(EngineWorker):
'''
classdocs
'''
pairSync = pyqtSignal(object, object)
path_locks = dict()
path_locker = Lock()
soft_locks = dict()
readonly_locks = dict()
readonly_locker = Lock()
def __init__(self, engine, item_getter, name=None):
'''
Constructor
'''
super(Processor, self).__init__(engine, engine.get_dao(), name=name)
self._current_item = None
self._current_doc_pair = None
self._get_item = item_getter
self._engine = engine
def _unlock_soft_path(self, path):
log.trace("Soft unlocking: %s", path)
path = path.lower()
Processor.path_locker.acquire()
if self._engine.get_uid() not in Processor.soft_locks:
Processor.soft_locks[self._engine.get_uid()] = dict()
try:
del Processor.soft_locks[self._engine.get_uid()][path]
except Exception as e:
log.trace(e)
finally:
Processor.path_locker.release()
def _unlock_readonly(self, local_client, path):
# dict[path]=(count, lock)
Processor.readonly_locker.acquire()
if self._engine.get_uid() not in Processor.readonly_locks:
Processor.readonly_locks[self._engine.get_uid()] = dict()
try:
if path in Processor.readonly_locks[self._engine.get_uid()]:
log.trace("readonly unlock: increase count on %s", path)
Processor.readonly_locks[self._engine.get_uid()][path][0] = Processor.readonly_locks[self._engine.get_uid()][path][0] + 1
else:
lock = local_client.unlock_ref(path)
log.trace("readonly unlock: unlock on %s with %d", path, lock)
Processor.readonly_locks[self._engine.get_uid()][path] = [1, lock]
finally:
Processor.readonly_locker.release()
def _lock_readonly(self, local_client, path):
Processor.readonly_locker.acquire()
if self._engine.get_uid() not in Processor.readonly_locks:
Processor.readonly_locks[self._engine.get_uid()] = dict()
try:
if path not in Processor.readonly_locks[self._engine.get_uid()]:
log.debug("readonly lock: can't find reference on %s", path)
return
Processor.readonly_locks[self._engine.get_uid()][path][0] = Processor.readonly_locks[self._engine.get_uid()][path][0] - 1
log.trace("readonly lock: update lock count on %s to %d", path, Processor.readonly_locks[self._engine.get_uid()][path][0])
if Processor.readonly_locks[self._engine.get_uid()][path][0] <= 0:
local_client.lock_ref(path, Processor.readonly_locks[self._engine.get_uid()][path][1])
log.trace("readonly lock: relocked path: %s with %d", path, Processor.readonly_locks[self._engine.get_uid()][path][1])
del Processor.readonly_locks[self._engine.get_uid()][path]
finally:
Processor.readonly_locker.release()
def _lock_soft_path(self, path):
log.trace("Soft locking: %s", path)
path = path.lower()
Processor.path_locker.acquire()
if self._engine.get_uid() not in Processor.soft_locks:
Processor.soft_locks[self._engine.get_uid()] = dict()
try:
if path in Processor.soft_locks[self._engine.get_uid()]:
raise PairInterrupt
else:
Processor.soft_locks[self._engine.get_uid()][path] = True
return path
finally:
Processor.path_locker.release()
return None
def _lock_path(self, path):
log.trace("Get lock for '%s'", path)
lock = None
Processor.path_locker.acquire()
if self._engine.get_uid() not in Processor.path_locks:
Processor.path_locks[self._engine.get_uid()] = dict()
try:
if path in Processor.path_locks[self._engine.get_uid()]:
lock = Processor.path_locks[self._engine.get_uid()][path]
else:
lock = Lock()
finally:
Processor.path_locker.release()
log.trace("Locking '%s'", path)
lock.acquire()
Processor.path_locks[self._engine.get_uid()][path] = lock
def _unlock_path(self, path):
log.trace("Unlocking '%s'", path)
Processor.path_locker.acquire()
if self._engine.get_uid() not in Processor.path_locks:
Processor.path_locks[self._engine.get_uid()] = dict()
try:
if path in Processor.path_locks[self._engine.get_uid()]:
Processor.path_locks[self._engine.get_uid()][path].release()
del Processor.path_locks[self._engine.get_uid()][path]
finally:
Processor.path_locker.release()
def _clean(self, reason, e=None):
super(Processor, self)._clean(reason, e)
if reason == 'exception':
# Add it back to the queue ? Add the error delay
if self._current_doc_pair is not None:
self.increase_error(self._current_doc_pair, "EXCEPTION", exception=e)
def acquire_state(self, row_id):
if self._dao.acquire_processor(self._thread_id, row_id):
return self._dao.get_state_from_id(row_id)
return None
def release_state(self):
self._dao.release_processor(self._thread_id)
def _execute(self):
self._current_metrics = dict()
self._current_item = self._get_item()
soft_lock = None
while (self._current_item != None):
# Take client every time as it is cached in engine
local_client = self._engine.get_local_client()
remote_client = self._engine.get_remote_client()
doc_pair = self.acquire_state(self._current_item.id)
log.debug('Executing processor on %r', doc_pair)
self._current_doc_pair = doc_pair
self._current_temp_file = None
try:
if (doc_pair is None or
doc_pair.pair_state == 'synchronized'
or doc_pair.pair_state == 'unsynchronized'
or doc_pair.pair_state is None
or doc_pair.pair_state.startswith('parent_')):
log.trace("Skip as pair is None or in non-processable state: %r", doc_pair)
self._current_item = self._get_item()
continue
# TODO Update as the server dont take hash to avoid conflict yet
if (doc_pair.pair_state.startswith("locally")
and doc_pair.remote_ref is not None):
try:
remote_info = remote_client.get_info(doc_pair.remote_ref)
if remote_info.digest != doc_pair.remote_digest:
doc_pair.remote_state = 'modified'
self._refresh_remote(doc_pair, remote_client, remote_info)
# Can run into conflict
if doc_pair.pair_state == 'conflicted':
self._current_item = self._get_item()
continue
doc_pair = self._dao.get_state_from_id(doc_pair.id)
if doc_pair is None:
self._current_item = self._get_item()
continue
except NotFound:
doc_pair.remote_ref = None
parent_path = doc_pair.local_parent_path
if (parent_path == ''):
parent_path = "/"
if not local_client.exists(parent_path):
if doc_pair.pair_state == "remotely_deleted":
self._dao.remove_state(doc_pair)
continue
self._handle_no_parent(doc_pair, local_client, remote_client)
self._current_item = self._get_item()
continue
self._current_metrics = dict()
handler_name = '_synchronize_' + doc_pair.pair_state
self._action = Action(handler_name)
sync_handler = getattr(self, handler_name, None)
if sync_handler is None:
log.debug("Unhandled pair_state: %r for %r",
doc_pair.pair_state, doc_pair)
self.increase_error(doc_pair, "ILLEGAL_STATE")
self._current_item = self._get_item()
continue
else:
self._current_metrics = dict()
self._current_metrics["handler"] = doc_pair.pair_state
self._current_metrics["start_time"] = current_milli_time()
log.trace("Calling %s on doc pair %r", sync_handler, doc_pair)
try:
soft_lock = self._lock_soft_path(doc_pair.local_path)
sync_handler(doc_pair, local_client, remote_client)
self._current_metrics["end_time"] = current_milli_time()
self.pairSync.emit(doc_pair, self._current_metrics)
# TO_REVIEW May have a call to reset_error
log.trace("Finish %s on doc pair %r", sync_handler, doc_pair)
except ThreadInterrupt:
raise
except PairInterrupt:
from time import sleep
# Wait one second to avoid retrying to quickly
self._current_doc_pair = None
log.debug("PairInterrupt wait 1s and requeue on %r", doc_pair)
sleep(1)
self._engine.get_queue_manager().push(doc_pair)
continue
except Exception as e:
log.exception(e)
self.increase_error(doc_pair, "SYNC HANDLER: %s" % handler_name, exception=e)
self._current_item = self._get_item()
continue
except ThreadInterrupt:
self._engine.get_queue_manager().push(doc_pair)
raise
except Exception as e:
log.exception(e)
self.increase_error(doc_pair, "EXCEPTION", exception=e)
raise e
finally:
if soft_lock is not None:
self._unlock_soft_path(soft_lock)
self.release_state()
self._interact()
self._current_item = self._get_item()
def _synchronize_conflicted(self, doc_pair, local_client, remote_client):
# Auto-resolve conflict
if not doc_pair.folderish:
if local_client.is_equal_digests(doc_pair.local_digest, doc_pair.remote_digest, doc_pair.local_path):
log.debug("Auto-resolve conflict has digest are the same")
self._dao.synchronize_state(doc_pair)
elif local_client.get_remote_id(doc_pair.local_path) == doc_pair.remote_ref:
log.debug("Auto-resolve conflict has folder has same remote_id")
self._dao.synchronize_state(doc_pair)
def _handle_no_parent(self, doc_pair, local_client, remote_client):
log.trace("Republish as parent doesn't exist : %r", doc_pair)
self.increase_error(doc_pair, error="NO_PARENT")
def _update_speed_metrics(self):
action = Action.get_last_file_action()
if action:
duration = action.end_time - action.start_time
# Too fast for clock resolution
if duration <= 0:
return
speed = (action.size / duration) * 1000
log.trace("Transfer speed %d ko/s", speed / 1024)
self._current_metrics["speed"] = speed
def _synchronize_locally_modified(self, doc_pair, local_client, remote_client):
if doc_pair.local_digest == UNACCESSIBLE_HASH:
# Try to update
info = local_client.get_info(doc_pair.local_path)
log.trace("Modification of postponed local file: %r", doc_pair)
self._dao.update_local_state(doc_pair, info, versionned=False, queue=False)
doc_pair.local_digest = info.get_digest()
if doc_pair.local_digest == UNACCESSIBLE_HASH:
self._postpone_pair(doc_pair)
return
if not local_client.is_equal_digests(doc_pair.local_digest, doc_pair.remote_digest, doc_pair.local_path):
if doc_pair.remote_can_update:
if doc_pair.local_digest == UNACCESSIBLE_HASH:
self._postpone_pair(doc_pair)
return
log.debug("Updating remote document '%s'.",
doc_pair.local_name)
fs_item_info = remote_client.stream_update(
doc_pair.remote_ref,
local_client._abspath(doc_pair.local_path),
parent_fs_item_id=doc_pair.remote_parent_ref,
filename=doc_pair.remote_name,# Use remote name to avoid rename in case of duplicate
)
self._dao.update_last_transfer(doc_pair.id, "upload")
self._update_speed_metrics()
self._dao.update_remote_state(doc_pair, fs_item_info, versionned=False)
# TODO refresh_client
else:
log.debug("Skip update of remote document '%s'"\
" as it is readonly.",
doc_pair.local_name)
if self._engine.local_rollback():
local_client.delete(doc_pair.local_path)
self._dao.mark_descendants_remotely_created(doc_pair)
else:
log.debug("Set pair unsynchronized: %r", doc_pair)
self._dao.synchronize_state(doc_pair, state='unsynchronized')
self._handle_unsynchronized(local_client, doc_pair)
return
self._dao.synchronize_state(doc_pair)
def _get_normal_state_from_remote_ref(self, ref):
# TODO Select the only states that is not a collection
return self._dao.get_normal_state_from_remote(ref)
def _postpone_pair(self, doc_pair):
# Wait 60s for it
log.trace("Postpone creation of local file: %r", doc_pair)
doc_pair.error_count = 1
self._engine.get_queue_manager().push_error(doc_pair, exception=None)
def _synchronize_locally_created(self, doc_pair, local_client, remote_client):
name = os.path.basename(doc_pair.local_path)
remote_ref = local_client.get_remote_id(doc_pair.local_path)
# Find the parent pair to find the ref of the remote folder to
# create the document
parent_pair = self._dao.get_state_from_local(doc_pair.local_parent_path)
if parent_pair is None:
# Try to get it from xattr
log.trace("Fallback to xattr")
if local_client.exists(doc_pair.local_parent_path):
parent_ref = local_client.get_remote_id(doc_pair.local_parent_path)
parent_pair = self._get_normal_state_from_remote_ref(parent_ref)
if parent_pair is None or parent_pair.remote_ref is None:
# Illegal state: report the error and let's wait for the
# parent folder issue to get resolved first
raise ValueError(
"Parent folder of %s is not bound to a remote folder"
% doc_pair.local_parent_path)
if remote_ref is not None and '#' in remote_ref:
# TODO Decide what to do
log.warn("This document %r has remote_ref %s", doc_pair, remote_ref)
# Get the remote doc
# Verify it is not already synced elsewhere ( a missed move ? )
# If same hash dont do anything and reconcile
remote_doc_client = self._engine.get_remote_doc_client()
uid = remote_ref.split('#')[-1]
info = remote_doc_client.get_info(uid, raise_if_missing=False, use_trash=False)
if info and info.state == 'deleted':
log.debug("Untrash from the client")
remote_parent_path = parent_pair.remote_parent_path + '/' + parent_pair.remote_ref
remote_doc_client.undelete(uid)
fs_item_info = remote_client.get_info(remote_ref)
if not fs_item_info.path.startswith(remote_parent_path):
fs_item_info = remote_client.move(fs_item_info.uid, parent_pair.remote_ref)
self._dao.update_remote_state(doc_pair, fs_item_info, remote_parent_path, versionned=False)
self._dao.synchronize_state(doc_pair)
return
parent_ref = parent_pair.remote_ref
if parent_pair.remote_can_create_child:
remote_parent_path = parent_pair.remote_parent_path + '/' + parent_pair.remote_ref
if doc_pair.folderish:
log.debug("Creating remote folder '%s' in folder '%s'",
name, parent_pair.remote_name)
fs_item_info = remote_client.make_folder(parent_ref, name)
remote_ref = fs_item_info.uid
else:
# TODO Check if the file is already on the server with the good digest
log.debug("Creating remote document '%s' in folder '%s'",
name, parent_pair.remote_name)
info = local_client.get_info(doc_pair.local_path)
if info.size != doc_pair.size:
# Size has changed ( copy must still be running )
doc_pair.local_digest = UNACCESSIBLE_HASH
self._dao.update_local_state(doc_pair, info, versionned=False, queue=False)
self._postpone_pair(doc_pair)
return
if doc_pair.local_digest == UNACCESSIBLE_HASH:
doc_pair.local_digest = info.get_digest()
log.trace("Creation of postponed local file: %r", doc_pair)
self._dao.update_local_state(doc_pair, info, versionned=False, queue=False)
if doc_pair.local_digest == UNACCESSIBLE_HASH:
self._postpone_pair(doc_pair)
return
fs_item_info = remote_client.stream_file(
parent_ref, local_client._abspath(doc_pair.local_path), filename=name)
remote_ref = fs_item_info.uid
self._dao.update_last_transfer(doc_pair.id, "upload")
self._update_speed_metrics()
self._dao.update_remote_state(doc_pair, fs_item_info, remote_parent_path,
versionned=False)
log.trace("Put remote_ref in %s", remote_ref)
try:
local_client.set_remote_id(doc_pair.local_path, remote_ref)
except (NotFound, IOError):
new_pair = self._dao.get_state_from_id(doc_pair.id)
local_client.set_remote_id(new_pair.local_path, remote_ref)
# File has been moved during creation
self._synchronize_locally_moved(new_pair, local_client, remote_client, update=False)
return
self._dao.synchronize_state(doc_pair)
else:
child_type = 'folder' if doc_pair.folderish else 'file'
log.warning("Won't synchronize %s '%s' created in"
" local folder '%s' since it is readonly",
child_type, doc_pair.local_name, parent_pair.local_name)
if doc_pair.folderish:
doc_pair.remote_can_create_child = False
if self._engine.local_rollback():
local_client.delete(doc_pair.local_path)
self._dao.remove_state(doc_pair)
else:
log.debug("Set pair unsynchronized: %r", doc_pair)
self._dao.synchronize_state(doc_pair, state='unsynchronized')
self._handle_unsynchronized(local_client, doc_pair)
def _synchronize_locally_deleted(self, doc_pair, local_client, remote_client):
if doc_pair.remote_ref is not None:
if doc_pair.remote_can_delete:
log.debug("Deleting or unregistering remote document"
" '%s' (%s)",
doc_pair.remote_name, doc_pair.remote_ref)
if doc_pair.remote_state != 'deleted':
remote_client.delete(doc_pair.remote_ref,
parent_fs_item_id=doc_pair.remote_parent_ref)
self._dao.remove_state(doc_pair)
else:
log.debug("Marking %s as remotely created since remote"
" document '%s' (%s) can not be deleted: either"
" it is readonly or it is a virtual folder that"
" doesn't exist in the server hierarchy",
doc_pair, doc_pair.remote_name, doc_pair.remote_ref)
if doc_pair.remote_state != 'deleted':
self._dao.mark_descendants_remotely_created(doc_pair)
else:
self._dao.remove_state(doc_pair)
def _synchronize_locally_moved_remotely_modified(self, doc_pair, local_client, remote_client):
self._synchronize_locally_moved(doc_pair, local_client, remote_client, update=False)
self._synchronize_remotely_modified(doc_pair, local_client, remote_client)
def _synchronize_locally_moved_created(self, doc_pair, local_client, remote_client):
doc_pair.remote_ref = None
self._synchronize_locally_created(doc_pair, local_client, remote_client)
def _synchronize_locally_moved(self, doc_pair, local_client, remote_client, update=True):
# A file has been moved locally, and an error occurs when tried to
# move on the server
renamed = False
moved = False
if doc_pair.local_name != doc_pair.remote_name:
try:
if doc_pair.remote_can_rename:
log.debug('Renaming remote file according to local : %r',
doc_pair)
remote_info = remote_client.rename(doc_pair.remote_ref,
doc_pair.local_name)
renamed = True
self._refresh_remote(doc_pair, remote_client, remote_info=remote_info)
else:
self._handle_failed_remote_rename(doc_pair, doc_pair)
return
except Exception as e:
log.debug(e)
self._handle_failed_remote_rename(doc_pair, doc_pair)
return
parent_pair = None
parent_ref = local_client.get_remote_id(doc_pair.local_parent_path)
if parent_ref is None:
parent_pair = self._dao.get_state_from_local(doc_pair.local_parent_path)
else:
parent_pair = self._get_normal_state_from_remote_ref(parent_ref)
if parent_pair is None:
raise Exception("Should have a parent pair")
if parent_ref != doc_pair.remote_parent_ref:
if doc_pair.remote_can_delete:
log.debug('Moving remote file according to local : %r', doc_pair)
# Bug if move in a parent with no rights / partial move
# if rename at the same time
parent_path = parent_pair.remote_parent_path + "/" + parent_pair.remote_ref
remote_info = remote_client.move(doc_pair.remote_ref,
parent_pair.remote_ref)
moved = True
self._dao.update_remote_state(doc_pair, remote_info, parent_path, versionned=False)
else:
# Move it back
self._handle_failed_remote_move(doc_pair, doc_pair)
# Handle modification at the same time if needed
if update:
self._synchronize_locally_modified(doc_pair, local_client, remote_client)
def _synchronize_deleted_unknown(self, doc_pair, local_client, remote_client):
# Somehow a pair can get to an inconsistent state:
# <local_state=u'deleted', remote_state=u'unknown',
# pair_state=u'unknown'>
# Even though we are not able to figure out how this can happen we
# need to handle this case to put the database back to a consistent
# state.
# This is tracked by https://jira.nuxeo.com/browse/NXP-14039
log.debug("Inconsistency should not happens anymore")
log.debug("Detected inconsistent doc pair %r, deleting it hoping the"
" synchronizer will fix this case at next iteration",
doc_pair)
self._dao.remove_state(doc_pair)
def _get_temporary_file(self, file_path):
from nxdrive.client.base_automation_client import DOWNLOAD_TMP_FILE_PREFIX
from nxdrive.client.base_automation_client import DOWNLOAD_TMP_FILE_SUFFIX
file_dir = os.path.dirname(file_path)
file_name = os.path.basename(file_path)
file_out = os.path.join(file_dir, DOWNLOAD_TMP_FILE_PREFIX + file_name
+ DOWNLOAD_TMP_FILE_SUFFIX)
return file_out
def _download_content(self, local_client, remote_client, doc_pair, file_path):
# Check if the file is already on the HD
pair = self._dao.get_valid_duplicate_file(doc_pair.remote_digest)
if pair:
import shutil
file_out = self._get_temporary_file(file_path)
shutil.copy(local_client._abspath(pair.local_path), file_out)
return file_out
tmp_file = remote_client.stream_content(
doc_pair.remote_ref, file_path,
parent_fs_item_id=doc_pair.remote_parent_ref)
self._update_speed_metrics()
return tmp_file
def _synchronize_remotely_modified(self, doc_pair, local_client, remote_client):
tmp_file = None
try:
is_renaming = doc_pair.remote_name != doc_pair.local_name
if (not local_client.is_equal_digests(doc_pair.local_digest, doc_pair.remote_digest, doc_pair.local_path)
and doc_pair.local_digest is not None):
os_path = local_client._abspath(doc_pair.local_path)
if is_renaming:
new_os_path = os.path.join(os.path.dirname(os_path),
doc_pair.remote_name)
log.debug("Replacing local file '%s' by '%s'.",
os_path, new_os_path)
else:
new_os_path = os_path
log.debug("Updating content of local file '%s'.",
os_path)
tmp_file = self._download_content(local_client, remote_client, doc_pair, new_os_path)
# Delete original file and rename tmp file
remote_id = local_client.get_remote_id(doc_pair.local_path)
local_client.delete_final(doc_pair.local_path)
updated_info = local_client.rename(
local_client.get_path(tmp_file),
doc_pair.remote_name)
if remote_id is not None:
local_client.set_remote_id(doc_pair.local_parent_path + '/' + doc_pair.remote_name,
doc_pair.remote_ref)
doc_pair.local_digest = updated_info.get_digest()
self._dao.update_last_transfer(doc_pair.id, "download")
self._refresh_local_state(doc_pair, updated_info)
else:
# digest agree so this might be a renaming and/or a move,
# and no need to transfer additional bytes over the network
is_move, new_parent_pair = self._is_remote_move(doc_pair)
if remote_client.is_filtered(doc_pair.remote_parent_path):
# A move to a filtered parent ( treat it as deletion )
self._synchronize_remotely_deleted(doc_pair, local_client, remote_client)
return
if not is_move and not is_renaming:
log.debug("No local impact of metadata update on"
" document '%s'.", doc_pair.remote_name)
else:
file_or_folder = 'folder' if doc_pair.folderish else 'file'
if (is_move or is_renaming) and doc_pair.folderish:
self._engine.set_local_folder_lock(doc_pair.local_path)
if is_move:
# move and potential rename
moved_name = doc_pair.remote_name if is_renaming else doc_pair.local_name
log.debug("Moving local %s '%s' to '%s'.",
file_or_folder, local_client._abspath(doc_pair.local_path),
local_client._abspath(new_parent_pair.local_path + '/' + moved_name))
# May need to add a lock for move
updated_info = local_client.move(doc_pair.local_path,
new_parent_pair.local_path, name=moved_name)
new_parent_path = new_parent_pair.remote_parent_path + "/" + new_parent_pair.remote_ref
self._dao.update_remote_parent_path(doc_pair, new_parent_path)
elif is_renaming:
# renaming
log.debug("Renaming local %s '%s' to '%s'.",
file_or_folder, local_client._abspath(doc_pair.local_path),
doc_pair.remote_name)
updated_info = local_client.rename(
doc_pair.local_path, doc_pair.remote_name)
if is_move or is_renaming:
# Should call a DAO method
new_path = os.path.dirname(updated_info.path)
self._dao.update_local_parent_path(doc_pair, os.path.basename(updated_info.path), new_path)
self._refresh_local_state(doc_pair, updated_info)
self._handle_readonly(local_client, doc_pair)
self._dao.synchronize_state(doc_pair)
except (IOError, WindowsError) as e:
log.warning(
"Delaying local update of remotely modified content %r due to"
" concurrent file access (probably opened by another"
" process).",
doc_pair)
raise e
finally:
if tmp_file is not None:
try:
os.remove(tmp_file)
except (IOError, WindowsError):
pass
if doc_pair.folderish:
# Release folder lock in any case
self._engine.release_folder_lock()
def _synchronize_remotely_created(self, doc_pair, local_client, remote_client):
name = doc_pair.remote_name
# Find the parent pair to find the path of the local folder to
# create the document into
parent_pair = self._get_normal_state_from_remote_ref(doc_pair.remote_parent_ref)
if parent_pair is None:
# Illegal state: report the error and let's wait for the
# parent folder issue to get resolved first
raise ValueError(
"Could not find parent folder of doc %r (%r)"
" folder" % (name, doc_pair.remote_ref))
if parent_pair.local_path is None:
# Illegal state: report the error and let's wait for the
# parent folder issue to get resolved first
raise ValueError(
"Parent folder of doc %r (%r) is not bound to a local"
" folder" % (name, doc_pair.remote_ref))
path = doc_pair.remote_parent_path + '/' + doc_pair.remote_ref
if remote_client.is_filtered(path):
# It is filtered so skip and remove from the LastKnownState
self._dao.remove_state(doc_pair)
return
if not local_client.exists(doc_pair.local_path):
path = self._create_remotely(local_client, remote_client, doc_pair, parent_pair, name)
else:
path = doc_pair.local_path
remote_ref = local_client.get_remote_id(doc_pair.local_path)
if remote_ref is not None and remote_ref == doc_pair.remote_ref:
log.debug('remote_ref (xattr) = %s, doc_pair.remote_ref = %s => setting conflicted state', remote_ref,
doc_pair.remote_ref)
# Set conflict state for now
# TO_REVIEW May need to overwrite
self._dao.set_conflict_state(doc_pair)
return
elif remote_ref is not None:
# Case of several documents with same name or case insensitive hard drive
# TODO dedup
path = self._create_remotely(local_client, remote_client, doc_pair, parent_pair, name)
local_client.set_remote_id(path, doc_pair.remote_ref)
self._handle_readonly(local_client, doc_pair)
self._refresh_local_state(doc_pair, local_client.get_info(path))
if not self._dao.synchronize_state(doc_pair):
log.debug("Pair is not in synchronized state (version issue): %r", doc_pair)
def _create_remotely(self, local_client, remote_client, doc_pair, parent_pair, name):
local_parent_path = parent_pair.local_path
# TODO Shared this locking system / Can have concurrent lock
self._unlock_readonly(local_client, local_parent_path)
tmp_file = None
try:
if doc_pair.folderish:
log.debug("Creating local folder '%s' in '%s'", name,
local_client._abspath(parent_pair.local_path))
path = local_client.make_folder(local_parent_path, name)
else:
path, os_path, name = local_client.get_new_file(local_parent_path,
name)
log.debug("Creating local file '%s' in '%s'", name,
local_client._abspath(parent_pair.local_path))
tmp_file = self._download_content(local_client, remote_client, doc_pair, os_path)
# Rename tmp file
local_client.rename(local_client.get_path(tmp_file), name)
self._dao.update_last_transfer(doc_pair.id, "download")
finally:
self._lock_readonly(local_client, local_parent_path)
# Clean .nxpart if needed
if tmp_file is not None and os.path.exists(tmp_file):
os.remove(tmp_file)
return path
def _synchronize_remotely_deleted(self, doc_pair, local_client, remote_client):
try:
if doc_pair.local_state != 'deleted':
log.debug("Deleting locally %s", local_client._abspath(doc_pair.local_path))
if doc_pair.folderish:
self._engine.set_local_folder_lock(doc_pair.local_path)
else:
# Check for nxpart to clean up
file_out = self._get_temporary_file(local_client._abspath(doc_pair.local_path))
if os.path.exists(file_out):
os.remove(file_out)
if self._engine.use_trash():
local_client.delete(doc_pair.local_path)
else:
local_client.delete_final(doc_pair.local_path)
self._dao.remove_state(doc_pair)
except (IOError, WindowsError) as e:
# Under Windows deletion can be impossible while another
# process is accessing the same file (e.g. word processor)
# TODO: be more specific as detecting this case:
# shall we restrict to the case e.errno == 13 ?
log.warning(
"Delaying local deletion of remotely deleted item %r due to"
" concurrent file access (probably opened by another"
" process).", doc_pair)
raise e
finally:
if doc_pair.folderish:
self._engine.release_folder_lock()
def _synchronize_unknown_deleted(self, doc_pair, local_client, remote_client):
# Somehow a pair can get to an inconsistent state:
# <local_state=u'unknown', remote_state=u'deleted',
# pair_state=u'unknown'>
# Even though we are not able to figure out how this can happen we
# need to handle this case to put the database back to a consistent
# state.
# This is tracked by https://jira.nuxeo.com/browse/NXP-13216
log.debug("Inconsistency should not happens anymore")
log.debug("Detected inconsistent doc pair %r, deleting it hoping the"
" synchronizer will fix this case at next iteration",
doc_pair)
self._dao.remove_state(doc_pair)
if doc_pair.local_path is not None:
log.debug("Since the local path is not None: %s, the synchronizer"
" will probably consider this as a local creation at"
" next iteration and create the file or folder remotely",
doc_pair.local_path)
else:
log.debug("Since the local path is None the synchronizer will"
" probably do nothing at next iteration")
def _refresh_remote(self, doc_pair, remote_client, remote_info=None):
if remote_info is None:
remote_info = None # Get from remote_client
remote_info = remote_client.get_info(doc_pair.remote_ref)
self._dao.update_remote_state(doc_pair, remote_info, versionned=False)
def _refresh_local_state(self, doc_pair, local_info):
if doc_pair.local_digest is None and not doc_pair.folderish:
doc_pair.local_digest = local_info.get_digest()
self._dao.update_local_state(doc_pair, local_info, versionned=False)
doc_pair.local_path = local_info.path
doc_pair.local_name = os.path.basename(local_info.path)
doc_pair.last_local_updated = local_info.last_modification_time
def _is_remote_move(self, doc_pair):
local_parent_pair = self._dao.get_state_from_local(doc_pair.local_parent_path)
remote_parent_pair = self._get_normal_state_from_remote_ref(doc_pair.remote_parent_ref)
return (local_parent_pair is not None
and remote_parent_pair is not None
and local_parent_pair.id != remote_parent_pair.id,
remote_parent_pair)
def _handle_failed_remote_move(self, source_pair, target_pair):
pass
def _handle_failed_remote_rename(self, source_pair, target_pair):
# An error occurs return false
log.error("Renaming from %s to %s canceled",
target_pair.remote_name, target_pair.local_name)
if self._engine.local_rollback():
try:
local_client = self._engine.get_local_client()
info = local_client.rename(target_pair.local_path,
target_pair.remote_name)
self._dao.update_local_state(source_pair, info)
if source_pair != target_pair:
if target_pair.folderish:
# Remove "new" created tree
pairs = self._dao.get_states_from_partial_local(
target_pair.local_path).all()
for pair in pairs:
self._dao.remove_state(pair)
pairs = self._dao.get_states_from_partial_local(
source_pair.local_path).all()
for pair in pairs:
self._dao.synchronize_state(pair)
else:
self._dao.remove_state(target_pair)
# Mark all local as unknown
#self._mark_unknown_local_recursive(session, source_pair)
self._dao.synchronize_state(source_pair)
return True
except Exception, e:
log.error("Can't rollback local modification")
log.debug(e)
return False
def _is_locally_edited_folder(self, doc_pair):
return doc_pair.local_path.endswith(LOCALLY_EDITED_FOLDER_NAME)
def _handle_unsynchronized(self, local_client, doc_pair):
# Used for overwrite
pass
def _handle_readonly(self, local_client, doc_pair):
# Don't use readonly on folder for win32 and on Locally Edited
if (doc_pair.folderish and os.sys.platform == 'win32'
or self._is_locally_edited_folder(doc_pair)):
return
if doc_pair.is_readonly():
local_client.set_readonly(doc_pair.local_path)
else:
local_client.unset_readonly(doc_pair.local_path)
|
"""Qarbon's main python package."""
from qarbon.release import version as __version__
__all__ = []
|
'''
Created on 2013-04-22 14:54
@summary:
@author: Martin predki
'''
from PySchedGUI.PySchedUI import FileUtils
import os
import readline
import atexit
class PathCompleter(object):
'''
@summary: Implements history path auto completion for all user input
'''
def __init__(self):
history_file = os.path.expanduser('~/.PySchedUI_history')
atexit.register(readline.write_history_file, history_file)
def _listdir(self, root):
"List directory 'root' appending the path separator to subdirs."
res = []
for name in os.listdir(root):
path = os.path.join(root, name)
if os.path.isdir(path):
name += os.sep
res.append(name)
return res
def complete(self, text, state):
line = readline.get_line_buffer().split()
"Perform completion of filesystem path."
dirname, rest = os.path.split(line[0])
dirname = FileUtils.expandPath(dirname)
rest = FileUtils.expandPath(rest)
if dirname == '':
dirname = '/'
if rest == '':
name = self._listdir(dirname)
return name[state]
else:
tmp = self._listdir(dirname)
name = []
for t in tmp:
if t.startswith(rest):
name.append(t)
return name[state]
|
""" the slowest set of tests
tests that a schedule set to fire every minute
actually does, without speedup
"""
import time
import logging
from datetime import datetime, timedelta
from rapidsms.tests.scripted import TestScript
import scheduler.app as scheduler_app
from scheduler.models import EventSchedule, ALL
class TestSlow (TestScript):
apps = ([scheduler_app.App])
def setUp(self):
global callback_counter
callback_counter = 0
TestScript.setUp(self)
EventSchedule.objects.all().delete()
def test_one_shot(self):
""" Test scheduler in real time"""
global callback_counter
self.router.start()
schedule = EventSchedule(callback="scheduler.tests.slow.callback_func",
minutes=ALL, callback_args=([3]))
schedule.save()
time.sleep(180.0)
self.assertEquals(callback_counter,9)
self.router.stop()
def callback_func(router, arg):
global callback_counter
print "adding %s to global_var (%s)" % (arg, callback_counter)
logging.info("adding %s to global_var (%s)" % (arg, callback_counter))
callback_counter = callback_counter + arg
|
"""HTTP enabled process executor."""
import socket
from mirakuru.compat import HTTPConnection, HTTPException, OK
from mirakuru.compat import urlparse
from mirakuru.tcp import TCPExecutor
class HTTPExecutor(TCPExecutor):
"""Http enabled process executor."""
def __init__(self, command, url, **kwargs):
"""
Initialize HTTPExecutor executor.
:param (str, list) command: command to be run by the subprocess
:param str url: URL that executor checks to verify
if process has already started.
:param bool shell: same as the `subprocess.Popen` shell definition
:param int timeout: number of seconds to wait for the process to start
or stop. If None or False, wait indefinitely.
:param float sleep: how often to check for start/stop condition
:param int sig_stop: signal used to stop process run by the executor.
default is `signal.SIGTERM`
:param int sig_kill: signal used to kill process run by the executor.
default is `signal.SIGKILL`
"""
self.url = urlparse(url)
"""
An :func:`urlparse.urlparse` representation of an url.
It'll be used to check process status on."""
super(HTTPExecutor, self).__init__(
command, host=self.url.hostname, port=self.url.port, **kwargs
)
def after_start_check(self):
"""Check if defined url returns successful head."""
try:
conn = HTTPConnection(self.url.hostname, self.url.port)
conn.request('HEAD', self.url.path)
response = conn.getresponse()
if response.status is OK:
conn.close()
return True
except (HTTPException, socket.timeout, socket.error):
return False
|
from flask import g, jsonify, request
from functools import wraps
from service import app
from service.errors import (BadRequestError, UnauthorizedError, ForbiddenError,
InternalServerError)
from service.utils import load_auth_client, get_token
def authenticated(fn):
"""Mark a route as requiring authentication."""
@wraps(fn)
def decorated_function(*args, **kwargs):
if 'Authorization' not in request.headers:
raise UnauthorizedError()
# Get the access token from the request
token = get_token(request.headers['Authorization'])
# Call token introspect
client = load_auth_client()
token_meta = client.oauth2_token_introspect(token)
if not token_meta.get('active'):
raise ForbiddenError()
# Verify that the "audience" for this token is our service
if 'GlobusWorld Resource Server' not in token_meta.get('aud', []):
raise ForbiddenError()
portal_client_id = app.config['PORTAL_CLIENT_ID']
# Verify that the identities_set from the token introspection
# includes the portal client identity id
if portal_client_id != token_meta.get('sub'):
raise ForbiddenError()
# Token has passed verification so we attach it to the
# request global object and proceed
g.req_token = token
return fn(*args, **kwargs)
return decorated_function
@app.errorhandler(BadRequestError)
def handle_badrequest_error(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.errorhandler(InternalServerError)
def handle_internalserver_error(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.errorhandler(UnauthorizedError)
def handle_unauthorized_error(error):
response = jsonify(error.to_dict())
response.headers['WWW-Authenticate'] = \
'Bearer realm="urn:globus:auth:scope:demo-resource-server:all"'
response.status_code = error.status_code
return response
@app.errorhandler(ForbiddenError)
def handle_forbidded_error(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
|
from game import Agent
from game import Actions
from game import Directions
import random
from util import manhattanDistance
import util
class GhostAgent( Agent ):
def __init__( self, index ):
self.index = index
def getAction( self, state ):
dist = self.getDistribution(state)
if len(dist) == 0:
return Directions.STOP
else:
return util.chooseFromDistribution( dist )
def getDistribution(self, state):
"Returns a Counter encoding a distribution over actions from the provided state."
util.raiseNotDefined()
class RandomGhost( GhostAgent ):
"A ghost that chooses a legal action uniformly at random."
def getDistribution( self, state ):
dist = util.Counter()
for a in state.getLegalActions( self.index ): dist[a] = 1.0
dist.normalize()
return dist
class DirectionalGhost( GhostAgent ):
"A ghost that prefers to rush Pacman, or flee when scared."
def __init__( self, index, prob_attack=0.8, prob_scaredFlee=0.8 ):
self.index = index
self.prob_attack = prob_attack
self.prob_scaredFlee = prob_scaredFlee
def getDistribution( self, state ):
# Read variables from state
ghostState = state.getGhostState( self.index )
legalActions = state.getLegalActions( self.index )
pos = state.getGhostPosition( self.index )
isScared = ghostState.scaredTimer > 0
speed = 1
if isScared: speed = 0.5
actionVectors = [Actions.directionToVector( a, speed ) for a in legalActions]
newPositions = [( pos[0]+a[0], pos[1]+a[1] ) for a in actionVectors]
pacmanPosition = state.getPacmanPosition()
# Select best actions given the state
distancesToPacman = [manhattanDistance( pos, pacmanPosition ) for pos in newPositions]
if isScared:
bestScore = max( distancesToPacman )
bestProb = self.prob_scaredFlee
else:
bestScore = min( distancesToPacman )
bestProb = self.prob_attack
bestActions = [action for action, distance in zip( legalActions, distancesToPacman ) if distance == bestScore]
# Construct distribution
dist = util.Counter()
for a in bestActions: dist[a] = bestProb / len(bestActions)
for a in legalActions: dist[a] += ( 1-bestProb ) / len(legalActions)
dist.normalize()
return dist
|
"""
docs-mail-merge.py (Python 2.x or 3.x)
Google Docs (REST) API mail-merge sample app
"""
from __future__ import print_function
import time
from googleapiclient import discovery
from httplib2 import Http
from oauth2client import client, file, tools
DOCS_FILE_ID = 'YOUR_TMPL_DOC_FILE_ID'
SHEETS_FILE_ID = 'YOUR_SHEET_DATA_FILE_ID'
CLIENT_ID_FILE = 'credentials.json'
TOKEN_STORE_FILE = 'token.json'
SCOPES = ( # iterable or space-delimited string
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/documents',
'https://www.googleapis.com/auth/spreadsheets.readonly',
)
SOURCES = ('text', 'sheets')
SOURCE = 'text' # Choose one of the data SOURCES
COLUMNS = ['to_name', 'to_title', 'to_company', 'to_address']
TEXT_SOURCE_DATA = (
('Ms. Lara Brown', 'Googler', 'Google NYC', '111 8th Ave\n'
'New York, NY 10011-5201'),
('Mr. Jeff Erson', 'Googler', 'Google NYC', '76 9th Ave\n'
'New York, NY 10011-4962'),
)
def get_http_client():
"""Uses project credentials in CLIENT_ID_FILE along with requested OAuth2
scopes for authorization, and caches API tokens in TOKEN_STORE_FILE.
"""
store = file.Storage(TOKEN_STORE_FILE)
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets(CLIENT_ID_FILE, SCOPES)
creds = tools.run_flow(flow, store)
return creds.authorize(Http())
HTTP = get_http_client()
DRIVE = discovery.build('drive', 'v3', http=HTTP)
DOCS = discovery.build('docs', 'v1', http=HTTP)
SHEETS = discovery.build('sheets', 'v4', http=HTTP)
def get_data(source):
"""Gets mail merge data from chosen data source.
"""
if source not in {'sheets', 'text'}:
raise ValueError('ERROR: unsupported source %r; choose from %r' % (
source, SOURCES))
return SAFE_DISPATCH[source]()
def _get_text_data():
"""(private) Returns plain text data; can alter to read from CSV file.
"""
return TEXT_SOURCE_DATA
def _get_sheets_data(service=SHEETS):
"""(private) Returns data from Google Sheets source. It gets all rows of
'Sheet1' (the default Sheet in a new spreadsheet), but drops the first
(header) row. Use any desired data range (in standard A1 notation).
"""
return service.spreadsheets().values().get(spreadsheetId=SHEETS_FILE_ID,
range='Sheet1').execute().get('values')[1:] # skip header row
SAFE_DISPATCH = {k: globals().get('_get_%s_data' % k) for k in SOURCES}
def _copy_template(tmpl_id, source, service):
"""(private) Copies letter template document using Drive API then
returns file ID of (new) copy.
"""
body = {'name': 'Merged form letter (%s)' % source}
return service.files().copy(body=body, fileId=tmpl_id,
fields='id').execute().get('id')
def merge_template(tmpl_id, source, service):
"""Copies template document and merges data into newly-minted copy then
returns its file ID.
"""
# copy template and set context data struct for merging template values
copy_id = _copy_template(tmpl_id, source, service)
context = merge.iteritems() if hasattr({}, 'iteritems') else merge.items()
# "search & replace" API requests for mail merge substitutions
reqs = [{'replaceAllText': {
'containsText': {
'text': '{{%s}}' % key.upper(), # {{VARS}} are uppercase
'matchCase': True,
},
'replaceText': value,
}} for key, value in context]
# send requests to Docs API to do actual merge
DOCS.documents().batchUpdate(body={'requests': reqs},
documentId=copy_id, fields='').execute()
return copy_id
if __name__ == '__main__':
# fill-in your data to merge into document template variables
merge = {
# sender data
'my_name': 'Ayme A. Coder',
'my_address': '1600 Amphitheatre Pkwy\n'
'Mountain View, CA 94043-1351',
'my_email': 'http://google.com',
'my_phone': '+1-650-253-0000',
# - - - - - - - - - - - - - - - - - - - - - - - - - -
# recipient data (supplied by 'text' or 'sheets' data source)
'to_name': None,
'to_title': None,
'to_company': None,
'to_address': None,
# - - - - - - - - - - - - - - - - - - - - - - - - - -
'date': time.strftime('%Y %B %d'),
# - - - - - - - - - - - - - - - - - - - - - - - - - -
'body': 'Google, headquartered in Mountain View, unveiled the new '
'Android phone at the Consumer Electronics Show. CEO Sundar '
'Pichai said in his keynote that users love their new phones.'
}
# get row data, then loop through & process each form letter
data = get_data(SOURCE) # get data from data source
for i, row in enumerate(data):
merge.update(dict(zip(COLUMNS, row)))
print('Merged letter %d: docs.google.com/document/d/%s/edit' % (
i + 1, merge_template(DOCS_FILE_ID, SOURCE, DRIVE)))
|
"""
OpenFlow1.3 flow table for OFAgent
* requirements
** plain OpenFlow 1.3. no vendor extensions.
* legends
xxx: network id (agent internal use)
yyy: segment id (vlan id, gre key, ...)
a,b,c: tunnel port (tun_ofports, map[net_id].tun_ofports)
i,j,k: vm port (map[net_id].vif_ports[vif_id].ofport)
x,y,z: physical port (int_ofports)
N: tunnel type (0 for TYPE_GRE, 1 for TYPE_xxx, ...)
iii: unknown ip address
uuu: unicast l2 address
* tables (in order)
CHECK_IN_PORT
TUNNEL_IN+N
PHYS_IN
LOCAL_IN
ARP_PASSTHROUGH
ARP_RESPONDER
TUNNEL_OUT
LOCAL_OUT
PHYS_OUT
TUNNEL_FLOOD+N
PHYS_FLOOD
LOCAL_FLOOD
* CHECK_IN_PORT
for each vm ports:
// check_in_port_add_local_port, check_in_port_delete_port
in_port=i, write_metadata(LOCAL|xxx),goto(LOCAL_IN)
TYPE_GRE
for each tunnel ports:
// check_in_port_add_tunnel_port, check_in_port_delete_port
in_port=a, goto(TUNNEL_IN+N)
TYPE_VLAN
for each networks ports:
// provision_tenant_physnet, reclaim_tenant_physnet
in_port=x,vlan_vid=present|yyy, write_metadata(xxx),goto(PHYS_IN)
TYPE_FLAT
// provision_tenant_physnet, reclaim_tenant_physnet
in_port=x, write_metadata(xxx),goto(PHYS_IN)
default drop
* TUNNEL_IN+N (per tunnel types) tunnel -> network
for each networks:
// provision_tenant_tunnel, reclaim_tenant_tunnel
tun_id=yyy, write_metadata(xxx),goto(TUNNEL_OUT)
default drop
* PHYS_IN
default goto(TUNNEL_OUT)
* LOCAL_IN
default goto(next_table)
* ARP_PASSTHROUGH
for each unknown tpa:
// arp_passthrough
arp,arp_op=request,metadata=xxx,tpa=iii, idle_timeout=5, goto(TUNNEL_OUT)
default goto(next_table)
* ARP_RESPONDER
arp,arp_op=request, output:controller
default goto(next_table)
* TUNNEL_OUT
TYPE_GRE
// !FLOODING_ENTRY
// install_tunnel_output, delete_tunnel_output
metadata=LOCAL|xxx,eth_dst=uuu set_tunnel(yyy),output:a
default goto(next table)
* LOCAL_OUT
for each known destinations:
// local_out_add_port, local_out_delete_port
metadata=xxx,eth_dst=uuu output:i
default goto(next table)
* PHYS_OUT
NOTE(yamamoto): currently this table is always empty.
default goto(next table)
* TUNNEL_FLOOD+N. (per tunnel types)
network -> tunnel/vlan
output to tunnel/physical ports
"next table" might be LOCAL_OUT
TYPE_GRE
for each networks:
// FLOODING_ENTRY
// install_tunnel_output, delete_tunnel_output
metadata=LOCAL|xxx, set_tunnel(yyy),output:a,b,c,goto(next table)
default goto(next table)
* PHYS_FLOOD
TYPE_VLAN
for each networks:
// provision_tenant_physnet, reclaim_tenant_physnet
metadata=LOCAL|xxx, push_vlan:0x8100,set_field:present|yyy->vlan_vid,
output:x,pop_vlan,goto(next table)
TYPE_FLAT
for each networks:
// provision_tenant_physnet, reclaim_tenant_physnet
metadata=LOCAL|xxx, output:x,goto(next table)
default goto(next table)
* LOCAL_FLOOD
for each networks:
// local_flood_update, local_flood_delete
metadata=xxx, output:i,j,k
or
metadata=xxx,eth_dst=broadcast, output:i,j,k
default drop
* references
** OVS agent https://wiki.openstack.org/wiki/Ovs-flow-logic
*** we use metadata instead of "internal" VLANs
*** we don't want to use NX learn action
"""
from ryu.lib.packet import arp
from ryu.ofproto import ether
from neutron.plugins.common import constants as p_const
import networking_ofagent.plugins.ofagent.agent.metadata as meta
from networking_ofagent.plugins.ofagent.agent import ofswitch
from networking_ofagent.plugins.ofagent.agent import tables
class OFAgentIntegrationBridge(ofswitch.OpenFlowSwitch):
"""ofagent br-int specific logic."""
def setup_default_table(self):
self.delete_flows()
self.install_default_drop(tables.CHECK_IN_PORT)
for t in tables.TUNNEL_IN.values():
self.install_default_drop(t)
self.install_default_goto(tables.PHYS_IN, tables.TUNNEL_OUT)
self.install_default_goto_next(tables.LOCAL_IN)
self.install_default_goto_next(tables.ARP_PASSTHROUGH)
self.install_arp_responder(tables.ARP_RESPONDER)
self.install_default_goto_next(tables.TUNNEL_OUT)
self.install_default_goto_next(tables.LOCAL_OUT)
self.install_default_goto_next(tables.PHYS_OUT)
for t in tables.TUNNEL_FLOOD.values():
self.install_default_goto_next(t)
self.install_default_goto_next(tables.PHYS_FLOOD)
self.install_default_drop(tables.LOCAL_FLOOD)
def install_arp_responder(self, table_id):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(eth_type=ether.ETH_TYPE_ARP,
arp_op=arp.ARP_REQUEST)
actions = [ofpp.OFPActionOutput(ofp.OFPP_CONTROLLER)]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions)]
msg = ofpp.OFPFlowMod(dp,
table_id=table_id,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
self.install_default_goto_next(table_id)
def install_tunnel_output(self, table_id,
network, segmentation_id,
ports, goto_next, **additional_matches):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network, meta.LOCAL),
**additional_matches)
actions = [ofpp.OFPActionSetField(tunnel_id=segmentation_id)]
actions += [ofpp.OFPActionOutput(port=p) for p in ports]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
]
if goto_next:
instructions += [
ofpp.OFPInstructionGotoTable(table_id=table_id + 1),
]
msg = ofpp.OFPFlowMod(dp,
table_id=table_id,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def delete_tunnel_output(self, table_id,
network, **additional_matches):
(dp, _ofp, ofpp) = self._get_dp()
self.delete_flows(table_id=table_id,
metadata=meta.mk_metadata(network, meta.LOCAL),
**additional_matches)
def provision_tenant_tunnel(self, network_type, network, segmentation_id):
(dp, _ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(tunnel_id=segmentation_id)
metadata = meta.mk_metadata(network)
instructions = [
ofpp.OFPInstructionWriteMetadata(metadata=metadata[0],
metadata_mask=metadata[1]),
ofpp.OFPInstructionGotoTable(table_id=tables.TUNNEL_OUT),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.TUNNEL_IN[network_type],
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def reclaim_tenant_tunnel(self, network_type, network, segmentation_id):
table_id = tables.TUNNEL_IN[network_type]
self.delete_flows(table_id=table_id, tunnel_id=segmentation_id)
def provision_tenant_physnet(self, network_type, network,
segmentation_id, phys_port):
"""for vlan and flat."""
assert(network_type in [p_const.TYPE_VLAN, p_const.TYPE_FLAT])
(dp, ofp, ofpp) = self._get_dp()
# inbound
metadata = meta.mk_metadata(network)
instructions = [
ofpp.OFPInstructionWriteMetadata(metadata=metadata[0],
metadata_mask=metadata[1])
]
if network_type == p_const.TYPE_VLAN:
vlan_vid = segmentation_id | ofp.OFPVID_PRESENT
match = ofpp.OFPMatch(in_port=phys_port, vlan_vid=vlan_vid)
actions = [ofpp.OFPActionPopVlan()]
instructions += [ofpp.OFPInstructionActions(
ofp.OFPIT_APPLY_ACTIONS, actions)]
else:
match = ofpp.OFPMatch(in_port=phys_port)
instructions += [ofpp.OFPInstructionGotoTable(table_id=tables.PHYS_IN)]
msg = ofpp.OFPFlowMod(dp,
priority=1,
table_id=tables.CHECK_IN_PORT,
match=match,
instructions=instructions)
self._send_msg(msg)
# outbound
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network, meta.LOCAL))
if network_type == p_const.TYPE_VLAN:
actions = [
ofpp.OFPActionPushVlan(),
ofpp.OFPActionSetField(vlan_vid=vlan_vid),
]
else:
actions = []
actions += [ofpp.OFPActionOutput(port=phys_port)]
if network_type == p_const.TYPE_VLAN:
actions += [ofpp.OFPActionPopVlan()]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
ofpp.OFPInstructionGotoTable(table_id=tables.PHYS_FLOOD + 1),
]
msg = ofpp.OFPFlowMod(dp,
priority=1,
table_id=tables.PHYS_FLOOD,
match=match,
instructions=instructions)
self._send_msg(msg)
def reclaim_tenant_physnet(self, network_type, network,
segmentation_id, phys_port):
(_dp, ofp, _ofpp) = self._get_dp()
vlan_vid = segmentation_id | ofp.OFPVID_PRESENT
if network_type == p_const.TYPE_VLAN:
self.delete_flows(table_id=tables.CHECK_IN_PORT,
in_port=phys_port, vlan_vid=vlan_vid)
else:
self.delete_flows(table_id=tables.CHECK_IN_PORT,
in_port=phys_port)
self.delete_flows(table_id=tables.PHYS_FLOOD,
metadata=meta.mk_metadata(network))
def check_in_port_add_tunnel_port(self, network_type, port):
(dp, _ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(in_port=port)
instructions = [
ofpp.OFPInstructionGotoTable(
table_id=tables.TUNNEL_IN[network_type])
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.CHECK_IN_PORT,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def check_in_port_add_local_port(self, network, port):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(in_port=port)
metadata = meta.mk_metadata(network, meta.LOCAL)
instructions = [
ofpp.OFPInstructionWriteMetadata(metadata=metadata[0],
metadata_mask=metadata[1]),
ofpp.OFPInstructionGotoTable(table_id=tables.LOCAL_IN),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.CHECK_IN_PORT,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def check_in_port_delete_port(self, port):
self.delete_flows(table_id=tables.CHECK_IN_PORT, in_port=port)
def local_flood_update(self, network, ports, flood_unicast):
(dp, ofp, ofpp) = self._get_dp()
match_all = ofpp.OFPMatch(metadata=meta.mk_metadata(network))
match_multicast = ofpp.OFPMatch(metadata=meta.mk_metadata(network),
eth_dst=('01:00:00:00:00:00',
'01:00:00:00:00:00'))
if flood_unicast:
match_add = match_all
match_del = match_multicast
else:
match_add = match_multicast
match_del = match_all
actions = [ofpp.OFPActionOutput(port=p) for p in ports]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.LOCAL_FLOOD,
priority=1,
match=match_add,
instructions=instructions)
self._send_msg(msg)
self.delete_flows(table_id=tables.LOCAL_FLOOD, strict=True,
priority=1, match=match_del)
def local_flood_delete(self, network):
self.delete_flows(table_id=tables.LOCAL_FLOOD,
metadata=meta.mk_metadata(network))
def local_out_add_port(self, network, port, mac):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network), eth_dst=mac)
actions = [ofpp.OFPActionOutput(port=port)]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.LOCAL_OUT,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def local_out_delete_port(self, network, mac):
self.delete_flows(table_id=tables.LOCAL_OUT,
metadata=meta.mk_metadata(network), eth_dst=mac)
def arp_passthrough(self, network, tpa):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network),
eth_type=ether.ETH_TYPE_ARP,
arp_op=arp.ARP_REQUEST,
arp_tpa=tpa)
instructions = [
ofpp.OFPInstructionGotoTable(table_id=tables.TUNNEL_OUT)]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.ARP_PASSTHROUGH,
priority=1,
idle_timeout=5,
match=match,
instructions=instructions)
self._send_msg(msg)
|
import socket
def check_remote_port_whether_open(remote_addr, remote_port):
""" Check the remote port whether open
:param remote_addr: Remote host's ip address
:param remote_port: Remote host's tcp port
:type remote_addr: string
:type remote_port: int
:return: A boolean value to decide the port whether open
:rtype: boolean
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ret = s.connect_ex((remote_addr, remote_port))
return ret == 0
|
"""
REST API Documentation for the NRS TFRS Credit Trading Application
The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation.
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
from django.test import TestCase
from django.test import Client
import django
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework import status
from . import fakedata
from .models.Audit import Audit
from .serializers import AuditSerializer
from .models.CreditTrade import CreditTrade
from .serializers import CreditTradeSerializer
from .models.CreditTradeHistory import CreditTradeHistory
from .serializers import CreditTradeHistorySerializer
from .models.CreditTradeStatus import CreditTradeStatus
from .serializers import CreditTradeStatusSerializer
from .models.CreditTradeType import CreditTradeType
from .serializers import CreditTradeTypeSerializer
from .models.CurrentUserViewModel import CurrentUserViewModel
from .serializers import CurrentUserViewModelSerializer
from .models.FuelSupplier import FuelSupplier
from .serializers import FuelSupplierSerializer
from .models.FuelSupplierActionsType import FuelSupplierActionsType
from .serializers import FuelSupplierActionsTypeSerializer
from .models.FuelSupplierAttachment import FuelSupplierAttachment
from .serializers import FuelSupplierAttachmentSerializer
from .models.FuelSupplierAttachmentTag import FuelSupplierAttachmentTag
from .serializers import FuelSupplierAttachmentTagSerializer
from .models.FuelSupplierBalance import FuelSupplierBalance
from .serializers import FuelSupplierBalanceSerializer
from .models.FuelSupplierCCData import FuelSupplierCCData
from .serializers import FuelSupplierCCDataSerializer
from .models.FuelSupplierContact import FuelSupplierContact
from .serializers import FuelSupplierContactSerializer
from .models.FuelSupplierContactRole import FuelSupplierContactRole
from .serializers import FuelSupplierContactRoleSerializer
from .models.FuelSupplierHistory import FuelSupplierHistory
from .serializers import FuelSupplierHistorySerializer
from .models.FuelSupplierStatus import FuelSupplierStatus
from .serializers import FuelSupplierStatusSerializer
from .models.FuelSupplierType import FuelSupplierType
from .serializers import FuelSupplierTypeSerializer
from .models.Notification import Notification
from .serializers import NotificationSerializer
from .models.NotificationEvent import NotificationEvent
from .serializers import NotificationEventSerializer
from .models.NotificationType import NotificationType
from .serializers import NotificationTypeSerializer
from .models.NotificationViewModel import NotificationViewModel
from .serializers import NotificationViewModelSerializer
from .models.Opportunity import Opportunity
from .serializers import OpportunitySerializer
from .models.OpportunityHistory import OpportunityHistory
from .serializers import OpportunityHistorySerializer
from .models.OpportunityStatus import OpportunityStatus
from .serializers import OpportunityStatusSerializer
from .models.Permission import Permission
from .serializers import PermissionSerializer
from .models.PermissionViewModel import PermissionViewModel
from .serializers import PermissionViewModelSerializer
from .models.Role import Role
from .serializers import RoleSerializer
from .models.RolePermission import RolePermission
from .serializers import RolePermissionSerializer
from .models.RolePermissionViewModel import RolePermissionViewModel
from .serializers import RolePermissionViewModelSerializer
from .models.RoleViewModel import RoleViewModel
from .serializers import RoleViewModelSerializer
from .models.User import User
from .serializers import UserSerializer
from .models.UserDetailsViewModel import UserDetailsViewModel
from .serializers import UserDetailsViewModelSerializer
from .models.UserFavourite import UserFavourite
from .serializers import UserFavouriteSerializer
from .models.UserFavouriteViewModel import UserFavouriteViewModel
from .serializers import UserFavouriteViewModelSerializer
from .models.UserRole import UserRole
from .serializers import UserRoleSerializer
from .models.UserRoleViewModel import UserRoleViewModel
from .serializers import UserRoleViewModelSerializer
from .models.UserViewModel import UserViewModel
from .serializers import UserViewModelSerializer
class Test_Api_Custom(TestCase):
def setUp(self):
# Every test needs a client.
self.client = Client()
# needed to setup django
django.setup()
def test_credittradesSearchGet(self):
self.fail("Not implemented")
def test_usersCurrentFavouritesIdDeletePost(self):
self.fail("Not implemented")
def test_usersCurrentFavouritesPut(self):
self.fail("Not implemented")
def test_usersCurrentFavouritesSearchGet(self):
self.fail("Not implemented")
def test_usersCurrentGet(self):
self.fail("Not implemented")
def test_fuelsuppliersIdAttachmentsGet(self):
self.fail("Not implemented")
def test_fuelsuppliersIdHistoryGet(self):
self.fail("Not implemented")
def test_fuelsuppliersSearchGet(self):
self.fail("Not implemented")
def test_rolesIdPermissionsGet(self):
self.fail("Not implemented")
def test_rolesIdUsersGet(self):
self.fail("Not implemented")
def test_usersIdFavouritesGet(self):
self.fail("Not implemented")
def test_usersIdNotificationsGet(self):
self.fail("Not implemented")
def test_usersIdPermissionsGet(self):
self.fail("Not implemented")
def test_usersIdRolesGet(self):
self.fail("Not implemented")
def test_usersSearchGet(self):
self.fail("Not implemented")
if __name__ == '__main__':
unittest.main()
|
from django.conf.urls import patterns, url
from desktop.lib.django_util import get_username_re_rule, get_groupname_re_rule
username_re = get_username_re_rule()
groupname_re = get_groupname_re_rule()
urlpatterns = patterns('useradmin.views',
url(r'^$', 'list_users'),
url(r'^users/?$', 'list_users'),
url(r'^groups/?$', 'list_groups'),
url(r'^permissions/?$', 'list_permissions'),
url(r'^configurations/?$', 'list_configurations'),
url(r'^users/edit/(?P<username>%s)$' % (username_re,), 'edit_user'),
url(r'^users/add_ldap_users$', 'add_ldap_users'),
url(r'^users/add_ldap_groups$', 'add_ldap_groups'),
url(r'^users/sync_ldap_users_groups$', 'sync_ldap_users_groups'),
url(r'^groups/edit/(?P<name>%s)$' % (groupname_re,), 'edit_group'),
url(r'^permissions/edit/(?P<app>.+?)/(?P<priv>.+?)/?$', 'edit_permission'),
url(r'^users/new$', 'edit_user', name="useradmin.new"),
url(r'^groups/new$', 'edit_group', name="useradmin.new_group"),
url(r'^users/delete', 'delete_user'),
url(r'^groups/delete$', 'delete_group'),
)
urlpatterns += patterns('useradmin.api',
url(r'^api/get_users/?', 'get_users', name='api_get_users'),
)
|
from distconfig.backends.base import BaseBackend
from distconfig.backends.execution_context import ThreadingExecutionContext
class ConsulBackend(BaseBackend):
"""Consul backend implementation.
:param client: Instance of :class:`consul.Consul`.
:param execution_context: Instance of :class:`distconfig.backends.execution_context.ExecutionContext`
"""
def __init__(self, client, execution_context=ThreadingExecutionContext(), **kwargs):
super(ConsulBackend, self).__init__(**kwargs)
self._client = client
self._execution_context = execution_context
self._watching = set()
def get_raw(self, key):
result = self._get_backend_data(key)
self._add_watcher(key)
return result
def _get_backend_data(self, key):
_, result = self._client.kv.get(key)
if result:
result = result['Value']
return result
def _add_watcher(self, key):
if key not in self._watching:
self._watching.add(key)
self._execution_context.run(self._watch_for_changes, key)
def _watch_for_changes(self, key):
index = None
while 1:
try:
index, data = self._client.kv.get(key, index=index)
except Exception as ex:
self._logger.error('exception raised while listening on consul changes (re-launching watcher): %s', ex)
else:
if data:
data = data['Value']
self._notify_listeners(data)
|
from case import Case
class Case7_3_5(Case):
DESCRIPTION = """Send a close frame with close code and close reason of maximum length (123)"""
EXPECTATION = """Clean close with normal code."""
def init(self):
self.suppressClose = True
def onConnectionLost(self, failedByMe):
Case.onConnectionLost(self, failedByMe)
if self.behaviorClose == Case.WRONG_CODE:
self.behavior = Case.FAILED
self.passed = False
self.result = self.resultClose
def onOpen(self):
self.payload = u"*" * 123
self.expected[Case.OK] = []
self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_NORMAL],"requireClean":True}
self.p.sendClose(self.p.CLOSE_STATUS_CODE_NORMAL,self.payload)
self.p.killAfter(1)
|
import os
from ..constants import FORGE_HOME, RELEASES_URL
FORGE_ROOT = os.environ.get('FORGE_ROOT', FORGE_HOME)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(FORGE_ROOT, 'forge.db'),
}
}
SITE_ID = 1
TIME_ZONE = 'America/Los_Angeles'
MEDIA_ROOT = os.path.join(FORGE_ROOT, 'releases')
MEDIA_URL = RELEASES_URL
STATIC_URL = '/static/'
STATIC_ROOT = ''
ROOT_URLCONF = 'forge.urls'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
TEMPLATE_DIRS = (
os.path.join(FORGE_HOME, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'forge',
)
ALLOWED_HOSTS = ['*']
FORGE_LOGS = os.environ.get('FORGE_LOGS', FORGE_ROOT)
FORGE_LOG = os.path.join(FORGE_LOGS, 'forge.log')
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'default': {
'format': '%(asctime)s %(levelname)s %(name)s %(message)s',
},
},
'handlers': {
'logfile': {
'class': 'logging.handlers.WatchedFileHandler',
'filename': FORGE_LOG,
'formatter': 'default',
},
},
'loggers': {
'django': {
'handlers': ['logfile'],
'level': 'INFO',
'propagate': False,
},
'django.request': {
'handlers': ['logfile'],
'level': 'ERROR',
'propagate': True,
},
'forge': {
'handlers': ['logfile'],
'level': 'INFO',
'propagate': True,
}
}
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'WishListItem'
db.create_table('market_buy_wishlistitem', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('marketplace', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketPlace'])),
('posted_on', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('posted_by', self.gf('django.db.models.fields.CharField')(max_length=100)),
('ideal_price', self.gf('django.db.models.fields.DecimalField')(max_digits=11, decimal_places=2)),
('description', self.gf('django.db.models.fields.TextField')()),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'])),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'])),
))
db.send_create_signal('market_buy', ['WishListItem'])
# Adding model 'Show'
db.create_table('market_buy_show', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('marketplace', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketPlace'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('date_from', self.gf('django.db.models.fields.DateField')()),
('date_to', self.gf('django.db.models.fields.DateField')()),
('time_from', self.gf('django.db.models.fields.TimeField')()),
('time_to', self.gf('django.db.models.fields.TimeField')()),
('address', self.gf('django.db.models.fields.CharField')(max_length=128)),
('city', self.gf('django.db.models.fields.CharField')(max_length=128)),
('state', self.gf('django.db.models.fields.CharField')(max_length=128)),
('zip', self.gf('django.db.models.fields.CharField')(max_length=10)),
('contact_info', self.gf('django.db.models.fields.CharField')(max_length=128)),
('admission', self.gf('django.db.models.fields.DecimalField')(default='0.0', max_digits=11, decimal_places=2)),
))
db.send_create_signal('market_buy', ['Show'])
# Adding model 'EditorPick'
db.create_table('market_buy_editorpick', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('marketplace', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketPlace'])),
('order', self.gf('django.db.models.fields.IntegerField')(default=5)),
('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['inventory.Product'])),
))
db.send_create_signal('market_buy', ['EditorPick'])
# Adding model 'MarketPlacePick'
db.create_table('market_buy_marketplacepick', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('marketplace', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketPlace'])),
('order', self.gf('django.db.models.fields.IntegerField')(default=5)),
('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['inventory.Product'])),
))
db.send_create_signal('market_buy', ['MarketPlacePick'])
# Adding model 'DealerPick'
db.create_table('market_buy_dealerpick', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('marketplace', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketPlace'])),
('order', self.gf('django.db.models.fields.IntegerField')(default=5)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shops.Shop'])),
))
db.send_create_signal('market_buy', ['DealerPick'])
# Adding model 'BestSeller'
db.create_table('market_buy_bestseller', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shops.Shop'])),
('from_date', self.gf('django.db.models.fields.DateTimeField')()),
('to_date', self.gf('django.db.models.fields.DateTimeField')()),
('revenue', self.gf('django.db.models.fields.DecimalField')(max_digits=11, decimal_places=2)),
))
db.send_create_signal('market_buy', ['BestSeller'])
def backwards(self, orm):
# Deleting model 'WishListItem'
db.delete_table('market_buy_wishlistitem')
# Deleting model 'Show'
db.delete_table('market_buy_show')
# Deleting model 'EditorPick'
db.delete_table('market_buy_editorpick')
# Deleting model 'MarketPlacePick'
db.delete_table('market_buy_marketplacepick')
# Deleting model 'DealerPick'
db.delete_table('market_buy_dealerpick')
# Deleting model 'BestSeller'
db.delete_table('market_buy_bestseller')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'inventory.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.ProductType']", 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '11', 'decimal_places': '2'})
},
'inventory.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'market.marketcategory': {
'Meta': {'object_name': 'MarketCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'market.marketplace': {
'Meta': {'object_name': 'MarketPlace'},
'base_domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '92'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'template_prefix': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '92'})
},
'market.marketsubcategory': {
'Meta': {'unique_together': "(('parent', 'slug'),)", 'object_name': 'MarketSubCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subcategories'", 'null': 'True', 'to': "orm['market.MarketCategory']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '60', 'db_index': 'True'})
},
'market_buy.bestseller': {
'Meta': {'object_name': 'BestSeller'},
'from_date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revenue': ('django.db.models.fields.DecimalField', [], {'max_digits': '11', 'decimal_places': '2'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'to_date': ('django.db.models.fields.DateTimeField', [], {})
},
'market_buy.dealerpick': {
'Meta': {'object_name': 'DealerPick'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"})
},
'market_buy.editorpick': {
'Meta': {'object_name': 'EditorPick'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.Product']"})
},
'market_buy.marketplacepick': {
'Meta': {'object_name': 'MarketPlacePick'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.Product']"})
},
'market_buy.show': {
'Meta': {'object_name': 'Show'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'admission': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '11', 'decimal_places': '2'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'contact_info': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'date_from': ('django.db.models.fields.DateField', [], {}),
'date_to': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'time_from': ('django.db.models.fields.TimeField', [], {}),
'time_to': ('django.db.models.fields.TimeField', [], {}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'market_buy.wishlistitem': {
'Meta': {'object_name': 'WishListItem'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ideal_price': ('django.db.models.fields.DecimalField', [], {'max_digits': '11', 'decimal_places': '2'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'posted_by': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'posted_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"})
},
'shops.shop': {
'Meta': {'object_name': 'Shop'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'bids': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'39.29038,-76.61219'", 'max_length': '255'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['market_buy']
|
from __future__ import print_function
import argparse
import random
import mxnet as mx
import numpy as np
from mxnet import gluon, init
from mxnet import autograd as ag
from mxnet.gluon import nn
from mxnet.gluon.data.vision import transforms
from gluoncv.model_zoo import get_model
from gluoncv.data import transforms as gcv_transforms
parser = argparse.ArgumentParser(description="CIFAR-10 Example")
parser.add_argument(
"--model",
required=True,
type=str,
default="resnet50_v1b",
help="name of the pretrained model from gluoncv model zoo"
"(default: resnet50_v1b).")
parser.add_argument(
"--batch_size",
type=int,
default=64,
metavar="N",
help="input batch size for training (default: 64)")
parser.add_argument(
"--epochs",
type=int,
default=1,
metavar="N",
help="number of epochs to train (default: 1)")
parser.add_argument(
"--num_gpus",
default=0,
type=int,
help="number of gpus to use, 0 indicates cpu only (default: 0)")
parser.add_argument(
"--num_workers",
default=4,
type=int,
help="number of preprocessing workers (default: 4)")
parser.add_argument(
"--classes",
type=int,
default=10,
metavar="N",
help="number of outputs (default: 10)")
parser.add_argument(
"--lr",
default=0.001,
type=float,
help="initial learning rate (default: 0.001)")
parser.add_argument(
"--momentum",
default=0.9,
type=float,
help="initial momentum (default: 0.9)")
parser.add_argument(
"--wd", default=1e-4, type=float, help="weight decay (default: 1e-4)")
parser.add_argument(
"--expname", type=str, default="cifar10exp", help="experiments location")
parser.add_argument(
"--num_samples",
type=int,
default=20,
metavar="N",
help="number of samples (default: 20)")
parser.add_argument(
"--scheduler",
type=str,
default="fifo",
help="FIFO or AsyncHyperBandScheduler.")
parser.add_argument(
"--seed",
type=int,
default=1,
metavar="S",
help="random seed (default: 1)")
parser.add_argument(
"--smoke_test", action="store_true", help="Finish quickly for testing")
args = parser.parse_args()
def train_cifar10(args, config, reporter):
vars(args).update(config)
np.random.seed(args.seed)
random.seed(args.seed)
mx.random.seed(args.seed)
# Set Hyper-params
batch_size = args.batch_size * max(args.num_gpus, 1)
ctx = [mx.gpu(i)
for i in range(args.num_gpus)] if args.num_gpus > 0 else [mx.cpu()]
# Define DataLoader
transform_train = transforms.Compose([
gcv_transforms.RandomCrop(32, pad=4),
transforms.RandomFlipLeftRight(),
transforms.ToTensor(),
transforms.Normalize([0.4914, 0.4822, 0.4465],
[0.2023, 0.1994, 0.2010])
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize([0.4914, 0.4822, 0.4465],
[0.2023, 0.1994, 0.2010])
])
train_data = gluon.data.DataLoader(
gluon.data.vision.CIFAR10(train=True).transform_first(transform_train),
batch_size=batch_size,
shuffle=True,
last_batch="discard",
num_workers=args.num_workers)
test_data = gluon.data.DataLoader(
gluon.data.vision.CIFAR10(train=False).transform_first(transform_test),
batch_size=batch_size,
shuffle=False,
num_workers=args.num_workers)
# Load model architecture and Initialize the net with pretrained model
finetune_net = get_model(args.model, pretrained=True)
with finetune_net.name_scope():
finetune_net.fc = nn.Dense(args.classes)
finetune_net.fc.initialize(init.Xavier(), ctx=ctx)
finetune_net.collect_params().reset_ctx(ctx)
finetune_net.hybridize()
# Define trainer
trainer = gluon.Trainer(finetune_net.collect_params(), "sgd", {
"learning_rate": args.lr,
"momentum": args.momentum,
"wd": args.wd
})
L = gluon.loss.SoftmaxCrossEntropyLoss()
metric = mx.metric.Accuracy()
def train(epoch):
for i, batch in enumerate(train_data):
data = gluon.utils.split_and_load(
batch[0], ctx_list=ctx, batch_axis=0, even_split=False)
label = gluon.utils.split_and_load(
batch[1], ctx_list=ctx, batch_axis=0, even_split=False)
with ag.record():
outputs = [finetune_net(X) for X in data]
loss = [L(yhat, y) for yhat, y in zip(outputs, label)]
for l in loss:
l.backward()
trainer.step(batch_size)
mx.nd.waitall()
def test():
test_loss = 0
for i, batch in enumerate(test_data):
data = gluon.utils.split_and_load(
batch[0], ctx_list=ctx, batch_axis=0, even_split=False)
label = gluon.utils.split_and_load(
batch[1], ctx_list=ctx, batch_axis=0, even_split=False)
outputs = [finetune_net(X) for X in data]
loss = [L(yhat, y) for yhat, y in zip(outputs, label)]
test_loss += sum(l.mean().asscalar() for l in loss) / len(loss)
metric.update(label, outputs)
_, test_acc = metric.get()
test_loss /= len(test_data)
reporter(mean_loss=test_loss, mean_accuracy=test_acc)
for epoch in range(1, args.epochs + 1):
train(epoch)
test()
if __name__ == "__main__":
args = parser.parse_args()
import ray
from ray import tune
from ray.tune.schedulers import AsyncHyperBandScheduler, FIFOScheduler
ray.init()
if args.scheduler == "fifo":
sched = FIFOScheduler()
elif args.scheduler == "asynchyperband":
sched = AsyncHyperBandScheduler(
time_attr="training_iteration",
metric="mean_loss",
mode="min",
max_t=400,
grace_period=60)
else:
raise NotImplementedError
tune.register_trainable(
"TRAIN_FN",
lambda config, reporter: train_cifar10(args, config, reporter))
tune.run(
"TRAIN_FN",
name=args.expname,
verbose=2,
scheduler=sched,
**{
"stop": {
"mean_accuracy": 0.98,
"training_iteration": 1 if args.smoke_test else args.epochs
},
"resources_per_trial": {
"cpu": int(args.num_workers),
"gpu": int(args.num_gpus)
},
"num_samples": 1 if args.smoke_test else args.num_samples,
"config": {
"lr": tune.sample_from(
lambda spec: np.power(10.0, np.random.uniform(-4, -1))),
"momentum": tune.sample_from(
lambda spec: np.random.uniform(0.85, 0.95)),
}
})
|
"""
A module that contains the content-based recommender LDARecommender that uses
LDA.
"""
from lib.content_based import ContentBased
from overrides import overrides
from sklearn.decomposition import LatentDirichletAllocation
class LDARecommender(ContentBased):
"""
LDA Recommender, a content based recommender that uses LDA.
"""
def __init__(self, initializer, evaluator, hyperparameters, options,
verbose=False, load_matrices=True, dump_matrices=True):
"""
Constructor of Latent Dirichilet allocation's processor.
:param ModelInitializer initializer: A model initializer.
:param Evaluator evaluator: An evaluator of recommender and holder of input.
:param dict hyperparameters: A dictionary of the hyperparameters.
:param dict options: A dictionary of the run options.
:param boolean verbose: A flag for printing while computing.
:param boolean load_matrices: A flag for reinitializing the matrices.
:param boolean dump_matrices: A flag for saving the matrices.
"""
super(LDARecommender, self).__init__(initializer, evaluator, hyperparameters, options,
verbose, load_matrices, dump_matrices)
@overrides
def train_one_fold(self, return_report=True):
"""
Train one fold for n_iter iterations from scratch.
:param bool return_report: A flag to decide if we should return the evaluation report.
"""
# Try to read from file.
matrix_found = False
if self._load_matrices is True:
matrix_shape = (self.n_items, self.n_factors)
matrix_found, matrix = self.initializer.load_matrix(self.hyperparameters, 'document_distribution_lda',
matrix_shape)
self.document_distribution = matrix
if self._verbose and matrix_found:
print("Document distribution was set from file, will not train.")
if matrix_found is False:
if self._verbose and self._load_matrices:
print("Document distribution file was not found, will train LDA.")
self._train()
if self._dump_matrices:
self.initializer.save_matrix(self.document_distribution, 'document_distribution_lda')
if return_report:
return self.get_evaluation_report()
def _train(self):
"""
Train LDA Recommender, and store the document_distribution.
"""
term_freq = self.abstracts_preprocessor.get_term_frequency_sparse_matrix()
lda = LatentDirichletAllocation(n_topics=self.n_factors, max_iter=self.n_iter,
learning_method='online',
learning_offset=50., random_state=0,
verbose=0)
if self._verbose:
print("Initialized LDA model..., Training LDA...")
self.document_distribution = lda.fit_transform(term_freq)
if self._verbose:
print("LDA trained..")
|
from datetime import date
from django.core.management.base import BaseCommand
import logging
from actions.models import Action
from applications.models import Referral
logger = logging.getLogger('statdev')
class Command(BaseCommand):
help = 'Iterates over current referrals to resolve those past their expiry date'
def handle(self, *args, **options):
referrals = Referral.objects.filter(
status=Referral.REFERRAL_STATUS_CHOICES.referred, expire_date__lt=date.today(),
response_date__isnull=True)
for r in referrals:
self.stdout.write('Setting referral status to "expired": {}'.format(r))
logger.info('Setting referral status to "expired": {}'.format(r))
r.status = Referral.REFERRAL_STATUS_CHOICES.expired
r.save()
# Check the referral's application: if it is status 'with_referee'
# but has no referral's that are status 'referred', then set the
# application status to 'with admin'.
app = r.application
if not Referral.objects.filter(
application=app, status=Referral.REFERRAL_STATUS_CHOICES.referred).exists():
self.stdout.write('Setting application status to "with admin": {}'.format(app))
logger.info('Setting application status to "with admin": {}'.format(app))
app.state = app.APP_STATE_CHOICES.with_admin
app.save()
# Record an action.
action = Action(
content_object=app,
action='[SYSTEM] No outstanding referrals, application status set to {}'.format(app.get_state_display()))
action.save()
return
|
"""
Module holds base stuff regarding JMX format
Copyright 2015 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import os
import traceback
from cssselect import GenericTranslator
from lxml import etree
from urllib import parse
from bzt import TaurusInternalException, TaurusConfigError
from bzt.engine import Scenario
from bzt.utils import BetterDict, iteritems, numeric_types
from bzt.requests_model import has_variable_pattern
LOG = logging.getLogger("")
def try_convert(val, func=int, default=None):
if val is None:
res = val
elif has_variable_pattern(val): # it's property...
if default is not None:
val = get_prop_default(val) or default
res = func(val)
else:
res = val
else:
res = func(val)
return res
def get_prop_default(val):
comma_ind = val.find(",")
comma_found = comma_ind > -1
is_property = val.startswith("${__property(") or val.startswith("${__P(")
if has_variable_pattern(val) and is_property and comma_found:
return val[comma_ind + 1: -2]
else:
return None
def cond_int(val):
if isinstance(val, float):
return int(val)
return val
def cond_float(val, rounding=None):
if isinstance(val, numeric_types):
return round(float(val), rounding) if rounding is not None else float(val)
return val
class JMX(object):
"""
A class to manipulate and generate JMX test plans for JMeter
:param original: path to existing JMX to load. If it is None, then creates
empty test plan
"""
TEST_PLAN_SEL = "jmeterTestPlan>hashTree>hashTree"
THR_GROUP_SEL = TEST_PLAN_SEL + ">hashTree[type=tg]"
THR_TIMER = "kg.apc.jmeter.timers.VariableThroughputTimer"
SET_VAR_ACTION = "kg.apc.jmeter.control.sampler.SetVariablesAction"
def __init__(self, original=None, test_plan_name="BZT Generated Test Plan"):
self.log = logging.getLogger(self.__class__.__name__)
if original:
self.load(original)
else:
root = etree.Element("jmeterTestPlan")
self.tree = etree.ElementTree(root)
test_plan = etree.Element("TestPlan", guiclass="TestPlanGui",
testname=test_plan_name,
testclass="TestPlan", enabled="true")
htree = etree.Element("hashTree")
htree.append(test_plan)
htree.append(etree.Element("hashTree"))
self.append("jmeterTestPlan", htree)
element_prop = self._get_arguments_panel("TestPlan.user_defined_variables")
self.append("jmeterTestPlan>hashTree>TestPlan", element_prop)
def load(self, original):
"""
Load existing JMX file
:param original: JMX file path
:raise TaurusInternalException: in case of XML parsing error
"""
try:
self.tree = etree.ElementTree()
self.tree.parse(original)
except BaseException as exc:
msg = "XML parsing failed for file %s: %s"
raise TaurusInternalException(msg % (original, exc))
def get(self, selector):
"""
Returns tree elements by CSS selector
:type selector: str
:return:
"""
expression = GenericTranslator().css_to_xpath(selector)
nodes = self.tree.xpath(expression)
return nodes
def append(self, selector, node):
"""
Add node to container specified by selector. If multiple nodes will
match the selector, first of them will be used as container.
:param selector: CSS selector for container
:param node: Element instance to add
:raise TaurusInternalException: if container was not found
"""
container = self.get(selector)
if not len(container):
msg = "Failed to find TestPlan node in file: %s"
raise TaurusInternalException(msg % selector)
container[0].append(node)
def save(self, filename):
"""
Save JMX into file
:param filename:
"""
self.log.debug("Saving JMX to: %s", filename)
with open(filename, "wb") as fhd:
self.tree.write(fhd, pretty_print=True, encoding="UTF-8", xml_declaration=True)
@staticmethod
def _flag(flag_name, bool_value):
"""
Generates element for JMX flag node
:param flag_name:
:param bool_value:
:return:
"""
elm = etree.Element(flag_name)
elm.text = "true" if bool_value else "false"
return elm
@staticmethod
def __jtl_writer(filename, label, flags):
"""
Generates JTL writer
:param filename:
:return:
"""
jtl = etree.Element("stringProp", {"name": "filename"})
jtl.text = filename
name = etree.Element("name")
name.text = "saveConfig"
value = etree.Element("value")
value.set("class", "SampleSaveConfiguration")
for key, val in iteritems(flags):
value.append(JMX._flag(key, val))
obj_prop = etree.Element("objProp")
obj_prop.append(name)
obj_prop.append(value)
listener = etree.Element("ResultCollector",
testname=label,
testclass="ResultCollector",
guiclass="SimpleDataWriter")
listener.append(jtl)
listener.append(obj_prop)
return listener
@staticmethod
def new_kpi_listener(filename, flag_overrides=None):
"""
Generates listener for writing basic KPI data in CSV format
:param filename:
:return:
"""
defaults = {
"xml": False,
"fieldNames": True,
"time": True,
"timestamp": True,
"latency": True,
"connectTime": True,
"success": True,
"label": True,
"code": True,
"message": True,
"threadName": True,
"dataType": False,
"encoding": False,
"assertions": False,
"subresults": False,
"responseData": False,
"samplerData": False,
"responseHeaders": False,
"requestHeaders": False,
"responseDataOnError": False,
"saveAssertionResultsFailureMessage": False,
"bytes": True,
"hostname": True,
"threadCounts": True,
"url": False
}
flags = BetterDict.from_dict(defaults)
if flag_overrides:
flags.merge(flag_overrides)
return JMX.__jtl_writer(filename, "KPI Writer", flags)
@staticmethod
def new_xml_listener(filename, is_full, user_flags):
"""
:param is_full: bool
:param filename: str
:param user_flags: BetterDict
:return:
"""
default_flags = {
"xml": True,
"fieldNames": True,
"time": True,
"timestamp": True,
"latency": True,
"success": True,
"label": True,
"code": True,
"message": True,
"threadName": True,
"dataType": True,
"encoding": True,
"assertions": True,
"subresults": True,
"responseData": False,
"samplerData": False,
"responseHeaders": True,
"requestHeaders": True,
"responseDataOnError": True,
"saveAssertionResultsFailureMessage": True,
"bytes": True,
"threadCounts": True,
"url": True
}
flags = BetterDict.from_dict(default_flags)
flags.merge(user_flags)
if is_full:
writer = JMX.__jtl_writer(filename, "Trace Writer", flags)
else:
writer = JMX.__jtl_writer(filename, "Errors Writer", flags)
writer.append(JMX._bool_prop("ResultCollector.error_logging", True))
return writer
@staticmethod
def _get_arguments_panel(name):
"""
Generates ArgumentsPanel node
:param name:
:return:
"""
return etree.Element("elementProp", name=name, elementType="Arguments",
guiclass="ArgumentsPanel", testclass="Arguments")
@staticmethod
def get_auth_manager(authorizations, clear_flag):
mgr = etree.Element("AuthManager", guiclass="AuthPanel", testclass="AuthManager",
testname="HTTP Authorization Manager")
if clear_flag:
mgr.append(JMX._bool_prop("AuthManager.clearEachIteration", True))
auth_coll = JMX._collection_prop("AuthManager.auth_list")
mgr.append(auth_coll)
for authorization in authorizations:
auth_element = JMX._element_prop(name="", element_type="Authorization")
conf_url = authorization.get("url", "")
conf_name = authorization.get("name", "")
conf_pass = authorization.get("password", "")
conf_domain = authorization.get("domain", "")
conf_realm = authorization.get("realm", "")
conf_mech = authorization.get("mechanism", "").upper()
if not (conf_name and conf_pass and (conf_url or conf_domain)):
LOG.warning("Wrong authorization: %s" % authorization)
continue
auth_element.append(JMX._string_prop("Authorization.url", conf_url))
auth_element.append(JMX._string_prop("Authorization.username", conf_name))
auth_element.append(JMX._string_prop("Authorization.password", conf_pass))
auth_element.append(JMX._string_prop("Authorization.domain", conf_domain))
auth_element.append(JMX._string_prop("Authorization.realm", conf_realm))
if conf_mech == "KERBEROS": # optional prop
auth_element.append(JMX._string_prop("Authorization.mechanism", "KERBEROS"))
auth_coll.append(auth_element)
return mgr
@staticmethod
def _get_http_request(url, label, method, timeout, body, keepalive, files=(), encoding=None, follow_redirects=True,
use_random_host_ip=False, host_ips=()):
"""
Generates HTTP request
:type method: str
:type label: str
:type url: str
:rtype: lxml.etree.Element
"""
proxy = etree.Element("HTTPSamplerProxy", guiclass="HttpTestSampleGui", testclass="HTTPSamplerProxy")
proxy.set("testname", label)
args = JMX._get_arguments_panel("HTTPsampler.Arguments")
if isinstance(body, str):
JMX.__add_body_from_string(args, body, proxy)
elif isinstance(body, dict):
JMX.__add_body_from_script(args, body, proxy)
elif body:
msg = "Cannot handle 'body' option of type %s: %s"
raise TaurusInternalException(msg % (type(body), body))
parsed_url = parse.urlparse(url)
JMX.__add_hostnameport_2sampler(parsed_url, proxy, url)
path = parsed_url.path
if parsed_url.params:
path += ";" + parsed_url.params
if parsed_url.query:
path += "?" + parsed_url.query
proxy.append(JMX._string_prop("HTTPSampler.path", path))
proxy.append(JMX._string_prop("HTTPSampler.method", method))
proxy.append(JMX._bool_prop("HTTPSampler.use_keepalive", keepalive))
proxy.append(JMX._bool_prop("HTTPSampler.follow_redirects", follow_redirects))
proxy.append(JMX._bool_prop("HTTPSampler.auto_redirects", False))
if timeout:
proxy.append(JMX._string_prop("HTTPSampler.connect_timeout", timeout))
proxy.append(JMX._string_prop("HTTPSampler.response_timeout", timeout))
if encoding is not None:
proxy.append(JMX._string_prop("HTTPSampler.contentEncoding", encoding))
proxy.extend(JMX.get_files_elements(files))
if use_random_host_ip and host_ips:
if len(host_ips) > 1:
expr = "${__chooseRandom(%s,randomAddr)}" % ",".join(host_ips)
else:
expr = host_ips[0]
proxy.append(JMX._string_prop("HTTPSampler.ipSource", expr))
return proxy
@staticmethod
def get_files_elements(files):
elements = []
if files:
files_prop = JMX._element_prop("HTTPsampler.Files", "HTTPFileArgs")
elements.append(files_prop)
files_coll = JMX._collection_prop("HTTPFileArgs.files")
for file_dict in files:
file_elem = JMX._element_prop(file_dict.get("path", ""), "HTTPFileArg")
file_elem.append(JMX._string_prop("File.path", file_dict.get("path", "")))
file_elem.append(JMX._string_prop("File.paramname", file_dict.get("param", "")))
file_elem.append(JMX._string_prop("File.mimetype", file_dict.get("mime-type", "")))
files_coll.append(file_elem)
files_prop.append(files_coll)
return elements
@staticmethod
def get_keystore_config_elements(variable_name, start_index, end_index, preload):
elements = []
if variable_name:
elements = etree.Element("KeystoreConfig", guiclass="TestBeanGUI", testclass="KeystoreConfig",
testname="Taurus-Keystore-Configuration")
elements.append(JMX._string_prop("clientCertAliasVarName", variable_name))
elements.append(JMX._string_prop("startIndex", start_index))
elements.append(JMX._string_prop("endIndex", end_index))
elements.append(JMX._string_prop("preload", preload))
return elements
@staticmethod
def __add_body_from_string(args, body, proxy):
proxy.append(JMX._bool_prop("HTTPSampler.postBodyRaw", True))
coll_prop = JMX._collection_prop("Arguments.arguments")
header = JMX._element_prop("elementProp", "HTTPArgument")
try:
header.append(JMX._string_prop("Argument.value", body))
except ValueError:
LOG.warning("Failed to set body: %s", traceback.format_exc())
header.append(JMX._string_prop("Argument.value", "BINARY-STUB"))
coll_prop.append(header)
args.append(coll_prop)
proxy.append(args)
@staticmethod
def __add_body_from_script(args, body, proxy):
http_args_coll_prop = JMX._collection_prop("Arguments.arguments")
for arg_name, arg_value in body.items():
try:
http_element_prop = JMX._element_prop(arg_name, "HTTPArgument")
except ValueError:
LOG.warning("Failed to get element property: %s", traceback.format_exc())
http_element_prop = JMX._element_prop('BINARY-STUB', "HTTPArgument")
try:
http_element_prop.append(JMX._string_prop("Argument.name", arg_name))
except ValueError:
LOG.warning("Failed to set arg name: %s", traceback.format_exc())
http_element_prop.append(JMX._string_prop("Argument.name", "BINARY-STUB"))
try:
http_element_prop.append(
JMX._string_prop("Argument.value", arg_value if arg_value is not None else ''))
except ValueError:
LOG.warning("Failed to set arg name: %s", traceback.format_exc())
http_element_prop.append(JMX._string_prop("Argument.value", "BINARY-STUB"))
http_element_prop.append(JMX._bool_prop("HTTPArgument.always_encode", True))
use_equals = arg_value is not None
http_element_prop.append(JMX._bool_prop("HTTPArgument.use_equals", arg_value is not None))
http_element_prop.append(JMX._string_prop("Argument.metadata", '=' if use_equals else ''))
http_args_coll_prop.append(http_element_prop)
args.append(http_args_coll_prop)
proxy.append(args)
@staticmethod
def __add_hostnameport_2sampler(parsed_url, proxy, url):
if parsed_url.scheme:
proxy.append(JMX._string_prop("HTTPSampler.protocol", parsed_url.scheme))
if parsed_url.netloc:
netloc_parts = parsed_url.netloc.split(':')
if netloc_parts[0]:
proxy.append(JMX._string_prop("HTTPSampler.domain", netloc_parts[0]))
if len(netloc_parts) > 1 and netloc_parts[1]:
proxy.append(JMX._string_prop("HTTPSampler.port", netloc_parts[1]))
else:
try:
if parsed_url.port:
proxy.append(JMX._string_prop("HTTPSampler.port", parsed_url.port))
else:
proxy.append(JMX._string_prop("HTTPSampler.port", ""))
except ValueError:
LOG.debug("Non-parsable port: %s", url)
proxy.append(JMX._string_prop("HTTPSampler.port", ""))
@staticmethod
def _element_prop(name, element_type):
"""
Generates element property node
:param name:
:param element_type:
:return:
"""
res = etree.Element("elementProp", name=name, elementType=element_type)
return res
@staticmethod
def _collection_prop(name):
"""
Adds Collection prop
:param name:
:return:
"""
res = etree.Element("collectionProp", name=name)
return res
@staticmethod
def _string_prop(name, value):
"""
Generates string property node
:param name:
:param value:
:return:
"""
res = etree.Element("stringProp", name=name)
res.text = str(value)
return res
@staticmethod
def _long_prop(name, value):
"""
Generates long property node
:param name:
:param value:
:return:
"""
res = etree.Element("longProp", name=name)
res.text = str(value)
return res
@staticmethod
def _bool_prop(name, value):
"""
Generates boolean property
:param name:
:param value:
:return:
"""
res = etree.Element("boolProp", name=name)
res.text = 'true' if value else 'false'
return res
@staticmethod
def int_prop(name, value):
"""
JMX int property
:param name:
:param value:
:return:
"""
res = etree.Element("intProp", name=name)
res.text = str(value)
return res
@staticmethod
def get_thread_group(concurrency=None, rampup=0, hold=0, iterations=None,
testname="ThreadGroup", on_error="continue", thread_delay=False, scheduler_delay=None):
"""
Generates ThreadGroup
Expected values (by JMeter):
ThreadGroup.num_threads (concurrency): int
ThreadGroup.ramp_time (rampup): int
ThreadGroup.scheduler (need to hold): boolean
ThreadGroup.duration (rampup + hold): int
LoopController.loops (iterations): int
ThreadGroup.delayedStart: boolean
:return: etree element, ThreadGroup
"""
rampup = cond_int(rampup or 0)
hold = cond_int(hold or 0)
if concurrency is None:
concurrency = 1
if isinstance(concurrency, numeric_types) and concurrency <= 0:
enabled = "false"
else:
enabled = "true"
if not hold:
duration = rampup
elif not rampup:
duration = hold
elif isinstance(rampup, numeric_types) and isinstance(hold, numeric_types):
duration = hold + rampup
else:
duration = "${__intSum(%s,%s)}" % (rampup, hold)
trg = etree.Element("ThreadGroup", guiclass="ThreadGroupGui",
testclass="ThreadGroup", testname=testname, enabled=enabled)
if not iterations:
if duration:
iterations = -1
else:
iterations = 1
scheduler = False
if hold or (rampup and (iterations == -1)):
scheduler = True
if on_error is not None:
trg.append(JMX._string_prop("ThreadGroup.on_sample_error", on_error))
loop = etree.Element("elementProp",
name="ThreadGroup.main_controller",
elementType="LoopController",
guiclass="LoopControlPanel",
testclass="LoopController")
# 'true' causes endless execution of TG in non-gui mode
loop.append(JMX._bool_prop("LoopController.continue_forever", False))
loop.append(JMX._string_prop("LoopController.loops", iterations))
trg.append(loop)
trg.append(JMX._string_prop("ThreadGroup.num_threads", concurrency))
trg.append(JMX._string_prop("ThreadGroup.ramp_time", rampup))
trg.append(JMX._string_prop("ThreadGroup.start_time", ""))
trg.append(JMX._string_prop("ThreadGroup.end_time", ""))
trg.append(JMX._bool_prop("ThreadGroup.scheduler", scheduler))
trg.append(JMX._string_prop("ThreadGroup.duration", duration))
if scheduler_delay:
trg.append(JMX._string_prop("ThreadGroup.delay", scheduler_delay))
if thread_delay:
trg.append(JMX._bool_prop("ThreadGroup.delayedStart", thread_delay))
return trg
def get_rps_shaper(self):
"""
:return: etree.Element
"""
throughput_timer_element = etree.Element(self.THR_TIMER,
guiclass=self.THR_TIMER + "Gui",
testclass=self.THR_TIMER,
testname="Throughput_Limiter",
enabled="true")
shaper_load_prof = self._collection_prop("load_profile")
throughput_timer_element.append(shaper_load_prof)
return throughput_timer_element
def add_rps_shaper_schedule(self, shaper_etree, start_rps, end_rps, duration):
"""
Adds schedule to rps shaper
Expected values (by JMeter):
<first> ('start_rps'): float
<second> ('end_rps'): float
<third> ('duration'): int
"""
shaper_collection = shaper_etree.find(".//collectionProp[@name='load_profile']")
coll_prop = self._collection_prop("")
start_rps_prop = self._string_prop("", cond_float(start_rps, 3))
end_rps_prop = self._string_prop("", cond_float(end_rps, 3))
duration_prop = self._string_prop("", cond_int(duration))
coll_prop.append(start_rps_prop)
coll_prop.append(end_rps_prop)
coll_prop.append(duration_prop)
shaper_collection.append(coll_prop)
@staticmethod
def get_set_var_action(udv_dict, testname="Variables from Taurus"):
"""
:type testname: str
:type udv_dict: dict[str,str]
:rtype: etree.Element
"""
udv_element = etree.Element(JMX.SET_VAR_ACTION, guiclass=JMX.SET_VAR_ACTION + "Gui",
testclass=JMX.SET_VAR_ACTION, testname=testname)
arg_element = etree.Element("elementProp", name="SetVariablesAction", guiclass="ArgumentsPanel",
testclass="Arguments", testname="User Defined Variables", elementType="Arguments")
udv_element.append(arg_element)
udv_collection_prop = JMX._collection_prop("Arguments.arguments")
arg_element.append(udv_collection_prop)
for var_name in sorted(udv_dict.keys(), key=str):
udv_element_prop = JMX._element_prop(name=str(var_name), element_type="Argument")
udv_collection_prop.append(udv_element_prop)
udv_arg_name_prop = JMX._string_prop("Argument.name", var_name)
udv_arg_value_prop = JMX._string_prop("Argument.value", udv_dict[var_name])
udv_arg_meta_prop = JMX._string_prop("Argument.metadata", "=")
udv_element_prop.append(udv_arg_name_prop)
udv_element_prop.append(udv_arg_value_prop)
udv_element_prop.append(udv_arg_meta_prop)
return udv_element
@staticmethod
def add_user_def_vars_elements(udv_dict, testname="Variables from Taurus"):
"""
:type testname: str
:type udv_dict: dict[str,str]
:rtype: etree.Element
"""
udv_element = etree.Element("Arguments", guiclass="ArgumentsPanel", testclass="Arguments",
testname=testname)
udv_collection_prop = JMX._collection_prop("Arguments.arguments")
for var_name in sorted(udv_dict.keys(), key=str):
udv_element_prop = JMX._element_prop(str(var_name), "Argument")
udv_arg_name_prop = JMX._string_prop("Argument.name", var_name)
udv_arg_value_prop = JMX._string_prop("Argument.value", udv_dict[var_name])
udv_arg_desc_prop = JMX._string_prop("Argument.desc", "")
udv_arg_meta_prop = JMX._string_prop("Argument.metadata", "=")
udv_element_prop.append(udv_arg_name_prop)
udv_element_prop.append(udv_arg_value_prop)
udv_element_prop.append(udv_arg_desc_prop)
udv_element_prop.append(udv_arg_meta_prop)
udv_collection_prop.append(udv_element_prop)
udv_element.append(udv_collection_prop)
return udv_element
@staticmethod
def get_concurrency_thread_group(concurrency=None, rampup=0, hold=0, steps=None, on_error="continue",
testname="ConcurrencyThreadGroup", iterations=""):
"""
Generates ConcurrencyThreadGroup
Expected values (by JMeter):
Targetlevel (concurrency): int
RampUp (rampup): float
Steps (steps): boolean
Hold (hold): float
:return: etree element, Concurrency Thread Group
"""
if not rampup:
rampup = 0
if concurrency is None:
concurrency = 1
if isinstance(concurrency, numeric_types) and concurrency <= 0:
enabled = "false"
else:
enabled = "true"
if steps is None: # zero means infinity of steps
steps = 0
name = 'com.blazemeter.jmeter.threads.concurrency.ConcurrencyThreadGroup'
concurrency_thread_group = etree.Element(
name, guiclass=name + "Gui", testclass=name, testname=testname, enabled=enabled)
virtual_user_controller = etree.Element(
"elementProp",
name="ThreadGroup.main_controller",
elementType="com.blazemeter.jmeter.control.VirtualUserController")
concurrency_thread_group.append(virtual_user_controller)
concurrency_thread_group.append(JMX._string_prop("ThreadGroup.on_sample_error", on_error))
concurrency_thread_group.append(JMX._string_prop("TargetLevel", str(concurrency)))
concurrency_thread_group.append(JMX._string_prop("RampUp", str(cond_int(rampup))))
concurrency_thread_group.append(JMX._string_prop("Steps", steps))
concurrency_thread_group.append(JMX._string_prop("Hold", str(cond_int(hold))))
concurrency_thread_group.append(JMX._string_prop("LogFilename", ""))
concurrency_thread_group.append(JMX._string_prop("Iterations", iterations or ""))
concurrency_thread_group.append(JMX._string_prop("Unit", "S"))
return concurrency_thread_group
@staticmethod
def get_dns_cache_mgr():
"""
Adds dns cache element with defaults parameters
:return:
"""
dns_element = etree.Element("DNSCacheManager", guiclass="DNSCachePanel", testclass="DNSCacheManager",
testname="DNS Cache Manager")
dns_element.append(JMX._collection_prop("DNSCacheManager.servers"))
dns_element.append(JMX._bool_prop("DNSCacheManager.clearEachIteration", False))
dns_element.append(JMX._bool_prop("DNSCacheManager.isCustomResolver", False))
return dns_element
@staticmethod
def _get_header_mgr(hdict):
"""
:type hdict: dict[str,str]
:rtype: lxml.etree.Element
"""
mgr = etree.Element("HeaderManager", guiclass="HeaderPanel", testclass="HeaderManager", testname="Headers")
coll_prop = etree.Element("collectionProp", name="HeaderManager.headers")
for hname, hval in iteritems(hdict):
header = etree.Element("elementProp", name="", elementType="Header")
header.append(JMX._string_prop("Header.name", hname))
header.append(JMX._string_prop("Header.value", hval))
coll_prop.append(header)
mgr.append(coll_prop)
return mgr
@staticmethod
def _get_cache_mgr():
"""
:rtype: lxml.etree.Element
"""
mgr = etree.Element("CacheManager", guiclass="CacheManagerGui", testclass="CacheManager", testname="Cache")
mgr.append(JMX._bool_prop("clearEachIteration", True))
mgr.append(JMX._bool_prop("useExpires", True))
return mgr
@staticmethod
def _get_cookie_mgr(scenario=None):
"""
:rtype: lxml.etree.Element
"""
mgr = etree.Element("CookieManager", guiclass="CookiePanel", testclass="CookieManager", testname="Cookies")
mgr.append(JMX._bool_prop("CookieManager.clearEachIteration", False))
mgr.append(JMX._string_prop("CookieManager.implementation",
"org.apache.jmeter.protocol.http.control.HC4CookieHandler"))
if scenario:
cookies = scenario.get(Scenario.COOKIES)
if cookies:
cookies_coll = JMX._collection_prop("CookieManager.cookies")
mgr.append(cookies_coll)
for cookie in cookies:
if not isinstance(cookie, dict):
raise TaurusConfigError("Cookie must be dictionary: %s" % cookie)
c_name = cookie.get("name", TaurusConfigError("Name of cookie isn't found: %s" % cookie))
c_value = cookie.get("value", TaurusConfigError("Value of cookie isn't found: %s" % cookie))
c_domain = cookie.get("domain", TaurusConfigError("Domain of cookie isn't found: %s" % cookie))
c_path = cookie.get("path", "")
c_secure = cookie.get("secure", False)
# follow params are hardcoded in JMeter
c_expires = 0
c_path_specified = True
c_domain_specified = True
c_elem = etree.Element("elementProp", name=c_name, elementType="Cookie", testname=c_name)
c_elem.append(JMX._string_prop("Cookie.value", c_value))
c_elem.append(JMX._string_prop("Cookie.domain", c_domain))
c_elem.append(JMX._string_prop("Cookie.path", c_path))
c_elem.append(JMX._bool_prop("Cookie.secure", c_secure))
c_elem.append(JMX._long_prop("Cookie.expires", c_expires))
c_elem.append(JMX._bool_prop("Cookie.path_specified", c_path_specified))
c_elem.append(JMX._bool_prop("Cookie.domain_specified", c_domain_specified))
cookies_coll.append(c_elem)
return mgr
@staticmethod
def _get_http_defaults(default_address=None, timeout=None, retrieve_resources=None, concurrent_pool_size=4,
content_encoding=None, resources_regex=None):
"""
:rtype: lxml.etree.Element
"""
cfg = etree.Element("ConfigTestElement", guiclass="HttpDefaultsGui",
testclass="ConfigTestElement", testname="Defaults")
if retrieve_resources:
cfg.append(JMX._bool_prop("HTTPSampler.image_parser", True))
cfg.append(JMX._bool_prop("HTTPSampler.concurrentDwn", True))
if concurrent_pool_size:
cfg.append(JMX._string_prop("HTTPSampler.concurrentPool", concurrent_pool_size))
params = etree.Element("elementProp",
name="HTTPsampler.Arguments",
elementType="Arguments",
guiclass="HTTPArgumentsPanel",
testclass="Arguments", testname="user_defined")
cfg.append(params)
if default_address:
parsed_url = parse.urlsplit(default_address)
if parsed_url.scheme:
cfg.append(JMX._string_prop("HTTPSampler.protocol", parsed_url.scheme))
if parsed_url.netloc:
netloc = parsed_url.netloc
if ':' in netloc:
index = netloc.rfind(':')
cfg.append(JMX._string_prop("HTTPSampler.port", netloc[index + 1:]))
netloc = netloc[:index]
cfg.append(JMX._string_prop("HTTPSampler.domain", netloc))
if timeout:
cfg.append(JMX._string_prop("HTTPSampler.connect_timeout", timeout))
cfg.append(JMX._string_prop("HTTPSampler.response_timeout", timeout))
if content_encoding:
cfg.append(JMX._string_prop("HTTPSampler.contentEncoding", content_encoding))
if resources_regex:
cfg.append(JMX._string_prop("HTTPSampler.embedded_url_re", resources_regex))
return cfg
@staticmethod
def get_constant_timer(delay):
timer_type = "ConstantTimer"
element = etree.Element(timer_type, guiclass="%sGui" % timer_type, testclass=timer_type, testname="Think-Time")
element.append(JMX._string_prop("%s.delay" % timer_type, delay))
return [element, etree.Element("hashTree")]
@staticmethod
def get_uniform_timer(maximum, offset):
timer_type = "UniformRandomTimer"
element = etree.Element(timer_type, guiclass="%sGui" % timer_type, testclass=timer_type, testname="Think-Time")
element.append(JMX._string_prop("ConstantTimer.delay", offset))
element.append(JMX._string_prop("RandomTimer.range", maximum))
return [element, etree.Element("hashTree")]
@staticmethod
def get_gaussian_timer(dev, offset):
timer_type = "GaussianRandomTimer"
element = etree.Element(timer_type, guiclass="%sGui" % timer_type, testclass=timer_type, testname="Think-Time")
element.append(JMX._string_prop("ConstantTimer.delay", offset))
element.append(JMX._string_prop("RandomTimer.range", dev))
return [element, etree.Element("hashTree")]
@staticmethod
def get_poisson_timer(lam, delay):
timer_type = "PoissonRandomTimer"
element = etree.Element(timer_type, guiclass="%sGui" % timer_type, testclass=timer_type, testname="Think-Time")
element.append(JMX._string_prop("ConstantTimer.delay", delay))
element.append(JMX._string_prop("RandomTimer.range", lam))
return [element, etree.Element("hashTree")]
@staticmethod
def _get_extractor(varname, headers, regexp, template, match_no, default='NOT_FOUND', scope='', from_var=''):
"""
:type varname: str
:type regexp: str
:type template: str|int
:type match_no: int
:type default: str
:type scope: str
:type from_var: str
:rtype: lxml.etree.Element
"""
if isinstance(template, int):
template = '$%s$' % template
if headers.lower() == 'headers':
headers = 'true'
elif headers.lower() == 'http-code':
headers = 'code'
elif headers.lower() == 'url':
headers = 'URL'
else:
headers = 'body'
element = etree.Element("RegexExtractor", guiclass="RegexExtractorGui",
testclass="RegexExtractor", testname="Get %s" % varname, enabled="true")
element.append(JMX._string_prop("RegexExtractor.useHeaders", headers))
element.append(JMX._string_prop("RegexExtractor.refname", varname))
element.append(JMX._string_prop("RegexExtractor.regex", regexp))
element.append(JMX._string_prop("RegexExtractor.template", template))
element.append(JMX._string_prop("RegexExtractor.default", default))
element.append(JMX._string_prop("RegexExtractor.match_number", match_no))
element.extend(JMX.get_scope_props(scope, from_var))
return element
@staticmethod
def _get_boundary_extractor(varname, subject, left, right, match_no, defvalue='NOT_FOUND', scope='', from_var=''):
"""
:type varname: str
:type regexp: str
:type template: str|int
:type match_no: int
:type default: str
:type scope: str
:type from_var: str
:rtype: lxml.etree.Element
"""
subjects = {
'body': 'false',
'body-unescaped': 'unescaped',
'body-as-document': 'as_document',
'response-headers': 'true',
'request-headers': 'request_headers',
'url': 'URL',
'code': 'code',
'message': 'message',
}
subject = subjects.get(subject)
element = etree.Element("BoundaryExtractor", guiclass="BoundaryExtractorGui",
testclass="BoundaryExtractor", testname="Get %s" % varname, enabled="true")
element.append(JMX._string_prop("BoundaryExtractor.useHeaders", subject))
element.append(JMX._string_prop("BoundaryExtractor.refname", varname))
element.append(JMX._string_prop("BoundaryExtractor.lboundary", left))
element.append(JMX._string_prop("BoundaryExtractor.rboundary", right))
element.append(JMX._string_prop("RegexExtractor.default", defvalue))
element.append(JMX._string_prop("RegexExtractor.match_number", match_no))
element.extend(JMX.get_scope_props(scope, from_var))
return element
@staticmethod
def _get_jquerycss_extractor(varname, selector, attribute, match_no, default="NOT_FOUND", scope='', from_var=''):
"""
:type varname: str
:type regexp: str
:type match_no: int
:type default: str
:type scope: str
:type from_var: str
:rtype: lxml.etree.Element
"""
element = etree.Element("HtmlExtractor", guiclass="HtmlExtractorGui", testclass="HtmlExtractor",
testname="Get %s" % varname)
element.append(JMX._string_prop("HtmlExtractor.refname", varname))
element.append(JMX._string_prop("HtmlExtractor.expr", selector))
element.append(JMX._string_prop("HtmlExtractor.attribute", attribute))
element.append(JMX._string_prop("HtmlExtractor.match_number", match_no))
element.append(JMX._string_prop("HtmlExtractor.default", default))
element.extend(JMX.get_scope_props(scope, from_var))
return element
@staticmethod
def _get_json_extractor(varname, jsonpath, default='NOT_FOUND', from_variable=None):
"""
:type varname: str
:type default: str
:rtype: lxml.etree.Element
"""
package = "com.atlantbh.jmeter.plugins.jsonutils.jsonpathextractor"
element = etree.Element("%s.JSONPathExtractor" % package,
guiclass="%s.gui.JSONPathExtractorGui" % package,
testclass="%s.JSONPathExtractor" % package,
testname="Get %s" % varname)
element.append(JMX._string_prop("VAR", varname))
element.append(JMX._string_prop("JSONPATH", jsonpath))
element.append(JMX._string_prop("DEFAULT", default))
if from_variable:
element.append(JMX._string_prop("VARIABLE", from_variable))
element.append(JMX._string_prop("SUBJECT", "VAR"))
return element
@staticmethod
def get_scope_props(scope, from_variable):
props = []
if scope:
props.append(JMX._string_prop("Sample.scope", scope))
if scope == "variable":
props.append(JMX._string_prop("Scope.variable", from_variable))
return props
@staticmethod
def _get_internal_json_extractor(varname, jsonpath, default, scope, from_variable, match_no, concat):
"""
:type varname: str
:type default: str
:rtype: lxml.etree.Element
"""
package = "JSONPostProcessor"
element = etree.Element(package,
guiclass="%sGui" % package,
testclass="%s" % package,
testname="Get %s" % varname)
element.append(JMX._string_prop("JSONPostProcessor.referenceNames", varname))
element.append(JMX._string_prop("JSONPostProcessor.jsonPathExprs", jsonpath))
element.append(JMX._string_prop("JSONPostProcessor.match_numbers", match_no))
if default:
element.append(JMX._string_prop("JSONPostProcessor.defaultValues", default))
element.extend(JMX.get_scope_props(scope, from_variable))
if concat:
element.append(JMX._bool_prop("JSONPostProcessor.compute_concat", True))
return element
@staticmethod
def _get_json_path_assertion(jsonpath, expected_value, json_validation, expect_null, invert, regexp=True):
"""
:type jsonpath: str
:type expected_value: str
:type json_validation: bool
:type expect_null: bool
:type invert: bool
:type regexp: bool
:return: lxml.etree.Element
"""
package = "com.atlantbh.jmeter.plugins.jsonutils.jsonpathassertion"
element = etree.Element("%s.JSONPathAssertion" % package,
guiclass="%s.gui.JSONPathAssertionGui" % package,
testclass="%s.JSONPathAssertion" % package,
testname="JSon path assertion")
element.append(JMX._string_prop("JSON_PATH", jsonpath))
element.append(JMX._string_prop("EXPECTED_VALUE", expected_value))
element.append(JMX._bool_prop("JSONVALIDATION", json_validation))
element.append(JMX._bool_prop("EXPECT_NULL", expect_null))
element.append(JMX._bool_prop("INVERT", invert))
element.append(JMX._bool_prop("ISREGEX", regexp))
return element
@staticmethod
def _get_xpath_extractor(varname, xpath, default, validate_xml, ignore_whitespace, match_no, use_namespaces,
use_tolerant_parser, scope, from_var):
"""
:type varname: str
:type xpath: str
:type default: str
:type validate_xml: bool
:type ignore_whitespace: bool
:type use_tolerant_parser: bool
:type scope: str
:type from_var: str
:rtype: lxml.etree.Element
"""
element = etree.Element("XPathExtractor",
guiclass="XPathExtractorGui",
testclass="XPathExtractor",
testname="Get %s" % varname)
element.append(JMX._string_prop("XPathExtractor.refname", varname))
element.append(JMX._string_prop("XPathExtractor.xpathQuery", xpath))
element.append(JMX._string_prop("XPathExtractor.default", default))
element.append(JMX._bool_prop("XPathExtractor.validate", validate_xml))
element.append(JMX._bool_prop("XPathExtractor.whitespace", ignore_whitespace))
element.append(JMX._string_prop("XPathExtractor.matchNumber", match_no))
element.append(JMX._bool_prop("XPathExtractor.namespace", use_namespaces))
element.append(JMX._bool_prop("XPathExtractor.tolerant", use_tolerant_parser))
element.extend(JMX.get_scope_props(scope, from_var))
return element
@staticmethod
def _get_xpath_assertion(xpath, validate_xml, ignore_whitespace, use_tolerant_parser, invert):
"""
:type xpath: str
:type validate_xml: bool
:type ignore_whitespace: bool
:type use_tolerant_parser: bool
:return: lxml.etree.Element
"""
element = etree.Element("XPathAssertion",
guiclass="XPathAssertionGui",
testclass="XPathAssertion",
testname="XPath Assertion")
element.append(JMX._string_prop("XPath.xpath", xpath))
element.append(JMX._bool_prop("XPath.validate", validate_xml))
element.append(JMX._bool_prop("XPath.whitespace", ignore_whitespace))
element.append(JMX._bool_prop("XPath.tolerant", use_tolerant_parser))
element.append(JMX._bool_prop("XPath.negate", invert))
return element
@staticmethod
def _get_resp_assertion(field, contains, is_regexp, is_invert, assume_success=False):
"""
:type field: str
:type contains: list[str]
:type is_regexp: bool
:type is_invert: bool
:rtype: lxml.etree.Element
"""
tname = "Assert %s %s" % ("hasn't" if is_invert else "has",
"[" + ", ".join('"' + str(x) + '"' for x in contains) + "]")
element = etree.Element("ResponseAssertion", guiclass="AssertionGui",
testclass="ResponseAssertion", testname=tname)
if field == Scenario.FIELD_HEADERS:
fld = "Assertion.response_headers"
elif field == Scenario.FIELD_RESP_CODE:
fld = "Assertion.response_code"
else:
fld = "Assertion.response_data"
if is_regexp:
if is_invert:
mtype = 6 # not contains
else:
mtype = 2 # contains
else:
if is_invert:
mtype = 20 # not substring
else:
mtype = 16 # substring
element.append(JMX._string_prop("Assertion.test_field", fld))
element.append(JMX._string_prop("Assertion.test_type", mtype))
element.append(JMX._bool_prop("Assertion.assume_success", assume_success))
coll_prop = etree.Element("collectionProp", name="Asserion.test_strings")
for string in contains:
coll_prop.append(JMX._string_prop("", string))
element.append(coll_prop)
return element
@staticmethod
def _get_jsr223_element(language, script_file, parameters, execute, script_text=None, cache_key='true'):
if execute == "before":
proc = "JSR223PreProcessor"
else:
proc = "JSR223PostProcessor"
element = etree.Element(proc, guiclass="TestBeanGUI", testclass=proc, testname=proc)
element.append(JMX._string_prop("filename", script_file if script_file else ''))
element.append(JMX._string_prop("script", script_text if script_text else ''))
element.append(JMX._string_prop("parameters", parameters))
element.append(JMX._string_prop("scriptLanguage", language))
element.append(JMX._string_prop("cacheKey", cache_key))
return element
@staticmethod
def _get_csv_config(path, delimiter, loop, variable_names, is_quoted):
"""
:type path: str
:type delimiter: str
:type is_quoted: bool
:type loop: bool
:type variable_names: string
:return:
"""
element = etree.Element("CSVDataSet", guiclass="TestBeanGUI",
testclass="CSVDataSet", testname="CSV %s" % os.path.basename(path))
element.append(JMX._string_prop("filename", path))
element.append(JMX._string_prop("delimiter", delimiter))
element.append(JMX._bool_prop("quotedData", is_quoted))
element.append(JMX._bool_prop("recycle", loop))
element.append(JMX._bool_prop("stopThread", not loop))
element.append(JMX._string_prop("variableNames", variable_names))
return element
@staticmethod
def _get_csv_config_random(path, delimiter, loop, variable_names):
"""
:type path: str
:type delimiter: str
:type loop: bool
:type variable_names: string
:return:
"""
element = etree.Element("com.blazemeter.jmeter.RandomCSVDataSetConfig",
guiclass="com.blazemeter.jmeter.RandomCSVDataSetConfigGui",
testclass="com.blazemeter.jmeter.RandomCSVDataSetConfig",
testname="bzm - Random CSV Data Set Config")
element.append(JMX._string_prop("filename", path))
element.append(JMX._string_prop("fileEncoding", "UTF-8"))
element.append(JMX._string_prop("delimiter", delimiter))
element.append(JMX._string_prop("variableNames", variable_names))
element.append(JMX._bool_prop("randomOrder", True))
element.append(JMX._bool_prop("ignoreFirstLine", False if variable_names else True))
element.append(JMX._bool_prop("rewindOnTheEndOfList", loop))
element.append(JMX._bool_prop("independentListPerThread", False))
return element
def set_enabled(self, sel, state):
"""
Toggle items by selector
:type sel: str
:type state: bool
"""
items = self.get(sel)
self.log.debug("Enable %s elements %s: %s", state, sel, items)
for item in items:
item.set("enabled", 'true' if state else 'false')
def set_text(self, sel, text):
"""
Set text value
:type sel: str
:type text: str
"""
items = self.get(sel)
res = 0
for item in items:
item.text = str(text)
res += 1
return res
@staticmethod
def _get_simple_controller(name):
return etree.Element("GenericController", guiclass="LogicControllerGui", testclass="GenericController",
testname=name)
def _add_results_tree(self):
dbg_tree = etree.Element("ResultCollector",
testname="View Results Tree",
testclass="ResultCollector",
guiclass="ViewResultsFullVisualizer")
self.append(self.TEST_PLAN_SEL, dbg_tree)
self.append(self.TEST_PLAN_SEL, etree.Element("hashTree"))
@staticmethod
def _get_results_tree():
dbg_tree = etree.Element("ResultCollector",
testname="View Results Tree",
testclass="ResultCollector",
guiclass="ViewResultsFullVisualizer")
return dbg_tree
@staticmethod
def _get_if_controller(condition):
controller = etree.Element("IfController", guiclass="IfControllerPanel", testclass="IfController",
testname="If Controller")
controller.append(JMX._string_prop("IfController.condition", condition))
return controller
@staticmethod
def _get_once_controller():
"""
Generates Once Only Controller
:return: etree element, OnceOnlyController
"""
controller = etree.Element("OnceOnlyController", guiclass="OnceOnlyControllerGui",
testclass="OnceOnlyController", testname="Once Only Controller")
return controller
@staticmethod
def _get_loop_controller(loops):
"""
Generates Loop Controller
Expected values(by JMeter):
LoopController.loops(iterations): int
LoopController.continue_forever: boolean
:return: etree element, LoopController
"""
if loops == 'forever':
iterations = -1
else:
iterations = loops
controller = etree.Element("LoopController", guiclass="LoopControlPanel", testclass="LoopController",
testname="Loop Controller")
# 'false' means controller can be called only one time (by parent)
controller.append(JMX._bool_prop("LoopController.continue_forever", True))
controller.append(JMX._string_prop("LoopController.loops", str(iterations)))
return controller
@staticmethod
def _get_foreach_controller(input_var, loop_var):
controller = etree.Element("ForeachController", guiclass="ForeachControlPanel", testclass="ForeachController",
testname="ForEach Controller")
controller.append(JMX._string_prop("ForeachController.inputVal", input_var))
controller.append(JMX._string_prop("ForeachController.returnVal", loop_var))
controller.append(JMX._bool_prop("ForeachController.useSeparator", True))
return controller
@staticmethod
def _get_while_controller(condition):
controller = etree.Element("WhileController", guiclass="WhileControllerGui", testclass="WhileController",
testname="While Controller")
controller.append(JMX._string_prop("WhileController.condition", condition))
return controller
@staticmethod
def _get_transaction_controller(transaction_name, force_parent_sample=False, include_timers=False):
controller = etree.Element("TransactionController", guiclass="TransactionControllerGui",
testclass="TransactionController", testname=transaction_name)
controller.append(JMX._bool_prop("TransactionController.parent", force_parent_sample))
controller.append(JMX._bool_prop("TransactionController.includeTimers", include_timers))
return controller
@staticmethod
def _get_functional_mode_prop(enabled):
return JMX._bool_prop("TestPlan.functional_mode", enabled)
@staticmethod
def _get_action_block(action_index, target_index, duration_ms):
action = etree.Element("TestAction", guiclass="TestActionGui", testclass="TestAction", testname="Test Action")
action.append(JMX.int_prop("ActionProcessor.action", action_index))
action.append(JMX.int_prop("ActionProcessor.target", target_index))
action.append(JMX._string_prop("ActionProcessor.duration", str(duration_ms)))
return action
|
"""Tests for Philips Hue config flow."""
import asyncio
from aiohttp import client_exceptions
import aiohue
from aiohue.discovery import URL_NUPNP
import pytest
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.components.hue import config_flow, const
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import MockConfigEntry
@pytest.fixture(name="hue_setup", autouse=True)
def hue_setup_fixture():
"""Mock hue entry setup."""
with patch("homeassistant.components.hue.async_setup_entry", return_value=True):
yield
def get_mock_bridge(
bridge_id="aabbccddeeff", host="1.2.3.4", mock_create_user=None, username=None
):
"""Return a mock bridge."""
mock_bridge = Mock()
mock_bridge.host = host
mock_bridge.username = username
mock_bridge.config.name = "Mock Bridge"
mock_bridge.id = bridge_id
if not mock_create_user:
async def create_user(username):
mock_bridge.username = username
mock_create_user = create_user
mock_bridge.create_user = mock_create_user
mock_bridge.initialize = AsyncMock()
return mock_bridge
async def test_flow_works(hass):
"""Test config flow ."""
mock_bridge = get_mock_bridge()
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == "aabbccddeeff"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "create_entry"
assert result["title"] == "Mock Bridge"
assert result["data"] == {
"host": "1.2.3.4",
"username": "home-assistant#test-home",
}
assert len(mock_bridge.initialize.mock_calls) == 1
async def test_manual_flow_works(hass, aioclient_mock):
"""Test config flow discovers only already configured bridges."""
mock_bridge = get_mock_bridge()
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": "manual"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
bridge = get_mock_bridge(
bridge_id="id-1234", host="2.2.2.2", username="username-abc"
)
with patch(
"aiohue.Bridge", return_value=bridge,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "2.2.2.2"}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
with patch("homeassistant.components.hue.config_flow.authenticate_bridge"), patch(
"homeassistant.components.hue.async_unload_entry", return_value=True
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == "create_entry"
assert result["title"] == "Mock Bridge"
assert result["data"] == {
"host": "2.2.2.2",
"username": "username-abc",
}
entries = hass.config_entries.async_entries("hue")
assert len(entries) == 1
entry = entries[-1]
assert entry.unique_id == "id-1234"
async def test_manual_flow_bridge_exist(hass, aioclient_mock):
"""Test config flow discovers only already configured bridges."""
MockConfigEntry(
domain="hue", unique_id="id-1234", data={"host": "2.2.2.2"}
).add_to_hass(hass)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp", return_value=[],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
bridge = get_mock_bridge(
bridge_id="id-1234", host="2.2.2.2", username="username-abc"
)
with patch(
"aiohue.Bridge", return_value=bridge,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "2.2.2.2"}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_manual_flow_no_discovered_bridges(hass, aioclient_mock):
"""Test config flow discovers no bridges."""
aioclient_mock.get(URL_NUPNP, json=[])
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
async def test_flow_all_discovered_bridges_exist(hass, aioclient_mock):
"""Test config flow discovers only already configured bridges."""
aioclient_mock.get(URL_NUPNP, json=[{"internalipaddress": "1.2.3.4", "id": "bla"}])
MockConfigEntry(
domain="hue", unique_id="bla", data={"host": "1.2.3.4"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
async def test_flow_bridges_discovered(hass, aioclient_mock):
"""Test config flow discovers two bridges."""
# Add ignored config entry. Should still show up as option.
MockConfigEntry(
domain="hue", source=config_entries.SOURCE_IGNORE, unique_id="bla"
).add_to_hass(hass)
aioclient_mock.get(
URL_NUPNP,
json=[
{"internalipaddress": "1.2.3.4", "id": "bla"},
{"internalipaddress": "5.6.7.8", "id": "beer"},
],
)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
with pytest.raises(vol.Invalid):
assert result["data_schema"]({"id": "not-discovered"})
result["data_schema"]({"id": "bla"})
result["data_schema"]({"id": "beer"})
result["data_schema"]({"id": "manual"})
async def test_flow_two_bridges_discovered_one_new(hass, aioclient_mock):
"""Test config flow discovers two bridges."""
aioclient_mock.get(
URL_NUPNP,
json=[
{"internalipaddress": "1.2.3.4", "id": "bla"},
{"internalipaddress": "5.6.7.8", "id": "beer"},
],
)
MockConfigEntry(
domain="hue", unique_id="bla", data={"host": "1.2.3.4"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
assert result["data_schema"]({"id": "beer"})
assert result["data_schema"]({"id": "manual"})
with pytest.raises(vol.error.MultipleInvalid):
assert not result["data_schema"]({"id": "bla"})
async def test_flow_timeout_discovery(hass):
"""Test config flow ."""
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
side_effect=asyncio.TimeoutError,
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "abort"
assert result["reason"] == "discover_timeout"
async def test_flow_link_timeout(hass):
"""Test config flow."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=asyncio.TimeoutError),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_flow_link_unknown_error(hass):
"""Test if a unknown error happened during the linking processes."""
mock_bridge = get_mock_bridge(mock_create_user=AsyncMock(side_effect=OSError),)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {"base": "linking"}
async def test_flow_link_button_not_pressed(hass):
"""Test config flow ."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=aiohue.LinkButtonNotPressed),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {"base": "register_failed"}
async def test_flow_link_unknown_host(hass):
"""Test config flow ."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=client_exceptions.ClientOSError),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_bridge_ssdp(hass):
"""Test a bridge being discovered."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
async def test_bridge_ssdp_discover_other_bridge(hass):
"""Test that discovery ignores other bridges."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={ssdp.ATTR_UPNP_MANUFACTURER_URL: "http://www.notphilips.com"},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_emulated_hue(hass):
"""Test if discovery info is from an emulated hue instance."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "Home Assistant Bridge",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_missing_location(hass):
"""Test if discovery info is missing a location attribute."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_missing_serial(hass):
"""Test if discovery info is a serial attribute."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_espalexa(hass):
"""Test if discovery info is from an Espalexa based device."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "Espalexa (0.0.0.0)",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_already_configured(hass):
"""Test if a discovered bridge has already been configured."""
MockConfigEntry(
domain="hue", unique_id="1234", data={"host": "0.0.0.0"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_import_with_no_config(hass):
"""Test importing a host without an existing config file."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "import"}, data={"host": "0.0.0.0"},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
async def test_creating_entry_removes_entries_for_same_host_or_bridge(hass):
"""Test that we clean up entries for same host and bridge.
An IP can only hold a single bridge and a single bridge can only be
accessible via a single IP. So when we create a new entry, we'll remove
all existing entries that either have same IP or same bridge_id.
"""
orig_entry = MockConfigEntry(
domain="hue", data={"host": "0.0.0.0", "username": "aaaa"}, unique_id="id-1234",
)
orig_entry.add_to_hass(hass)
MockConfigEntry(
domain="hue", data={"host": "1.2.3.4", "username": "bbbb"}, unique_id="id-5678",
).add_to_hass(hass)
assert len(hass.config_entries.async_entries("hue")) == 2
bridge = get_mock_bridge(
bridge_id="id-1234", host="2.2.2.2", username="username-abc"
)
with patch(
"aiohue.Bridge", return_value=bridge,
):
result = await hass.config_entries.flow.async_init(
"hue", data={"host": "2.2.2.2"}, context={"source": "import"}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
with patch("homeassistant.components.hue.config_flow.authenticate_bridge"), patch(
"homeassistant.components.hue.async_unload_entry", return_value=True
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == "create_entry"
assert result["title"] == "Mock Bridge"
assert result["data"] == {
"host": "2.2.2.2",
"username": "username-abc",
}
entries = hass.config_entries.async_entries("hue")
assert len(entries) == 2
new_entry = entries[-1]
assert orig_entry.entry_id != new_entry.entry_id
assert new_entry.unique_id == "id-1234"
async def test_bridge_homekit(hass):
"""Test a bridge being discovered via HomeKit."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "homekit"},
data={
"host": "0.0.0.0",
"serial": "1234",
"manufacturerURL": config_flow.HUE_MANUFACTURERURL,
"properties": {"id": "aa:bb:cc:dd:ee:ff"},
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
async def test_bridge_import_already_configured(hass):
"""Test if a import flow aborts if host is already configured."""
MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "import"},
data={"host": "0.0.0.0", "properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_bridge_homekit_already_configured(hass):
"""Test if a HomeKit discovered bridge has already been configured."""
MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "homekit"},
data={"host": "0.0.0.0", "properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_ssdp_discovery_update_configuration(hass):
"""Test if a discovered bridge is configured and updated with new host."""
entry = MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://1.1.1.1/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "aabbccddeeff",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
async def test_homekit_discovery_update_configuration(hass):
"""Test if a discovered bridge is configured and updated with new host."""
entry = MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "homekit"},
data={"host": "1.1.1.1", "properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
async def test_options_flow(hass):
"""Test options config flow."""
entry = MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
const.CONF_ALLOW_HUE_GROUPS: True,
const.CONF_ALLOW_UNREACHABLE: True,
},
)
assert result["type"] == "create_entry"
assert result["data"] == {
const.CONF_ALLOW_HUE_GROUPS: True,
const.CONF_ALLOW_UNREACHABLE: True,
}
|
from a10sdk.common.A10BaseClass import A10BaseClass
class FilterCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param dport1: {"description": "Forward Destination Port", "format": "number", "maximum": 65535, "minimum": 1, "not": "dport2", "type": "number"}
:param source_addr: {"type": "string", "description": "Forward Source IP (Source IP address)", "format": "ipv4-address"}
:param dport2: {"description": "Forward Destination Port (Dest Port)", "format": "number", "maximum": 65535, "minimum": 1, "not": "dport1", "type": "number"}
:param dest_mask: {"type": "string", "description": "Forward Destination IP Subnet (Destination Netmask)", "format": "ipv4-netmask"}
:param session_type: {"enum": ["ipv6", "sip"], "type": "string", "description": "'ipv6': Display ipv6 sessions only; 'sip': SIP sessions; ", "format": "enum"}
:param source_port: {"description": "Forward Source Port", "minimum": 1, "type": "number", "maximum": 65535, "format": "number"}
:param dest_addr: {"type": "string", "description": "Forward Destination IP (Destination IP address)", "format": "ipv4-address"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "filter-cfg"
self.DeviceProxy = ""
self.dport1 = ""
self.source_addr = ""
self.dport2 = ""
self.dest_mask = ""
self.session_type = ""
self.source_port = ""
self.dest_addr = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class SessionFilter(A10BaseClass):
"""Class Description::
Create a convenience Filter to display/clear sessions.
Class session-filter supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param set: {"default": 0, "optional": true, "type": "number", "description": "Set filter criteria", "format": "flag"}
:param name: {"description": "Session filter name", "format": "string", "minLength": 1, "optional": false, "maxLength": 31, "type": "string"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/session-filter/{name}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "name"]
self.b_key = "session-filter"
self.a10_url="/axapi/v3/session-filter/{name}"
self.DeviceProxy = ""
self.filter_cfg = {}
self.A10WW_set = ""
self.name = ""
self.uuid = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
__author__ = 'Administrator'
import json
from heat.engine.resources.hwcloud.hws_service import HWSService
class EVSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(EVSService, self).__init__(ak, sk, 'EVS', region, protocol, host, port)
def list(self, project_id, opts=None):
uri = '/v2/%s/cloudvolumes' % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def create_volume(self, project_id, availability_zone, size, volume_type,
backup_id=None, description=None, name=None, imageRef=None, count=None):
"""
{
"volume": {
"backup_id": null,
"count": 1,
"availability_zone": "az1.dc1",
"description": "test_volume_1",
"size": 120,
"name": "test_volume_1",
"imageRef": null,
"volume_type": "SSD"
}
}
:param project_id:
:param availability_zone:
:param size:
:param volume_type:
:param backup_id:
:param description:
:param name:
:param imageRef:
:param count:
:return: dict
{
"job_id": "70a599e0-31e7-49b7-b260-868f441e862b",
}
or
{
"error": {
"message": "XXXX",
"code": "XXX"
}
}
Get job detail result:
{
u'body': {
u'status': u'RUNNING',
u'fail_reason': None,
u'job_id': u'8aace0c651b0a02301521ae1f96c6138',
u'job_type': u'createVolume',
u'entities': {
u'volume_id': u'9bd6fa88-0e60-48e5-ae61-7e028dbdf045'
},
u'end_time': u'',
u'begin_time': u'2016-01-07T06: 59: 23.115Z',
u'error_code': None
},
u'status': 200
}
{
u'body': {
u'status': u'SUCCESS',
u'fail_reason': None,
u'job_id': u'8aace0c651b0a02301521ae1f96c6138',
u'job_type': u'createVolume',
u'entities': {
u'volume_id': u'9bd6fa88-0e60-48e5-ae61-7e028dbdf045'
},
u'end_time': u'2016-01-07T06: 59: 48.279Z',
u'begin_time': u'2016-01-07T06: 59: 23.115Z',
u'error_code': None
},
u'status': 200
}
Failed job result:
{
u'body': {
u'status': u'FAIL',
u'fail_reason': u"EbsCreateVolumeTask-fail:badRequest: Invalid input received: Availability zone 'cn-north-1' is invalid",
u'job_id': u'8aace0c651b0a02301521ab7e58660ca',
u'job_type': u'createVolume',
u'entities': {
},
u'end_time': u'2016-01-07T06: 13: 25.809Z',
u'begin_time': u'2016-01-07T06: 13: 25.509Z',
u'error_code': u'EVS.5400'
},
u'status': 200
}
"""
uri = '/v2/%s/cloudvolumes' % project_id
request_body_dict = {}
volume = {}
volume['availability_zone'] = availability_zone
volume['size'] = size
volume['volume_type'] = volume_type
if backup_id:
volume['backup_id'] = backup_id
if description:
volume['description'] = description
if name:
volume['name'] = name
if imageRef:
volume['imageRef'] = imageRef
if count:
volume['count'] = count
request_body_dict['volume'] = volume
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def delete_volume(self, project_id, volume_id):
"""
DELETE /v2/{tenant_id}/cloudvolumes/{volume_id}
:return:
"""
uri = '/v2/%s/cloudvolumes/%s' % (project_id, volume_id)
response = self.delete(uri)
return response
def get_volume_detail(self, project_id, volume_id):
uri = "/v2/%s/volumes/%s" % (project_id, volume_id)
response = self.get(uri)
return response
|
"""Provide some handy classes for user to implement a simple computation module
in Python easily.
"""
import logging
from .base_module import BaseModule
from ..initializer import Uniform
from .. import ndarray as nd
class PythonModule(BaseModule):
"""A convenient module class that implements many of the module APIs as
empty functions.
Parameters
----------
data_names : list of str
Names of the data expected by the module.
label_names : list of str
Names of the labels expected by the module. Could be ``None`` if the
module does not need labels.
output_names : list of str
Names of the outputs.
"""
def __init__(self, data_names, label_names, output_names, logger=logging):
super(PythonModule, self).__init__(logger=logger)
if isinstance(data_names, tuple):
data_names = list(data_names)
if isinstance(label_names, tuple):
label_names = list(label_names)
self._data_names = data_names
self._label_names = label_names
self._output_names = output_names
self._data_shapes = None
self._label_shapes = None
self._output_shapes = None
################################################################################
# Symbol information
################################################################################
@property
def data_names(self):
"""A list of names for data required by this module."""
return self._data_names
@property
def output_names(self):
"""A list of names for the outputs of this module."""
return self._output_names
################################################################################
# Input/Output information
################################################################################
@property
def data_shapes(self):
"""A list of (name, shape) pairs specifying the data inputs to this module."""
return self._data_shapes
@property
def label_shapes(self):
"""A list of (name, shape) pairs specifying the label inputs to this module.
If this module does not accept labels -- either it is a module without loss
function, or it is not bound for training, then this should return an empty
list ``[]```.
"""
return self._label_shapes
@property
def output_shapes(self):
"""A list of (name, shape) pairs specifying the outputs of this module."""
return self._output_shapes
################################################################################
# Parameters of a module
################################################################################
def get_params(self):
"""Get parameters, those are potentially copies of the the actual parameters used
to do computation on the device.
Returns
-------
``({}, {})``, a pair of empty dict. Subclass should override this method if
contains parameters.
"""
return (dict(), dict())
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
allow_missing=False, force_init=False):
"""Initialize the parameters and auxiliary states. By default this function
does nothing. Subclass should override this method if contains parameters.
Parameters
----------
initializer : Initializer
Called to initialize parameters if needed.
arg_params : dict
If not ``None``, should be a dictionary of existing `arg_params`. Initialization
will be copied from that.
aux_params : dict
If not ``None``, should be a dictionary of existing `aux_params`. Initialization
will be copied from that.
allow_missing : bool
If ``True``, params could contain missing values, and the initializer will be
called to fill those missing params.
force_init : bool
If ``True``, will force re-initialize even if already initialized.
"""
pass
def update(self):
"""Update parameters according to the installed optimizer and the gradients computed
in the previous forward-backward batch. Currently we do nothing here. Subclass should
override this method if contains parameters.
"""
pass
def update_metric(self, eval_metric, labels):
"""Evaluate and accumulate evaluation metric on outputs of the last forward computation.
ubclass should override this method if needed.
Parameters
----------
eval_metric : EvalMetric
labels : list of NDArray
Typically ``data_batch.label``.
"""
if self._label_shapes is None:
# since we do not need labels, we are probably not a module with a loss
# function or predictions, so just ignore this call
return
# by default we expect our outputs are some scores that could be evaluated
eval_metric.update(labels, self.get_outputs())
################################################################################
# module setup
################################################################################
def bind(self, data_shapes, label_shapes=None, for_training=True,
inputs_need_grad=False, force_rebind=False, shared_module=None,
grad_req='write'):
"""Bind the symbols to construct executors. This is necessary before one
can perform computation with the module.
Parameters
----------
data_shapes : list of (str, tuple)
Typically is ``data_iter.provide_data``.
label_shapes : list of (str, tuple)
Typically is ``data_iter.provide_label``.
for_training : bool
Default is ``True``. Whether the executors should be bind for training.
inputs_need_grad : bool
Default is ``False``. Whether the gradients to the input data need to be computed.
Typically this is not needed. But this might be needed when implementing composition
of modules.
force_rebind : bool
Default is ``False``. This function does nothing if the executors are already
bound. But with this ``True``, the executors will be forced to rebind.
shared_module : Module
Default is ``None``. This is used in bucketing. When not ``None``, the shared module
essentially corresponds to a different bucket -- a module with different symbol
but with the same sets of parameters (e.g. unrolled RNNs with different lengths).
grad_req : str, list of str, dict of str to str
Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
(default to 'write').
Can be specified globally (str) or for each argument (list, dict).
"""
if self.binded and not force_rebind:
self.logger.warning('Already bound, ignoring bind()')
return
assert grad_req == 'write', "Python module only support write gradient"
self.for_training = for_training
self.inputs_need_grad = inputs_need_grad
assert len(data_shapes) == len(self._data_names)
assert [x[0] for x in data_shapes] == self._data_names
self._data_shapes = data_shapes
self._label_shapes = label_shapes
if label_shapes is not None:
assert self._label_names is not None
assert len(self._label_names) == len(label_shapes)
assert [x[0] for x in label_shapes] == self._label_names
self._output_shapes = self._compute_output_shapes()
def _compute_output_shapes(self):
"""The subclass should implement this method to compute the shape of
outputs. This method can assume that the ``data_shapes`` and ``label_shapes``
are already initialized.
"""
raise NotImplementedError()
def init_optimizer(self, kvstore='local', optimizer='sgd',
optimizer_params=(('learning_rate', 0.01),), force_init=False):
"""Install and initialize optimizers. By default we do nothing. Subclass
should
Parameters
----------
kvstore : str or KVStore
Default `'local'`.
optimizer : str or Optimizer
Default `'sgd'`
optimizer_params : dict
Default `(('learning_rate', 0.01),)`. The default value is not a dictionary,
just to avoid pylint warning of dangerous default values.
force_init : bool
Default `False`, indicating whether we should force re-initializing the
optimizer in the case an optimizer is already installed.
"""
pass
class PythonLossModule(PythonModule):
"""A convenient module class that implements many of the module APIs as
empty functions.
Parameters
----------
name : str
Names of the module. The outputs will be named `[name + '_output']`.
data_names : list of str
Defaults to ``['data']``. Names of the data expected by this module.
Should be a list of only one name.
label_names : list of str
Default ``['softmax_label']``. Names of the labels expected by the module.
Should be a list of only one name.
grad_func : function
Optional. If not ``None``, should be a function that takes `scores`
and `labels`, both of type `NDArray`, and return the gradients with
respect to the scores according to this loss function. The return
value could be a numpy array or an `NDArray`.
"""
def __init__(self, name='pyloss', data_names=('data',), label_names=('softmax_label',),
logger=logging, grad_func=None):
super(PythonLossModule, self).__init__(data_names, label_names,
[name + '_output'], logger=logger)
self._name = name
assert len(data_names) == 1
assert len(label_names) == 1
self._scores = None
self._labels = None
self._scores_grad = None
if grad_func is not None:
assert callable(grad_func)
self._grad_func = grad_func
def _compute_output_shapes(self):
"""Compute the shapes of outputs. As a loss module with outputs, we simply
output whatever we receive as inputs (i.e. the scores).
"""
return [(self._name + '_output', self._data_shapes[0][1])]
def forward(self, data_batch, is_train=None):
"""Forward computation. Here we do nothing but to keep a reference to
the scores and the labels so that we can do backward computation.
Parameters
----------
data_batch : DataBatch
Could be anything with similar API implemented.
is_train : bool
Default is ``None``, which means `is_train` takes the value of ``self.for_training``.
"""
self._scores = data_batch.data[0]
if is_train is None:
is_train = self.for_training
if is_train:
self._labels = data_batch.label[0]
def get_outputs(self, merge_multi_context=True):
"""Get outputs of the previous forward computation. As a output loss module,
we treat the inputs to this module as scores, and simply return them.
Parameters
----------
merge_multi_context : bool
Should always be ``True``, because we do not use multiple contexts for computing.
"""
assert merge_multi_context is True
return [self._scores]
def backward(self, out_grads=None):
"""Backward computation.
Parameters
----------
out_grads : NDArray or list of NDArray, optional
Gradient on the outputs to be propagated back.
This parameter is only needed when bind is called
on outputs that are not a loss function.
"""
assert out_grads is None, 'For a loss module, out_grads should be None'
assert self.for_training
self._backward_impl()
def _backward_impl(self):
"""Actual implementation of the backward computation. The computation
should take ``self._scores`` and ``self._labels`` and then compute the
gradients with respect to the scores, store it as an `NDArray` in
``self._scores_grad``.
Instead of defining a subclass and overriding this function,
a more convenient way is to pass in a `grad_func` when constructing
the module object. Then it will be called to compute the gradients.
"""
if self._grad_func is not None:
grad = self._grad_func(self._scores, self._labels)
if not isinstance(grad, nd.NDArray):
grad = nd.array(grad)
self._scores_grad = grad
else:
raise NotImplementedError()
def get_input_grads(self, merge_multi_context=True):
"""Get the gradients to the inputs, computed in the previous backward computation.
Parameters
----------
merge_multi_context : bool
Should always be ``True`` because we do not use multiple context for computation.
"""
assert merge_multi_context is True
return [self._scores_grad]
def install_monitor(self, mon):
"""Install monitor on all executors."""
raise NotImplementedError()
|
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.data import kb
from lib.request.connect import Connect as Request
def getPageTemplate(payload, place):
retVal = (kb.originalPage, kb.errorIsNone)
if payload and place:
if (payload, place) not in kb.pageTemplates:
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
kb.pageTemplates[(payload, place)] = (page, kb.lastParserStatus is None)
retVal = kb.pageTemplates[(payload, place)]
return retVal
|
import argparse
import sys
from typing import Any
from django.core import validators
from django.core.exceptions import ValidationError
from django.core.management.base import CommandError
from django.db.utils import IntegrityError
from zerver.lib.actions import do_create_user
from zerver.lib.initial_password import initial_password
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Create the specified user with a default initial password.
Set tos_version=None, so that the user needs to do a ToS flow on login.
Omit both <email> and <full name> for interactive user creation.
"""
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument('--this-user-has-accepted-the-tos',
dest='tos',
action="store_true",
help='Acknowledgement that the user has already accepted the ToS.')
parser.add_argument('--password',
help='password of new user. For development only.'
'Note that we recommend against setting '
'passwords this way, since they can be snooped by any user account '
'on the server via `ps -ef` or by any superuser with'
'read access to the user\'s bash history.')
parser.add_argument('--password-file',
help='The file containing the password of the new user.')
parser.add_argument('email', metavar='<email>', nargs='?', default=argparse.SUPPRESS,
help='email address of new user')
parser.add_argument('full_name', metavar='<full name>', nargs='?',
default=argparse.SUPPRESS,
help='full name of new user')
self.add_realm_args(parser, True, "The name of the existing realm to which to add the user.")
def handle(self, *args: Any, **options: Any) -> None:
if not options["tos"]:
raise CommandError("""You must confirm that this user has accepted the
Terms of Service by passing --this-user-has-accepted-the-tos.""")
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
try:
email = options['email']
full_name = options['full_name']
try:
validators.validate_email(email)
except ValidationError:
raise CommandError("Invalid email address.")
except KeyError:
if 'email' in options or 'full_name' in options:
raise CommandError("""Either specify an email and full name as two
parameters, or specify no parameters for interactive user creation.""")
else:
while True:
email = input("Email: ")
try:
validators.validate_email(email)
break
except ValidationError:
print("Invalid email address.", file=sys.stderr)
full_name = input("Full name: ")
try:
if options['password_file'] is not None:
with open(options['password_file']) as f:
pw = f.read().strip()
elif options['password'] is not None:
pw = options['password']
else:
user_initial_password = initial_password(email)
if user_initial_password is None:
raise CommandError("Password is unusable.")
pw = user_initial_password
do_create_user(
email,
pw,
realm,
full_name,
acting_user=None,
)
except IntegrityError:
raise CommandError("User already exists.")
|
"""
A driver for XenServer or Xen Cloud Platform.
**Variable Naming Scheme**
- suffix "_ref" for opaque references
- suffix "_uuid" for UUIDs
- suffix "_rec" for record objects
"""
import math
from os_xenapi.client import session
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import units
from oslo_utils import versionutils
import six.moves.urllib.parse as urlparse
import nova.conf
from nova import exception
from nova.i18n import _
from nova.virt import driver
from nova.virt.xenapi import host
from nova.virt.xenapi import pool
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import vmops
from nova.virt.xenapi import volumeops
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
OVERHEAD_BASE = 3
OVERHEAD_PER_MB = 0.00781
OVERHEAD_PER_VCPU = 1.5
def invalid_option(option_name, recommended_value):
LOG.exception(_('Current value of '
'CONF.xenserver.%(option)s option incompatible with '
'CONF.xenserver.independent_compute=True. '
'Consider using "%(recommended)s"'),
{'option': option_name,
'recommended': recommended_value})
raise exception.NotSupportedWithOption(
operation=option_name,
option='CONF.xenserver.independent_compute')
class XenAPIDriver(driver.ComputeDriver):
"""A connection to XenServer or Xen Cloud Platform."""
capabilities = {
"has_imagecache": False,
"supports_recreate": False,
"supports_migrate_to_same_host": False,
"supports_attach_interface": True,
"supports_device_tagging": True,
}
def __init__(self, virtapi, read_only=False):
super(XenAPIDriver, self).__init__(virtapi)
url = CONF.xenserver.connection_url
username = CONF.xenserver.connection_username
password = CONF.xenserver.connection_password
if not url or password is None:
raise Exception(_('Must specify connection_url, '
'connection_username (optionally), and '
'connection_password to use '
'compute_driver=xenapi.XenAPIDriver'))
self._session = session.XenAPISession(url, username, password,
originator="nova")
self._volumeops = volumeops.VolumeOps(self._session)
self._host_state = None
self._host = host.Host(self._session, self.virtapi)
self._vmops = vmops.VMOps(self._session, self.virtapi)
self._initiator = None
self._hypervisor_hostname = None
self._pool = pool.ResourcePool(self._session, self.virtapi)
@property
def host_state(self):
if not self._host_state:
self._host_state = host.HostState(self._session)
return self._host_state
def init_host(self, host):
if CONF.xenserver.independent_compute:
# Check various options are in the correct state:
if CONF.xenserver.check_host:
invalid_option('CONF.xenserver.check_host', False)
if CONF.flat_injected:
invalid_option('CONF.flat_injected', False)
if CONF.default_ephemeral_format and \
CONF.default_ephemeral_format != 'ext3':
invalid_option('CONF.default_ephemeral_format', 'ext3')
if CONF.xenserver.check_host:
vm_utils.ensure_correct_host(self._session)
if not CONF.xenserver.independent_compute:
try:
vm_utils.cleanup_attached_vdis(self._session)
except Exception:
LOG.exception(_('Failure while cleaning up attached VDIs'))
def instance_exists(self, instance):
"""Checks existence of an instance on the host.
:param instance: The instance to lookup
Returns True if supplied instance exists on the host, False otherwise.
NOTE(belliott): This is an override of the base method for
efficiency.
"""
return self._vmops.instance_exists(instance.name)
def estimate_instance_overhead(self, instance_info):
"""Get virtualization overhead required to build an instance of the
given flavor.
:param instance_info: Instance/flavor to calculate overhead for.
:returns: Overhead memory in MB.
"""
# XenServer memory overhead is proportional to the size of the
# VM. Larger flavor VMs become more efficient with respect to
# overhead.
# interpolated formula to predict overhead required per vm.
# based on data from:
# https://wiki.openstack.org/wiki/XenServer/Overhead
# Some padding is done to each value to fit all available VM data
memory_mb = instance_info['memory_mb']
vcpus = instance_info.get('vcpus', 1)
overhead = ((memory_mb * OVERHEAD_PER_MB) + (vcpus * OVERHEAD_PER_VCPU)
+ OVERHEAD_BASE)
overhead = math.ceil(overhead)
return {'memory_mb': overhead}
def list_instances(self):
"""List VM instances."""
return self._vmops.list_instances()
def list_instance_uuids(self):
"""Get the list of nova instance uuids for VMs found on the
hypervisor.
"""
return self._vmops.list_instance_uuids()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create VM instance."""
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def confirm_migration(self, context, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
self._vmops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize."""
# NOTE(vish): Xen currently does not use network info.
self._vmops.finish_revert_migration(context, instance,
block_device_info,
power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
self._vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info, power_on)
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance."""
self._vmops.snapshot(context, instance, image_id, update_task_state)
def post_interrupted_snapshot_cleanup(self, context, instance):
"""Cleans up any resources left after a failed snapshot."""
self._vmops.post_interrupted_snapshot_cleanup(context, instance)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot VM instance."""
self._vmops.reboot(instance, reboot_type,
bad_volumes_callback=bad_volumes_callback)
def set_admin_password(self, instance, new_pass):
"""Set the root/admin password on the VM instance."""
self._vmops.set_admin_password(instance, new_pass)
def inject_file(self, instance, b64_path, b64_contents):
"""Create a file on the VM instance. The file path and contents
should be base64-encoded.
"""
self._vmops.inject_file(instance, b64_path, b64_contents)
def change_instance_metadata(self, context, instance, diff):
"""Apply a diff to the instance metadata."""
self._vmops.change_instance_metadata(instance, diff)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True):
"""Destroy VM instance."""
self._vmops.destroy(instance, network_info, block_device_info,
destroy_disks)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def pause(self, instance):
"""Pause VM instance."""
self._vmops.pause(instance)
def unpause(self, instance):
"""Unpause paused VM instance."""
self._vmops.unpause(instance)
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the VHD of a running instance to another host, then shuts
off the instance copies over the COW disk
"""
# NOTE(vish): Xen currently does not use network info.
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._vmops.migrate_disk_and_power_off(context, instance,
dest, flavor, block_device_info)
def suspend(self, context, instance):
"""suspend the specified instance."""
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
"""resume the specified instance."""
self._vmops.resume(instance)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance."""
self._vmops.rescue(context, instance, network_info, image_meta,
rescue_password)
def set_bootable(self, instance, is_bootable):
"""Set the ability to power on/off an instance."""
self._vmops.set_bootable(instance, is_bootable)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance."""
self._vmops.unrescue(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance."""
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance."""
self._vmops.power_on(instance)
def soft_delete(self, instance):
"""Soft delete the specified instance."""
self._vmops.soft_delete(instance)
def restore(self, instance):
"""Restore the specified instance."""
self._vmops.restore(instance)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
self._vmops.poll_rebooting_instances(timeout, instances)
def reset_network(self, instance):
"""reset networking for specified instance."""
self._vmops.reset_network(instance)
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
self._vmops.inject_network_info(instance, nw_info)
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
self._vmops.plug_vifs(instance, network_info)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
self._vmops.unplug_vifs(instance, network_info)
def get_info(self, instance):
"""Return data about VM instance."""
return self._vmops.get_info(instance)
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_diagnostics(instance)
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_instance_diagnostics(instance)
def get_all_bw_counters(self, instances):
"""Return bandwidth usage counters for each interface on each
running VM.
"""
# we only care about VMs that correspond to a nova-managed
# instance:
imap = {inst['name']: inst['uuid'] for inst in instances}
bwcounters = []
# get a dictionary of instance names. values are dictionaries
# of mac addresses with values that are the bw counters:
# e.g. {'instance-001' : { 12:34:56:78:90:12 : {'bw_in': 0, ....}}
all_counters = self._vmops.get_all_bw_counters()
for instance_name, counters in all_counters.items():
if instance_name in imap:
# yes these are stats for a nova-managed vm
# correlate the stats with the nova instance uuid:
for vif_counter in counters.values():
vif_counter['uuid'] = imap[instance_name]
bwcounters.append(vif_counter)
return bwcounters
def get_console_output(self, context, instance):
"""Return snapshot of console."""
return self._vmops.get_console_output(instance)
def get_vnc_console(self, context, instance):
"""Return link to instance's VNC console."""
return self._vmops.get_vnc_console(instance)
def get_volume_connector(self, instance):
"""Return volume connector information."""
if not self._initiator or not self._hypervisor_hostname:
stats = self.host_state.get_host_stats(refresh=True)
try:
self._initiator = stats['host_other-config']['iscsi_iqn']
self._hypervisor_hostname = stats['host_hostname']
except (TypeError, KeyError) as err:
LOG.warning('Could not determine key: %s', err,
instance=instance)
self._initiator = None
return {
'ip': self._get_block_storage_ip(),
'initiator': self._initiator,
'host': self._hypervisor_hostname
}
def _get_block_storage_ip(self):
# If CONF.my_block_storage_ip is set, use it.
if CONF.my_block_storage_ip != CONF.my_ip:
return CONF.my_block_storage_ip
return self.get_host_ip_addr()
def get_host_ip_addr(self):
xs_url = urlparse.urlparse(CONF.xenserver.connection_url)
return xs_url.netloc
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach volume storage to VM instance."""
self._volumeops.attach_volume(connection_info,
instance['name'],
mountpoint)
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach volume storage from VM instance."""
self._volumeops.detach_volume(connection_info,
instance['name'],
mountpoint)
def get_console_pool_info(self, console_type):
xs_url = urlparse.urlparse(CONF.xenserver.connection_url)
return {'address': xs_url.netloc,
'username': CONF.xenserver.connection_username,
'password': CONF.xenserver.connection_password}
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename: ignored in this driver
:returns: dictionary describing resources
"""
host_stats = self.host_state.get_host_stats(refresh=True)
# Updating host information
total_ram_mb = host_stats['host_memory_total'] / units.Mi
# NOTE(belliott) memory-free-computed is a value provided by XenServer
# for gauging free memory more conservatively than memory-free.
free_ram_mb = host_stats['host_memory_free_computed'] / units.Mi
total_disk_gb = host_stats['disk_total'] / units.Gi
used_disk_gb = host_stats['disk_used'] / units.Gi
allocated_disk_gb = host_stats['disk_allocated'] / units.Gi
hyper_ver = versionutils.convert_version_to_int(
self._session.product_version)
dic = {'vcpus': host_stats['host_cpu_info']['cpu_count'],
'memory_mb': total_ram_mb,
'local_gb': total_disk_gb,
'vcpus_used': host_stats['vcpus_used'],
'memory_mb_used': total_ram_mb - free_ram_mb,
'local_gb_used': used_disk_gb,
'hypervisor_type': 'XenServer',
'hypervisor_version': hyper_ver,
'hypervisor_hostname': host_stats['host_hostname'],
'cpu_info': jsonutils.dumps(host_stats['cpu_model']),
'disk_available_least': total_disk_gb - allocated_disk_gb,
'supported_instances': host_stats['supported_instances'],
'pci_passthrough_devices': jsonutils.dumps(
host_stats['pci_passthrough_devices']),
'numa_topology': None}
return dic
def ensure_filtering_rules_for_instance(self, instance, network_info):
# NOTE(salvatore-orlando): it enforces security groups on
# host initialization and live migration.
# In XenAPI we do not assume instances running upon host initialization
return
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False, disk_over_commit=False):
"""Check if it is possible to execute live migration.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance object
:param block_migration: if true, prepare for block migration
:param disk_over_commit: if true, allow disk over commit
:returns: a XenapiLiveMigrateData object
"""
return self._vmops.check_can_live_migrate_destination(context,
instance,
block_migration,
disk_over_commit)
def cleanup_live_migration_destination_check(self, context,
dest_check_data):
"""Do required cleanup on dest host after check_can_live_migrate calls
:param context: security context
:param dest_check_data: result of check_can_live_migrate_destination
"""
pass
def check_can_live_migrate_source(self, context, instance,
dest_check_data, block_device_info=None):
"""Check if it is possible to execute live migration.
This checks if the live migration can succeed, based on the
results from check_can_live_migrate_destination.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param dest_check_data: result of check_can_live_migrate_destination
includes the block_migration flag
:param block_device_info: result of _get_instance_block_device_info
:returns: a XenapiLiveMigrateData object
"""
return self._vmops.check_can_live_migrate_source(context, instance,
dest_check_data)
def get_instance_disk_info(self, instance,
block_device_info=None):
"""Used by libvirt for live migration. We rely on xenapi
checks to do this for us.
"""
pass
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Performs the live migration of the specified instance.
:param context: security context
:param instance:
nova.db.sqlalchemy.models.Instance object
instance object that is migrated.
:param dest: destination host
:param post_method:
post operation method.
expected nova.compute.manager._post_live_migration.
:param recover_method:
recovery method when any exception occurs.
expected nova.compute.manager._rollback_live_migration.
:param block_migration: if true, migrate VM disk.
:param migrate_data: a XenapiLiveMigrateData object
"""
self._vmops.live_migrate(context, instance, dest, post_method,
recover_method, block_migration, migrate_data)
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
"""Performs a live migration rollback.
:param context: security context
:param instance: instance object that was being migrated
:param network_info: instance network information
:param block_device_info: instance block device information
:param destroy_disks:
if true, destroy disks at destination during cleanup
:param migrate_data: A XenapiLiveMigrateData object
"""
# NOTE(johngarbutt) Destroying the VM is not appropriate here
# and in the cases where it might make sense,
# XenServer has already done it.
# NOTE(sulo): The only cleanup we do explicitly is to forget
# any volume that was attached to the destination during
# live migration. XAPI should take care of all other cleanup.
self._vmops.rollback_live_migration_at_destination(instance,
network_info,
block_device_info)
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data):
"""Preparation live migration.
:param block_device_info:
It must be the result of _get_instance_volume_bdms()
at compute manager.
:returns: a XenapiLiveMigrateData object
"""
return self._vmops.pre_live_migration(context, instance,
block_device_info, network_info, disk_info, migrate_data)
def post_live_migration(self, context, instance, block_device_info,
migrate_data=None):
"""Post operation of live migration at source host.
:param context: security context
:instance: instance object that was migrated
:block_device_info: instance block device information
:param migrate_data: a XenapiLiveMigrateData object
"""
self._vmops.post_live_migration(context, instance, migrate_data)
def post_live_migration_at_source(self, context, instance, network_info):
"""Unplug VIFs from networks at source.
:param context: security context
:param instance: instance object reference
:param network_info: instance network information
"""
self._vmops.post_live_migration_at_source(context, instance,
network_info)
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
"""Post operation of live migration at destination host.
:param context: security context
:param instance:
nova.db.sqlalchemy.models.Instance object
instance object that is migrated.
:param network_info: instance network information
:param block_migration: if true, post operation of block_migration.
"""
self._vmops.post_live_migration_at_destination(context, instance,
network_info, block_device_info, block_device_info)
def unfilter_instance(self, instance, network_info):
"""Removes security groups configured for an instance."""
return self._vmops.unfilter_instance(instance, network_info)
def refresh_security_group_rules(self, security_group_id):
"""Updates security group rules for all instances associated with a
given security group.
Invoked when security group rules are updated.
"""
return self._vmops.refresh_security_group_rules(security_group_id)
def refresh_instance_security_rules(self, instance):
"""Updates security group rules for specified instance.
Invoked when instances are added/removed to a security group
or when a rule is added/removed to a security group.
"""
return self._vmops.refresh_instance_security_rules(instance)
def get_available_nodes(self, refresh=False):
stats = self.host_state.get_host_stats(refresh=refresh)
return [stats["hypervisor_hostname"]]
def host_power_action(self, action):
"""The only valid values for 'action' on XenServer are 'reboot' or
'shutdown', even though the API also accepts 'startup'. As this is
not technically possible on XenServer, since the host is the same
physical machine as the hypervisor, if this is requested, we need to
raise an exception.
"""
if action in ("reboot", "shutdown"):
return self._host.host_power_action(action)
else:
msg = _("Host startup on XenServer is not supported.")
raise NotImplementedError(msg)
def set_host_enabled(self, enabled):
"""Sets the compute host's ability to accept new instances."""
return self._host.set_host_enabled(enabled)
def get_host_uptime(self):
"""Returns the result of calling "uptime" on the target host."""
return self._host.get_host_uptime()
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
return self._host.host_maintenance_mode(host, mode)
def add_to_aggregate(self, context, aggregate, host, **kwargs):
"""Add a compute host to an aggregate."""
return self._pool.add_to_aggregate(context, aggregate, host, **kwargs)
def remove_from_aggregate(self, context, aggregate, host, **kwargs):
"""Remove a compute host from an aggregate."""
return self._pool.remove_from_aggregate(context,
aggregate, host, **kwargs)
def undo_aggregate_operation(self, context, op, aggregate,
host, set_error=True):
"""Undo aggregate operation when pool error raised."""
return self._pool.undo_aggregate_operation(context, op,
aggregate, host, set_error)
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted."""
self._vmops.power_on(instance)
def get_per_instance_usage(self):
"""Get information about instance resource usage.
:returns: dict of nova uuid => dict of usage info
"""
return self._vmops.get_per_instance_usage()
def attach_interface(self, context, instance, image_meta, vif):
"""Use hotplug to add a network interface to a running instance.
The counter action to this is :func:`detach_interface`.
:param context: The request context.
:param nova.objects.instance.Instance instance:
The instance which will get an additional network interface.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param nova.network.model.VIF vif:
The object which has the information about the interface to attach.
:raise nova.exception.NovaException: If the attach fails.
:return: None
"""
self._vmops.attach_interface(instance, vif)
def detach_interface(self, context, instance, vif):
"""Use hotunplug to remove a network interface from a running instance.
The counter action to this is :func:`attach_interface`.
:param context: The request context.
:param nova.objects.instance.Instance instance:
The instance which gets a network interface removed.
:param nova.network.model.VIF vif:
The object which has the information about the interface to detach.
:raise nova.exception.NovaException: If the detach fails.
:return: None
"""
self._vmops.detach_interface(instance, vif)
|
from datetime import datetime, timedelta
import json
import logging
import os
from botocore.exceptions import ClientError
import boto3
def bucket_info(c, bucket):
result = {'Bucket': bucket}
response = c.get_metric_statistics(
Namespace='AWS/S3',
MetricName='NumberOfObjects',
Dimensions=[
{'Name': 'BucketName',
'Value': bucket},
{'Name': 'StorageType',
'Value': 'AllStorageTypes'}
],
StartTime=datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
EndTime=datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0),
Period=60*24*24,
Statistics=['Average'])
if not response['Datapoints']:
result['ObjectCount'] = 0
else:
result['ObjectCount'] = response['Datapoints'][0]['Average']
response = c.get_metric_statistics(
Namespace='AWS/S3',
MetricName='BucketSizeBytes',
Dimensions=[
{'Name': 'BucketName',
'Value': bucket},
{'Name': 'StorageType',
'Value': 'StandardStorage'},
],
StartTime=datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(10),
EndTime=datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0),
Period=60*24*24,
Statistics=['Average'])
if not response['Datapoints']:
result['Size'] = 0
result['SizeGB'] = 0
else:
result['Size'] = response['Datapoints'][0]['Average']
result['SizeGB'] = result['Size'] / (1024.0 * 1024 * 1024)
return result
def main():
logging.basicConfig(level=logging.INFO)
bucket = os.environ.get('BUCKET')
results = {'buckets':[]}
size_count = obj_count = 0.0
s = boto3.Session()
s3 = s.client('s3')
buckets = s3.list_buckets()['Buckets']
cw_cache = {}
index = 0
for b in buckets:
index += 1
try:
bucket_region = s3.get_bucket_location(
Bucket=b['Name'])['LocationConstraint']
except ClientError as e:
# We don't have permission to the bucket, try us-east-1
bucket_region = "us-east-1"
# get the cloudwatch session for the region the bucket is in
if bucket_region in cw_cache:
cw = cw_cache[bucket_region]
else:
cw = s.client('cloudwatch', region_name=bucket_region)
cw_cache[bucket_region] = cw
i = bucket_info(cw, b['Name'])
results['buckets'].append(i)
obj_count += i['ObjectCount']
size_count += i['SizeGB']
results['TotalObjects'] = obj_count
results['TotalSizeGB'] = size_count
print(json.dumps(results, indent=2))
if __name__ == '__main__':
main()
|
import unittest
class TestClient(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.translate import Client
return Client
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor(self):
from google.cloud.translate_v2._http import Connection
from google.cloud.translate_v2.client import ENGLISH_ISO_639
http = object()
client = self._make_one(_http=http)
self.assertIsInstance(client._connection, Connection)
self.assertIsNone(client._connection.credentials)
self.assertIs(client._connection.http, http)
self.assertEqual(client.target_language, ENGLISH_ISO_639)
def test_constructor_non_default(self):
from google.cloud.translate_v2._http import Connection
http = object()
target = "es"
client = self._make_one(target_language=target, _http=http)
self.assertIsInstance(client._connection, Connection)
self.assertIsNone(client._connection.credentials)
self.assertIs(client._connection.http, http)
self.assertEqual(client.target_language, target)
def test_get_languages(self):
from google.cloud.translate_v2.client import ENGLISH_ISO_639
client = self._make_one(_http=object())
supported = [
{"language": "en", "name": "English"},
{"language": "af", "name": "Afrikaans"},
{"language": "am", "name": "Amharic"},
]
data = {"data": {"languages": supported}}
conn = client._connection = _Connection(data)
result = client.get_languages()
self.assertEqual(result, supported)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "GET")
self.assertEqual(req["path"], "/languages")
self.assertEqual(req["query_params"], {"target": ENGLISH_ISO_639})
def test_get_languages_no_target(self):
client = self._make_one(target_language=None, _http=object())
supported = [{"language": "en"}, {"language": "af"}, {"language": "am"}]
data = {"data": {"languages": supported}}
conn = client._connection = _Connection(data)
result = client.get_languages()
self.assertEqual(result, supported)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(len(req), 3)
self.assertEqual(req["method"], "GET")
self.assertEqual(req["path"], "/languages")
self.assertEqual(req["query_params"], {})
def test_get_languages_explicit_target(self):
client = self._make_one(_http=object())
target_language = "en"
supported = [
{"language": "en", "name": "Spanish"},
{"language": "af", "name": "Afrikaans"},
{"language": "am", "name": "Amharic"},
]
data = {"data": {"languages": supported}}
conn = client._connection = _Connection(data)
result = client.get_languages(target_language)
self.assertEqual(result, supported)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "GET")
self.assertEqual(req["path"], "/languages")
self.assertEqual(req["query_params"], {"target": target_language})
def test_detect_language_bad_result(self):
client = self._make_one(_http=object())
value = "takoy"
conn = client._connection = _Connection({})
with self.assertRaises(ValueError):
client.detect_language(value)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "/detect")
expected_data = {"q": [value]}
self.assertEqual(req["data"], expected_data)
def test_detect_language_single_value(self):
client = self._make_one(_http=object())
value = "takoy"
detection = {
"confidence": 1.0,
"input": value,
"language": "ru",
"isReliable": False,
}
data = {"data": {"detections": [[detection]]}}
conn = client._connection = _Connection(data)
result = client.detect_language(value)
self.assertEqual(result, detection)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "/detect")
expected_data = {"q": [value]}
self.assertEqual(req["data"], expected_data)
def test_detect_language_multiple_values(self):
client = self._make_one(_http=object())
value1 = u"fa\xe7ade" # facade (with a cedilla)
detection1 = {
"confidence": 0.6166008,
"input": value1,
"isReliable": False,
"language": "en",
}
value2 = "s'il vous plait"
detection2 = {
"confidence": 0.29728225,
"input": value2,
"isReliable": False,
"language": "fr",
}
data = {"data": {"detections": [[detection1], [detection2]]}}
conn = client._connection = _Connection(data)
result = client.detect_language([value1, value2])
self.assertEqual(result, [detection1, detection2])
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "/detect")
expected_data = {"q": [value1, value2]}
self.assertEqual(req["data"], expected_data)
def test_detect_language_multiple_results(self):
client = self._make_one(_http=object())
value = "soy"
detection1 = {
"confidence": 0.81496066,
"input": value,
"language": "es",
"isReliable": False,
}
detection2 = {
"confidence": 0.222,
"input": value,
"language": "en",
"isReliable": False,
}
data = {"data": {"detections": [[detection1, detection2]]}}
client._connection = _Connection(data)
with self.assertRaises(ValueError):
client.detect_language(value)
def test_translate_bad_result(self):
client = self._make_one(_http=object())
value = "hvala ti"
conn = client._connection = _Connection({})
with self.assertRaises(ValueError):
client.translate(value)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "")
expected_data = {
"target": "en",
"q": [value],
"cid": (),
"source": None,
"model": None,
"format": None,
}
self.assertEqual(req["data"], expected_data)
def test_translate_defaults(self):
client = self._make_one(_http=object())
value = "hvala ti"
translation = {
"detectedSourceLanguage": "hr",
"translatedText": "thank you",
"input": value,
}
data = {"data": {"translations": [translation]}}
conn = client._connection = _Connection(data)
result = client.translate(value)
self.assertEqual(result, translation)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "")
expected_data = {
"target": "en",
"q": [value],
"cid": (),
"source": None,
"model": None,
"format": None,
}
self.assertEqual(req["data"], expected_data)
def test_translate_multiple(self):
client = self._make_one(_http=object())
value1 = "hvala ti"
translation1 = {
"detectedSourceLanguage": "hr",
"translatedText": "thank you",
"input": value1,
}
value2 = "Dankon"
translation2 = {
"detectedSourceLanguage": "eo",
"translatedText": "thank you",
"input": value2,
}
data = {"data": {"translations": [translation1, translation2]}}
conn = client._connection = _Connection(data)
result = client.translate([value1, value2])
self.assertEqual(result, [translation1, translation2])
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "")
expected_data = {
"target": "en",
"q": [value1, value2],
"cid": (),
"source": None,
"model": None,
"format": None,
}
self.assertEqual(req["data"], expected_data)
def test_translate_explicit(self):
client = self._make_one(_http=object())
value = "thank you"
target_language = "eo"
source_language = "en"
translation = {"translatedText": "Dankon", "input": value}
data = {"data": {"translations": [translation]}}
conn = client._connection = _Connection(data)
cid = "123"
format_ = "text"
model = "nmt"
result = client.translate(
value,
target_language=target_language,
source_language=source_language,
format_=format_,
customization_ids=cid,
model=model,
)
self.assertEqual(result, translation)
# Verify requested.
self.assertEqual(len(conn._requested), 1)
req = conn._requested[0]
self.assertEqual(req["method"], "POST")
self.assertEqual(req["path"], "")
expected_data = {
"target": target_language,
"q": [value],
"cid": [cid],
"source": source_language,
"model": model,
"format": format_,
}
self.assertEqual(req["data"], expected_data)
class _Connection(object):
def __init__(self, *responses):
self._responses = responses
self._requested = []
def api_request(self, **kw):
self._requested.append(kw)
response, self._responses = self._responses[0], self._responses[1:]
return response
|
'''
Created on May 29, 2013
@author: vieglais
Copied directly from:
http://asimpleweblog.wordpress.com/2010/06/20/julian-date-calculator/
'''
import math
import datetime
import pytz
MJD0 = 2400000.5 # 1858 November 17, 00:00:00 hours
def base60_to_decimal(xyz,delimiter=None):
"""Decimal value from numbers in sexagesimal system.
The input value can be either a floating point number or a string
such as "hh mm ss.ss" or "dd mm ss.ss". Delimiters other than " "
can be specified using the keyword ``delimiter``.
"""
divisors = [1,60.0,3600.0]
xyzlist = str(xyz).split(delimiter)
sign = -1 if xyzlist[0].find("-") != -1 else 1
xyzlist = [abs(float(x)) for x in xyzlist]
decimal_value = 0
for i,j in zip(xyzlist,divisors): # if xyzlist has <3 values then
# divisors gets clipped.
decimal_value += i/j
decimal_value = -decimal_value if sign == -1 else decimal_value
return decimal_value
def decimal_to_base60(deci,precision=1e-8):
"""Converts decimal number into sexagesimal number parts.
``deci`` is the decimal number to be converted. ``precision`` is how
close the multiple of 60 and 3600, for example minutes and seconds,
are to 60.0 before they are rounded to the higher quantity, for
example hours and minutes.
"""
sign = "+" # simple putting sign back at end gives errors for small
# deg. This is because -00 is 00 and hence ``format``,
# that constructs the delimited string will not add '-'
# sign. So, carry it as a character.
if deci < 0:
deci = abs(deci)
sign = "-"
frac1, num = math.modf(deci)
num = int(num) # hours/degrees is integer valued but type is float
frac2, frac1 = math.modf(frac1*60.0)
frac1 = int(frac1) # minutes is integer valued but type is float
frac2 *= 60.0 # number of seconds between 0 and 60
# Keep seconds and minutes in [0 - 60.0000)
if abs(frac2 - 60.0) < precision:
frac2 = 0.0
frac1 += 1
if abs(frac1 - 60.0) < precision:
frac1 = 0.0
num += 1
return (sign,num,frac1,frac2)
def julian_date(year,month,day,hour,minute,second):
"""Given year, month, day, hour, minute and second return JD.
``year``, ``month``, ``day``, ``hour`` and ``minute`` are integers,
truncates fractional part; ``second`` is a floating point number.
For BC year: use -(year-1). Example: 1 BC = 0, 1000 BC = -999.
"""
MJD0 = 2400000.5 # 1858 November 17, 00:00:00 hours
year, month, day, hour, minute =\
int(year),int(month),int(day),int(hour),int(minute)
if month <= 2:
month +=12
year -= 1
modf = math.modf
# Julian calendar on or before 1582 October 4 and Gregorian calendar
# afterwards.
if ((10000L*year+100L*month+day) <= 15821004L):
b = -2 + int(modf((year+4716)/4)[1]) - 1179
else:
b = int(modf(year/400)[1])-int(modf(year/100)[1])+\
int(modf(year/4)[1])
mjdmidnight = 365L*year - 679004L + b + int(30.6001*(month+1)) + day
fracofday = base60_to_decimal(\
" ".join([str(hour),str(minute),str(second)])) / 24.0
return MJD0 + mjdmidnight + fracofday
def caldate(mjd):
"""Given mjd return calendar date.
Retrns a tuple (year,month,day,hour,minute,second). The last is a
floating point number and others are integers. The precision in
seconds is about 1e-4.
To convert jd to mjd use jd - 2400000.5. In this module 2400000.5 is
stored in MJD0.
"""
MJD0 = 2400000.5 # 1858 November 17, 00:00:00 hours
modf = math.modf
a = long(mjd+MJD0+0.5)
# Julian calendar on or before 1582 October 4 and Gregorian calendar
# afterwards.
if a < 2299161:
b = 0
c = a + 1524
else:
b = long((a-1867216.25)/36524.25)
c = a+ b - long(modf(b/4)[1]) + 1525
d = long((c-122.1)/365.25)
e = 365*d + long(modf(d/4)[1])
f = long((c-e)/30.6001)
day = c - e - int(30.6001*f)
month = f - 1 - 12*int(modf(f/14)[1])
year = d - 4715 - int(modf((7+month)/10)[1])
fracofday = mjd - math.floor(mjd)
hours = fracofday * 24.0
sign,hour,minute,second = decimal_to_base60(hours)
return (year,month,day,int(sign+str(hour)),minute,second)
def dateTime2MJD(dt):
sec = dt.second + dt.microsecond / 1000.0
jnow = julian_date(dt.year, dt.month, dt.day,
dt.hour, dt.minute, sec)
return jnow - MJD0
def MJD2dateTime(mjd):
dt = caldate(mjd)
return datetime.datetime(int(dt[0]), int(dt[1]), int(dt[2]), int(dt[3]), int(dt[4]), int(dt[5]), 0, pytz.utc)
def now():
'''Returns MJD for right now, UTC
'''
dnow = datetime.datetime.utcnow()
sec = dnow.second + dnow.microsecond / 1000.0
jnow = julian_date(dnow.year, dnow.month, dnow.day,
dnow.hour, dnow.minute, sec)
return jnow - MJD0
if __name__ == '__main__':
print "Julian date for 2010/1/1 13:20:12.3456 : ",
j = julian_date(2010,1,1,13,20,12.3456)
print j
print "Calendar date for MJD "+ str(j-MJD0) + " (jd = " + str(j)+" )"
print "Year: {0}, Month: {1}, Day: {2}, Hour: {3}, Minute: {4},\
Second: {5:8.5f}".format(*caldate(j-MJD0))
print "MJD for now = %.5f" % (now())
|
import subprocess
import re
import shutil
import os
from os.path import exists, join, split, splitext, normpath, abspath
def ensuredir(path):
if not exists(path):
os.makedirs(path)
def _cmd(*args):
print(' '.join(args))
return subprocess.check_output(args)
def extract_dependent_dylibs(dylib_fpath, filter_regex=None):
'Extracts the dependent libraries of the input dylib'
out = _cmd('otool', '-L', dylib_fpath)
out = [line.strip() for line in out.split('\n')]
if filter_regex is not None:
out = filter(lambda line: re.search(filter_regex, line), out)
dylib_list = [line.split(' ')[0] for line in out]
return dylib_list
def append_suffix(fpath, suffix):
'appends sufix like /some/filename<suffix>.ext'
root, fname = split(fpath)
name, ext = splitext(fname)
new_fname = name + suffix + ext
new_fpath = join(root, new_fname)
return new_fpath
def get_localize_name_cmd(dylib_fpath, fpath_src):
fname = split(fpath_src)[1]
loader_dst = join('@loader_path', fname)
instname_cmd = ['install_name_tool', '-change', fpath_src, loader_dst, dylib_fpath]
return instname_cmd
def inspect_dylib(dylib_fpath):
print(_cmd('otool', '-L', dylib_fpath))
def make_distributable_dylib(dylib_fpath, filter_regex='/opt/local/lib/'):
'removes absolute paths from dylibs on mac using otool'
print('[otool] making distributable: %r' % dylib_fpath)
assert exists(dylib_fpath), 'does not exist dylib_fpath=%r' % dylib_fpath
loader_path = split(dylib_fpath)[0]
depends_list = extract_dependent_dylibs(dylib_fpath, filter_regex=filter_regex)
dependency_moved = False
# Build task list
copy_list = []
instname_list = []
for fpath_src in depends_list:
# Skip depenencies which are relative paths
# they have probably already been fixed
if not exists(fpath_src):
continue
fpath_dst = join(loader_path, split(fpath_src)[1])
# Only copy if the file doesnt already exist
if not exists(fpath_dst):
if re.search(filter_regex, fpath_src):
dependency_moved = True
copy_list.append((fpath_src, fpath_dst))
instname_list.append(get_localize_name_cmd(dylib_fpath, fpath_src))
# Change input name as well
instname_list.append(get_localize_name_cmd(dylib_fpath, dylib_fpath))
# Copy the dependencies to the dylib location
for (fpath_src, fpath_dst) in copy_list:
shutil.copy(fpath_src, fpath_dst)
# Change the dependencies in the dylib
for instname_cmd in instname_list:
_cmd(*instname_cmd)
return dependency_moved
def check_depends_dylib(dylib_fpath, filter_regex='/opt/local/lib/'):
print('[otool] checking dependencies: %r' % dylib_fpath)
assert exists(dylib_fpath), 'does not exist dylib_fpath=%r' % dylib_fpath
depends_list = extract_dependent_dylibs(dylib_fpath, filter_regex=filter_regex)
loader_path = split(dylib_fpath)[0]
exists_list = []
missing_list = []
missing_abs_list = []
for fpath in depends_list:
fixed_fpath = normpath(fpath.replace('@loader_path', loader_path))
absfpath = abspath(fixed_fpath)
if exists(absfpath):
exists_list.append(fpath)
else:
missing_list.append(fpath)
missing_abs_list.append(absfpath)
if len(exists_list) > 0:
print('Verified Dependencies: ')
print('\n'.join(exists_list))
print('----')
else:
print('Nothing exists')
if len(missing_list) > 0:
print('Missing Dependencies: ')
print('\n'.join(missing_list))
print('----')
print('Missing Dependencies: (absolute path)')
print('\n'.join(missing_abs_list))
print('----')
else:
print('Nothing missing')
if __name__ == '__main__':
#from os.path import expanduser
#dylib_fpath = expanduser('~/code/hotspotter/hstpl/extern_feat/libhesaff.dylib')
import sys
if len(sys.argv) == 3:
dylib_fpath = sys.argv[2]
if sys.argv[1] == 'make_distributable':
make_distributable_dylib(dylib_fpath, filter_regex='/opt/local/lib/')
elif sys.argv[1] == 'check_depends':
check_depends_dylib(dylib_fpath, filter_regex='')
else:
print('[otool] unknown command')
else:
print('[otool] not enough arguments')
print(sys.argv)
|
try:
import json
except ImportError:
import simplejson as json
from cm_api.endpoints.types import *
__docformat__ = "epytext"
EXTERNAL_ACCOUNT_PATH = "/externalAccounts/%s"
EXTERNAL_ACCOUNT_FETCH_PATH = "/externalAccounts/%s/%s"
EXTERNAL_ACCOUNT_CONFIG_FETCH_PATH = "/externalAccounts/account/%s"
def get_supported_categories(resource_root):
"""
Lookup all supported categories.
@param resource_root: The root Resource object.
@return: An ApiExternalAcccountCategory list
"""
return call(resource_root.get, EXTERNAL_ACCOUNT_PATH % ("supportedCategories",) ,
ApiExternalAccountCategory, True)
def get_supported_types(resource_root, category_name):
"""
Lookup all supported types in a category.
@param resource_root: The root Resource object.
@param category_name: The category name
@return: An ApiExternalAcccountType list
"""
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("supportedTypes", category_name,),
ApiExternalAccountType, True)
def create_external_account(resource_root, name, display_name, type_name,
account_configs=None):
"""
Create an external account
@param resource_root: The root Resource object.
@param name: Immutable external account name
@param display_name: Display name
@param type_name: Account type
@param account_configs: Optional account configuration (ApiList of ApiConfig objects)
@return: An ApiExternalAccount object matching the newly created account
"""
account = ApiExternalAccount(resource_root,
name=name,
displayName=display_name,
typeName=type_name,
accountConfigs=account_configs)
return call(resource_root.post,
EXTERNAL_ACCOUNT_PATH % ("create",),
ApiExternalAccount, False, data=account)
def get_external_account(resource_root, name, view=None):
"""
Lookup an external account by name
@param resource_root: The root Resource object.
@param name: Account name
@param view: View
@return: An ApiExternalAccount object
"""
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("account", name,),
ApiExternalAccount, False, params=view and dict(view=view) or None)
def get_external_account_by_display_name(resource_root,
display_name, view=None):
"""
Lookup an external account by display name
@param resource_root: The root Resource object.
@param display_name: Account display name
@param view: View
@return: An ApiExternalAccount object
"""
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("accountByDisplayName", display_name,),
ApiExternalAccount, False, params=view and dict(view=view) or None)
def get_all_external_accounts(resource_root, type_name, view=None):
"""
Lookup all external accounts of a particular type, by type name.
@param resource_root: The root Resource object.
@param type_name: Type name
@param view: View
@return: An ApiList of ApiExternalAccount objects matching the specified type
"""
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("type", type_name,),
ApiExternalAccount, True, params=view and dict(view=view) or None)
def update_external_account(resource_root, account):
"""
Update an external account
@param resource_root: The root Resource object.
@param account: Account to update, account name must be specified.
@return: An ApiExternalAccount object, representing the updated external account
"""
return call(resource_root.put,
EXTERNAL_ACCOUNT_PATH % ("update",),
ApiExternalAccount, False, data=account)
def delete_external_account(resource_root, name):
"""
Delete an external account by name
@param resource_root: The root Resource object.
@param name: Account name
@return: The deleted ApiExternalAccount object
"""
return call(resource_root.delete,
EXTERNAL_ACCOUNT_FETCH_PATH % ("delete", name,),
ApiExternalAccount, False)
class ApiExternalAccountCategory(BaseApiObject):
_ATTRIBUTES = {
'name' : None,
'displayName' : None,
'description' : None
}
def __str__(self):
return "<ApiExternalAccountCategory>: %s" % (
self.name)
class ApiExternalAccountType(BaseApiObject):
_ATTRIBUTES = {
'name' : None,
'displayName' : None,
'type' : None,
'categoryName' : None,
'description' : None,
'allowedAccountConfigs' : Attr(ApiConfig)
}
def __str__(self):
return "<ApiExternalAccountType>: %s (categoryName: %s)" % (
self.name, self.typeName)
class ApiExternalAccount(BaseApiResource):
_ATTRIBUTES = {
'name' : None,
'displayName' : None,
'typeName' : None,
'createdTime' : ROAttr(),
'lastModifiedTime' : ROAttr(),
'accountConfigs' : Attr(ApiConfig)
}
def __init__(self, resource_root, name=None, displayName=None,
typeName=None, accountConfigs=None):
BaseApiResource.init(self, resource_root, locals())
def __str__(self):
return "<ApiExternalAccount>: %s (typeName: %s)" % (
self.name, self.typeName)
def _path(self):
return EXTERNAL_ACCOUNT_CONFIG_FETCH_PATH % self.name
def get_config(self, view=None):
"""
Retrieve the external account's configuration.
The 'summary' view contains strings as the dictionary values. The full
view contains ApiConfig instances as the values.
@param view: View to materialize ('full' or 'summary')
@return: Dictionary with configuration data.
"""
return self._get_config("config", view)
def update_config(self, config):
"""
Update the external account's configuration.
@param config: Dictionary with configuration to update.
@return: Dictionary with updated configuration.
"""
return self._update_config("config", config)
def external_account_cmd_by_name(self, command_name):
"""
Executes a command on the external account specified
by name.
@param command_name: The name of the command.
@return: Reference to the submitted command.
@since: API v16
"""
return self._cmd(command_name, data=self.name, api_version=16)
|
from zion.handlers import BaseHandler
from zion.handlers.base import NotFunctionRequest
from swift.common.utils import public
import time
class ComputeHandler(BaseHandler):
def __init__(self, request, conf, app, logger, redis):
super(ComputeHandler, self).__init__(
request, conf, app, logger, redis)
def _parse_vaco(self):
return self.req.split_path(3, 4, rest_with_last=True)
def _get_functions(self):
return eval(self.req.headers.pop('functions_data'))
def is_valid_request(self):
return 'functions_data' in self.req.headers
def handle_request(self):
if hasattr(self, self.method) and self.is_valid_request():
try:
handler = getattr(self, self.method)
getattr(handler, 'publicly_accessible')
except AttributeError:
raise NotFunctionRequest()
return handler()
else:
raise NotFunctionRequest()
@public
def GET(self):
"""
GET handler on Compute node
"""
functions_data = self._get_functions()
self.response = self.req.get_response(self.app)
# self.response = Response(body="Test", headers=self.req.headers)
t0 = time.time()
self.apply_function_onget(functions_data)
self.logger.info('------> TOAL ZION TIME: %0.6fs' % ((time.time()-t0)))
return self.response
@public
def PUT(self):
"""
PUT handler on Compute node
"""
functions_data = self._get_functions()
return self.apply_function_onput(functions_data)
|
"""Constants for the AVM FRITZ!SmartHome integration."""
from __future__ import annotations
import logging
from typing import Final
ATTR_STATE_BATTERY_LOW: Final = "battery_low"
ATTR_STATE_DEVICE_LOCKED: Final = "device_locked"
ATTR_STATE_HOLIDAY_MODE: Final = "holiday_mode"
ATTR_STATE_LOCKED: Final = "locked"
ATTR_STATE_SUMMER_MODE: Final = "summer_mode"
ATTR_STATE_WINDOW_OPEN: Final = "window_open"
CONF_CONNECTIONS: Final = "connections"
CONF_COORDINATOR: Final = "coordinator"
DEFAULT_HOST: Final = "fritz.box"
DEFAULT_USERNAME: Final = "admin"
DOMAIN: Final = "fritzbox"
LOGGER: Final[logging.Logger] = logging.getLogger(__package__)
PLATFORMS: Final[list[str]] = ["binary_sensor", "climate", "switch", "sensor"]
|
from eventlet import greenthread
import mock
from oslo_concurrency import processutils
from six.moves.urllib import error as url_error # pylint: disable=E0611
from six.moves.urllib import request as url_request # pylint: disable=E0611
from manila import exception
from manila.share import configuration as conf
from manila.share.drivers.emc.plugins.vnx import connector
from manila import test
from manila.tests.share.drivers.emc.plugins.vnx import fakes
from manila.tests.share.drivers.emc.plugins.vnx import utils as emc_utils
from manila import utils
class XMLAPIConnectorTestData(object):
FAKE_BODY = '<fakebody></fakebody>'
FAKE_RESP = '<Response></Response>'
FAKE_METHOD = 'fake_method'
FAKE_KEY = 'key'
FAKE_VALUE = 'value'
@staticmethod
def req_auth_url():
return 'https://' + fakes.FakeData.emc_nas_server + '/Login'
@staticmethod
def req_credential():
return (
'user=' + fakes.FakeData.emc_nas_login
+ '&password=' + fakes.FakeData.emc_nas_password
+ '&Login=Login'
)
@staticmethod
def req_url_encode():
return {'Content-Type': 'application/x-www-form-urlencoded'}
@staticmethod
def req_url():
return (
'https://'
+ fakes.FakeData.emc_nas_server
+ '/servlets/CelerraManagementServices'
)
XML_CONN_TD = XMLAPIConnectorTestData
class XMLAPIConnectorTest(test.TestCase):
@mock.patch.object(url_request, 'Request', mock.Mock())
def setUp(self):
super(XMLAPIConnectorTest, self).setUp()
emc_share_driver = fakes.FakeEMCShareDriver()
self.configuration = emc_share_driver.configuration
xml_socket = mock.Mock()
xml_socket.read = mock.Mock(return_value=XML_CONN_TD.FAKE_RESP)
opener = mock.Mock()
opener.open = mock.Mock(return_value=xml_socket)
with mock.patch.object(url_request, 'build_opener',
mock.Mock(return_value=opener)):
self.XmlConnector = connector.XMLAPIConnector(
configuration=self.configuration, debug=False)
expected_calls = [
mock.call(XML_CONN_TD.req_auth_url(),
XML_CONN_TD.req_credential(),
XML_CONN_TD.req_url_encode()),
]
url_request.Request.assert_has_calls(expected_calls)
def test_request_with_debug(self):
self.XmlConnector.debug = True
request = mock.Mock()
request.headers = {XML_CONN_TD.FAKE_KEY: XML_CONN_TD.FAKE_VALUE}
request.get_full_url = mock.Mock(
return_value=XML_CONN_TD.FAKE_VALUE)
with mock.patch.object(url_request, 'Request',
mock.Mock(return_value=request)):
rsp = self.XmlConnector.request(XML_CONN_TD.FAKE_BODY,
XML_CONN_TD.FAKE_METHOD)
self.assertEqual(XML_CONN_TD.FAKE_RESP, rsp)
def test_request_with_no_authorized_exception(self):
xml_socket = mock.Mock()
xml_socket.read = mock.Mock(return_value=XML_CONN_TD.FAKE_RESP)
hook = emc_utils.RequestSideEffect()
hook.append(ex=url_error.HTTPError(XML_CONN_TD.req_url(),
'403', 'fake_message', None, None))
hook.append(xml_socket)
hook.append(xml_socket)
self.XmlConnector.url_opener.open = mock.Mock(side_effect=hook)
self.XmlConnector.request(XML_CONN_TD.FAKE_BODY)
def test_request_with_general_exception(self):
hook = emc_utils.RequestSideEffect()
hook.append(ex=url_error.HTTPError(XML_CONN_TD.req_url(),
'error_code', 'fake_message',
None, None))
self.XmlConnector.url_opener.open = mock.Mock(side_effect=hook)
self.assertRaises(exception.ManilaException,
self.XmlConnector.request,
XML_CONN_TD.FAKE_BODY)
class MockSSH(object):
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
class MockSSHPool(object):
def __init__(self):
self.ssh = MockSSH()
def item(self):
try:
return self.ssh
finally:
pass
class CmdConnectorTest(test.TestCase):
def setUp(self):
super(CmdConnectorTest, self).setUp()
self.configuration = conf.Configuration(None)
self.configuration.append_config_values = mock.Mock(return_value=0)
self.configuration.emc_nas_login = fakes.FakeData.emc_nas_login
self.configuration.emc_nas_password = fakes.FakeData.emc_nas_password
self.configuration.emc_nas_server = fakes.FakeData.emc_nas_server
self.sshpool = MockSSHPool()
with mock.patch.object(utils, "SSHPool",
mock.Mock(return_value=self.sshpool)):
self.CmdHelper = connector.SSHConnector(
configuration=self.configuration, debug=False)
utils.SSHPool.assert_called_once_with(
ip=fakes.FakeData.emc_nas_server,
port=22,
conn_timeout=None,
login=fakes.FakeData.emc_nas_login,
password=fakes.FakeData.emc_nas_password)
def test_run_ssh(self):
with mock.patch.object(processutils, "ssh_execute",
mock.Mock(return_value=('fake_output', ''))):
cmd_list = ['fake', 'cmd']
self.CmdHelper.run_ssh(cmd_list)
processutils.ssh_execute.assert_called_once_with(
self.sshpool.item(), 'fake cmd', check_exit_code=False)
def test_run_ssh_with_debug(self):
self.CmdHelper.debug = True
with mock.patch.object(processutils, "ssh_execute",
mock.Mock(return_value=('fake_output', ''))):
cmd_list = ['fake', 'cmd']
self.CmdHelper.run_ssh(cmd_list)
processutils.ssh_execute.assert_called_once_with(
self.sshpool.item(), 'fake cmd', check_exit_code=False)
@mock.patch.object(
processutils, "ssh_execute",
mock.Mock(side_effect=processutils.ProcessExecutionError))
def test_run_ssh_exception(self):
cmd_list = ['fake', 'cmd']
self.mock_object(greenthread, 'sleep', mock.Mock())
sshpool = MockSSHPool()
with mock.patch.object(utils, "SSHPool",
mock.Mock(return_value=sshpool)):
self.CmdHelper = connector.SSHConnector(self.configuration)
self.assertRaises(processutils.ProcessExecutionError,
self.CmdHelper.run_ssh,
cmd_list,
True)
utils.SSHPool.assert_called_once_with(
ip=fakes.FakeData.emc_nas_server,
port=22,
conn_timeout=None,
login=fakes.FakeData.emc_nas_login,
password=fakes.FakeData.emc_nas_password)
processutils.ssh_execute.assert_called_once_with(
sshpool.item(), 'fake cmd', check_exit_code=True)
|
"""Provides methods to bootstrap a home assistant instance."""
import asyncio
import logging
import logging.handlers
import os
import sys
from time import time
from collections import OrderedDict
from typing import Any, Optional, Dict
import voluptuous as vol
import homeassistant.components as core_components
from homeassistant.components import persistent_notification
import homeassistant.config as conf_util
import homeassistant.core as core
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
from homeassistant.setup import async_setup_component
import homeassistant.loader as loader
from homeassistant.util.logging import AsyncHandler
from homeassistant.util.yaml import clear_secret_cache
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import event_decorators, service
from homeassistant.helpers.signal import async_register_signal_handling
_LOGGER = logging.getLogger(__name__)
ERROR_LOG_FILENAME = 'home-assistant.log'
FIRST_INIT_COMPONENT = set((
'recorder', 'mqtt', 'mqtt_eventstream', 'logger', 'introduction'))
def from_config_dict(config: Dict[str, Any],
hass: Optional[core.HomeAssistant]=None,
config_dir: Optional[str]=None,
enable_log: bool=True,
verbose: bool=False,
skip_pip: bool=False,
log_rotate_days: Any=None) \
-> Optional[core.HomeAssistant]:
"""Try to configure Home Assistant from a config dict.
Dynamically loads required components and its dependencies.
"""
if hass is None:
hass = core.HomeAssistant()
if config_dir is not None:
config_dir = os.path.abspath(config_dir)
hass.config.config_dir = config_dir
mount_local_lib_path(config_dir)
# run task
hass = hass.loop.run_until_complete(
async_from_config_dict(
config, hass, config_dir, enable_log, verbose, skip_pip,
log_rotate_days)
)
return hass
@asyncio.coroutine
def async_from_config_dict(config: Dict[str, Any],
hass: core.HomeAssistant,
config_dir: Optional[str]=None,
enable_log: bool=True,
verbose: bool=False,
skip_pip: bool=False,
log_rotate_days: Any=None) \
-> Optional[core.HomeAssistant]:
"""Try to configure Home Assistant from a config dict.
Dynamically loads required components and its dependencies.
This method is a coroutine.
"""
start = time()
hass.async_track_tasks()
core_config = config.get(core.DOMAIN, {})
try:
yield from conf_util.async_process_ha_core_config(hass, core_config)
except vol.Invalid as ex:
conf_util.async_log_exception(ex, 'homeassistant', core_config, hass)
return None
yield from hass.loop.run_in_executor(
None, conf_util.process_ha_config_upgrade, hass)
if enable_log:
async_enable_logging(hass, verbose, log_rotate_days)
hass.config.skip_pip = skip_pip
if skip_pip:
_LOGGER.warning('Skipping pip installation of required modules. '
'This may cause issues.')
if not loader.PREPARED:
yield from hass.loop.run_in_executor(None, loader.prepare, hass)
# Merge packages
conf_util.merge_packages_config(
config, core_config.get(conf_util.CONF_PACKAGES, {}))
# Make a copy because we are mutating it.
# Use OrderedDict in case original one was one.
# Convert values to dictionaries if they are None
new_config = OrderedDict()
for key, value in config.items():
new_config[key] = value or {}
config = new_config
# Filter out the repeating and common config section [homeassistant]
components = set(key.split(' ')[0] for key in config.keys()
if key != core.DOMAIN)
# setup components
# pylint: disable=not-an-iterable
res = yield from core_components.async_setup(hass, config)
if not res:
_LOGGER.error('Home Assistant core failed to initialize. '
'Further initialization aborted.')
return hass
yield from persistent_notification.async_setup(hass, config)
_LOGGER.info('Home Assistant core initialized')
# Give event decorators access to HASS
event_decorators.HASS = hass
service.HASS = hass
# stage 1
for component in components:
if component not in FIRST_INIT_COMPONENT:
continue
hass.async_add_job(async_setup_component(hass, component, config))
yield from hass.async_block_till_done()
# stage 2
for component in components:
if component in FIRST_INIT_COMPONENT:
continue
hass.async_add_job(async_setup_component(hass, component, config))
yield from hass.async_stop_track_tasks()
stop = time()
_LOGGER.info('Home Assistant initialized in %ss', round(stop-start, 2))
async_register_signal_handling(hass)
return hass
def from_config_file(config_path: str,
hass: Optional[core.HomeAssistant]=None,
verbose: bool=False,
skip_pip: bool=True,
log_rotate_days: Any=None):
"""Read the configuration file and try to start all the functionality.
Will add functionality to 'hass' parameter if given,
instantiates a new Home Assistant object if 'hass' is not given.
"""
if hass is None:
hass = core.HomeAssistant()
# run task
hass = hass.loop.run_until_complete(
async_from_config_file(
config_path, hass, verbose, skip_pip, log_rotate_days)
)
return hass
@asyncio.coroutine
def async_from_config_file(config_path: str,
hass: core.HomeAssistant,
verbose: bool=False,
skip_pip: bool=True,
log_rotate_days: Any=None):
"""Read the configuration file and try to start all the functionality.
Will add functionality to 'hass' parameter.
This method is a coroutine.
"""
# Set config dir to directory holding config file
config_dir = os.path.abspath(os.path.dirname(config_path))
hass.config.config_dir = config_dir
yield from hass.loop.run_in_executor(
None, mount_local_lib_path, config_dir)
async_enable_logging(hass, verbose, log_rotate_days)
try:
config_dict = yield from hass.loop.run_in_executor(
None, conf_util.load_yaml_config_file, config_path)
except HomeAssistantError as err:
_LOGGER.error('Error loading %s: %s', config_path, err)
return None
finally:
clear_secret_cache()
hass = yield from async_from_config_dict(
config_dict, hass, enable_log=False, skip_pip=skip_pip)
return hass
@core.callback
def async_enable_logging(hass: core.HomeAssistant, verbose: bool=False,
log_rotate_days=None) -> None:
"""Setup the logging.
This method must be run in the event loop.
"""
logging.basicConfig(level=logging.INFO)
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
"[%(name)s] %(message)s")
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
datefmt = '%y-%m-%d %H:%M:%S'
# suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
try:
from colorlog import ColoredFormatter
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
))
except ImportError:
pass
# Log errors to a file if we have write access to file or config dir
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
err_path_exists = os.path.isfile(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or \
(not err_path_exists and os.access(hass.config.config_dir, os.W_OK)):
if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when='midnight', backupCount=log_rotate_days)
else:
err_handler = logging.FileHandler(
err_log_path, mode='w', delay=True)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
async_handler = AsyncHandler(hass.loop, err_handler)
@asyncio.coroutine
def async_stop_async_handler(event):
"""Cleanup async handler."""
logging.getLogger('').removeHandler(async_handler)
yield from async_handler.async_close(blocking=True)
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler)
logger = logging.getLogger('')
logger.addHandler(async_handler)
logger.setLevel(logging.INFO)
else:
_LOGGER.error(
'Unable to setup error log %s (access denied)', err_log_path)
def mount_local_lib_path(config_dir: str) -> str:
"""Add local library to Python Path.
Async friendly.
"""
deps_dir = os.path.join(config_dir, 'deps')
if deps_dir not in sys.path:
sys.path.insert(0, os.path.join(config_dir, 'deps'))
return deps_dir
|
from oslo.config import cfg
from nova.compute import power_state
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import utils as compute_utils
from nova import db
from nova import exception
from nova.i18n import _
from nova import image
from nova import objects
from nova.openstack.common import log as logging
from nova.scheduler import client as scheduler_client
from nova.scheduler import utils as scheduler_utils
from nova import servicegroup
LOG = logging.getLogger(__name__)
migrate_opt = cfg.IntOpt('migrate_max_retries',
default=-1,
help='Number of times to retry live-migration before failing. '
'If == -1, try until out of hosts. '
'If == 0, only try once, no retries.')
CONF = cfg.CONF
CONF.register_opt(migrate_opt)
class LiveMigrationTask(object):
def __init__(self, context, instance, destination,
block_migration, disk_over_commit):
self.context = context
self.instance = instance
self.destination = destination
self.block_migration = block_migration
self.disk_over_commit = disk_over_commit
self.source = instance.host
self.migrate_data = None
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
self.servicegroup_api = servicegroup.API()
self.scheduler_client = scheduler_client.SchedulerClient()
self.image_api = image.API()
def execute(self):
self._check_instance_is_running()
self._check_host_is_up(self.source)
if not self.destination:
self.destination = self._find_destination()
else:
self._check_requested_destination()
# TODO(johngarbutt) need to move complexity out of compute manager
# TODO(johngarbutt) disk_over_commit?
return self.compute_rpcapi.live_migration(self.context,
host=self.source,
instance=self.instance,
dest=self.destination,
block_migration=self.block_migration,
migrate_data=self.migrate_data)
def rollback(self):
# TODO(johngarbutt) need to implement the clean up operation
# but this will make sense only once we pull in the compute
# calls, since this class currently makes no state changes,
# except to call the compute method, that has no matching
# rollback call right now.
raise NotImplementedError()
def _check_instance_is_running(self):
if self.instance.power_state != power_state.RUNNING:
raise exception.InstanceNotRunning(
instance_id=self.instance.uuid)
def _check_host_is_up(self, host):
try:
service = db.service_get_by_compute_host(self.context, host)
except exception.NotFound:
raise exception.ComputeServiceUnavailable(host=host)
if not self.servicegroup_api.service_is_up(service):
raise exception.ComputeServiceUnavailable(host=host)
def _check_requested_destination(self):
self._check_destination_is_not_source()
self._check_host_is_up(self.destination)
self._check_destination_has_enough_memory()
self._check_compatible_with_source_hypervisor(self.destination)
self._call_livem_checks_on_host(self.destination)
def _check_destination_is_not_source(self):
if self.destination == self.source:
raise exception.UnableToMigrateToSelf(
instance_id=self.instance.uuid, host=self.destination)
def _check_destination_has_enough_memory(self):
avail = self._get_compute_info(self.destination)['free_ram_mb']
mem_inst = self.instance.memory_mb
if not mem_inst or avail <= mem_inst:
instance_uuid = self.instance.uuid
dest = self.destination
reason = _("Unable to migrate %(instance_uuid)s to %(dest)s: "
"Lack of memory(host:%(avail)s <= "
"instance:%(mem_inst)s)")
raise exception.MigrationPreCheckError(reason=reason % dict(
instance_uuid=instance_uuid, dest=dest, avail=avail,
mem_inst=mem_inst))
def _get_compute_info(self, host):
return objects.ComputeNode.get_first_node_by_host_for_old_compat(
self.context, host)
def _check_compatible_with_source_hypervisor(self, destination):
source_info = self._get_compute_info(self.source)
destination_info = self._get_compute_info(destination)
source_type = source_info['hypervisor_type']
destination_type = destination_info['hypervisor_type']
if source_type != destination_type:
raise exception.InvalidHypervisorType()
source_version = source_info['hypervisor_version']
destination_version = destination_info['hypervisor_version']
if source_version > destination_version:
raise exception.DestinationHypervisorTooOld()
def _call_livem_checks_on_host(self, destination):
self.migrate_data = self.compute_rpcapi.\
check_can_live_migrate_destination(self.context, self.instance,
destination, self.block_migration, self.disk_over_commit)
def _find_destination(self):
# TODO(johngarbutt) this retry loop should be shared
attempted_hosts = [self.source]
image = None
if self.instance.image_ref:
image = compute_utils.get_image_metadata(self.context,
self.image_api,
self.instance.image_ref,
self.instance)
request_spec = scheduler_utils.build_request_spec(self.context, image,
[self.instance])
host = None
while host is None:
self._check_not_over_max_retries(attempted_hosts)
filter_properties = {'ignore_hosts': attempted_hosts}
scheduler_utils.setup_instance_group(self.context, request_spec,
filter_properties)
host = self.scheduler_client.select_destinations(self.context,
request_spec, filter_properties)[0]['host']
try:
self._check_compatible_with_source_hypervisor(host)
self._call_livem_checks_on_host(host)
except exception.Invalid as e:
LOG.debug("Skipping host: %(host)s because: %(e)s",
{"host": host, "e": e})
attempted_hosts.append(host)
host = None
return host
def _check_not_over_max_retries(self, attempted_hosts):
if CONF.migrate_max_retries == -1:
return
retries = len(attempted_hosts) - 1
if retries > CONF.migrate_max_retries:
msg = (_('Exceeded max scheduling retries %(max_retries)d for '
'instance %(instance_uuid)s during live migration')
% {'max_retries': retries,
'instance_uuid': self.instance.uuid})
raise exception.NoValidHost(reason=msg)
def execute(context, instance, destination,
block_migration, disk_over_commit):
task = LiveMigrationTask(context, instance,
destination,
block_migration,
disk_over_commit)
# TODO(johngarbutt) create a superclass that contains a safe_execute call
return task.execute()
|
import unittest
import random, sys, time, re
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_glm, h2o_util, h2o_rf, h2o_pca, h2o_jobs as h2j
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(node_count=1)
else:
h2o_hosts.build_cloud_with_hosts(node_count=1)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_PCA_UCIwine(self):
csvFilename = "wine.data"
timeoutSecs=300
trialStart = time.time()
#parse
trainKey = "wine.hex"
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvFilename, schema='local',
hex_key=trainKey, timeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "parse end on ", csvFilename, 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
print "parse result:", parseResult['destination_key']
#PCA params
params = {
'destination_key': "python_PCA_key",
'tolerance':0.0,
'standardize':1
}
kwargs = params.copy()
h2o.beta_features = True
#TODO(spencer): Hack around no polling FVEC
PCAResult = {'python_elapsed': 0, 'python_%timeout': 0}
start = time.time()
h2o_cmd.runPCA(parseResult=parseResult, timeoutSecs=timeoutSecs, noPoll=True, returnFast=False, **kwargs)
h2j.pollWaitJobs(timeoutSecs=timeoutSecs, pollTimeoutSecs=120, retryDelaySecs=2)
#time.sleep(100)
elapsed = time.time() - start
PCAResult['python_elapsed'] = elapsed
PCAResult['python_%timeout'] = 1.0*elapsed / timeoutSecs
print "PCA completed in", PCAResult['python_elapsed'], "seconds.", \
"%f pct. of timeout" % (PCAResult['python_%timeout'])
#check PCA results
pcaView = h2o_cmd.runPCAView(modelKey = "python_PCA_key")
h2o_pca.simpleCheckPCA(self,pcaView)
h2o_pca.resultsCheckPCA(self,pcaView)
if __name__ == '__main__':
h2o.unit_main()
|
from django.conf.urls import patterns, url
urlpatterns = patterns('moocng.externalapps.views',
#url(r'^$', 'externalapps_index', name='externalapps_index'),
url(r'^add/',
'externalapps_add_or_edit',
name='externalapps_add'),
url(r'^(?P<external_app_id>\d+)/edit/$',
'externalapps_add_or_edit',
name='externalapps_edit'),
url(r'^(?P<external_app_id>\d+)/delete/$',
'externalapps_delete',
name='externalapps_delete'),
url(r'^$',
'externalapps_list',
name='externalapps_list'),
)
|
"""Test the Broadlink config flow."""
import errno
import socket
import broadlink.exceptions as blke
import pytest
from homeassistant import config_entries
from homeassistant.components.broadlink.const import DOMAIN
from . import get_device
from tests.async_mock import call, patch
DEVICE_DISCOVERY = "homeassistant.components.broadlink.config_flow.blk.discover"
DEVICE_FACTORY = "homeassistant.components.broadlink.config_flow.blk.gendevice"
@pytest.fixture(autouse=True)
def broadlink_setup_fixture():
"""Mock broadlink entry setup."""
with patch(
"homeassistant.components.broadlink.async_setup", return_value=True
), patch("homeassistant.components.broadlink.async_setup_entry", return_value=True):
yield
async def test_flow_user_works(hass):
"""Test a config flow initiated by the user.
Best case scenario with no errors or locks.
"""
device = get_device("Living Room")
mock_api = device.get_mock_api()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "form"
assert result["step_id"] == "finish"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"name": device.name},
)
assert result["type"] == "create_entry"
assert result["title"] == device.name
assert result["data"] == device.get_entry_data()
assert mock_discover.call_count == 1
assert mock_api.auth.call_count == 1
async def test_flow_user_already_in_progress(hass):
"""Test we do not accept more than one config flow per device."""
device = get_device("Living Room")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress"
async def test_flow_user_mac_already_configured(hass):
"""Test we do not accept more than one config entry per device.
We need to abort the flow and update the existing entry.
"""
device = get_device("Living Room")
mock_entry = device.get_mock_entry()
mock_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
device.host = "192.168.1.64"
device.timeout = 20
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert dict(mock_entry.data) == device.get_entry_data()
assert mock_api.auth.call_count == 0
async def test_flow_user_invalid_ip_address(hass):
"""Test we handle an invalid IP address in the user step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.EINVAL, None)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "0.0.0.1"},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "invalid_host"}
async def test_flow_user_invalid_hostname(hass):
"""Test we handle an invalid hostname in the user step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, side_effect=OSError(socket.EAI_NONAME, None)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "pancakemaster.local"},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "invalid_host"}
async def test_flow_user_device_not_found(hass):
"""Test we handle a device not found in the user step."""
device = get_device("Living Room")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_user_device_not_supported(hass):
"""Test we handle a device not supported in the user step."""
device = get_device("Kitchen")
mock_api = device.get_mock_api()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host},
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
async def test_flow_user_network_unreachable(hass):
"""Test we handle a network unreachable in the user step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.ENETUNREACH, None)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "192.168.1.32"},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_user_os_error(hass):
"""Test we handle an OS error in the user step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, side_effect=OSError()):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "192.168.1.32"},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "unknown"}
async def test_flow_auth_authentication_error(hass):
"""Test we handle an authentication error in the auth step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.AuthenticationError()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "form"
assert result["step_id"] == "reset"
assert result["errors"] == {"base": "invalid_auth"}
async def test_flow_auth_network_timeout(hass):
"""Test we handle a network timeout in the auth step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.NetworkTimeoutError()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host},
)
assert result["type"] == "form"
assert result["step_id"] == "auth"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_auth_firmware_error(hass):
"""Test we handle a firmware error in the auth step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.BroadlinkException()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host},
)
assert result["type"] == "form"
assert result["step_id"] == "auth"
assert result["errors"] == {"base": "unknown"}
async def test_flow_auth_network_unreachable(hass):
"""Test we handle a network unreachable in the auth step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.auth.side_effect = OSError(errno.ENETUNREACH, None)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host},
)
assert result["type"] == "form"
assert result["step_id"] == "auth"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_auth_os_error(hass):
"""Test we handle an OS error in the auth step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.auth.side_effect = OSError()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host},
)
assert result["type"] == "form"
assert result["step_id"] == "auth"
assert result["errors"] == {"base": "unknown"}
async def test_flow_reset_works(hass):
"""Test we finish a config flow after a manual unlock."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.AuthenticationError()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"name": device.name},
)
assert result["type"] == "create_entry"
assert result["title"] == device.name
assert result["data"] == device.get_entry_data()
async def test_flow_unlock_works(hass):
"""Test we finish a config flow with an unlock request."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.is_locked = True
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "form"
assert result["step_id"] == "unlock"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"unlock": True},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"name": device.name},
)
assert result["type"] == "create_entry"
assert result["title"] == device.name
assert result["data"] == device.get_entry_data()
assert mock_api.set_lock.call_args == call(False)
assert mock_api.set_lock.call_count == 1
async def test_flow_unlock_network_timeout(hass):
"""Test we handle a network timeout in the unlock step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.is_locked = True
mock_api.set_lock.side_effect = blke.NetworkTimeoutError()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"unlock": True},
)
assert result["type"] == "form"
assert result["step_id"] == "unlock"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_unlock_firmware_error(hass):
"""Test we handle a firmware error in the unlock step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.is_locked = True
mock_api.set_lock.side_effect = blke.BroadlinkException
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"unlock": True},
)
assert result["type"] == "form"
assert result["step_id"] == "unlock"
assert result["errors"] == {"base": "unknown"}
async def test_flow_unlock_network_unreachable(hass):
"""Test we handle a network unreachable in the unlock step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.is_locked = True
mock_api.set_lock.side_effect = OSError(errno.ENETUNREACH, None)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"unlock": True},
)
assert result["type"] == "form"
assert result["step_id"] == "unlock"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_unlock_os_error(hass):
"""Test we handle an OS error in the unlock step."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.is_locked = True
mock_api.set_lock.side_effect = OSError()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"unlock": True},
)
assert result["type"] == "form"
assert result["step_id"] == "unlock"
assert result["errors"] == {"base": "unknown"}
async def test_flow_do_not_unlock(hass):
"""Test we do not unlock the device if the user does not want to."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
mock_api.is_locked = True
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"unlock": False},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"name": device.name},
)
assert result["type"] == "create_entry"
assert result["title"] == device.name
assert result["data"] == device.get_entry_data()
assert mock_api.set_lock.call_count == 0
async def test_flow_import_works(hass):
"""Test an import flow."""
device = get_device("Living Room")
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": device.host},
)
assert result["type"] == "form"
assert result["step_id"] == "finish"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"name": device.name},
)
assert result["type"] == "create_entry"
assert result["title"] == device.name
assert result["data"]["host"] == device.host
assert result["data"]["mac"] == device.mac
assert result["data"]["type"] == device.devtype
assert mock_api.auth.call_count == 1
assert mock_discover.call_count == 1
async def test_flow_import_already_in_progress(hass):
"""Test we do not import more than one flow per device."""
device = get_device("Living Room")
data = {"host": device.host}
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data
)
with patch(DEVICE_DISCOVERY, return_value=[device.get_mock_api()]):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress"
async def test_flow_import_host_already_configured(hass):
"""Test we do not import a host that is already configured."""
device = get_device("Living Room")
mock_entry = device.get_mock_entry()
mock_entry.add_to_hass(hass)
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": device.host},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_flow_import_mac_already_configured(hass):
"""Test we do not import more than one config entry per device.
We need to abort the flow and update the existing entry.
"""
device = get_device("Living Room")
mock_entry = device.get_mock_entry()
mock_entry.add_to_hass(hass)
device.host = "192.168.1.16"
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": device.host},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert mock_entry.data["host"] == device.host
assert mock_entry.data["mac"] == device.mac
assert mock_entry.data["type"] == device.devtype
assert mock_api.auth.call_count == 0
async def test_flow_import_device_not_found(hass):
"""Test we handle a device not found in the import step."""
with patch(DEVICE_DISCOVERY, return_value=[]):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "192.168.1.32"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_flow_import_device_not_supported(hass):
"""Test we handle a device not supported in the import step."""
device = get_device("Kitchen")
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": device.host},
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
async def test_flow_import_invalid_ip_address(hass):
"""Test we handle an invalid IP address in the import step."""
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.EINVAL, None)):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "0.0.0.1"},
)
assert result["type"] == "abort"
assert result["reason"] == "invalid_host"
async def test_flow_import_invalid_hostname(hass):
"""Test we handle an invalid hostname in the import step."""
with patch(DEVICE_DISCOVERY, side_effect=OSError(socket.EAI_NONAME, None)):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "hotdog.local"},
)
assert result["type"] == "abort"
assert result["reason"] == "invalid_host"
async def test_flow_import_network_unreachable(hass):
"""Test we handle a network unreachable in the import step."""
with patch(DEVICE_DISCOVERY, side_effect=OSError(errno.ENETUNREACH, None)):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "192.168.1.64"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_flow_import_os_error(hass):
"""Test we handle an OS error in the import step."""
with patch(DEVICE_DISCOVERY, side_effect=OSError()):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "192.168.1.64"},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_flow_reauth_works(hass):
"""Test a reauthentication flow."""
device = get_device("Living Room")
mock_entry = device.get_mock_entry()
mock_entry.add_to_hass(hass)
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.AuthenticationError()
data = {"name": device.name, **device.get_entry_data()}
with patch(DEVICE_FACTORY, return_value=mock_api):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "reauth"}, data=data
)
assert result["type"] == "form"
assert result["step_id"] == "reset"
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert dict(mock_entry.data) == device.get_entry_data()
assert mock_api.auth.call_count == 1
assert mock_discover.call_count == 1
async def test_flow_reauth_invalid_host(hass):
"""Test we do not accept an invalid host for reauthentication.
The MAC address cannot change.
"""
device = get_device("Living Room")
mock_entry = device.get_mock_entry()
mock_entry.add_to_hass(hass)
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.AuthenticationError()
data = {"name": device.name, **device.get_entry_data()}
with patch(DEVICE_FACTORY, return_value=mock_api):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "reauth"}, data=data
)
device.mac = get_device("Office").mac
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "invalid_host"}
assert mock_discover.call_count == 1
assert mock_api.auth.call_count == 0
async def test_flow_reauth_valid_host(hass):
"""Test we accept a valid host for reauthentication.
The hostname/IP address may change. We need to update the entry.
"""
device = get_device("Living Room")
mock_entry = device.get_mock_entry()
mock_entry.add_to_hass(hass)
mock_api = device.get_mock_api()
mock_api.auth.side_effect = blke.AuthenticationError()
data = {"name": device.name, **device.get_entry_data()}
with patch(DEVICE_FACTORY, return_value=mock_api):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "reauth"}, data=data
)
device.host = "192.168.1.128"
mock_api = device.get_mock_api()
with patch(DEVICE_DISCOVERY, return_value=[mock_api]) as mock_discover:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": device.host, "timeout": device.timeout},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert mock_entry.data["host"] == device.host
assert mock_discover.call_count == 1
assert mock_api.auth.call_count == 1
|
from django.core.urlresolvers import reverse_lazy
from django.http import JsonResponse
from django.shortcuts import redirect, render
from django.views.generic import CreateView, ListView, DetailView, TemplateView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from . import constants
from .models import Voto, Charla
from .forms import RegistrarCharlaForm
from django.db.models import Q
class LoginRequired(object):
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequired, self).dispatch(*args, **kwargs)
class ListarEstadoView(ListView):
context_object_name = 'charlas'
queryset = Charla.objects.all().order_by("estado")
template_name = 'charla/index.html'
def get_queryset(self, *args, **kwargs):
queryset = super(ListarEstadoView, self).get_queryset(*args, **kwargs)
queryset = queryset.filter(
Q(estado=constants.ESTADO_POSIBLE)|
Q(estado=constants.ESTADO_AGENDADO)
)
for charla in queryset:
if self.request.user.is_authenticated():
votos = Voto.objects.filter(
Q(charla=charla) &
Q(usuario=self.request.user)
).count()
if votos > 0:
charla.estado_estrella = True
else:
charla.estado_estrella = False
return queryset
class ListarAgendadoView(ListarEstadoView):
queryset = Charla.agendadas.all()
class ListarFinalizadoView(ListView):
context_object_name = 'charlas'
queryset = Charla.finalizadas.all()
template_name = 'charla/index.html'
class ListarFaqView(TemplateView):
template_name = 'faqs/index.html'
class RegistrarCharlaView(LoginRequired, CreateView):
form_class = RegistrarCharlaForm
model = Charla
success_url = reverse_lazy('index')
template_name = 'charla/registrar.html'
def get_form_kwargs(self):
if self.request.method in ('POST', 'PUT'):
self.object = self.model()
self.object.usuario = self.request.user
kwargs = super(RegistrarCharlaView, self).get_form_kwargs()
return kwargs
def login(request):
if not request.user.is_authenticated():
return render(request, 'login.html')
else:
return redirect('/', name='index')
class DetalleCharlaView(DetailView):
context_object_name = 'charla'
model = Charla
template_name = 'charla/detalle.html'
"""A base view for displaying a single object."""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
try:
charla = Charla.objects.get(id=self.object.id)
except Charla.DoesNotExist:
return JsonResponse({"html": "Esta Charla no existe"})
try:
voto_charla = Voto.objects.get(usuario=request.user.id, charla=charla)
self.object.estado_estrella = True
except Voto.DoesNotExist:
self.object.estado_estrella = False
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class VotoView(LoginRequired, TemplateView):
template_name = "charla/voto.html"
http_method_names = ["post"]
def post(self, request, *args, **kwargs):
id = self.kwargs.get("charla", None)
try:
charla = Charla.objects.get(id=id)
charla.estado_estrella = True
except Charla.DoesNotExist:
return JsonResponse({"html": "Esta Charla no existe" })
if charla.estado == constants.ESTADO_POSIBLE:
voto, created = Voto.objects.get_or_create(charla=charla,
usuario=request.user)
i = 1
if not created:
i *= -1
voto.delete()
charla.estado_estrella = False
charla.votos += i
charla.save()
response = self.render_to_response({ "charla":charla })
return JsonResponse({"html": response.rendered_content })
|
"""Some common utilities for tools to use."""
import codecs
import contextlib
import glob
import logging
import os
import os.path as path
import re
import shutil
import subprocess
import sys
import time
import zipfile
from nototools import notoconfig
@contextlib.contextmanager
def temp_chdir(path):
"""Usage: with temp_chdir(path):
do_something
"""
saved_dir = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(saved_dir)
noto_re = re.compile(
r"\[(tools|fonts|fonts_alpha|emoji|cjk|source|adobe|mti|afdko)\](.*)"
)
def resolve_path(somepath):
"""Resolve a path that might start with noto path shorthand. If
the path is empty, is '-', or the shorthand is not defined,
returns None. Example: '[fonts]/hinted'."""
if not somepath or somepath == "-":
return None
m = noto_re.match(somepath)
if m:
base, rest = m.groups()
if base == "adobe":
key = "adobe_data"
elif base == "mti":
key = "monotype_data"
elif base == "afdko":
key = "afdko"
else:
key = "noto_" + base
base = notoconfig.get(key)
while rest.startswith(path.sep):
rest = rest[len(path.sep) :]
somepath = path.join(base, rest)
return path.realpath(path.abspath(path.expanduser(somepath)))
def commonpathprefix(paths):
"""Return the common path prefix and a tuple of the relative subpaths for the
provided paths. Uses resolve_path to convert to absolute paths and returns
the common absolute path. Some subpaths might be the empty string and joining
these will produce paths ending in '/', use normpath if you don't want this.
Python 2.7 only has path.commonprefix, which returns a common string prefix,
not a common path prefix.
"""
norm_paths = [resolve_path(p) + path.sep for p in paths]
prefix = path.dirname(path.commonprefix(norm_paths))
prefix_len = len(prefix)
if prefix_len > 1:
prefix_len += 1 # not '/' so does not end in '/', strip from subpaths
subpaths = tuple(p[prefix_len:-1] for p in norm_paths)
return prefix, subpaths
def _name_to_key(keyname):
if keyname == "adobe_data":
return "adobe"
if keyname == "monotype_data":
return ("mti",)
if keyname.startswith("noto_"):
return keyname[5:]
return keyname
def short_path(somepath, basedir=os.getcwd()):
"""Return a short version of somepath, either relative to one of the noto path
shorthands or to the provided base directory (defaults to current). For
logging/debugging output of file paths."""
shortest = somepath
if basedir and somepath.startswith(basedir):
shortest = "." + somepath[len(basedir) :]
for k, v in notoconfig.values.items():
if somepath.startswith(v):
test = ("[%s]" % _name_to_key(k)) + somepath[len(v) :]
if len(test) < len(shortest):
shortest = test
return shortest
def check_dir_exists(dirpath):
if not os.path.isdir(dirpath):
raise ValueError("%s does not exist or is not a directory" % dirpath)
def check_file_exists(filepath):
if not os.path.isfile(filepath):
raise ValueError("%s does not exist or is not a file" % filepath)
def ensure_dir_exists(path, clean=False):
path = os.path.realpath(path)
if not os.path.isdir(path):
if os.path.exists(path):
raise ValueError("%s exists and is not a directory" % path)
print("making '%s'" % path)
os.makedirs(path)
elif clean:
shutil.rmtree(path)
os.makedirs(path)
return path
def generate_zip_with_7za(root_dir, file_paths, archive_path):
"""file_paths is a list of files relative to root_dir, these will be the names
in the archive at archive_path."""
arg_list = ["7za", "a", archive_path, "-tzip", "-mx=7", "-bd", "--"]
arg_list.extend(file_paths)
with temp_chdir(root_dir):
# capture but discard output
subprocess.check_output(arg_list)
def generate_zip_with_7za_from_filepairs(pairs, archive_path):
"""Pairs are source/destination path pairs. The source will be put into the
zip with name destination."""
staging_dir = "/tmp/stage_7za"
if os.path.exists(staging_dir):
shutil.rmtree(staging_dir)
os.makedirs(staging_dir)
pair_map = {}
for source, dest in pairs:
if not source.endswith(dest):
staging_source = os.path.join(staging_dir, dest)
shutil.copyfile(source, staging_source)
source_root = staging_dir
else:
source_root = source[: -len(dest)]
if source_root not in pair_map:
pair_map[source_root] = set()
pair_map[source_root].add(dest)
for source_root, dest_set in pair_map.items():
generate_zip_with_7za(source_root, sorted(dest_set), archive_path)
def dos2unix(root_dir, glob_list):
"""Convert dos line endings to unix ones in place."""
with temp_chdir(root_dir):
for g in glob_list:
file_list = glob.glob(g)
if file_list:
subprocess.check_call(["dos2unix", "-k", "-q", "-o"] + file_list)
def zip_extract_with_timestamp(zippath, dstdir):
zip = zipfile.ZipFile(zippath)
with temp_chdir(dstdir):
for info in zip.infolist():
zip.extract(info.filename)
# of course, time zones mess this up, so don't expect precision
date_time = time.mktime(info.date_time + (0, 0, -1))
os.utime(info.filename, (date_time, date_time))
def git_checkout(repo, branch_or_tag, verbose=False):
"""checkout the branch or tag"""
with temp_chdir(repo):
result = subprocess.check_output(
["git", "checkout", branch_or_tag], stderr=subprocess.STDOUT
)
if verbose:
print("%s:\n%s\n-----" % (repo, result))
def git_mv(repo, old, new):
"""Rename old to new in repo"""
with temp_chdir(repo):
return subprocess.check_output(["git", "mv", old, new])
def git_file_lastlog(repo, filepath):
"""Return a string containing the short hash, date, author email, and title
of most recent commit of filepath, separated by tab."""
with temp_chdir(repo):
return subprocess.check_output(
[
"git",
"log",
"-n",
"1",
"--format=%h\t%ad\t%ae\t%s",
"--date=short",
"--",
filepath,
]
)
def git_tags(repo):
"""Return a list of info about annotated tags. The list consists of tuples
of the commit hash (as a string), the tag name, and the time, sorted in
in reverse chronological order (most recent first). The hash is for the
referenced commit and not the tag itself, but the date is the tag date."""
with temp_chdir(repo):
text = subprocess.check_output(
[
"git",
"tag",
"-l",
"--format=%(*objectname)|%(refname:strip=2)|"
"%(taggerdate:format:%Y-%m-%d %T %Z)",
"--sort=-taggerdate",
]
).decode("utf-8")
print(text)
return [
tuple(line.split("|"))
for line in text.splitlines()
if not line.strip().startswith("|")
]
def git_tag_info(repo, tag_name):
"""Return the annotation for this tag in the repo. It is limited to no more
than 50 lines."""
# Unfortunately, I can't get the other formatted tag info and also limit
# to the tag annotation without more serious munging of tag or show output.
with temp_chdir(repo):
text = subprocess.check_output(["git", "tag", "-l", "-n50", tag_name])
lines = text[len(tag_name) :].strip().splitlines()
return "\n".join(l.strip() for l in lines)
def get_tool_generated(repo, subdir, commit_title_prefix="Updated by tool"):
"""
Return a list of the names of tool-generated files in the provided directory.
The idea is that when we check in files that are generated by a tool, the
commit will start with the given prefix. If a files' most recent log entry
matches this, it means that we've not applied patches or fixes to the file
since it was generated, so we can overwrite it with new tool-generated data.
The motivation for this is mantaining the sample texts. The original source
for most of these is UDHR data, but subsequently we have fixed things in
some of the samples. We generally do not want to blindly overwrite these
fixes, but do want to be able to regenerate the samples if we get new source
data.
"""
files_not_under_version_control = []
protected_files = []
tool_generated_files = []
for f in sorted(os.listdir(path.join(repo, subdir))):
relpath = path.join(subdir, f)
lastlog_str = git_file_lastlog(repo, relpath)
if not lastlog_str:
files_not_under_version_control.append(f)
continue
commit, date, author, title = lastlog_str.split("\t")
if title.startswith(commit_title_prefix):
tool_generated_files.append(f)
else:
protected_files.append(f)
if files_not_under_version_control:
sys.stderr.write(
"%d files were not under version control:\n %s\n"
% (
len(files_not_under_version_control),
", ".join(files_not_under_version_control),
)
)
if protected_files:
sys.stderr.write(
"%d files protected:\n %s\n"
% (len(protected_files), ", ".join(protected_files))
)
return tool_generated_files
def git_get_branch(repo):
try:
with temp_chdir(repo):
with open(os.devnull) as trash:
return subprocess.check_output(
["git", "symbolic-ref", "--short", "HEAD"], stderr=trash
).strip()
except:
return "<not on any branch>"
def git_is_clean(repo, print_errors=False):
"""Ensure there are no unstaged or uncommitted changes in the repo."""
def capture_and_show_errors(cmd):
def dumplines(msg, text, limit):
if text:
lines = text.splitlines()
print(
"%s (%d lines):\n %s"
% (msg, len(lines), "\n ".join(lines[:limit]))
)
if len(lines) > limit:
print(" ...")
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
dumplines("out", out, 20)
dumplines("err", err, 20)
result = True
with temp_chdir(repo):
subprocess.check_call(
["git", "update-index", "-q", "--ignore-submodules", "--refresh"]
)
if subprocess.call(
["git", "diff-files", "--quiet", "--ignore-submodules", "--"]
):
if print_errors:
print("There are unstaged changes:")
capture_and_show_errors(
[
"git",
"diff-files",
"--name-status",
"-r",
"--ignore-submodules",
"--",
]
)
result = False
if subprocess.call(
[
"git",
"diff-index",
"--cached",
"--quiet",
"HEAD",
"--ignore-submodules",
"--",
]
):
if print_errors:
print("There are uncommitted changes:")
capture_and_show_errors(
[
"git",
"diff-index",
"--cached",
"--name-status",
"-r",
"HEAD",
"--ignore-submodules",
"--",
]
)
result = False
return result
def git_head_commit(repo):
"""Return the commit hash at head, the date and time of the commit as
YYYY-mm-dd HH:MM:SS, and the subject line, as a tuple of three strings."""
with temp_chdir(repo):
text = subprocess.check_output(
[
"git",
"show",
"-s",
"--date=format:%Y-%m-%d %H:%M:%S",
"--no-expand-tabs",
"--pretty=format:%H\t%cd\t%s",
"HEAD",
]
)
return tuple(text.strip().split(b"\t", 2))
def git_check_remote_commit(repo, commit, remote="upstream", branch="master"):
"""Return true if the commit exists in the remote repo at the given branch
(or any branch if branch is None). This has the side effect of calling
'git fetch {remote}'."""
with temp_chdir(repo):
subprocess.check_call(["git", "fetch", remote])
# the following will throw an exception if commit is unrecognized
text = subprocess.check_output(["git", "branch", "-r", "--contains", commit])
lines = [line.strip() for line in text.splitlines()]
if branch:
if branch == "HEAD":
# assume a link, it will be reported as one
expected = remote + "/HEAD ->"
return any(line.startswith(expected) for line in lines)
expected = remote + "/" + branch
return expected in lines
else:
expected = remote + "/"
return any(line.startswith(remote) for line in lines)
def git_add_all(repo_subdir):
"""Add all changed, deleted, and new files in subdir to the staging area."""
# git can now add everything, even removed files
with temp_chdir(repo_subdir):
subprocess.check_call(["git", "add", "--", "."])
def svn_get_version(repo):
with temp_chdir(repo):
version_string = subprocess.check_output(["svnversion", "-c"]).strip()
colon_index = version_string.find(":".encode("utf-8"))
if colon_index >= 0:
version_string = version_string[colon_index + 1 :]
return version_string
def svn_update(repo):
with temp_chdir(repo):
subprocess.check_call(["svn", "up"], stderr=subprocess.STDOUT)
def parse_int_ranges(
range_string,
is_hex=True,
sep=None,
allow_duplicates=False,
return_set=True,
allow_compressed=False,
):
"""Returns a set/list of ints from a string of numbers or ranges separated by
sep. A range is two values separated by hyphen with no intervening separator;
ranges are inclusive. If allow_compressed is true, '/' is also allowed
as a separator, and ranges following it or hyphen are interpreted as suffixes
that replace the same number of characters at the end of the previous value.
'-' generates the range of intervening characters as before, while '/' does
not. Returns a set or a list depending on return_set.
For example, with compressed ranges the following:
1ee42/7/9/b/d-f 1ee51-2/4/7/9/b/d/f
expands to:
1ee42 1ee47 1ee49 1ee4b 1ee4d-1ee4f 1ee51-1ee52 1ee54 1ee57 1ee59 1ee5b
1ee5d 1ee5f
"""
base = 16 if is_hex else 10
vals = []
def _add_segment(prev_str, suffix, is_range):
slen = len(suffix)
if prev_str is None:
next_str = suffix
next_val = int(next_str, base)
else:
if slen > len(prev_str):
raise ValueError(
"suffix '%s' is longer than previous '%s'" % (suffix, prev_str)
)
next_str = prev_str[:-slen] + suffix
next_val = int(next_str, base)
if next_val <= vals[-1]:
raise ValueError(
"next value '%s' is not greater than previous '%s'"
% (next_str, prev_str)
)
if is_range:
start_val = vals[-1] + 1
vals.extend(range(start_val, next_val))
vals.append(next_val)
return next_str
def _add_range(r):
stops = "-/"
if len(r) == 0:
raise ValueError("empty range")
if r[0] in stops:
raise ValueError('range "%s" has leading separator' % r)
if r[-1] in stops:
raise ValueError('range "%s" has trailing separator' % r)
r = r + "/"
prev_str = None
is_range = False
start = 0
len_limit = -1
for i in range(len(r)):
cp = r[i]
if cp not in stops:
continue
if i == start:
raise ValueError('range "%s" has two separators together' % r)
if is_range and cp == "-":
raise ValueError("range \"%s\" has two '-' in sequence" % r)
if not allow_compressed:
if start == 0:
len_limit = i
elif i - start > len_limit:
raise ValueError(
"segment '%s' longer than previous segment" % r[start:i]
)
else:
len_limit = i - start
prev_str = _add_segment(prev_str, r[start:i], is_range)
is_range = cp == "-"
start = i + 1
# main
# handle comments and multiline input
if "\n" in range_string or "#" in range_string:
# strip comments and turn into single line
def strip_comment(line):
x = line.find("#")
if x >= 0:
line = line[:x]
return line.strip()
join_char = " " if sep is None else sep
range_string = join_char.join(
filter(None, (strip_comment(line) for line in range_string.splitlines()))
)
if not allow_compressed and "/" != sep and range_string.find("/") != -1:
raise ValueError("'/' only allowed in compressed range format")
# collect ordered list of values
for r in range_string.split(sep):
_add_range(r)
# check for duplicates and/or convert to set
if return_set or not allow_duplicates:
range_set = set(vals)
if not allow_duplicates and len(range_set) != len(vals):
fail = set()
seen = set()
for v in vals:
if v in seen:
fail.add(v)
else:
seen.add(v)
raise ValueError(
'range "%s" has %d duplicates: %s'
% (range_string, len(fail), write_int_ranges(fail))
)
return range_set if return_set else vals
def write_int_ranges(int_values, in_hex=True, sep=" "):
"""From a set or list of ints, generate a string representation that can be
parsed by parse_int_ranges to return the original values (not
order_preserving)."""
if not int_values:
return ""
num_list = []
int_values = iter(sorted(int_values))
start = prev = next(int_values)
single_fmt = "%04x" if in_hex else "%d"
pair_fmt = single_fmt + "-" + single_fmt
def emit():
if prev == start:
num_list.append(single_fmt % prev)
else:
num_list.append(pair_fmt % (start, prev))
for v in int_values:
if v == prev + 1:
prev += 1
continue
else:
emit()
start = prev = v
emit()
return sep.join(num_list)
def setup_logging(loglevel, quiet_ttx=True):
"""Set up logging to stream to stdout.
The loglevel is a logging level name or a level value (int or string).
ttx/fontTools uses 'info' to report when it is reading/writing tables,
but when we want 'info' in our own tools we usually don't want this detail.
When quiet_ttx is true, set up logging to treat 'info' logs from
fontTools misc.xmlReader and ttLib as though they were at level 19."""
try:
loglevel = int(loglevel)
except:
loglevel = getattr(logging, loglevel.upper(), loglevel)
if not isinstance(loglevel, int):
print(
"Could not set log level, should be one of debug, info, warning, "
"error, critical, or a numeric value"
)
return
logging.basicConfig(level=loglevel)
if quiet_ttx and loglevel == logging.INFO:
for logger_name in ["fontTools.misc.xmlReader", "fontTools.ttLib"]:
logger = logging.getLogger(logger_name)
logger.setLevel(loglevel + 1)
def write_lines(lines, outfile):
"""Write lines as utf-8 to outfile, separated by and ending with newline"""
ensure_dir_exists(path.dirname(outfile))
with codecs.open(outfile, "w", "utf-8") as f:
f.write("\n".join(lines + [""]))
def read_lines(infile, ignore_comments=True, strip=True, skip_empty=True):
"""Read lines from infile and return as a list, optionally stripping comments,
whitespace, and/or skipping blank lines."""
lines = []
with codecs.open(infile, "r", "utf-8") as f:
for line in f:
if ignore_comments:
ix = line.find("#")
if ix >= 0:
line = line[:ix]
if strip:
line = line.strip()
if not line and skip_empty:
continue
lines.append(line)
return lines
def _read_filename_list(filenames):
with open(filenames, "r") as f:
return [resolve_path(n.strip()) for n in f if n]
NOTO_FONT_PATHS = ["[fonts]/hinted", "[fonts]/unhinted", "[emoji]/fonts", "[cjk]"]
def collect_paths(dirs, files):
"""Return a collection of all files in any of the listed dirs, and
the listed files. Can use noto short paths. A file name starting
with '@' is interpreted as the name of a file containing a list
of filenames one per line. The short name '[noto]' refers to
the noto (phase 2) font paths."""
paths = []
if dirs:
for i in range(len(dirs)):
# special case '[noto]' to include all noto font dirs
if dirs[i] == "[noto]":
dirs[i] = None
dirs.extend(NOTO_FONT_PATHS)
dirs = filter(None, args.dirs)
break
for d in dirs:
d = resolve_path(d)
paths.extend(n for n in glob.glob(path.join(d, "*")))
if files:
for fname in files:
if fname[0] == "@":
paths.extend(_read_filename_list(fname[1:]))
else:
paths.append(resolve_path(fname))
return paths
|
from __future__ import print_function
import logging
import sys
from commands import *
import time
import argparse
from crawler import Crawler
from cleaner import Cleaner
from clients import RBTJIRAClient
from commit import Committer
from post_review import ReviewPoster
from test_patch import PatchTester
possible_options = ['post-review', 'clean', 'submit-patch', 'commit', 'count-comments', 'test-patch']
def option(s):
s = s.lower()
if s not in possible_options:
raise argparse.ArgumentTypeError(
"you provided " + s + " which is not in possible options: " + str(possible_options))
return s
def parsed_time(s):
return time.strptime(s, '%Y-%m-%dT%H:%M:%SZ')
def main():
popt = argparse.ArgumentParser(description='apache dev tool. Command line helper for frequent actions.')
popt.add_argument('action', nargs='?', action="store", help="action of the command. One of post-review, "
"submit-patch, commit and clean", type=option)
popt.add_argument('-j', '--jira', action='store', dest='jira', required=False,
help='JIRAs. provide as -j JIRA1 JIRA2... Mostly only one option will be used, commit command'
'can provide multiple jira ids and commit all of them together.',
default=[getoutput("git rev-parse --abbrev-ref HEAD")], nargs="*")
popt.add_argument('-ju', '--jira-username', action='store', dest='jira_username', required=False,
help='JIRA Username. If not provided, it will prompt and ask the user.')
popt.add_argument('-jp', '--jira-password', action='store', dest='jira_password', required=False,
help='JIRA Password. If not provided, it will prompt and ask the user.')
popt.add_argument('-repo', '--repository', action='store', dest='repositories', required=False,
help='Reviewboard Repository names', nargs='*', default=[])
popt.add_argument('-ru', '--reviewboard-username', action='store', dest='reviewboard_username', required=False,
help='Reviewboard username'
'can provide multiple jira ids and commit all of them together.', nargs="*")
popt.add_argument('-rp', '--reviewboard-password', action='store', dest='reviewboard_password', required=False,
help='Review Board Password. If not provided, it will prompt and ask the user.')
popt.add_argument('-b', '--branch', action='store', dest='branch', required=False,
help='Tracking branch to create diff against. Picks default from .reviewboardrc file')
popt.add_argument('-s', '--summary', action='store', dest='summary', required=False,
help='Summary for the reviewboard. If not provided, jira summary will be picked. ')
popt.add_argument('-d', '--description', action='store', dest='description', required=False,
help='Description for reviewboard. Defaults to description on jira. ')
popt.add_argument('-r', '--rb', action='store', dest='reviewboard', required=False,
help='Review board that needs to be updated. Only needed if you haven\'t created rb entry using '
'this tool.')
popt.add_argument('-t', '--testing-done', action='store', dest='testing_done', required=False,
help='Text for the Testing Done section of the reviewboard. Defaults to empty string.')
popt.add_argument('-ta', '--testing-done-append', action='store', dest='testing_done_append', required=False,
help='Text to append to Testing Done section of the reviewboard. Used to provide '
'new testing done in addition to old one already mentioned on rb', nargs="*")
popt.add_argument('-ch', '--choose-patch', action='store_true', dest='choose_patch', required=False,
help='Whether Ask for which patch to commit. By default the latest uploaded '
'patch is picked for committing.', default=False)
popt.add_argument('-p', '--publish', action='store_true', dest='publish', required=False,
help='Whether to make the review request public', default=False)
popt.add_argument('-o', '--open', action='store_true', dest='open', required=False,
help='Whether to open the review request in browser', default=False)
popt.add_argument('-tpc', '--test-patch-command', action='store', dest='test_patch_command', required=False,
help='Whether to open the review request in browser', default="mvn clean install -fae")
popt.add_argument('-rs', '--require-ship-it', action='store_true', dest='require_ship_it', required=False,
help='Whether to require Ship It! review before posting patch from rb to jira. True by default.',
default=False)
popt.add_argument('-from', '--from', action='store', dest='from_time', required=False, help='Time range start',
type=parsed_time)
popt.add_argument('-to', '--to', action='store', dest='to_time', required=False, help='Time range end',
type=parsed_time)
popt.add_argument('-v', '--verbose', action='store_true', dest='verbose', required=False, help='Verbose',
default=False)
popt.add_argument('--debug', action='store_true', dest='debug', required=False, help='Debug',
default=False)
opt = popt.parse_args()
logging.basicConfig(format='%(asctime)s %(name)-6s %(levelname)-6s %(message)s',
level=logging.DEBUG if opt.debug else logging.INFO if opt.verbose else logging.WARN)
with RBTJIRAClient(opt) as client:
def validate_jiras():
opt.issues = [client.valid_jira(jira) for jira in opt.jira]
def validate_single_jira_provided():
if len(opt.jira) != 1:
raise Exception("Only single JIRA expected for this action")
if opt.action in ['post-review', 'submit-patch', 'test-patch', 'commit']:
validate_jiras()
if opt.action in ['post-review', 'submit-patch']:
validate_single_jira_provided()
review_poster = ReviewPoster(client, opt)
if opt.action == 'post-review':
review_poster.post_review()
elif opt.action == 'submit-patch':
review_poster.submit_patch()
elif opt.action == 'test-patch':
return PatchTester(client, opt).test_patch()
elif opt.action == 'commit':
return Committer(client, opt).commit()
elif opt.action == "count-comments":
return Crawler(client, opt).count_comments()
elif opt.action == "clean":
return Cleaner(client).clean()
else:
print("Provided action not supported, you provided: ", opt.action)
print("Provide --help option to understand usage")
return 1
if __name__ == '__main__':
sys.exit(main())
|
"""
Scripts to help load the movielens dataset into Python classes
"""
import re
class User:
def __init__(self, id, age, sex, occupation, zip):
self.id = int(id)
self.age = int(age)
self.sex = sex
self.occupation = occupation
self.zip = zip
self.avg_r = 0.0
class Item:
def __init__(self, id, title, release_date, video_release_date, imdb_url, \
unknown, action, adventure, animation, childrens, comedy, crime, documentary, \
drama, fantasy, film_noir, horror, musical, mystery ,romance, sci_fi, thriller, war, western):
self.id = int(id)
self.title = title
self.release_date = release_date
self.video_release_date = video_release_date
self.imdb_url = imdb_url
self.unknown = int(unknown)
self.action = int(action)
self.adventure = int(adventure)
self.animation = int(animation)
self.childrens = int(childrens)
self.comedy = int(comedy)
self.crime = int(crime)
self.documentary = int(documentary)
self.drama = int(drama)
self.fantasy = int(fantasy)
self.film_noir = int(film_noir)
self.horror = int(horror)
self.musical = int(musical)
self.mystery = int(mystery)
self.romance = int(romance)
self.sci_fi = int(sci_fi)
self.thriller = int(thriller)
self.war = int(war)
self.western = int(western)
class Rating:
def __init__(self, user_id, item_id, rating, time):
self.user_id = int(user_id)
self.item_id = int(item_id)
self.rating = int(rating)
self.time = time
class Dataset:
def load_users(self, file, u):
f = open(file, "r")
text = f.read()
entries = re.split("\n+", text)
for entry in entries:
e = entry.split('|', 5)
if len(e) == 5:
u.append(User(e[0], e[1], e[2], e[3], e[4]))
f.close()
def load_items(self, file, i):
f = open(file, "r")
text = f.read()
entries = re.split("\n+", text)
for entry in entries:
e = entry.split('|', 24)
if len(e) == 24:
i.append(Item(e[0], e[1], e[2], e[3], e[4], e[5], e[6], e[7], e[8], e[9], e[10], \
e[11], e[12], e[13], e[14], e[15], e[16], e[17], e[18], e[19], e[20], e[21], \
e[22], e[23]))
f.close()
def load_ratings(self, file, r):
f = open(file, "r")
text = f.read()
entries = re.split("\n+", text)
for entry in entries:
e = entry.split('\t', 4)
if len(e) == 4:
r.append(Rating(e[0], e[1], e[2], e[3]))
f.close()
|
import logging
import re
from streamlink.exceptions import PluginError
from streamlink.plugin import Plugin
from streamlink.plugin.api import validate
from streamlink.plugin.api.utils import parse_json
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
class StreamMe(Plugin):
_url_re = re.compile(r'^https?://(?:www\.)?stream\.me/(\w+).*$')
_streams_schema = validate.Schema(validate.all(
validate.transform(parse_json),
validate.any(
validate.all({
'_embedded': {
'streams': [{
'manifest': validate.url(),
'active': bool,
validate.optional('title'): validate.text,
validate.optional('privateLocked'): bool,
validate.optional('userSlug'): validate.text,
}]
}},
validate.get('_embedded'),
validate.get('streams'),
validate.get(0),
),
{
'reasons': [
{
'message': validate.text,
validate.optional('code'): validate.text,
}
]
}
)
))
_formats_schema = validate.Schema(validate.all(
validate.transform(parse_json),
{
'formats': {
'mp4-hls': {
'encodings': [{
'videoHeight': int,
'location': validate.url()
}],
validate.optional('origin'): validate.any(None, {
validate.optional('location'): validate.url(),
})
}
}
},
validate.get('formats'),
validate.get('mp4-hls'),
))
API_CHANNEL = 'https://www.stream.me/api-user/v1/{0}/channel'
STREAM_WEIGHTS = {
'source': 65535,
}
def __init__(self, url):
super().__init__(url)
self.author = None
self.title = None
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
@classmethod
def stream_weight(cls, stream):
if stream in cls.STREAM_WEIGHTS:
return cls.STREAM_WEIGHTS[stream], 'streamme'
return Plugin.stream_weight(stream)
def get_author(self):
if self.author is None:
self._get_channel_data()
return self.author
def get_title(self):
if self.title is None:
self._get_channel_data()
return self.title
def _get_channel_data(self):
username = self._url_re.match(self.url).group(1)
data = self.session.http.get(self.API_CHANNEL.format(username),
acceptable_status=(200, 403, 404),
schema=self._streams_schema)
if data.get('reasons'):
raise PluginError(data['reasons'][0]['message'])
self.author = data.get('userSlug')
self.title = data.get('title')
return data
def _get_streams(self):
data = self._get_channel_data()
log.trace('{0!r}'.format(data))
if not data['active']:
log.error('Stream is not active')
return
hls_streams = self.session.http.get(data['manifest'],
schema=self._formats_schema)
for s in hls_streams['encodings']:
log.trace('{0!r}'.format(s))
yield ('{0}p'.format(s['videoHeight']),
HLSStream(self.session, s['location']))
source = hls_streams.get('origin')
if source:
log.trace('{0!r}'.format(source))
yield 'source', HLSStream(self.session, source['location'])
__plugin__ = StreamMe
|
import nose
from angr import SimState, SIM_PROCEDURES
FAKE_ADDR = 0x100000
def test_calling_conventions():
#
# SimProcedures
#
from angr.calling_conventions import SimCCCdecl
args = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 1000, 100000, 1000000, 2000000, 14, 15, 16 ]
arches = [
('X86', SimCCCdecl),
('AMD64', None),
('ARMEL', None),
('MIPS32', None),
('PPC32', None),
('PPC64', None),
]
# x86, cdecl
for arch, cc in arches:
s = SimState(arch=arch)
for reg, val, _, _ in s.arch.default_register_values:
s.registers.store(reg, val)
if cc is not None:
manyargs = SIM_PROCEDURES['testing']['manyargs'](cc=cc(s.arch)).execute(s)
else:
manyargs = SIM_PROCEDURES['testing']['manyargs']().execute(s)
# Simulate a call
if s.arch.call_pushes_ret:
s.regs.sp = s.regs.sp + s.arch.stack_change
manyargs.set_args(args)
for index, arg in enumerate(args):
nose.tools.assert_true(s.solver.is_true(manyargs.arg(index) == arg))
if __name__ == '__main__':
test_calling_conventions()
|
import os
import codecs
from django.core.urlresolvers import reverse
from onadata.apps.main.tests.test_base import TestBase
from onadata.apps.logger.models import Attachment
from onadata.apps.logger.models import Instance
from onadata.apps.logger.models import XForm
from onadata.apps.logger.views import submission
class TestEncryptedForms(TestBase):
def setUp(self):
super(TestBase, self).setUp()
self._create_user_and_login()
self._submission_url = reverse(
submission, kwargs={'username': self.user.username})
def test_encrypted_submissions(self):
self._publish_xls_file(os.path.join(
self.this_directory, 'fixtures', 'transportation',
'transportation_encrypted.xls'
))
xform = XForm.objects.get(id_string='transportation_encrypted')
self.assertTrue(xform.encrypted)
uuid = "c15252fe-b6f3-4853-8f04-bf89dc73985a"
with self.assertRaises(Instance.DoesNotExist):
Instance.objects.get(uuid=uuid)
message = u"Successful submission."
files = {}
for filename in ['submission.xml', 'submission.xml.enc']:
files[filename] = os.path.join(
self.this_directory, 'fixtures', 'transportation',
'instances_encrypted', filename)
count = Instance.objects.count()
acount = Attachment.objects.count()
with open(files['submission.xml.enc']) as ef:
with codecs.open(files['submission.xml']) as f:
post_data = {
'xml_submission_file': f,
'submission.xml.enc': ef}
response = self.client.post(self._submission_url, post_data)
self.assertContains(response, message, status_code=201)
self.assertEqual(Instance.objects.count(), count + 1)
self.assertEqual(Attachment.objects.count(), acount + 1)
self.assertTrue(Instance.objects.get(uuid=uuid))
|
import numpy as np
import pycuda.gpuarray as gpuarray
import pycuda.driver as drv
import scipy.linalg
import tps
from lfd.tpsopt.culinalg_exts import gemm, get_gpu_ptrs, dot_batch_nocheck
class NoGPUTPSSolver(object):
"""
class to fit a thin plate spline to data using precomputed
matrix products
"""
def __init__(self, bend_coefs, N, QN, NON, NR, x_nd, K_nn, rot_coef):
for b in bend_coefs:
assert b in NON, 'no solver found for bending coefficient {}'.format(b)
self.rot_coef = rot_coef
self.n, self.d = x_nd.shape
self.bend_coefs = bend_coefs
self.N = N
self.QN = QN
self.NON = NON
self.NR = NR
self.x_nd = x_nd
self.K_nn = K_nn
self.valid = True
# @profile
def solve(self, wt_n, y_nd, bend_coef, rot_coef,f_res):
assert y_nd.shape == (self.n, self.d)
assert bend_coef in self.bend_coefs
assert np.allclose(rot_coef, self.rot_coef)
assert self.valid
WQN = wt_n[:, None] * self.QN
lhs = self.NON[b] + self.QN.T.dot(WQN)
wy_nd = wt_n[:, None] * y_nd
rhs = self.NR + self.QN.T.dot(wy_nd)
z = scipy.linalg.solve(lhs, rhs)
theta = self.N.dot(z)
set_ThinPlateSpline(f_res, self.x_nd, theta)
@staticmethod
def get_solvers(h5file):
solvers = {}
for seg_name, seg_info in h5file.iteritems():
solver_info = seg_info['solver']
N = solver_info['N'][:]
QN = solver_info['QN'][:]
NR = solver_info['NR'][:]
x_nd = solver_info['x_nd'][:]
K_nn = solver_info['K_nn'][:]
bend_coefs = [float(x) for x in solver_info['NON'].keys()]
NON = {}
for b in bend_coefs:
NON[b] = solver_info['NON'][str(b)][:]
solvers[seg_name] = NoGPUTPSSolver(bend_coefs, N, QN, NON, NR, x_nd, K_nn)
return solvers
class NoGPUEmptySolver(object):
"""
computes solution params and returns a NoGPUTPSSolver
"""
def __init__(self, max_N, bend_coefs):
d = 3
self.max_N = max_N
self.bend_coefs = bend_coefs
self.cur_solver = None
# @profile
def get_solver(self, x_na, K_nn, bend_coefs, rot_coef):
n,d = x_na.shape
assert len(bend_coefs) <= len(self.bend_coefs)
assert n <= self.max_N
if not self.cur_solver is None:
self.cur_solver.valid = False
Q = np.c_[np.ones((n, 1)), x_na, K_nn]
A = np.r_[np.zeros((d+1, d+1)), np.c_[np.ones((n, 1)), x_na]].T
R = np.zeros((n+d+1, d))
R[1:d+1, :d] = np.diag(rot_coef)
n_cnts = A.shape[0]
_u,_s,_vh = np.linalg.svd(A.T)
N = _u[:,n_cnts:]
QN = Q.dot(N)
NR = N.T.dot(R)
NON = {}
for i, b in enumerate(bend_coefs):
O_b = np.zeros((n+d+1, n+d+1), np.float64)
O_b[d+1:, d+1:] += b * K_nn
O_b[1:d+1, 1:d+1] += np.diag(rot_coef)
NON[b] = N.T.dot(O_b.dot(N))
self.cur_solver = NoGPUTPSSolver(bend_coefs, N, QN, NON, NR, x_na, K_nn, rot_coef)
return self.cur_solver
class TPSSolver(object):
"""
class to fit a thin plate spline to data using precomputed
matrix products
"""
def __init__(self, bend_coefs, N, QN, NON, NR, x_nd, K_nn, rot_coef,
QN_gpu = None, WQN_gpu = None, NON_gpu = None, NHN_gpu = None):
for b in bend_coefs:
assert b in NON, 'no solver found for bending coefficient {}'.format(b)
self.rot_coef = rot_coef
self.n, self.d = x_nd.shape
self.bend_coefs = bend_coefs
self.N = N
self.QN = QN
self.NON = NON
self.NR = NR
self.x_nd = x_nd
self.K_nn = K_nn
## set up GPU memory
if QN_gpu is None:
self.QN_gpu = gpuarray.to_gpu(self.QN)
else:
self.QN_gpu = QN_gpu
if WQN_gpu is None:
self.WQN_gpu = gpuarray.zeros_like(self.QN_gpu)
else:
self.WQN_gpu = WQN_gpu
if NON_gpu is None:
self.NON_gpu = {}
for b in bend_coefs:
self.NON_gpu[b] = gpuarray.to_gpu(self.NON[b])
else:
self.NON_gpu = NON_gpu
if NHN_gpu is None:
self.NHN_gpu = gpuarray.zeros_like(self.NON_gpu[bend_coefs[0]])
else:
self.NHN_gpu = NHN_gpu
self.valid = True
# @profile
def initialize_solver(self, b, wt_n):
drv.memcpy_dtod_async(self.NHN_gpu.gpudata, self.NON_gpu[b].gpudata,
self.NHN_gpu.nbytes)
self.WQN_gpu.set_async(wt_n[:, None] * self.QN)
# @profile
def solve(self, wt_n, y_nd, bend_coef, rot_coef,f_res):
assert y_nd.shape == (self.n, self.d)
assert bend_coef in self.bend_coefs
assert np.allclose(rot_coef, self.rot_coef)
assert self.valid
self.initialize_solver(bend_coef, wt_n)
gemm(self.QN_gpu, self.WQN_gpu, self.NHN_gpu,
transa='T', alpha=1, beta=1)
lhs = self.NHN_gpu.get()
wy_nd = wt_n[:, None] * y_nd
rhs = self.NR + self.QN.T.dot(wy_nd)
z = scipy.linalg.solve(lhs, rhs)
theta = self.N.dot(z)
set_ThinPlateSpline(f_res, self.x_nd, theta)
@staticmethod
def get_solvers(h5file):
solvers = {}
for seg_name, seg_info in h5file.iteritems():
solver_info = seg_info['solver']
N = solver_info['N'][:]
QN = solver_info['QN'][:]
NR = solver_info['NR'][:]
x_nd = solver_info['x_nd'][:]
K_nn = solver_info['K_nn'][:]
bend_coefs = [float(x) for x in solver_info['NON'].keys()]
NON = {}
for b in bend_coefs:
NON[b] = solver_info['NON'][str(b)][:]
solvers[seg_name] = TPSSolver(bend_coefs, N, QN, NON, NR, x_nd, K_nn)
return solvers
class EmptySolver(object):
"""
pre-allocates the GPU space needed to get a new solver
efficiently computes solution params and returns a TPSSolver
"""
def __init__(self, max_N, bend_coefs):
d = 3
self.max_N = max_N
self.bend_coefs = bend_coefs
self.cur_solver = None
self.NON_gpu = gpuarray.empty(max_N * max_N * len(bend_coefs), np.float64)
self.NHN_gpu = gpuarray.empty(max_N * max_N , np.float64)
self.QN_gpu = gpuarray.empty(max_N * max_N, np.float64)
self.WQN_gpu = gpuarray.empty(max_N * max_N, np.float64)
# temporary space to compute NON
self.ON_gpu = gpuarray.empty(max_N * (max_N + d + 1)* len(bend_coefs), np.float64)
self.O_gpu = gpuarray.empty((max_N +d+1)*(max_N+d+1)* len(bend_coefs), np.float64)
self.N_gpu = gpuarray.empty((max_N +d+1)*(max_N) *len(bend_coefs), np.float64)
# @profile
def get_solver(self, x_na, K_nn, bend_coefs, rot_coef):
n,d = x_na.shape
assert len(bend_coefs) <= len(self.bend_coefs)
assert n <= self.max_N
if not self.cur_solver is None:
self.cur_solver.valid = False
Q = np.c_[np.ones((n, 1)), x_na, K_nn]
A = np.r_[np.zeros((d+1, d+1)), np.c_[np.ones((n, 1)), x_na]].T
R = np.zeros((n+d+1, d))
R[1:d+1, :d] = np.diag(rot_coef)
n_cnts = A.shape[0]
_u,_s,_vh = np.linalg.svd(A.T)
N = _u[:,n_cnts:].copy()
N_gpu = self.N_gpu[:(n+d+1)*n].reshape(n+d+1, n)
N_gpu.set_async(N)
QN = Q.dot(N)
QN_gpu = self.QN_gpu[:n*n].reshape(n, n)
QN_gpu.set_async(QN)
WQN_gpu = self.WQN_gpu[:n*n].reshape(n, n)
NHN_gpu = self.NHN_gpu[:n*n].reshape(n, n)
NR = N.T.dot(R)
N_arr_gpu = []
O_gpu = []
ON_gpu = []
NON_gpu = []
for i, b in enumerate(bend_coefs):
O_b = np.zeros((n+d+1, n+d+1), np.float64)
O_b[d+1:, d+1:] += b * K_nn
O_b[1:d+1, 1:d+1] += np.diag(rot_coef)
offset = i * (n+d+1)*(n+d+1)
O_gpu.append(self.O_gpu[offset:offset + (n+d+1)*(n+d+1)].reshape(n+d+1, n+d+1))
O_gpu[-1].set(O_b)
offset = i * (n)*(n+d+1)
ON_gpu.append(self.ON_gpu[offset:offset + n*(n+d+1)].reshape(n+d+1, n))
offset = i * n * n
NON_gpu.append(self.NON_gpu[offset:offset + n*n].reshape(n, n))
N_arr_gpu.append(N_gpu)
O_ptrs = get_gpu_ptrs(O_gpu)
ON_ptrs = get_gpu_ptrs(ON_gpu)
NON_ptrs = get_gpu_ptrs(NON_gpu)
N_ptrs = get_gpu_ptrs(N_arr_gpu)
dot_batch_nocheck(O_gpu, N_arr_gpu, ON_gpu,
O_ptrs, N_ptrs, ON_ptrs,
b = 0)
dot_batch_nocheck(N_arr_gpu, ON_gpu, NON_gpu,
N_ptrs, ON_ptrs, NON_ptrs,
transa='T', b = 0)
NON_gpu = dict(zip(bend_coefs, NON_gpu))
NON = dict([(b, non.get_async()) for b, non in NON_gpu.iteritems()])
self.cur_solver = TPSSolver(bend_coefs, N, QN, NON, NR, x_na, K_nn, rot_coef,
QN_gpu, WQN_gpu, NON_gpu, NHN_gpu)
return self.cur_solver
class Transformation(object):
"""
Object oriented interface for transformations R^d -> R^d
"""
def transform_points(self, x_ma):
raise NotImplementedError
def compute_jacobian(self, x_ma):
raise NotImplementedError
def transform_bases(self, x_ma, rot_mad, orthogonalize=True, orth_method = "cross"):
"""
orthogonalize: none, svd, qr
"""
grad_mga = self.compute_jacobian(x_ma)
newrot_mgd = np.array([grad_ga.dot(rot_ad) for (grad_ga, rot_ad) in zip(grad_mga, rot_mad)])
if orthogonalize:
if orth_method == "qr":
newrot_mgd = orthogonalize3_qr(newrot_mgd)
elif orth_method == "svd":
newrot_mgd = orthogonalize3_svd(newrot_mgd)
elif orth_method == "cross":
newrot_mgd = orthogonalize3_cross(newrot_mgd)
else: raise Exception("unknown orthogonalization method %s"%orthogonalize)
return newrot_mgd
def transform_hmats(self, hmat_mAD):
"""
Transform (D+1) x (D+1) homogenius matrices
"""
hmat_mGD = np.empty_like(hmat_mAD)
hmat_mGD[:,:3,3] = self.transform_points(hmat_mAD[:,:3,3])
hmat_mGD[:,:3,:3] = self.transform_bases(hmat_mAD[:,:3,3], hmat_mAD[:,:3,:3])
hmat_mGD[:,3,:] = np.array([0,0,0,1])
return hmat_mGD
def compute_numerical_jacobian(self, x_d, epsilon=0.0001):
"numerical jacobian"
x0 = np.asfarray(x_d)
f0 = self.transform_points(x0)
jac = np.zeros(len(x0), len(f0))
dx = np.zeros(len(x0))
for i in range(len(x0)):
dx[i] = epsilon
jac[i] = (self.transform_points(x0+dx) - f0) / epsilon
dx[i] = 0.
return jac.transpose()
class ThinPlateSpline(Transformation):
"""
members:
x_na: centers of basis functions
w_ng:
lin_ag: transpose of linear part, so you take x_na.dot(lin_ag)
trans_g: translation part
"""
def __init__(self, d=3):
"initialize as identity"
self.x_na = np.zeros((0,d))
self.lin_ag = np.eye(d)
self.trans_g = np.zeros(d)
self.w_ng = np.zeros((0,d))
def transform_points(self, x_ma):
y_ng = tps.tps_eval(x_ma, self.lin_ag, self.trans_g, self.w_ng, self.x_na)
return y_ng
def compute_jacobian(self, x_ma):
grad_mga = tps.tps_grad(x_ma, self.lin_ag, self.trans_g, self.w_ng, self.x_na)
return grad_mga
class Affine(Transformation):
def __init__(self, lin_ag, trans_g):
self.lin_ag = lin_ag
self.trans_g = trans_g
def transform_points(self, x_ma):
return x_ma.dot(self.lin_ag) + self.trans_g[None,:]
def compute_jacobian(self, x_ma):
return np.repeat(self.lin_ag.T[None,:,:],len(x_ma), axis=0)
class Composition(Transformation):
def __init__(self, fs):
"applied from first to last (left to right)"
self.fs = fs
def transform_points(self, x_ma):
for f in self.fs: x_ma = f.transform_points(x_ma)
return x_ma
def compute_jacobian(self, x_ma):
grads = []
for f in self.fs:
grad_mga = f.compute_jacobian(x_ma)
grads.append(grad_mga)
x_ma = f.transform_points(x_ma)
totalgrad = grads[0]
for grad in grads[1:]:
totalgrad = (grad[:,:,:,None] * totalgrad[:,None,:,:]).sum(axis=-2)
return totalgrad
def fit_ThinPlateSpline(x_na, y_ng, bend_coef=.1, rot_coef = 1e-5, wt_n=None):
"""
x_na: source cloud
y_nd: target cloud
smoothing: penalize non-affine part
angular_spring: penalize rotation
wt_n: weight the points
"""
f = ThinPlateSpline()
f.lin_ag, f.trans_g, f.w_ng = tps.tps_fit3(x_na, y_ng, bend_coef, rot_coef, wt_n)
f.x_na = x_na
return f
def set_ThinPlateSpline(f, x_na, theta):
f.x_na = x_na
d = x_na.shape[1]
f.trans_g = theta[0]
f.lin_ag = theta[1:d+1]
f.w_ng = theta[d+1:]
|
import os, sys, cPickle
import wx # ensure this import works before starting the application
import matplotlib # ensure this import works before starting the application
if -1 != sys.path[0].find('pyeq2-master'):raise Exception('Please rename git checkout directory from "pyeq2-master" to "pyeq2"')
exampleFileDirectory = sys.path[0][:sys.path[0].rfind(os.sep)]
pyeq2IimportDirectory = os.path.join(os.path.join(exampleFileDirectory, '..'), '..')
if pyeq2IimportDirectory not in sys.path:
sys.path.append(pyeq2IimportDirectory)
import pyeq2
import guifiles.icon as icon
import guifiles.DataForControls as dfc
import guifiles.CustomDialogs as CustomDialogs
import guifiles.CustomEvents as CustomEvents
import guifiles.CustomThreads as CustomThreads
class ApplicationFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, title="Example wxPython Curve And Surface Fitter",
size=(800,600))
# wx converted an icon file to a Python file for embedding here, see icon.py file
self.SetIcon(icon.icon.GetIcon())
p = wx.Panel(self) # something to put the controls on
# create the controls
# no need to use "self." as these are not referenced by other methods
label1 = wx.StaticText(p, -1, "--- 2D Data Text Editor ---")
label2 = wx.StaticText(p, -1, "--- 3D Data Text Editor ---")
# use "self" because of references in other methods
self.text_2D = wx.TextCtrl(p, -1, dfc.exampleText_2D,
style=wx.TE_MULTILINE|wx.HSCROLL)
self.text_3D = wx.TextCtrl(p, -1, dfc.exampleText_3D,
style=wx.TE_MULTILINE|wx.HSCROLL)
# use "self" because of references in other methods
self.rbFittingTargetChoice_2D = wx.RadioBox(
p, -1, "Fitting Target 2D", wx.DefaultPosition, wx.DefaultSize,
dfc.fittingTargetList, 1, wx.RA_SPECIFY_COLS
)
self.rbFittingTargetChoice_3D = wx.RadioBox(
p, -1, "Fitting Target 3D", wx.DefaultPosition, wx.DefaultSize,
dfc.fittingTargetList, 1, wx.RA_SPECIFY_COLS
)
# use "self" because of references in other methods
self.rbEqChoice_2D = wx.RadioBox(
p, -1, "Example 2D Equations", wx.DefaultPosition, wx.DefaultSize,
dfc.exampleEquationList_2D, 1, wx.RA_SPECIFY_COLS
)
self.rbEqChoice_3D = wx.RadioBox(
p, -1, "Example 3D Equations", wx.DefaultPosition, wx.DefaultSize,
dfc.exampleEquationList_3D, 1, wx.RA_SPECIFY_COLS
)
# use "self" because of references in other methods
self.btnFit2D = wx.Button(p, -1, "Fit 2D Text Data")
self.btnFit3D = wx.Button(p, -1, "Fit 3D Text Data")
# setup the layout with grid sizer
fgs = wx.FlexGridSizer(5, 2, 10, 20)
fgs.AddGrowableRow(1)
fgs.AddGrowableCol(0)
fgs.AddGrowableCol(1)
fgs.Add(label1, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(label2, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(self.text_2D, 0, wx.EXPAND)
fgs.Add(self.text_3D, 0, wx.EXPAND)
fgs.Add(self.rbEqChoice_2D, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(self.rbEqChoice_3D, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(self.rbFittingTargetChoice_2D, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(self.rbFittingTargetChoice_3D, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(self.btnFit2D, 0, wx.ALIGN_CENTER_HORIZONTAL)
fgs.Add(self.btnFit3D, 0, wx.ALIGN_CENTER_HORIZONTAL)
border = wx.BoxSizer()
border.Add(fgs, 1, wx.EXPAND|wx.ALL, 10)
p.SetSizer(border)
# all controls on the main panel have been added with sizers,
# now center the application window on the user's display
self.Center()
# this dialog will not be displayed unless fitting is in progress
# use "self" because of references in other methods
self.statusBox = CustomDialogs.StatusDialog(self, '', "Status")
# Bind the button events to their application methods
self.Bind(wx.EVT_BUTTON, self.OnFit2D, self.btnFit2D)
self.Bind(wx.EVT_BUTTON, self.OnFit3D, self.btnFit3D)
# Set up event handler for any worker thread results
CustomEvents.EVT_THREADSTATUS(self, self.OnThreadStatus)
self.fittingWorkerThread = None
def OnThreadStatus(self, event):
if type(event.data) == type(''): # strings are status updates
self.statusBox.text.AppendText(event.data + "\n")
else: # not string data type, the worker thread completed
self.fittingWorkerThread = None
# event.data will be the fitted equation
pickledEquationFile = open("pickledEquationFile", "wb")
cPickle.dump(event.data, pickledEquationFile)
pickledEquationFile.close()
self.btnFit2D.Enable()
self.btnFit3D.Enable()
self.statusBox.Hide()
currentDirectory = os.path.dirname(os.path.abspath(__file__))
dialogDirectory = os.path.join(currentDirectory, 'guifiles')
commandString = os.path.join(dialogDirectory, 'CustomDialogs.py')
os.popen(sys.executable + ' ' + commandString)
def OnFit2D(self, evt):
textData = str(self.text_2D.GetValue())
equationSelection = self.rbEqChoice_2D.GetStringSelection()
fittingTargetSelection = self.rbFittingTargetChoice_2D.GetStringSelection()
# the GUI's fitting target string contains what we need - extract it
fittingTarget = fittingTargetSelection.split('(')[1].split(')')[0]
if equationSelection == 'Linear Polynomial':
self.equation = pyeq2.Models_2D.Polynomial.Linear(fittingTarget)
if equationSelection == 'Quadratic Polynomial':
self.equation = pyeq2.Models_2D.Polynomial.Quadratic(fittingTarget)
if equationSelection == 'Cubic Polynomial':
self.equation = pyeq2.Models_2D.Polynomial.Cubic(fittingTarget)
if equationSelection == 'Witch Of Maria Agnesi A':
self.equation = pyeq2.Models_2D.Miscellaneous.WitchOfAgnesiA(fittingTarget)
if equationSelection == 'VanDeemter Chromatography':
self.equation = pyeq2.Models_2D.Engineering.VanDeemterChromatography(fittingTarget)
if equationSelection == 'Gamma Ray Angular Distribution (degrees) B':
self.equation = pyeq2.Models_2D.LegendrePolynomial.GammaRayAngularDistributionDegreesB(fittingTarget)
if equationSelection == 'Exponential With Offset':
self.equation = pyeq2.Models_2D.Exponential.Exponential(fittingTarget, 'Offset')
# convert text to numeric data checking for log of negative numbers, etc.
try:
pyeq2.dataConvertorService().ConvertAndSortColumnarASCII(textData, self.equation, False)
except:
wx.MessageBox(self.equation.reasonWhyDataRejected, "Error")
return
# check for number of coefficients > number of data points to be fitted
coeffCount = len(self.equation.GetCoefficientDesignators())
dataCount = len(self.equation.dataCache.allDataCacheDictionary['DependentData'])
if coeffCount > dataCount:
wx.MessageBox("This equation requires a minimum of " + str(coeffCount) + " data points, you have supplied " + repr(dataCount) + ".", "Error")
return
# Now the status dialog is used. Disable fitting buttons until thread completes
self.btnFit2D.Disable()
self.btnFit3D.Disable()
self.statusBox.text.SetValue('')
self.statusBox.Show() # hidden by OnThreadStatus() when thread completes
# thread will automatically start to tun
self.fittingWorkerThread = CustomThreads.FittingThread(self, self.equation)
def OnFit3D(self, evt):
textData = str(self.text_3D.GetValue())
equationSelection = self.rbEqChoice_3D.GetStringSelection()
fittingTargetSelection = self.rbFittingTargetChoice_3D.GetStringSelection()
# the GUI's fitting target string contains what we need - extract it
fittingTarget = fittingTargetSelection.split('(')[1].split(')')[0]
if equationSelection == 'Linear Polynomial':
self.equation = pyeq2.Models_3D.Polynomial.Linear(fittingTarget)
if equationSelection == 'Full Quadratic Polynomial':
self.equation = pyeq2.Models_3D.Polynomial.FullQuadratic(fittingTarget)
if equationSelection == 'Full Cubic Polynomial':
self.equation = pyeq2.Models_3D.Polynomial.FullCubic(fittingTarget)
if equationSelection == 'Monkey Saddle A':
self.equation = pyeq2.Models_3D.Miscellaneous.MonkeySaddleA(fittingTarget)
if equationSelection == 'Gaussian Curvature Of Whitneys Umbrella A':
self.equation = pyeq2.Models_3D.Miscellaneous.GaussianCurvatureOfWhitneysUmbrellaA(fittingTarget)
if equationSelection == 'NIST Nelson Autolog':
self.equation = pyeq2.Models_3D.NIST.NIST_NelsonAutolog(fittingTarget)
if equationSelection == 'Custom Polynomial One':
self.equation = pyeq2.Models_3D.Polynomial.UserSelectablePolynomial(fittingTarget, "Default", 3, 1)
# convert text to numeric data checking for log of negative numbers, etc.
try:
pyeq2.dataConvertorService().ConvertAndSortColumnarASCII(textData, self.equation, False)
except:
wx.MessageBox(self.equation.reasonWhyDataRejected, "Error")
return
# check for number of coefficients > number of data points to be fitted
coeffCount = len(self.equation.GetCoefficientDesignators())
dataCount = len(self.equation.dataCache.allDataCacheDictionary['DependentData'])
if coeffCount > dataCount:
wx.MessageBox("This equation requires a minimum of " + str(coeffCount) + " data points, you have supplied " + repr(dataCount) + ".", "Error")
return
# Now the status dialog is used. Disable fitting buttons until thread completes
self.btnFit2D.Disable()
self.btnFit3D.Disable()
self.statusBox.text.SetValue('')
self.statusBox.Show() # hidden by OnThreadStatus() when thread completes
# thread will automatically start to run
self.fittingWorkerThread = CustomThreads.FittingThread(self, self.equation)
if __name__ == "__main__":
app = wx.App()
frm = ApplicationFrame()
frm.Show()
app.MainLoop()
|
"""
This package collects functions useful for studying chemical kinetics problems.
"""
from .rates import MassAction, EyringHS
from .eyring import EyringParam
from .arrhenius import ArrheniusParam
|
from __future__ import print_function
import unittest2
from lldbsuite.test.decorators import *
from lldbsuite.test.concurrent_base import ConcurrentEventsBase
from lldbsuite.test.lldbtest import TestBase
@skipIfWindows
class ConcurrentWatchBreakDelay(ConcurrentEventsBase):
mydir = ConcurrentEventsBase.compute_mydir(__file__)
@skipIfFreeBSD # timing out on buildbot
# Atomic sequences are not supported yet for MIPS in LLDB.
@skipIf(triple='^mips')
@add_test_categories(["watchpoint"])
def test(self):
"""Test watchpoint and a (1 second delay) breakpoint in multiple threads."""
self.build(dictionary=self.getBuildFlags())
self.do_thread_actions(
num_delay_breakpoint_threads=1,
num_watchpoint_threads=1)
|
"""Utilities to build feature vectors from text documents"""
import re
import unicodedata
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..preprocessing import normalize
from ..utils.fixes import Counter
ENGLISH_STOP_WORDS = set([
"a", "about", "above", "across", "after", "afterwards", "again", "against",
"all", "almost", "alone", "along", "already", "also", "although", "always",
"am", "among", "amongst", "amoungst", "amount", "an", "and", "another",
"any", "anyhow", "anyone", "anything", "anyway", "anywhere", "are",
"around", "as", "at", "back", "be", "became", "because", "become",
"becomes", "becoming", "been", "before", "beforehand", "behind", "being",
"below", "beside", "besides", "between", "beyond", "bill", "both",
"bottom", "but", "by", "call", "can", "cannot", "cant", "co", "con",
"could", "couldnt", "cry", "de", "describe", "detail", "do", "done",
"down", "due", "during", "each", "eg", "eight", "either", "eleven", "else",
"elsewhere", "empty", "enough", "etc", "even", "ever", "every", "everyone",
"everything", "everywhere", "except", "few", "fifteen", "fify", "fill",
"find", "fire", "first", "five", "for", "former", "formerly", "forty",
"found", "four", "from", "front", "full", "further", "get", "give", "go",
"had", "has", "hasnt", "have", "he", "hence", "her", "here", "hereafter",
"hereby", "herein", "hereupon", "hers", "herself", "him", "himself", "his",
"how", "however", "hundred", "i", "ie", "if", "in", "inc", "indeed",
"interest", "into", "is", "it", "its", "itself", "keep", "last", "latter",
"latterly", "least", "less", "ltd", "made", "many", "may", "me",
"meanwhile", "might", "mill", "mine", "more", "moreover", "most", "mostly",
"move", "much", "must", "my", "myself", "name", "namely", "neither",
"never", "nevertheless", "next", "nine", "no", "nobody", "none", "noone",
"nor", "not", "nothing", "now", "nowhere", "of", "off", "often", "on",
"once", "one", "only", "onto", "or", "other", "others", "otherwise", "our",
"ours", "ourselves", "out", "over", "own", "part", "per", "perhaps",
"please", "put", "rather", "re", "same", "see", "seem", "seemed",
"seeming", "seems", "serious", "several", "she", "should", "show", "side",
"since", "sincere", "six", "sixty", "so", "some", "somehow", "someone",
"something", "sometime", "sometimes", "somewhere", "still", "such",
"system", "take", "ten", "than", "that", "the", "their", "them",
"themselves", "then", "thence", "there", "thereafter", "thereby",
"therefore", "therein", "thereupon", "these", "they", "thick", "thin",
"third", "this", "those", "though", "three", "through", "throughout",
"thru", "thus", "to", "together", "too", "top", "toward", "towards",
"twelve", "twenty", "two", "un", "under", "until", "up", "upon", "us",
"very", "via", "was", "we", "well", "were", "what", "whatever", "when",
"whence", "whenever", "where", "whereafter", "whereas", "whereby",
"wherein", "whereupon", "wherever", "whether", "which", "while", "whither",
"who", "whoever", "whole", "whom", "whose", "why", "will", "with",
"within", "without", "would", "yet", "you", "your", "yours", "yourself",
"yourselves"])
def strip_accents(s):
"""Transform accentuated unicode symbols into their simple counterpart
Warning: the python-level loop and join operations make this implementation
20 times slower than the to_ascii basic normalization.
"""
return u''.join([c for c in unicodedata.normalize('NFKD', s)
if not unicodedata.combining(c)])
def to_ascii(s):
"""Transform accentuated unicode symbols into ascii or nothing
Warning: this solution is only suited for languages that have a direct
transliteration to ASCII symbols.
A better solution would be to use transliteration based on a precomputed
unidecode map to be used by translate as explained here:
http://stackoverflow.com/questions/2854230/
"""
nkfd_form = unicodedata.normalize('NFKD', s)
only_ascii = nkfd_form.encode('ASCII', 'ignore')
return only_ascii
def strip_tags(s):
return re.compile(r"<([^>]+)>", flags=re.UNICODE).sub("", s)
class RomanPreprocessor(object):
"""Fast preprocessor suitable for Latin alphabet text"""
def preprocess(self, unicode_text):
"""Preprocess strings"""
return to_ascii(strip_tags(unicode_text.lower()))
def __repr__(self):
return "RomanPreprocessor()"
DEFAULT_PREPROCESSOR = RomanPreprocessor()
DEFAULT_TOKEN_PATTERN = r"\b\w\w+\b"
class WordNGramAnalyzer(BaseEstimator):
"""Simple analyzer: transform text document into a sequence of word tokens
This simple implementation does:
- lower case conversion
- unicode accents removal
- token extraction using unicode regexp word bounderies for token of
minimum size of 2 symbols (by default)
- output token n-grams (unigram only by default)
"""
def __init__(self, charset='utf-8', min_n=1, max_n=1,
preprocessor=DEFAULT_PREPROCESSOR,
stop_words=ENGLISH_STOP_WORDS,
token_pattern=DEFAULT_TOKEN_PATTERN):
self.charset = charset
self.stop_words = stop_words
self.min_n = min_n
self.max_n = max_n
self.preprocessor = preprocessor
self.token_pattern = token_pattern
def analyze(self, text_document):
"""From documents to token"""
if hasattr(text_document, 'read'):
# ducktype for file-like objects
text_document = text_document.read()
if isinstance(text_document, str):
text_document = text_document.decode(self.charset, 'ignore')
text_document = self.preprocessor.preprocess(text_document)
# word boundaries tokenizer (cannot compile it in the __init__ because
# we want support for pickling and runtime parameter fitting)
compiled = re.compile(self.token_pattern, re.UNICODE)
tokens = compiled.findall(text_document)
# handle token n-grams
if self.min_n != 1 or self.max_n != 1:
original_tokens = tokens
tokens = []
n_original_tokens = len(original_tokens)
for n in xrange(self.min_n, min(self.max_n + 1, n_original_tokens + 1)):
for i in xrange(n_original_tokens - n + 1):
tokens.append(u" ".join(original_tokens[i: i + n]))
# handle stop words
if self.stop_words is not None:
tokens = [w for w in tokens if w not in self.stop_words]
return tokens
class CharNGramAnalyzer(BaseEstimator):
"""Compute character n-grams features of a text document
This analyzer is interesting since it is language agnostic and will work
well even for language where word segmentation is not as trivial as English
such as Chinese and German for instance.
Because of this, it can be considered a basic morphological analyzer.
"""
white_spaces = re.compile(r"\s\s+")
def __init__(self, charset='utf-8', preprocessor=DEFAULT_PREPROCESSOR,
min_n=3, max_n=6):
self.charset = charset
self.min_n = min_n
self.max_n = max_n
self.preprocessor = preprocessor
def analyze(self, text_document):
"""From documents to token"""
if hasattr(text_document, 'read'):
# ducktype for file-like objects
text_document = text_document.read()
if isinstance(text_document, str):
text_document = text_document.decode(self.charset, 'ignore')
text_document = self.preprocessor.preprocess(text_document)
# normalize white spaces
text_document = self.white_spaces.sub(" ", text_document)
text_len = len(text_document)
ngrams = []
for n in xrange(self.min_n, min(self.max_n + 1, text_len + 1)):
for i in xrange(text_len - n + 1):
ngrams.append(text_document[i: i + n])
return ngrams
DEFAULT_ANALYZER = WordNGramAnalyzer(min_n=1, max_n=1)
class CountVectorizer(BaseEstimator):
"""Convert a collection of raw documents to a matrix of token counts
This implementation produces a sparse representation of the counts using
scipy.sparse.coo_matrix.
If you do not provide an a-priori dictionary and you do not use an analyzer
that does some kind of feature selection then the number of features will
be equal to the vocabulary size found by analysing the data. The default
analyzer does simple stop word filtering for English.
Parameters
----------
analyzer: WordNGramAnalyzer or CharNGramAnalyzer, optional
vocabulary: dict or iterable, optional
Either a dictionary where keys are tokens and values are indices in
the matrix, or an iterable over terms (in which case the indices are
determined by the iteration order as per enumerate).
This is useful in order to fix the vocabulary in advance.
max_df : float in range [0.0, 1.0], optional, 1.0 by default
When building the vocabulary ignore terms that have a term frequency
strictly higher than the given threshold (corpus specific stop words).
This parameter is ignored if vocabulary is not None.
max_features : optional, None by default
If not None, build a vocabulary that only consider the top
max_features ordered by term frequency across the corpus.
This parameter is ignored if vocabulary is not None.
dtype: type, optional
Type of the matrix returned by fit_transform() or transform().
"""
def __init__(self, analyzer=DEFAULT_ANALYZER, vocabulary=None, max_df=1.0,
max_features=None, dtype=long):
self.analyzer = analyzer
self.fit_vocabulary = vocabulary is None
if vocabulary is not None and not isinstance(vocabulary, dict):
vocabulary = dict((t, i) for i, t in enumerate(vocabulary))
self.vocabulary = vocabulary
self.dtype = dtype
self.max_df = max_df
self.max_features = max_features
def _term_count_dicts_to_matrix(self, term_count_dicts):
i_indices = []
j_indices = []
values = []
vocabulary = self.vocabulary
for i, term_count_dict in enumerate(term_count_dicts):
for term, count in term_count_dict.iteritems():
j = vocabulary.get(term)
if j is not None:
i_indices.append(i)
j_indices.append(j)
values.append(count)
# free memory as we go
term_count_dict.clear()
shape = (len(term_count_dicts), max(vocabulary.itervalues()) + 1)
return sp.coo_matrix((values, (i_indices, j_indices)),
shape=shape, dtype=self.dtype)
def fit(self, raw_documents, y=None):
"""Learn a vocabulary dictionary of all tokens in the raw documents
Parameters
----------
raw_documents: iterable
an iterable which yields either str, unicode or file objects
Returns
-------
self
"""
self.fit_transform(raw_documents)
return self
def fit_transform(self, raw_documents, y=None):
"""Learn the vocabulary dictionary and return the count vectors
This is more efficient than calling fit followed by transform.
Parameters
----------
raw_documents: iterable
an iterable which yields either str, unicode or file objects
Returns
-------
vectors: array, [n_samples, n_features]
"""
if not self.fit_vocabulary:
return self.transform(raw_documents)
# result of document conversion to term count dicts
term_counts_per_doc = []
term_counts = Counter()
# term counts across entire corpus (count each term maximum once per
# document)
document_counts = Counter()
max_df = self.max_df
max_features = self.max_features
# TODO: parallelize the following loop with joblib?
# (see XXX up ahead)
for doc in raw_documents:
term_count_current = Counter()
for term in self.analyzer.analyze(doc):
term_count_current[term] += 1
term_counts[term] += 1
if max_df is not None:
for term in term_count_current:
document_counts[term] += 1
term_counts_per_doc.append(term_count_current)
n_doc = len(term_counts_per_doc)
# filter out stop words: terms that occur in almost all documents
if max_df is not None:
max_document_count = max_df * n_doc
stop_words = set(t for t, dc in document_counts.iteritems()
if dc > max_document_count)
# list the terms that should be part of the vocabulary
if max_features is None:
terms = [t for t in term_counts if t not in stop_words]
else:
# extract the most frequent terms for the vocabulary
terms = set()
for t, tc in term_counts.most_common():
if t not in stop_words:
terms.add(t)
if len(terms) >= max_features:
break
# convert to a document-token matrix
self.vocabulary = dict(((t, i) for i, t in enumerate(terms)))
# the term_counts and document_counts might be useful statistics, are
# we really sure want we want to drop them? They take some memory but
# can be useful for corpus introspection
return self._term_count_dicts_to_matrix(term_counts_per_doc)
def transform(self, raw_documents):
"""Extract token counts out of raw text documents using the vocabulary
fitted with fit or the one provided in the constructor.
Parameters
----------
raw_documents: iterable
an iterable which yields either str, unicode or file objects
Returns
-------
vectors: sparse matrix, [n_samples, n_features]
"""
if not self.vocabulary:
raise ValueError("Vocabulary wasn't fitted or is empty!")
# raw_documents is an iterable so we don't know its size in advance
# result of document conversion to term_count_dict
term_counts_per_doc = []
# XXX @larsmans tried to parallelize the following loop with joblib.
# The result was some 20% slower than the serial version.
for doc in raw_documents:
term_count_current = Counter()
for term in self.analyzer.analyze(doc):
term_count_current[term] += 1
term_counts_per_doc.append(term_count_current)
# now that we know the document we can allocate the vectors matrix at
# once and fill it with the term counts collected as a temporary list
# of dict
return self._term_count_dicts_to_matrix(term_counts_per_doc)
def inverse_transform(self, X):
"""Return terms per document with nonzero entries in X.
Parameters
----------
X : {array, sparse matrix}, shape = [n_samples, n_features]
Returns
-------
X_inv : list of arrays, len = n_samples
List of arrays of terms.
"""
if type(X) is sp.coo_matrix: # COO matrix is not indexable
X = X.tocsr()
terms = np.array(self.vocabulary.keys())
indices = np.array(self.vocabulary.values())
inverse_vocabulary = terms[np.argsort(indices)]
return [inverse_vocabulary[X[i, :].nonzero()[1]]
for i in xrange(X.shape[0])]
class TfidfTransformer(BaseEstimator, TransformerMixin):
"""Transform a count matrix to a normalized tf or tf–idf representation
Tf means term-frequency while tf–idf means term-frequency times inverse
document-frequency. This is a common term weighting scheme in information
retrieval, that has also found good use in document classification.
The goal of using tf–idf instead of the raw frequencies of occurrence of a
token in a given document is to scale down the impact of tokens that occur
very frequently in a given corpus and that are hence empirically less
informative than features that occur in a small fraction of the training
corpus.
In the SMART notation used in IR, this class implements several tf–idf
variants. Tf is always "n" (natural), idf is "t" iff use_idf is given,
"n" otherwise, and normalization is "c" iff norm='l2', "n" iff norm=None.
Parameters
----------
norm : 'l1', 'l2' or None, optional
Norm used to normalize term vectors. None for no normalization.
use_idf : boolean, optional
Enable inverse-document-frequency reweighting.
smooth_idf : boolean, optional
Smooth idf weights by adding one to document frequencies, as if an
extra document was seen containing every term in the collection
exactly once. Prevents zero divisions.
References
----------
R. Baeza-Yates and B. Ribeiro-Neto (2011). Modern Information Retrieval.
Addison Wesley, pp. 68–74.
C.D. Manning, H. Schütze and P. Raghavan (2008). Introduction to
Information Retrieval. Cambridge University Press, pp. 121–125.
"""
def __init__(self, norm='l2', use_idf=True, smooth_idf=True):
self.norm = norm
self.use_idf = use_idf
self.smooth_idf = smooth_idf
self.idf_ = None
def fit(self, X, y=None):
"""Learn the idf vector (global term weights)
Parameters
----------
X: sparse matrix, [n_samples, n_features]
a matrix of term/token counts
"""
if self.use_idf:
n_samples, n_features = X.shape
df = np.bincount(X.nonzero()[1])
if df.shape[0] < n_features:
# bincount might return fewer bins than there are features
df = np.concatenate([df, np.zeros(n_features - df.shape[0])])
df += int(self.smooth_idf)
self.idf_ = np.log(float(n_samples) / df)
return self
def transform(self, X, copy=True):
"""Transform a count matrix to a tf or tf–idf representation
Parameters
----------
X: sparse matrix, [n_samples, n_features]
a matrix of term/token counts
Returns
-------
vectors: sparse matrix, [n_samples, n_features]
"""
X = sp.csr_matrix(X, dtype=np.float64, copy=copy)
n_samples, n_features = X.shape
if self.use_idf:
d = sp.lil_matrix((len(self.idf_), len(self.idf_)))
d.setdiag(self.idf_)
# *= doesn't work
X = X * d
if self.norm:
X = normalize(X, norm=self.norm, copy=False)
return X
class Vectorizer(BaseEstimator):
"""Convert a collection of raw documents to a matrix
Equivalent to CountVectorizer followed by TfidfTransformer.
"""
def __init__(self, analyzer=DEFAULT_ANALYZER, max_df=1.0,
max_features=None, norm='l2', use_idf=True, smooth_idf=True):
self.tc = CountVectorizer(analyzer, max_df=max_df,
max_features=max_features,
dtype=np.float64)
self.tfidf = TfidfTransformer(norm=norm, use_idf=use_idf,
smooth_idf=smooth_idf)
def fit(self, raw_documents):
"""Learn a conversion law from documents to array data"""
X = self.tc.fit_transform(raw_documents)
self.tfidf.fit(X)
return self
def fit_transform(self, raw_documents):
"""
Learn the representation and return the vectors.
Parameters
----------
raw_documents: iterable
an iterable which yields either str, unicode or file objects
Returns
-------
vectors: array, [n_samples, n_features]
"""
X = self.tc.fit_transform(raw_documents)
# X is already a transformed view of raw_documents so
# we set copy to False
return self.tfidf.fit(X).transform(X, copy=False)
def transform(self, raw_documents, copy=True):
"""Transform raw text documents to tf–idf vectors
Parameters
----------
raw_documents: iterable
an iterable which yields either str, unicode or file objects
Returns
-------
vectors: sparse matrix, [n_samples, n_features]
"""
X = self.tc.transform(raw_documents)
return self.tfidf.transform(X, copy)
def inverse_transform(self, X):
"""Return terms per document with nonzero entries in X.
Parameters
----------
X : {array, sparse matrix}, shape = [n_samples, n_features]
Returns
-------
X_inv : list of arrays, len = n_samples
List of arrays of terms.
"""
return self.tc.inverse_transform(X)
vocabulary = property(lambda self: self.tc.vocabulary)
analyzer = property(lambda self: self.tc.analyzer)
|
__author__ = 'robert'
from pypet.tests.testutils.ioutils import make_temp_dir
import tables as pt
import tables.parameters
import os
import time
def create_children_dfs(hdf5_file, group_node, current_children):
if len(current_children) == 0:
return 1
nchildren = current_children[0]
child_count = 0
for irun in range(nchildren):
name = 'child%d' % irun
hdf5_file.create_group(where=group_node, name=name)
child = group_node._f_get_child(name)
child_count += create_children_dfs(hdf5_file, child, current_children[1:])
return child_count
def main():
start = time.time()
filename = os.path.join(make_temp_dir('tmp'), 'children.hdf5')
dirs = os.path.dirname(filename)
if not os.path.isdir(dirs):
os.makedirs(dirs)
if os.path.isfile(filename):
os.remove(filename)
#children_structure=(250000,1,1)
children_structure=(500,500,1)
myfile = pt.open_file(filename, mode='w')
cc = create_children_dfs(myfile, myfile.root, children_structure)
end = time.time()
runtime = end-start
print('\nCreated %d children %s in %f seconds' % (cc, str(children_structure), runtime))
if __name__ == '__main__':
main()
|
"""
ABIDE2BIDS download tool.
"""
import errno
import json
import os
import os.path as op
import shutil
import subprocess as sp
import tempfile
from argparse import ArgumentParser, RawTextHelpFormatter
from multiprocessing import Pool
from typing import Tuple
from xml.etree import ElementTree as et
import numpy as np
from mriqc.bin import messages
def main():
"""Entry point."""
parser = ArgumentParser(
description="ABIDE2BIDS downloader.",
formatter_class=RawTextHelpFormatter,
)
g_input = parser.add_argument_group("Inputs")
g_input.add_argument("-i", "--input-abide-catalog", action="store", required=True)
g_input.add_argument(
"-n", "--dataset-name", action="store", default="ABIDE Dataset"
)
g_input.add_argument(
"-u", "--nitrc-user", action="store", default=os.getenv("NITRC_USER")
)
g_input.add_argument(
"-p",
"--nitrc-password",
action="store",
default=os.getenv("NITRC_PASSWORD"),
)
g_outputs = parser.add_argument_group("Outputs")
g_outputs.add_argument("-o", "--output-dir", action="store", default="ABIDE-BIDS")
opts = parser.parse_args()
if opts.nitrc_user is None or opts.nitrc_password is None:
raise RuntimeError("NITRC user and password are required")
dataset_desc = {
"BIDSVersion": "1.0.0rc3",
"License": "CC Attribution-NonCommercial-ShareAlike 3.0 Unported",
"Name": opts.dataset_name,
}
out_dir = op.abspath(opts.output_dir)
try:
os.makedirs(out_dir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise exc
with open(op.join(out_dir, "dataset_description.json"), "w") as dfile:
json.dump(dataset_desc, dfile)
catalog = et.parse(opts.input_abide_catalog).getroot()
urls = [el.get("URI") for el in catalog.iter() if el.get("URI") is not None]
pool = Pool()
args_list = [(url, opts.nitrc_user, opts.nitrc_password, out_dir) for url in urls]
res = pool.map(fetch, args_list)
tsv_data = np.array([("subject_id", "site_name")] + res)
np.savetxt(
op.join(out_dir, "participants.tsv"),
tsv_data,
fmt="%s",
delimiter="\t",
)
def fetch(args: Tuple[str, str, str, str]) -> Tuple[str, str]:
"""
Downloads a subject and formats it into BIDS.
Parameters
----------
args : Tuple[str, str, str, str]
URL, NITRC user, NITRC password, destination
Returns
-------
Tuple[str, str]
Subject ID, Site name
"""
out_dir = None
if len(args) == 3:
url, user, password = args
else:
url, user, password, out_dir = args
tmpdir = tempfile.mkdtemp()
if out_dir is None:
out_dir = os.getcwd()
else:
out_dir = op.abspath(out_dir)
pkg_id = [u[9:] for u in url.split("/") if u.startswith("NITRC_IR_")][0]
sub_file = op.join(tmpdir, "%s.zip" % pkg_id)
cmd = ["curl", "-s", "-u", "%s:%s" % (user, password), "-o", sub_file, url]
sp.check_call(cmd)
sp.check_call(["unzip", "-qq", "-d", tmpdir, "-u", sub_file])
abide_root = op.join(tmpdir, "ABIDE")
files = []
for root, path, fname in os.walk(abide_root):
if fname and (fname[0].endswith("nii") or fname[0].endswith("nii.gz")):
if path:
root = op.join(root, path[0])
files.append(op.join(root, fname[0]))
index = len(abide_root) + 1
site_name, sub_str = files[0][index:].split("/")[0].split("_")
subject_id = "sub-" + sub_str
for i in files:
ext = ".nii.gz"
if i.endswith(".nii"):
ext = ".nii"
if "mprage" in i:
bids_dir = op.join(out_dir, subject_id, "anat")
try:
os.makedirs(bids_dir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise exc
shutil.copy(i, op.join(bids_dir, subject_id + "_T1w" + ext))
if "rest" in i:
bids_dir = op.join(out_dir, subject_id, "func")
try:
os.makedirs(bids_dir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise exc
shutil.copy(i, op.join(bids_dir, subject_id + "_rest_bold" + ext))
shutil.rmtree(tmpdir, ignore_errors=True, onerror=_myerror)
success_message = messages.ABIDE_SUBJECT_FETCHED.format(
subject_id=subject_id[4:], site_name=site_name
)
print(success_message)
return subject_id[4:], site_name
def _myerror(message: str):
"""
Print warning in case an exception is raised for temporal files removal.
Parameters
----------
message : str
`shutil.rmtree()` error message
"""
warning = messages.ABIDE_TEMPORAL_WARNING.format(message=message)
print(warning)
if __name__ == "__main__":
main()
|
from imageviewer import ImageViewerCell, ImageFileToSpreadsheet
def widgetName():
""" widgetName() -> str
Return the name of this widget plugin
"""
return 'Image Viewer'
def registerWidget(reg, basicModules, basicWidgets):
""" registerWidget(reg: module_registry,
basicModules: python package,
basicWidgets: python package) -> None
Register all widgets in this package to VisTrails module_registry
"""
reg.add_module(ImageViewerCell)
reg.add_input_port(ImageViewerCell, "Location", basicWidgets.CellLocation)
reg.add_input_port(ImageViewerCell, "File", basicModules.File)
# FIXME we need an ImageOutput module defined for this, but this
# probably requires an Image type as well...
#
# ImageOutput.register_output_mode(ImageFileToSpreadsheet)
|
import json
import os
import tempfile
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.utils.encoding import smart_unicode
import mock
from nose import SkipTest
from nose.tools import eq_, ok_
from PIL import Image
from pyquery import PyQuery as pq
from tower import strip_whitespace
from waffle.models import Switch
import amo
import amo.tests
from access.models import Group, GroupUser
from addons.models import (Addon, AddonCategory, AddonDeviceType, AddonUser,
Category)
from amo.helpers import absolutify
from amo.tests import assert_required, formset, initial
from amo.tests.test_helpers import get_image_path
from amo.urlresolvers import reverse
from constants.applications import DEVICE_TYPES
from devhub.models import ActivityLog
from editors.models import RereviewQueue
from lib.video.tests import files as video_files
from translations.models import Translation
from users.models import UserProfile
from versions.models import Version
import mkt
from mkt.comm.models import CommunicationNote
from mkt.constants import regions
from mkt.constants.ratingsbodies import RATINGS_BODIES
from mkt.site.fixtures import fixture
from mkt.webapps.models import AddonExcludedRegion as AER, ContentRating
response_mock = mock.Mock()
response_mock.read.return_value = '''
{
"name": "Something Ballin!",
"description": "Goin' hard in the paint.",
"launch_path": "/ballin/4.eva",
"developer": {
"name": "Pro Balliner",
"url": "http://www.ballin4eva.xxx"
},
"icons": {
"128": "/ballin/icon.png"
},
"installs_allowed_from": [ "https://marketplace.firefox.com" ]
}
'''
response_mock.headers = {'Content-Type':
'application/x-web-app-manifest+json'}
def get_section_url(addon, section, edit=False):
args = [addon.app_slug, section]
if edit:
args.append('edit')
return reverse('mkt.developers.apps.section', args=args)
class TestEdit(amo.tests.TestCase):
fixtures = fixture('group_admin', 'user_999', 'user_admin',
'user_admin_group', 'webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
self.url = self.webapp.get_dev_url()
self.user = UserProfile.objects.get(username='31337')
assert self.client.login(username=self.user.email, password='password')
def get_webapp(self):
return Addon.objects.no_cache().get(id=337141)
def get_url(self, section, edit=False):
return get_section_url(self.webapp, section, edit)
def get_dict(self, **kw):
fs = formset(self.cat_initial, initial_count=1)
result = {'name': 'new name', 'slug': 'test_slug',
'description': 'new description'}
result.update(**kw)
result.update(fs)
return result
def compare(self, data):
"""Compare an app against a `dict` of expected values."""
mapping = {
'regions': 'get_region_ids'
}
webapp = self.get_webapp()
for k, v in data.iteritems():
k = mapping.get(k, k)
val = getattr(webapp, k, '')
if callable(val):
val = val()
if val is None:
val = ''
eq_(unicode(val), unicode(v))
def compare_features(self, data, version=None):
"""
Compare an app's set of required features against a `dict` of expected
values.
"""
if not version:
version = self.get_webapp().current_version
features = version.features
for k, v in data.iteritems():
val = getattr(features, k)
if callable(val):
val = val()
eq_(unicode(val), unicode(v))
def check_form_url(self, section):
# Check form destinations and "Edit" button.
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('form').attr('action'), self.edit_url)
eq_(doc('h2 .button').attr('data-editurl'), self.edit_url)
# Check "Cancel" button.
r = self.client.get(self.edit_url)
eq_(pq(r.content)('form .addon-edit-cancel').attr('href'), self.url)
class TestEditListingWebapp(TestEdit):
fixtures = fixture('webapp_337141')
@mock.patch.object(settings, 'APP_PREVIEW', False)
def test_apps_context(self):
r = self.client.get(self.url)
eq_(r.context['webapp'], True)
eq_(pq(r.content)('title').text(),
'Edit Listing | %s | Firefox Marketplace' % self.webapp.name)
def test_redirect(self):
r = self.client.get(self.url.replace('edit', ''))
self.assert3xx(r, self.url)
def test_nav_links(self):
r = self.client.get(self.url)
doc = pq(r.content)('.edit-addon-nav')
eq_(doc.length, 2)
eq_(doc('.view-stats').length, 0)
def test_edit_with_no_current_version(self):
# Disable file for latest version, and then update app.current_version.
app = self.get_webapp()
app.versions.latest().all_files[0].update(status=amo.STATUS_DISABLED)
app.update_version()
# Now try to display edit page.
r = self.client.get(self.url)
eq_(r.status_code, 200)
@mock.patch.object(settings, 'TASK_USER_ID', 999)
class TestEditBasic(TestEdit):
fixtures = TestEdit.fixtures
def setUp(self):
super(TestEditBasic, self).setUp()
self.cat = Category.objects.create(name='Games', type=amo.ADDON_WEBAPP)
self.dtype = DEVICE_TYPES.keys()[0]
AddonCategory.objects.create(addon=self.webapp, category=self.cat)
AddonDeviceType.objects.create(addon=self.webapp,
device_type=self.dtype)
self.url = self.get_url('basic')
self.edit_url = self.get_url('basic', edit=True)
def get_webapp(self):
return Addon.objects.get(id=337141)
def get_dict(self, **kw):
result = {'device_types': self.dtype, 'slug': 'NeW_SluG',
'description': 'New description with <em>html</em>!',
'manifest_url': self.webapp.manifest_url,
'categories': [self.cat.id]}
result.update(**kw)
return result
def test_form_url(self):
self.check_form_url('basic')
def test_apps_context(self):
eq_(self.client.get(self.url).context['webapp'], True)
def test_appslug_visible(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('#slug_edit').remove('a, em').text(),
absolutify(u'/\u2026/%s' % self.webapp.app_slug))
def test_edit_slug_success(self):
data = self.get_dict()
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
eq_(r.status_code, 200)
webapp = self.get_webapp()
eq_(webapp.app_slug, data['slug'].lower())
# Make sure only the app_slug changed.
eq_(webapp.slug, self.webapp.slug)
def test_edit_slug_max_length(self):
r = self.client.post(self.edit_url, self.get_dict(slug='x' * 31))
self.assertFormError(r, 'form', 'slug',
'Ensure this value has at most 30 characters (it has 31).')
def test_edit_slug_dupe(self):
Addon.objects.create(type=amo.ADDON_WEBAPP, app_slug='dupe')
r = self.client.post(self.edit_url, self.get_dict(slug='dupe'))
self.assertFormError(r, 'form', 'slug',
'This slug is already in use. Please choose another.')
webapp = self.get_webapp()
# Nothing changed.
eq_(webapp.slug, self.webapp.slug)
eq_(webapp.app_slug, self.webapp.app_slug)
def test_edit_xss(self):
self.webapp.description = ("This\n<b>IS</b>"
"<script>alert('awesome')</script>")
self.webapp.save()
r = self.client.get(self.url)
eq_(pq(r.content)('#addon-description span[lang]').html(),
"This<br/><b>IS</b><script>alert('awesome')"
'</script>')
@mock.patch('devhub.tasks.urllib2.urlopen')
@mock.patch('devhub.tasks.validator')
def test_view_manifest_url_default(self, mock_urlopen, validator):
mock_urlopen.return_value = response_mock
validator.return_value = '{}'
# Should be able to see manifest URL listed.
r = self.client.get(self.url)
eq_(pq(r.content)('#manifest-url a').attr('href'),
self.webapp.manifest_url)
# There should be a readonly text field.
r = self.client.get(self.edit_url)
row = pq(r.content)('#manifest-url')
eq_(row.find('input[name=manifest_url][readonly]').length, 1)
# POST with the new manifest URL.
url = 'https://ballin.com/ballin4eva'
r = self.client.post(self.edit_url, self.get_dict(manifest_url=url))
self.assertNoFormErrors(r)
# The manifest should remain unchanged since this is disabled for
# non-admins.
eq_(self.get_webapp().manifest_url, self.webapp.manifest_url)
def test_view_edit_manifest_url_empty(self):
# Empty manifest should throw an error.
r = self.client.post(self.edit_url, self.get_dict(manifest_url=''))
form = r.context['form']
assert 'manifest_url' in form.errors
assert 'This field is required' in form.errors['manifest_url'][0]
@mock.patch('devhub.tasks.urllib2.urlopen')
@mock.patch('devhub.tasks.validator')
def test_view_admin_edit_manifest_url(self, mock_urlopen, validator):
mock_urlopen.return_value = response_mock
validator.return_value = '{}'
self.client.login(username='admin@mozilla.com', password='password')
# Should be able to see manifest URL listed.
r = self.client.get(self.url)
eq_(pq(r.content)('#manifest-url a').attr('href'),
self.webapp.manifest_url)
# Admins can edit the manifest URL and should see a text field.
r = self.client.get(self.edit_url)
row = pq(r.content)('#manifest-url')
eq_(row.find('input[name=manifest_url]').length, 1)
eq_(row.find('input[name=manifest_url][readonly]').length, 0)
# POST with the new manifest URL.
url = 'https://ballin.com/ballin4eva.webapp'
r = self.client.post(self.edit_url, self.get_dict(manifest_url=url))
self.assertNoFormErrors(r)
self.webapp = self.get_webapp()
eq_(self.webapp.manifest_url, url)
eq_(self.webapp.app_domain, 'https://ballin.com')
eq_(self.webapp.current_version.version, '1.0')
eq_(self.webapp.versions.count(), 1)
@mock.patch('devhub.tasks.urllib2.urlopen')
def test_view_manifest_changed_dupe_app_domain(self, mock_urlopen):
mock_urlopen.return_value = response_mock
Switch.objects.create(name='webapps-unique-by-domain', active=True)
amo.tests.app_factory(name='Super Duper',
app_domain='https://ballin.com')
self.client.login(username='admin@mozilla.com', password='password')
# POST with the new manifest URL.
url = 'https://ballin.com/ballin4eva.webapp'
r = self.client.post(self.edit_url, self.get_dict(manifest_url=url))
form = r.context['form']
assert 'manifest_url' in form.errors
assert 'one app per domain' in form.errors['manifest_url'][0]
eq_(self.get_webapp().manifest_url, self.webapp.manifest_url,
'Manifest URL should not have been changed!')
@mock.patch('devhub.tasks.urllib2.urlopen')
@mock.patch('devhub.tasks.validator')
def test_view_manifest_changed_same_domain_diff_path(self, mock_urlopen,
validator):
mock_urlopen.return_value = response_mock
validator.return_value = ''
Switch.objects.create(name='webapps-unique-by-domain', active=True)
self.client.login(username='admin@mozilla.com', password='password')
# POST with the new manifest URL for same domain but w/ different path.
data = self.get_dict(manifest_url=self.webapp.manifest_url + 'xxx')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
eq_(self.get_webapp().manifest_url, self.webapp.manifest_url + 'xxx',
'Manifest URL should have changed!')
def test_view_manifest_url_changed(self):
new_url = 'http://omg.org/yes'
self.webapp.manifest_url = new_url
self.webapp.save()
# If we change the `manifest_url` manually, the URL here should change.
r = self.client.get(self.url)
eq_(pq(r.content)('#manifest-url a').attr('href'), new_url)
def test_categories_listed(self):
r = self.client.get(self.url)
eq_(pq(r.content)('#addon-categories-edit').text(),
unicode(self.cat.name))
r = self.client.post(self.url)
eq_(pq(r.content)('#addon-categories-edit').text(),
unicode(self.cat.name))
def test_edit_categories_add(self):
new = Category.objects.create(name='Books', type=amo.ADDON_WEBAPP)
cats = [self.cat.id, new.id]
self.client.post(self.edit_url, self.get_dict(categories=cats))
app_cats = self.get_webapp().categories.values_list('id', flat=True)
eq_(sorted(app_cats), cats)
def test_edit_categories_addandremove(self):
new = Category.objects.create(name='Books', type=amo.ADDON_WEBAPP)
cats = [new.id]
self.client.post(self.edit_url, self.get_dict(categories=cats))
app_cats = self.get_webapp().categories.values_list('id', flat=True)
eq_(sorted(app_cats), cats)
@mock.patch('mkt.webapps.models.Webapp.save')
def test_edit_categories_required(self, save):
r = self.client.post(self.edit_url, self.get_dict(categories=[]))
assert_required(r.context['cat_form'].errors['categories'][0])
assert not save.called
def test_edit_categories_xss(self):
new = Category.objects.create(name='<script>alert("xss");</script>',
type=amo.ADDON_WEBAPP)
cats = [self.cat.id, new.id]
r = self.client.post(self.edit_url, self.get_dict(categories=cats))
assert '<script>alert' not in r.content
assert '<script>alert' in r.content
def test_edit_categories_nonexistent(self):
r = self.client.post(self.edit_url, self.get_dict(categories=[100]))
eq_(r.context['cat_form'].errors['categories'],
['Select a valid choice. 100 is not one of the available '
'choices.'])
def test_edit_categories_max(self):
new1 = Category.objects.create(name='Books', type=amo.ADDON_WEBAPP)
new2 = Category.objects.create(name='Lifestyle', type=amo.ADDON_WEBAPP)
cats = [self.cat.id, new1.id, new2.id]
r = self.client.post(self.edit_url, self.get_dict(categories=cats))
eq_(r.context['cat_form'].errors['categories'],
['You can have only 2 categories.'])
def test_edit_check_description(self):
# Make sure bug 629779 doesn't return.
r = self.client.post(self.edit_url, self.get_dict())
eq_(r.status_code, 200)
eq_(self.get_webapp().description, self.get_dict()['description'])
def test_edit_slug_valid(self):
old_edit = self.edit_url
data = self.get_dict(slug='valid')
r = self.client.post(self.edit_url, data)
doc = pq(r.content)
assert doc('form').attr('action') != old_edit
def test_edit_as_developer(self):
self.client.login(username='regular@mozilla.com', password='password')
data = self.get_dict()
r = self.client.post(self.edit_url, data)
# Make sure we get errors when they are just regular users.
eq_(r.status_code, 403)
AddonUser.objects.create(addon=self.webapp, user_id=999,
role=amo.AUTHOR_ROLE_DEV)
r = self.client.post(self.edit_url, data)
eq_(r.status_code, 200)
webapp = self.get_webapp()
eq_(unicode(webapp.app_slug), data['slug'].lower())
eq_(unicode(webapp.description), data['description'])
def test_l10n(self):
self.webapp.update(default_locale='en-US')
url = self.webapp.get_dev_url('edit')
r = self.client.get(url)
eq_(pq(r.content)('#l10n-menu').attr('data-default'), 'en-us',
'l10n menu not visible for %s' % url)
def test_l10n_not_us(self):
self.webapp.update(default_locale='fr')
url = self.webapp.get_dev_url('edit')
r = self.client.get(url)
eq_(pq(r.content)('#l10n-menu').attr('data-default'), 'fr',
'l10n menu not visible for %s' % url)
def test_edit_l10n(self):
data = {
'slug': self.webapp.app_slug,
'manifest_url': self.webapp.manifest_url,
'categories': [self.cat.id],
'description_en-us': u'Nêw english description',
'description_fr': u'Nëw french description',
'releasenotes_en-us': u'Nëw english release notes',
'releasenotes_fr': u'Nêw french release notes'
}
res = self.client.post(self.edit_url, data)
eq_(res.status_code, 200)
self.webapp = self.get_webapp()
version = self.webapp.current_version.reload()
desc_id = self.webapp.description_id
notes_id = version.releasenotes_id
eq_(self.webapp.description, data['description_en-us'])
eq_(version.releasenotes, data['releasenotes_en-us'])
eq_(unicode(Translation.objects.get(id=desc_id, locale='fr')),
data['description_fr'])
eq_(unicode(Translation.objects.get(id=desc_id, locale='en-us')),
data['description_en-us'])
eq_(unicode(Translation.objects.get(id=notes_id, locale='fr')),
data['releasenotes_fr'])
eq_(unicode(Translation.objects.get(id=notes_id, locale='en-us')),
data['releasenotes_en-us'])
@mock.patch('mkt.developers.views._update_manifest')
def test_refresh(self, fetch):
self.client.login(username='steamcube@mozilla.com',
password='password')
url = reverse('mkt.developers.apps.refresh_manifest',
args=[self.webapp.app_slug])
r = self.client.post(url)
eq_(r.status_code, 204)
fetch.assert_called_once_with(self.webapp.pk, True, {})
@mock.patch('mkt.developers.views._update_manifest')
def test_refresh_dev_only(self, fetch):
self.client.login(username='regular@mozilla.com',
password='password')
url = reverse('mkt.developers.apps.refresh_manifest',
args=[self.webapp.app_slug])
r = self.client.post(url)
eq_(r.status_code, 403)
eq_(fetch.called, 0)
def test_view_developer_name(self):
r = self.client.get(self.url)
developer_name = self.webapp.current_version.developer_name
content = smart_unicode(r.content)
eq_(pq(content)('#developer-name td').html().strip(), developer_name)
def test_view_developer_name_xss(self):
version = self.webapp.current_version
version._developer_name = '<script>alert("xss-devname")</script>'
version.save()
r = self.client.get(self.url)
assert '<script>alert' not in r.content
assert '<script>alert' in r.content
def test_edit_packaged(self):
self.get_webapp().update(is_packaged=True)
data = self.get_dict()
data.pop('manifest_url')
r = self.client.post(self.edit_url, data)
eq_(r.status_code, 200)
eq_(r.context['editable'], False)
eq_(self.get_webapp().description, self.get_dict()['description'])
def test_edit_basic_not_public(self):
# Disable file for latest version, and then update app.current_version.
app = self.get_webapp()
app.versions.latest().all_files[0].update(status=amo.STATUS_DISABLED)
app.update_version()
# Now try to display edit page.
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_view_release_notes(self):
version = self.webapp.current_version
version.releasenotes = u'Chëese !'
version.save()
res = self.client.get(self.url)
eq_(res.status_code, 200)
content = smart_unicode(res.content)
eq_(pq(content)('#releasenotes td span[lang]').html().strip(),
version.releasenotes)
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
content = smart_unicode(res.content)
eq_(pq(content)('#releasenotes').length, 0)
def test_edit_release_notes(self):
self.webapp.previews.create()
self.webapp.support_email = 'test@example.com'
self.webapp.save()
data = self.get_dict(releasenotes=u'I can hâz release notes')
res = self.client.post(self.edit_url, data)
releasenotes = self.webapp.current_version.reload().releasenotes
eq_(res.status_code, 200)
eq_(releasenotes, data['releasenotes'])
def test_edit_release_notes_packaged(self):
# You are not supposed to edit release notes from the basic edit
# page if you app is packaged. Instead this is done from the version
# edit page.
self.webapp.update(is_packaged=True)
data = self.get_dict(releasenotes=u'I can not hâz release notes')
res = self.client.post(self.edit_url, data)
releasenotes = self.webapp.current_version.reload().releasenotes
eq_(res.status_code, 200)
eq_(releasenotes, None)
def test_view_releasenotes_xss(self):
version = self.webapp.current_version
version.releasenotes = '<script>alert("xss-devname")</script>'
version.save()
r = self.client.get(self.url)
assert '<script>alert' not in r.content
assert '<script>alert' in r.content
class TestEditCountryLanguage(TestEdit):
def get_webapp(self):
return Addon.objects.get(id=337141)
def test_data_visible(self):
clean_countries = []
self.get_webapp().current_version.update(supported_locales='de,es')
res = self.client.get(self.url)
eq_(res.status_code, 200)
countries = (pq(pq(res.content)('#edit-app-language tr').eq(0))
.find('td').remove('small').text())
langs = (pq(pq(res.content)('#edit-app-language tr').eq(1)).find('td')
.remove('small').text())
for c in countries.split(', '):
clean_countries.append(strip_whitespace(c))
eq_(langs, u'English (US) (default), Deutsch, Espa\xc3\xb1ol')
self.assertSetEqual(
sorted(clean_countries),
sorted([r.name.decode() for r in regions.ALL_REGIONS]))
class TestEditMedia(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditMedia, self).setUp()
self.url = self.get_url('media')
self.edit_url = self.get_url('media', True)
self.icon_upload = self.webapp.get_dev_url('upload_icon')
self.preview_upload = self.webapp.get_dev_url('upload_preview')
patches = {
'ADDON_ICONS_PATH': tempfile.mkdtemp(),
'PREVIEW_THUMBNAIL_PATH': tempfile.mkstemp()[1] + '%s/%d.png',
}
for k, v in patches.iteritems():
patcher = mock.patch.object(settings, k, v)
patcher.start()
self.addCleanup(patcher.stop)
def formset_new_form(self, *args, **kw):
ctx = self.client.get(self.edit_url).context
blank = initial(ctx['preview_form'].forms[-1])
blank.update(**kw)
return blank
def formset_media(self, prev_blank=None, *args, **kw):
prev_blank = prev_blank or {}
kw.setdefault('initial_count', 0)
kw.setdefault('prefix', 'files')
# Preview formset.
fs = formset(*list(args) + [self.formset_new_form(**prev_blank)], **kw)
return dict((k, '' if v is None else v) for k, v in fs.items())
def new_preview_hash(self):
# At least one screenshot is required.
src_image = open(get_image_path('preview.jpg'), 'rb')
r = self.client.post(self.preview_upload,
dict(upload_image=src_image))
return {'upload_hash': json.loads(r.content)['upload_hash']}
def test_form_url(self):
self.check_form_url('media')
def test_edit_defaulticon(self):
data = dict(icon_type='')
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
assert webapp.get_icon_url(128).endswith('icons/default-128.png')
assert webapp.get_icon_url(64).endswith('icons/default-64.png')
for k in data:
eq_(unicode(getattr(webapp, k)), data[k])
def test_edit_preuploadedicon(self):
data = dict(icon_type='icon/appearance')
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
assert webapp.get_icon_url(64).endswith('icons/appearance-64.png')
assert webapp.get_icon_url(128).endswith('icons/appearance-128.png')
for k in data:
eq_(unicode(getattr(webapp, k)), data[k])
def test_edit_uploadedicon(self):
img = get_image_path('mozilla-sq.png')
src_image = open(img, 'rb')
response = self.client.post(self.icon_upload,
dict(upload_image=src_image))
response_json = json.loads(response.content)
webapp = self.get_webapp()
# Now, save the form so it gets moved properly.
data = dict(icon_type='image/png',
icon_upload_hash=response_json['upload_hash'])
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
# Unfortunate hardcoding of URL.
url = webapp.get_icon_url(64)
assert ('addon_icons/%s/%s' % (webapp.id / 1000, webapp.id)) in url, (
'Unexpected path: %r' % url)
eq_(data['icon_type'], 'image/png')
# Check that it was actually uploaded.
dirname = os.path.join(settings.ADDON_ICONS_PATH,
'%s' % (webapp.id / 1000))
dest = os.path.join(dirname, '%s-32.png' % webapp.id)
eq_(storage.exists(dest), True)
eq_(Image.open(storage.open(dest)).size, (32, 32))
def test_edit_icon_log(self):
self.test_edit_uploadedicon()
log = ActivityLog.objects.all()
eq_(log.count(), 1)
eq_(log[0].action, amo.LOG.CHANGE_ICON.id)
def test_edit_uploadedicon_noresize(self):
img = '%s/img/mkt/logos/128.png' % settings.MEDIA_ROOT
src_image = open(img, 'rb')
data = dict(upload_image=src_image)
response = self.client.post(self.icon_upload, data)
response_json = json.loads(response.content)
webapp = self.get_webapp()
# Now, save the form so it gets moved properly.
data = dict(icon_type='image/png',
icon_upload_hash=response_json['upload_hash'])
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
# Unfortunate hardcoding of URL.
addon_url = webapp.get_icon_url(64).split('?')[0]
end = 'addon_icons/%s/%s-64.png' % (webapp.id / 1000, webapp.id)
assert addon_url.endswith(end), 'Unexpected path: %r' % addon_url
eq_(data['icon_type'], 'image/png')
# Check that it was actually uploaded.
dirname = os.path.join(settings.ADDON_ICONS_PATH,
'%s' % (webapp.id / 1000))
dest = os.path.join(dirname, '%s-64.png' % webapp.id)
assert storage.exists(dest), dest
eq_(Image.open(storage.open(dest)).size, (64, 64))
def test_media_types(self):
res = self.client.get(self.get_url('media', edit=True))
doc = pq(res.content)
eq_(doc('#id_icon_upload').attr('data-allowed-types'),
'image/jpeg|image/png')
eq_(doc('.screenshot_upload').attr('data-allowed-types'),
'image/jpeg|image/png|video/webm')
def check_image_type(self, url, msg):
img = '%s/js/zamboni/devhub.js' % settings.MEDIA_ROOT
self.check_image_type_path(img, url, msg)
def check_image_type_path(self, img, url, msg):
src_image = open(img, 'rb')
res = self.client.post(url, {'upload_image': src_image})
response_json = json.loads(res.content)
assert any(e == msg for e in response_json['errors']), (
response_json['errors'])
# The check_image_type method uploads js, so let's try sending that
# to ffmpeg to see what it thinks.
@mock.patch.object(amo, 'VIDEO_TYPES', ['application/javascript'])
def test_edit_video_wrong_type(self):
raise SkipTest
self.check_image_type(self.preview_upload, 'Videos must be in WebM.')
def test_edit_icon_wrong_type(self):
self.check_image_type(self.icon_upload,
'Icons must be either PNG or JPG.')
def test_edit_screenshot_wrong_type(self):
self.check_image_type(self.preview_upload,
'Images must be either PNG or JPG.')
def setup_image_status(self):
self.icon_dest = os.path.join(self.webapp.get_icon_dir(),
'%s-64.png' % self.webapp.id)
os.makedirs(os.path.dirname(self.icon_dest))
open(self.icon_dest, 'w')
self.preview = self.webapp.previews.create()
self.preview.save()
os.makedirs(os.path.dirname(self.preview.thumbnail_path))
open(self.preview.thumbnail_path, 'w')
self.url = self.webapp.get_dev_url('ajax.image.status')
def test_icon_square(self):
img = get_image_path('mozilla.png')
self.check_image_type_path(img, self.icon_upload,
'Icons must be square.')
def test_icon_status_no_choice(self):
self.webapp.update(icon_type='')
url = self.webapp.get_dev_url('ajax.image.status')
result = json.loads(self.client.get(url).content)
assert result['icons']
def test_icon_status_works(self):
self.setup_image_status()
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_icon_status_fails(self):
self.setup_image_status()
os.remove(self.icon_dest)
result = json.loads(self.client.get(self.url).content)
assert not result['icons']
def test_preview_status_works(self):
self.setup_image_status()
result = json.loads(self.client.get(self.url).content)
assert result['previews']
# No previews means that all the images are done.
self.webapp.previews.all().delete()
result = json.loads(self.client.get(self.url).content)
assert result['previews']
def test_preview_status_fails(self):
self.setup_image_status()
os.remove(self.preview.thumbnail_path)
result = json.loads(self.client.get(self.url).content)
assert not result['previews']
def test_image_status_persona(self):
self.setup_image_status()
os.remove(self.icon_dest)
self.webapp.update(type=amo.ADDON_PERSONA)
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_image_status_default(self):
self.setup_image_status()
os.remove(self.icon_dest)
self.webapp.update(icon_type='icon/photos')
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_icon_size_req(self):
filehandle = open(get_image_path('sunbird-small.png'), 'rb')
res = self.client.post(self.icon_upload, {'upload_image': filehandle})
response_json = json.loads(res.content)
assert any(e == 'Icons must be at least 128px by 128px.' for e in
response_json['errors'])
def check_image_animated(self, url, msg):
filehandle = open(get_image_path('animated.png'), 'rb')
res = self.client.post(url, {'upload_image': filehandle})
response_json = json.loads(res.content)
assert any(e == msg for e in response_json['errors'])
def test_icon_animated(self):
self.check_image_animated(self.icon_upload,
'Icons cannot be animated.')
def test_screenshot_animated(self):
self.check_image_animated(self.preview_upload,
'Images cannot be animated.')
@mock.patch('lib.video.ffmpeg.Video')
@mock.patch('mkt.developers.utils.video_library')
def add(self, handle, Video, video_library, num=1):
data_formset = self.formset_media(upload_image=handle)
r = self.client.post(self.preview_upload, data_formset)
self.assertNoFormErrors(r)
upload_hash = json.loads(r.content)['upload_hash']
# Create and post with the formset.
fields = []
for i in xrange(num):
fields.append(self.formset_new_form(upload_hash=upload_hash,
position=i))
data_formset = self.formset_media(*fields)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
def preview_add(self, num=1):
self.add(open(get_image_path('preview.jpg'), 'rb'), num=num)
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def preview_video_add(self, num=1):
self.add(open(video_files['good'], 'rb'), num=num)
@mock.patch('lib.video.ffmpeg.Video')
@mock.patch('mkt.developers.utils.video_library')
def add_json(self, handle, Video, video_library):
data_formset = self.formset_media(upload_image=handle)
result = self.client.post(self.preview_upload, data_formset)
return json.loads(result.content)
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def test_edit_preview_video_add_hash(self):
res = self.add_json(open(video_files['good'], 'rb'))
assert not res['errors'], res['errors']
assert res['upload_hash'].endswith('.video-webm'), res['upload_hash']
def test_edit_preview_add_hash(self):
res = self.add_json(open(get_image_path('preview.jpg'), 'rb'))
assert res['upload_hash'].endswith('.image-jpeg'), res['upload_hash']
def test_edit_preview_add_hash_size(self):
res = self.add_json(open(get_image_path('mozilla.png'), 'rb'))
assert any(e.startswith('App previews ') for e in res['errors']), (
'Small screenshot not flagged for size.')
@mock.patch.object(settings, 'MAX_VIDEO_UPLOAD_SIZE', 1)
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def test_edit_preview_video_size(self):
res = self.add_json(open(video_files['good'], 'rb'))
assert any(e.startswith('Please use files smaller than')
for e in res['errors']), (res['errors'])
@mock.patch('lib.video.tasks.resize_video')
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def test_edit_preview_video_add(self, resize_video):
eq_(self.get_webapp().previews.count(), 0)
self.preview_video_add()
eq_(self.get_webapp().previews.count(), 1)
def test_edit_preview_add(self):
eq_(self.get_webapp().previews.count(), 0)
self.preview_add()
eq_(self.get_webapp().previews.count(), 1)
def test_edit_preview_edit(self):
self.preview_add()
preview = self.get_webapp().previews.all()[0]
edited = {'upload_hash': 'xxx',
'id': preview.id,
'position': preview.position,
'file_upload': None}
data_formset = self.formset_media(edited, initial_count=1)
self.client.post(self.edit_url, data_formset)
eq_(self.get_webapp().previews.count(), 1)
def test_edit_preview_reorder(self):
self.preview_add(3)
previews = list(self.get_webapp().previews.all())
base = dict(upload_hash='xxx', file_upload=None)
# Three preview forms were generated; mix them up here.
a = dict(position=1, id=previews[2].id)
b = dict(position=2, id=previews[0].id)
c = dict(position=3, id=previews[1].id)
a.update(base)
b.update(base)
c.update(base)
# Add them in backwards ("third", "second", "first")
data_formset = self.formset_media({}, *(c, b, a), initial_count=3)
eq_(data_formset['files-0-id'], previews[1].id)
eq_(data_formset['files-1-id'], previews[0].id)
eq_(data_formset['files-2-id'], previews[2].id)
self.client.post(self.edit_url, data_formset)
# They should come out "first", "second", "third".
eq_(self.get_webapp().previews.all()[0].id, previews[2].id)
eq_(self.get_webapp().previews.all()[1].id, previews[0].id)
eq_(self.get_webapp().previews.all()[2].id, previews[1].id)
def test_edit_preview_delete(self):
self.preview_add()
self.preview_add()
orig_previews = self.get_webapp().previews.all()
# Delete second preview. Keep the first.
edited = {'DELETE': 'checked',
'upload_hash': 'xxx',
'id': orig_previews[1].id,
'position': 0,
'file_upload': None}
ctx = self.client.get(self.edit_url).context
first = initial(ctx['preview_form'].forms[0])
first['upload_hash'] = 'xxx'
data_formset = self.formset_media(edited, *(first,), initial_count=2)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
# First one should still be there.
eq_(list(self.get_webapp().previews.all()), [orig_previews[0]])
def test_edit_preview_add_another(self):
self.preview_add()
self.preview_add()
eq_(self.get_webapp().previews.count(), 2)
def test_edit_preview_add_two(self):
self.preview_add(2)
eq_(self.get_webapp().previews.count(), 2)
def test_screenshot_video_required(self):
r = self.client.post(self.edit_url, self.formset_media())
eq_(r.context['preview_form'].non_form_errors(),
['You must upload at least one screenshot or video.'])
def test_screenshot_with_icon(self):
self.preview_add()
preview = self.get_webapp().previews.all()[0]
edited = {'upload_hash': '', 'id': preview.id}
data_formset = self.formset_media(edited, initial_count=1)
data_formset.update(icon_type='image/png', icon_upload_hash='')
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
class TestEditDetails(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditDetails, self).setUp()
self.url = self.get_url('details')
self.edit_url = self.get_url('details', edit=True)
def get_dict(self, **kw):
data = dict(default_locale='en-US',
homepage='http://twitter.com/fligtarsmom',
privacy_policy="fligtar's mom does <em>not</em> share "
"your data with third parties.")
data.update(kw)
return data
def test_form_url(self):
self.check_form_url('details')
def test_edit(self):
data = self.get_dict()
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
def test_privacy_policy_xss(self):
self.webapp.privacy_policy = ("We\n<b>own</b>your"
"<script>alert('soul')</script>")
self.webapp.save()
r = self.client.get(self.url)
eq_(pq(r.content)('#addon-privacy-policy span[lang]').html(),
"We<br/><b>own</b>your<script>"
"alert('soul')</script>")
def test_edit_exclude_optional_fields(self):
data = self.get_dict()
data.update(default_locale='en-US', homepage='',
privacy_policy='we sell your data to everyone')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
def test_edit_default_locale_required_trans(self):
# name and description are required in the new locale.
data = self.get_dict()
data.update(description='bullocks',
homepage='http://omg.org/yes',
privacy_policy='your data is delicious')
fields = ['name', 'description']
error = ('Before changing your default locale you must have a name '
'and description in that locale. You are missing %s.')
missing = lambda f: error % ', '.join(map(repr, f))
data.update(default_locale='pt-BR')
r = self.client.post(self.edit_url, data)
self.assertFormError(r, 'form', None, missing(fields))
# Now we have a name.
self.webapp.name = {'pt-BR': 'pt-BR name'}
self.webapp.save()
fields.remove('name')
r = self.client.post(self.edit_url, data)
self.assertFormError(r, 'form', None, missing(fields))
def test_edit_default_locale_frontend_error(self):
data = self.get_dict()
data.update(description='xx', homepage='http://google.com',
default_locale='pt-BR', privacy_policy='pp')
rp = self.client.post(self.edit_url, data)
self.assertContains(rp,
'Before changing your default locale you must')
def test_edit_locale(self):
self.webapp.update(default_locale='en-US')
r = self.client.get(self.url)
eq_(pq(r.content)('.addon_edit_locale').eq(0).text(),
'English (US)')
def test_homepage_url_optional(self):
r = self.client.post(self.edit_url, self.get_dict(homepage=''))
self.assertNoFormErrors(r)
def test_homepage_url_invalid(self):
r = self.client.post(self.edit_url,
self.get_dict(homepage='xxx'))
self.assertFormError(r, 'form', 'homepage', 'Enter a valid URL.')
def test_games_already_excluded_in_brazil(self):
AER.objects.create(addon=self.webapp, region=mkt.regions.BR.id)
games = Category.objects.create(type=amo.ADDON_WEBAPP, slug='games')
r = self.client.post(
self.edit_url, self.get_dict(categories=[games.id]))
self.assertNoFormErrors(r)
eq_(list(AER.objects.filter(addon=self.webapp)
.values_list('region', flat=True)),
[mkt.regions.BR.id])
class TestEditSupport(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditSupport, self).setUp()
self.url = self.get_url('support')
self.edit_url = self.get_url('support', edit=True)
def test_form_url(self):
self.check_form_url('support')
def test_edit_support(self):
data = dict(support_email='sjobs@apple.com',
support_url='http://apple.com/')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
def test_edit_support_free_required(self):
r = self.client.post(self.edit_url, dict(support_url=''))
self.assertFormError(r, 'form', 'support_email',
'This field is required.')
def test_edit_support_premium_required(self):
self.get_webapp().update(premium_type=amo.ADDON_PREMIUM)
r = self.client.post(self.edit_url, dict(support_url=''))
self.assertFormError(r, 'form', 'support_email',
'This field is required.')
def test_edit_support_premium(self):
self.get_webapp().update(premium_type=amo.ADDON_PREMIUM)
data = dict(support_email='sjobs@apple.com',
support_url='')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
eq_(self.get_webapp().support_email, data['support_email'])
def test_edit_support_url_optional(self):
data = dict(support_email='sjobs@apple.com', support_url='')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
class TestEditTechnical(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditTechnical, self).setUp()
self.url = self.get_url('technical')
self.edit_url = self.get_url('technical', edit=True)
def test_form_url(self):
self.check_form_url('technical')
def test_toggles(self):
# Turn everything on.
r = self.client.post(self.edit_url, formset(**{'flash': 'on'}))
self.assertNoFormErrors(r)
self.compare({'uses_flash': True})
# And off.
r = self.client.post(self.edit_url, formset(**{'flash': ''}))
self.compare({'uses_flash': False})
def test_public_stats(self):
o = ActivityLog.objects
eq_(o.count(), 0)
eq_(self.webapp.public_stats, False)
assert not self.webapp.public_stats, (
'Unexpectedly found public stats for app. Says Basta.')
r = self.client.post(self.edit_url, formset(public_stats=True))
self.assertNoFormErrors(r)
self.compare({'public_stats': True})
eq_(o.filter(action=amo.LOG.EDIT_PROPERTIES.id).count(), 1)
def test_features_hosted(self):
data_on = {'has_contacts': True}
data_off = {'has_contacts': False}
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
# Turn contacts on.
r = self.client.post(self.edit_url, formset(**data_on))
self.assertNoFormErrors(r)
self.compare_features(data_on)
# And turn it back off.
r = self.client.post(self.edit_url, formset(**data_off))
self.assertNoFormErrors(r)
self.compare_features(data_off)
# Changing features must trigger re-review.
assert RereviewQueue.objects.filter(addon=self.webapp).exists()
def test_features_hosted_app_disabled(self):
# Reject the app.
app = self.get_webapp()
app.update(status=amo.STATUS_REJECTED)
app.versions.latest().all_files[0].update(status=amo.STATUS_DISABLED)
app.update_version()
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
data_on = {'has_contacts': True}
data_off = {'has_contacts': False}
# Display edit technical page
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
# Turn contacts on.
r = self.client.post(self.edit_url, formset(**data_on))
app = self.get_webapp()
self.assertNoFormErrors(r)
self.compare_features(data_on, version=app.latest_version)
# Display edit technical page again, is the feature on ?
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
ok_(pq(r.content)('#id_has_contacts:checked'))
# And turn it back off.
r = self.client.post(self.edit_url, formset(**data_off))
app = self.get_webapp()
self.assertNoFormErrors(r)
self.compare_features(data_off, version=app.latest_version)
# Changing features on a rejected app must NOT trigger re-review.
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
class TestAdmin(TestEdit):
fixtures = TestEdit.fixtures
def setUp(self):
super(TestAdmin, self).setUp()
self.url = self.get_url('admin')
self.edit_url = self.get_url('admin', edit=True)
self.webapp = self.get_webapp()
assert self.client.login(username='admin@mozilla.com',
password='password')
def log_in_user(self):
assert self.client.login(username=self.user.email, password='password')
def log_in_with(self, rules):
user = UserProfile.objects.get(email='regular@mozilla.com')
group = Group.objects.create(name='Whatever', rules=rules)
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username=user.email, password='password')
class TestAdminSettings(TestAdmin):
fixtures = TestEdit.fixtures
def test_form_url(self):
self.check_form_url('admin')
def test_overview_visible_as_admin(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert not r.context.get('form'), (
'Admin Settings form should not be in context')
def test_overview_forbidden_for_nonadmin(self):
self.log_in_user()
eq_(self.client.head(self.url).status_code, 403)
def test_edit_get_as_admin(self):
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert r.context.get('form'), 'Admin Settings form expected in context'
def test_edit_post_as_admin(self):
# There are errors, but I don't care. I just want to see if I can POST.
eq_(self.client.post(self.edit_url).status_code, 200)
def test_edit_no_get_as_nonadmin(self):
self.log_in_user()
eq_(self.client.get(self.edit_url).status_code, 403)
def test_edit_no_post_as_nonadmin(self):
self.log_in_user()
eq_(self.client.post(self.edit_url).status_code, 403)
def post_contact(self, **kw):
data = {'position': '1',
'upload_hash': 'abcdef',
'mozilla_contact': 'a@mozilla.com'}
data.update(kw)
return self.client.post(self.edit_url, data)
def test_mozilla_contact(self):
self.post_contact()
webapp = self.get_webapp()
eq_(webapp.mozilla_contact, 'a@mozilla.com')
def test_mozilla_contact_invalid(self):
r = self.post_contact(
mozilla_contact='<script>alert("xss")</script>@mozilla.com')
webapp = self.get_webapp()
self.assertFormError(r, 'form', 'mozilla_contact',
'Enter a valid e-mail address.')
eq_(webapp.mozilla_contact, '')
def test_staff(self):
# Staff and Support Staff should have Apps:Configure.
self.log_in_with('Apps:Configure')
# Test GET.
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert r.context.get('form'), 'Admin Settings form expected in context'
# Test POST. Ignore errors.
eq_(self.client.post(self.edit_url).status_code, 200)
def test_developer(self):
# Developers have read-only on admin section.
self.log_in_with('Apps:ViewConfiguration')
# Test GET.
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert r.context.get('form'), 'Admin Settings form expected in context'
# Test POST. Ignore errors.
eq_(self.client.post(self.edit_url).status_code, 403)
def test_ratings_edit_add(self):
# TODO: Test AdminSettingsForm in test_forms.py.
self.log_in_with('Apps:Configure')
data = {'position': '1',
'upload_hash': 'abcdef',
'app_ratings': '2'
}
r = self.client.post(self.edit_url, data)
eq_(r.status_code, 200)
webapp = self.get_webapp()
eq_(list(webapp.content_ratings.values_list('ratings_body', 'rating')),
[(0, 2)])
def test_ratings_edit_add_dupe(self):
self.log_in_with('Apps:Configure')
data = {'position': '1',
'upload_hash': 'abcdef',
'app_ratings': ('1', '2')
}
r = self.client.post(self.edit_url, data)
self.assertFormError(r, 'form', 'app_ratings',
'Only one rating from each ratings body '
'may be selected.')
def test_ratings_edit_update(self):
self.log_in_with('Apps:Configure')
webapp = self.get_webapp()
ContentRating.objects.create(addon=webapp, ratings_body=0, rating=2)
data = {'position': '1',
'upload_hash': 'abcdef',
'app_ratings': '3',
}
r = self.client.post(self.edit_url, data)
eq_(r.status_code, 200)
eq_(list(webapp.content_ratings.all().values_list('ratings_body',
'rating')),
[(0, 3)])
#a second update doesn't duplicate existing ratings
r = self.client.post(self.edit_url, data)
eq_(list(webapp.content_ratings.all().values_list('ratings_body',
'rating')),
[(0, 3)])
del data['app_ratings']
r = self.client.post(self.edit_url, data)
assert not webapp.content_ratings.exists()
def test_ratings_view(self):
self.log_in_with('Apps:ViewConfiguration')
webapp = self.get_webapp()
ContentRating.objects.create(addon=webapp, ratings_body=0, rating=2)
r = self.client.get(self.url)
txt = pq(r.content)[0].xpath(
"//label[@for='app_ratings']/../../td/div/text()")[0]
eq_(txt,
'%s - %s' % (RATINGS_BODIES[0].name,
RATINGS_BODIES[0].ratings[2].name))
def test_banner_region_view(self):
self.log_in_with('Apps:ViewConfiguration')
geodata = self.get_webapp().geodata
geodata.banner_message = u'Exclusive message ! Only for AR/BR !'
geodata.banner_regions = [mkt.regions.BR.id, mkt.regions.AR.id]
geodata.save()
res = self.client.get(self.url)
eq_(pq(res.content)('#id_banner_message').text(),
unicode(geodata.banner_message))
eq_(pq(res.content)('#id_banner_regions').text(), u'Argentina, Brazil')
def test_banner_region_edit(self):
self.log_in_with('Apps:ViewConfiguration')
geodata = self.webapp.geodata
geodata.banner_message = u'Exclusive message ! Only for AR/BR !'
geodata.banner_regions = [mkt.regions.BR.id, mkt.regions.AR.id]
geodata.save()
AER.objects.create(addon=self.webapp, region=mkt.regions.US.id)
res = self.client.get(self.edit_url)
eq_(res.status_code, 200)
doc = pq(res.content)
inputs = doc.find('input[type=checkbox][name=banner_regions]')
eq_(inputs.length, len(mkt.regions.REGIONS_CHOICES_ID))
checked = doc.find('#id_banner_regions input[type=checkbox]:checked')
eq_(checked.length, 2)
eq_(checked[0].name, 'banner_regions')
eq_(checked[0].value, unicode(mkt.regions.AR.id))
eq_(pq(checked[0]).parents('li').attr('data-region'),
unicode(mkt.regions.AR.id))
eq_(checked[1].name, 'banner_regions')
eq_(checked[1].value, unicode(mkt.regions.BR.id))
eq_(pq(checked[1]).parents('li').attr('data-region'),
unicode(mkt.regions.BR.id))
disabled = doc.find('#id_banner_regions input[type=checkbox]:disabled')
eq_(disabled.length, 1)
eq_(disabled[0].value, None)
eq_(disabled.parents('li').attr('data-region'),
unicode(mkt.regions.US.id))
def test_banner_region_edit_post(self):
data = {
'position': 1, # Required, useless in this test.
'banner_regions': [unicode(mkt.regions.BR.id),
unicode(mkt.regions.SPAIN.id)],
'banner_message_en-us': u'Oh Hai.',
}
res = self.client.post(self.edit_url, data)
eq_(res.status_code, 200)
geodata = self.webapp.geodata.reload()
eq_(geodata.banner_message, data['banner_message_en-us'])
eq_(geodata.banner_regions, [mkt.regions.BR.id, mkt.regions.SPAIN.id])
class TestPromoUpload(TestAdmin):
fixtures = TestEdit.fixtures
def post(self, **kw):
data = {'position': '1',
'upload_hash': 'abcdef'}
data.update(kw)
self.client.post(self.edit_url, data)
def test_add(self):
self.post()
webapp = self.get_webapp()
eq_(webapp.previews.count(), 1)
eq_(list(webapp.get_previews()), [])
promo = webapp.get_promo()
eq_(promo.position, -1)
def test_delete(self):
self.post()
assert self.get_webapp().get_promo()
self.post(DELETE=True)
assert not self.get_webapp().get_promo()
class TestEditVersion(TestEdit):
fixtures = fixture('group_admin', 'user_999', 'user_admin',
'user_admin_group', 'webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
self.webapp.update(is_packaged=True)
self.version_pk = self.webapp.latest_version.pk
self.url = reverse('mkt.developers.apps.versions.edit', kwargs={
'version_id': self.version_pk,
'app_slug': self.webapp.app_slug
})
self.user = UserProfile.objects.get(username='31337')
self.login(self.user)
def test_post(self, **kwargs):
data = {'releasenotes_init': '',
'releasenotes_en-us': 'Hot new version',
'approvalnotes': 'The release notes are true.',
'has_audio': False,
'has_apps': False}
data.update(kwargs)
req = self.client.post(self.url, data)
eq_(req.status_code, 302)
version = Version.objects.no_cache().get(pk=self.version_pk)
eq_(version.releasenotes, data['releasenotes_en-us'])
eq_(version.approvalnotes, data['approvalnotes'])
return version
def test_comm_thread(self):
self.create_switch('comm-dashboard')
self.test_post(approvalnotes='abc')
notes = CommunicationNote.objects.all()
eq_(notes.count(), 1)
eq_(notes[0].body, 'abc')
def test_existing_features_initial_form_data(self):
features = self.webapp.current_version.features
features.update(has_audio=True, has_apps=True)
r = self.client.get(self.url)
eq_(r.context['appfeatures_form'].initial,
dict(id=features.id, **features.to_dict()))
def test_new_features(self):
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
# Turn a feature on.
version = self.test_post(has_audio=True)
ok_(version.features.has_audio)
ok_(not version.features.has_apps)
# Then turn the feature off.
version = self.test_post(has_audio=False)
ok_(not version.features.has_audio)
ok_(not version.features.has_apps)
# Changing features must trigger re-review.
assert RereviewQueue.objects.filter(addon=self.webapp).exists()
def test_correct_version_features(self):
new_version = self.webapp.latest_version.update(id=self.version_pk + 1)
self.webapp.update(_latest_version=new_version)
self.test_new_features()
def test_publish_checkbox_presence(self):
res = self.client.get(self.url)
ok_(not pq(res.content)('#id_publish_immediately'))
self.webapp.latest_version.files.update(status=amo.STATUS_PENDING)
res = self.client.get(self.url)
ok_(pq(res.content)('#id_publish_immediately'))
|
import os
def get_package_data():
paths_test = [os.path.join('data', '*.txt')]
return {'astroquery.ogle.tests': paths_test}
|
import rospy
import random
import genmsg.msgs
import genpy.dynamic
def get_sub_defs(msg_fqn, msg_txt):
def_dict = {}
defs = msg_txt.split("\n" + "="*80 + "\n")
def_dict[msg_fqn] = defs[0]
for d in defs[1:]:
lines = d.splitlines()
if not lines[0].startswith("MSG: "):
raise Exception("Invalid sub definition!")
type = lines[0][5:].strip()
def_txt = "\n".join(lines[1:])
def_dict[type] = def_txt
return def_dict
class RandomMsgGen(object):
def randstr(self, length=0):
if length == 0:
length = self.rand.randint(3,10)
return ''.join([chr(self.rand.randint(ord('a'), ord('z'))) for i in range(length)])
def __init__(self, seed, num_topics, duration):
self.message_defs = {}
self.message_dict = {}
self.topic_dict = {}
self.duration = duration
self.output = []
self.rand = random.Random(seed)
for i in range(num_topics):
msg_pkg = self.randstr()
msg_name = self.randstr()
msg_fqn = "%s/%s"%(msg_pkg,msg_name)
msg_fields = []
msg_def = ""
msg_sub_defs = {}
for j in range(self.rand.randint(3,5)):
field_name = self.randstr()
field_type = self.rand.choice(genmsg.msgs.BUILTIN_TYPES + list(self.message_defs.keys()))
field_array = self.rand.choice(5*[""]+["[]","[%d]"%self.rand.randint(1,10)])
if (field_type not in genmsg.msgs.BUILTIN_TYPES):
tmp = get_sub_defs(field_type, self.message_defs[field_type])
for (sm_type, sm_def) in tmp.items():
msg_sub_defs[sm_type] = sm_def
msg_def = msg_def + "%s%s %s\n"%(field_type, field_array, field_name)
for (t,d) in msg_sub_defs.items():
msg_def = msg_def + "\n" + "="*80 + "\n"
msg_def = msg_def + "MSG: %s\n"%(t)
msg_def = msg_def + d
self.message_defs[msg_fqn] = msg_def
topic_name = self.randstr()
self.message_dict[msg_fqn] = genpy.dynamic.generate_dynamic(msg_fqn, msg_def)[msg_fqn]
self.topic_dict[topic_name] = self.message_dict[msg_fqn]
time = 0.0
while time < duration:
topic = self.rand.choice(list(self.topic_dict.keys()))
msg_inst = self.rand_value(self.topic_dict[topic]._type)
self.output.append((topic, msg_inst, time))
time = time + self.rand.random()*.01
def topics(self):
return self.topic_dict.items()
def messages(self):
for m in self.output:
yield m
def message_count(self):
return len(self.output)
def rand_value(self, field_type):
if field_type == 'bool':
return self.rand.randint(0,1)
elif field_type == 'byte':
return self.rand.randint(0,2**7-1)
elif field_type == 'int8':
return self.rand.randint(-2**7,2**7-1)
elif field_type == 'int16':
return self.rand.randint(-2**15,2**15-1)
elif field_type == 'int32':
return self.rand.randint(-2**31,2**31-1)
elif field_type == 'int64':
return self.rand.randint(-2**63,2**63-1)
elif field_type == 'char':
return self.rand.randint(0,2**8-1)
elif field_type == 'uint8':
return self.rand.randint(0,2**8-1)
elif field_type == 'uint16':
return self.rand.randint(0,2**16-1)
elif field_type == 'uint32':
return self.rand.randint(0,2**32-1)
elif field_type == 'uint64':
return self.rand.randint(0,2**64-1)
elif field_type == 'float32':
return self.rand.random()
elif field_type == 'float64':
return self.rand.random()
elif field_type == 'string':
return self.randstr(100)
elif field_type == 'duration':
return rospy.Duration.from_sec(self.rand.random())
elif field_type == 'time':
return rospy.Time.from_sec(self.rand.random()*1000)
elif field_type.endswith(']'): # array type
base_type, is_array, array_len = genmsg.msgs.parse_type(field_type)
if array_len is None:
array_len = self.rand.randint(1,100)
# Make this more efficient rather than depend on recursion inside the array check
if field_type == 'bool':
return [ self.rand.randint(0,1) for i in range(0,array_len) ]
elif field_type == 'byte':
return [ self.rand.randint(-2**7,2**7-1) for i in range(0,array_len) ]
elif field_type == 'int8':
return [ self.rand.randint(-2**7,2**7-1) for i in range(0,array_len) ]
elif field_type == 'int16':
return [ self.rand.randint(-2**15,2**15-1) for i in range(0,array_len) ]
elif field_type == 'int32':
return [ self.rand.randint(-2**31,2**31-1) for i in range(0,array_len) ]
elif field_type == 'int64':
return [ self.rand.randint(-2**63,2**63-1) for i in range(0,array_len) ]
elif field_type == 'char':
return [ self.rand.randint(0,2**8-1) for i in range(0,array_len) ]
elif field_type == 'uint8':
return [ self.rand.randint(0,2**8-1) for i in range(0,array_len) ]
elif field_type == 'uint16':
return [ self.rand.randint(0,2**16-1) for i in range(0,array_len) ]
elif field_type == 'uint32':
return [ self.rand.randint(0,2**32-1) for i in range(0,array_len) ]
elif field_type == 'uint64':
return [ self.rand.randint(0,2**64-1) for i in range(0,array_len) ]
elif field_type == 'float32':
return [ self.rand.random() for i in range(0,array_len) ]
elif field_type == 'float64':
return [ self.rand.random() for i in range(0,array_len) ]
elif field_type == 'string':
return [ self.randstr(100) for i in range(0,array_len) ]
elif field_type == 'duration':
return [ rospy.Duration.from_sec(self.rand.random()) for i in range(0,array_len) ]
elif field_type == 'time':
return [ rospy.Time.from_sec(self.rand.random()*1000) for i in range(0,array_len) ]
else:
return [ self.rand_value(base_type) for i in range(0,array_len) ]
else:
msg_class = self.message_dict[field_type]
msg_inst = msg_class()
for s in msg_inst.__slots__:
ind = msg_inst.__slots__.index(s)
msg_inst.__setattr__(s,self.rand_value(msg_inst._slot_types[ind]))
return msg_inst
|
from rest_framework import serializers
from commons.serializers import (TimeStampedSerializerMixin, MetaDataSerializer,
IdSerializerMixin, TitleSerializerMixin,
IdTitleSerializerMixin)
from .fields import NoteResourceUriField
from commons.fields import UnixEpochDateField
class BaseNoteSerializer(serializers.Serializer):
description = serializers.CharField(max_length=255)
class NoteReadSerializer(IdSerializerMixin, BaseNoteSerializer,
TimeStampedSerializerMixin):
resource_uri = NoteResourceUriField(view_name='note-detail',
read_only=True)
class NoteWriteSerializer(BaseNoteSerializer):
pass
class NoteUpdateSerializer(BaseNoteSerializer):
pass
class TaskReadSerializer(IdTitleSerializerMixin, TimeStampedSerializerMixin):
is_archived = serializers.BooleanField(default=False)
is_completed = serializers.BooleanField(default=False)
due_date = serializers.DateTimeField(format='%s')
reminder = serializers.DateTimeField(format='%s')
notes = NoteReadSerializer(many=True)
created_by = serializers.IntegerField()
class TaskWriteSerializer(TitleSerializerMixin):
is_archived = serializers.BooleanField(default=False)
is_completed = serializers.BooleanField(default=False)
due_date = serializers.DateTimeField(format='%s', required=False)
reminder = serializers.DateTimeField(format='%s', required=False)
class TaskUpdateSerializer(TitleSerializerMixin):
is_archived = serializers.BooleanField(default=False)
is_completed = serializers.BooleanField(default=False)
due_date = UnixEpochDateField(required=False, allow_null=True)
reminder = UnixEpochDateField(required=False, allow_null=True)
class TaskReadListSerializer(IdTitleSerializerMixin,
TimeStampedSerializerMixin):
is_archived = serializers.BooleanField(default=False)
is_completed = serializers.BooleanField(default=False)
due_date = serializers.DateTimeField(format='%s')
reminder = serializers.DateTimeField(format='%s')
created_by = serializers.IntegerField()
class TaskListSerializer(serializers.Serializer):
meta = MetaDataSerializer()
objects = TaskReadListSerializer(many=True)
|
'''
To run a Bokeh application on a Bokeh server from a single Python script,
pass the script name to ``bokeh serve`` on the command line:
.. code-block:: sh
bokeh serve app_script.py
By default, the Bokeh application will be served by the Bokeh server on a
default port ({DEFAULT_PORT}) at localhost, under the path ``/app_script``,
i.e.,
.. code-block:: none
http://localhost:{DEFAULT_PORT}/app_script
It is also possible to run the same commmand with jupyter notebooks:
.. code-block:: sh
bokeh serve app_notebook.ipynb
This will generate the same results as described with a python script
and the application will be served on a default port ({DEFAULT_PORT})
at localhost, under the path ``/app_notebook``
Applications can also be created from directories. The directory should
contain a ``main.py`` (and any other helper modules that are required) as
well as any additional assets (e.g., theme files). Pass the directory name
to ``bokeh serve`` to run the application:
.. code-block:: sh
bokeh serve app_dir
It is possible to run multiple applications at once:
.. code-block:: sh
bokeh serve app_script.py app_dir
If you would like to automatically open a browser to display the HTML
page(s), you can pass the ``--show`` option on the command line:
.. code-block:: sh
bokeh serve app_script.py app_dir --show
This will open two pages, for ``/app_script`` and ``/app_dir``,
respectively.
If you would like to pass command line arguments to Bokeh applications,
you can pass the ``--args`` option as the LAST option on the command
line:
.. code-block:: sh
bokeh serve app_script.py myapp.py --args foo bar --baz
Everything that follows ``--args`` will be included in ``sys.argv`` when
the application runs. In this case, when ``myapp.py`` executes, the
contents of ``sys.argv`` will be ``['myapp.py', 'foo', 'bar', '--baz']``,
consistent with standard Python expectations for ``sys.argv``.
Note that if multiple scripts or directories are provided, they
all receive the same set of command line arguments (if any) given by
``--args``.
Network Configuration
~~~~~~~~~~~~~~~~~~~~~
To control the port that the Bokeh server listens on, use the ``--port``
argument:
.. code-block:: sh
bokeh serve app_script.py --port=8080
Similarly, a specific network address can be specified with the
``--address`` argument. For example:
.. code-block:: sh
bokeh serve app_script.py --address=0.0.0.0
will have the Bokeh server listen all available network addresses.
Additionally, it is possible to configure a hosts whitelist that must be
matched by the ``Host`` header in new requests. You can specify multiple
acceptable host values with the ``--host`` option:
.. code-block:: sh
bokeh serve app_script.py --host foo.com:8081 --host bar.com
If no port is specified in a host value, then port 80 will be used. In
the example above Bokeh server will accept requests from ``foo.com:8081``
and ``bar.com:80``.
If no host values are specified, then by default the Bokeh server will
accept requests from ``localhost:<port>`` where ``<port>`` is the port
that the server is configured to listen on (by default: {DEFAULT_PORT}).
If an asterix ``*`` is used in the host value then it will be treated as a
wildcard:
.. code-block:: sh
bokeh serve app_script.py --address=0.0.0.0 --host='*'
Using the wildcard can be helpful when testing applications that are deployed
with cloud orchestration tools and when the public endpoint is not known ahead
of time: for instance if the public IP is dynamically allocated during the
deployment process and no public DNS has been configured for the testing
environment.
As a warning, using permissive host values like ``*`` may be insecure and open
your application to HTTP host header attacks. Production deployments should
always set the ``--host`` flag to use the DNS name of the public endpoint such
as a TLS-enabled load balancer or reverse proxy that serves the application to
the end users.
Also note that the host whitelist applies to all request handlers,
including any extra ones added to extend the Bokeh server.
By default, cross site connections to the Bokeh server websocket are not
allowed. You can enable websocket connections originating from additional
hosts by specifying them with the ``--allow-websocket-origin`` option:
.. code-block:: sh
bokeh serve app_script.py --allow-websocket-origin foo.com:8081
It is possible to specify multiple allowed websocket origins by adding
the ``--allow-websocket-origin`` option multiple times.
The Bokeh server can also add an optional prefix to all URL paths.
This can often be useful in conjunction with "reverse proxy" setups.
.. code-block:: sh
bokeh serve app_script.py --prefix=foobar
Then the application will be served under the following URL:
.. code-block:: none
http://localhost:{DEFAULT_PORT}/foobar/app_script
If needed, Bokeh server can send keep-alive pings at a fixed interval.
To configure this feature, set the ``--keep-alive`` option:
.. code-block:: sh
bokeh serve app_script.py --keep-alive 10000
The value is specified in milliseconds. The default keep-alive interval
is 37 seconds. Give a value of 0 to disable keep-alive pings.
To control how often statistic logs are written, set the
--stats-log-frequency option:
.. code-block:: sh
bokeh serve app_script.py --stats-log-frequency 30000
The value is specified in milliseconds. The default interval for
logging stats is 15 seconds. Only positive integer values are accepted.
To have the Bokeh server override the remote IP and URI scheme/protocol for
all requests with ``X-Real-Ip``, ``X-Forwarded-For``, ``X-Scheme``,
``X-Forwarded-Proto`` headers (if they are provided), set the
``--use-xheaders`` option:
.. code-block:: sh
bokeh serve app_script.py --use-xheaders
This is typically needed when running a Bokeh server behind a reverse proxy
that is SSL-terminated.
.. warning::
It is not advised to set this option on a Bokeh server directly facing
the Internet.
Session ID Options
~~~~~~~~~~~~~~~~~~
Typically, each browser tab connected to a Bokeh server will have
its own session ID. When the server generates an ID, it will make
it cryptographically unguessable. This keeps users from accessing
one another's sessions.
To control who can use a Bokeh application, the server can sign
sessions with a secret key and reject "made up" session
names. There are three modes, controlled by the ``--session-ids``
argument:
.. code-block:: sh
bokeh serve app_script.py --session-ids=signed
The available modes are: {SESSION_ID_MODES}
In ``unsigned`` mode, the server will accept any session ID
provided to it in the URL. For example,
``http://localhost/app_script?bokeh-session-id=foo`` will create a
session ``foo``. In ``unsigned`` mode, if the session ID isn't
provided with ``?bokeh-session-id=`` in the URL, the server will
still generate a cryptographically-unguessable ID. However, the
server allows clients to create guessable or deliberately-shared
sessions if they want to.
``unsigned`` mode is most useful when the server is running
locally for development, for example you can have multiple
processes access a fixed session name such as
``default``. ``unsigned`` mode is also convenient because there's
no need to generate or configure a secret key.
In ``signed`` mode, the session ID must be in a special format and
signed with a secret key. Attempts to use the application with an
invalid session ID will fail, but if no ``?bokeh-session-id=``
parameter is provided, the server will generate a fresh, signed
session ID. The result of ``signed`` mode is that only secure
session IDs are allowed but anyone can connect to the server.
In ``external-signed`` mode, the session ID must be signed but the
server itself won't generate a session ID; the
``?bokeh-session-id=`` parameter will be required. To use this
mode, you would need some sort of external process (such as
another web app) which would use the
``bokeh.util.session_id.generate_session_id()`` function to create
valid session IDs. The external process and the Bokeh server must
share the same ``BOKEH_SECRET_KEY`` environment variable.
``external-signed`` mode is useful if you want another process to
authenticate access to the Bokeh server; if someone is permitted
to use the Bokeh application, you would generate a session ID for
them, then redirect them to the Bokeh server with that valid
session ID. If you don't generate a session ID for someone, then
they can't load the app from the Bokeh server.
In both ``signed`` and ``external-signed`` mode, the secret key
must be kept secret; anyone with the key can generate a valid
session ID.
The secret key should be set in a ``BOKEH_SECRET_KEY`` environment
variable and should be a cryptographically random string with at
least 256 bits (32 bytes) of entropy. You can generate a new
secret key with the ``bokeh secret`` command.
Session Expiration Options
~~~~~~~~~~~~~~~~~~~~~~~~~~
To configure how often to check for unused sessions. set the
--check-unused-sessions option:
.. code-block:: sh
bokeh serve app_script.py --check-unused-sessions 10000
The value is specified in milliseconds. The default interval for
checking for unused sessions is 17 seconds. Only positive integer
values are accepted.
To configure how often unused sessions last. set the
--unused-session-lifetime option:
.. code-block:: sh
bokeh serve app_script.py --unused-session-lifetime 60000
The value is specified in milliseconds. The default lifetime interval
for unused sessions is 15 seconds. Only positive integer values are
accepted.
Logging Options
~~~~~~~~~~~~~~~
The logging level can be controlled by the ``--log-level`` argument:
.. code-block:: sh
bokeh serve app_script.py --log-level=debug
The available log levels are: {LOGLEVELS}
The log format can be controlled by the ``--log-format`` argument:
.. code-block:: sh
bokeh serve app_script.py --log-format="%(levelname)s: %(message)s"
The default log format is ``"{DEFAULT_LOG_FORMAT}"``
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
import argparse
from bokeh.application import Application
from bokeh.resources import DEFAULT_SERVER_PORT
from bokeh.server.server import Server
from bokeh.util.string import nice_join
from bokeh.settings import settings
from os import getpid
from ..subcommand import Subcommand
from ..util import build_single_handler_applications, die
LOGLEVELS = ('debug', 'info', 'warning', 'error', 'critical')
SESSION_ID_MODES = ('unsigned', 'signed', 'external-signed')
DEFAULT_LOG_FORMAT = "%(asctime)s %(message)s"
__doc__ = __doc__.format(
DEFAULT_PORT=DEFAULT_SERVER_PORT,
LOGLEVELS=nice_join(LOGLEVELS),
SESSION_ID_MODES=nice_join(SESSION_ID_MODES),
DEFAULT_LOG_FORMAT=DEFAULT_LOG_FORMAT
)
base_serve_args = (
('--port', dict(
metavar = 'PORT',
type = int,
help = "Port to listen on",
default = None
)),
('--address', dict(
metavar = 'ADDRESS',
type = str,
help = "Address to listen on",
default = None,
)),
('--log-level', dict(
metavar = 'LOG-LEVEL',
action = 'store',
default = 'info',
choices = LOGLEVELS,
help = "One of: %s" % nice_join(LOGLEVELS),
)),
('--log-format', dict(
metavar ='LOG-FORMAT',
action = 'store',
default = DEFAULT_LOG_FORMAT,
help = "A standard Python logging format string (default: %r)" % DEFAULT_LOG_FORMAT.replace("%", "%%"),
)),
)
class Serve(Subcommand):
''' Subcommand to launch the Bokeh server.
'''
name = "serve"
help = "Run a Bokeh server hosting one or more applications"
args = base_serve_args + (
('files', dict(
metavar='DIRECTORY-OR-SCRIPT',
nargs='*',
help="The app directories or scripts to serve (serve empty document if not specified)",
default=None,
)),
('--args', dict(
metavar='COMMAND-LINE-ARGS',
nargs=argparse.REMAINDER,
help="Any command line arguments remaining are passed on to the application handler",
)),
('--develop', dict(
action='store_true',
help="Enable develop-time features that should not be used in production",
)),
('--show', dict(
action='store_true',
help="Open server app(s) in a browser",
)),
('--allow-websocket-origin', dict(
metavar='HOST[:PORT]',
action='append',
type=str,
help="Public hostnames which may connect to the Bokeh websocket",
)),
('--host', dict(
metavar='HOST[:PORT]',
action='append',
type=str,
help="Public hostnames to allow in requests",
)),
('--prefix', dict(
metavar='PREFIX',
type=str,
help="URL prefix for Bokeh server URLs",
default=None,
)),
('--keep-alive', dict(
metavar='MILLISECONDS',
type=int,
help="How often to send a keep-alive ping to clients, 0 to disable.",
default=None,
)),
('--check-unused-sessions', dict(
metavar='MILLISECONDS',
type=int,
help="How often to check for unused sessions",
default=None,
)),
('--unused-session-lifetime', dict(
metavar='MILLISECONDS',
type=int,
help="How long unused sessions last",
default=None,
)),
('--stats-log-frequency', dict(
metavar='MILLISECONDS',
type=int,
help="How often to log stats",
default=None,
)),
('--use-xheaders', dict(
action='store_true',
help="Prefer X-headers for IP/protocol information",
)),
('--session-ids', dict(
metavar='MODE',
action = 'store',
default = None,
choices = SESSION_ID_MODES,
help = "One of: %s" % nice_join(SESSION_ID_MODES),
)),
)
def invoke(self, args):
argvs = { f : args.args for f in args.files}
applications = build_single_handler_applications(args.files, argvs)
log_level = getattr(logging, args.log_level.upper())
logging.basicConfig(level=log_level, format=args.log_format)
if len(applications) == 0:
# create an empty application by default, typically used with output_server
applications['/'] = Application()
if args.keep_alive is not None:
if args.keep_alive == 0:
log.info("Keep-alive ping disabled")
else:
log.info("Keep-alive ping configured every %d milliseconds", args.keep_alive)
# rename to be compatible with Server
args.keep_alive_milliseconds = args.keep_alive
if args.check_unused_sessions is not None:
log.info("Check for unused sessions every %d milliseconds", args.check_unused_sessions)
# rename to be compatible with Server
args.check_unused_sessions_milliseconds = args.check_unused_sessions
if args.unused_session_lifetime is not None:
log.info("Unused sessions last for %d milliseconds", args.unused_session_lifetime)
# rename to be compatible with Server
args.unused_session_lifetime_milliseconds = args.unused_session_lifetime
if args.stats_log_frequency is not None:
log.info("Log statistics every %d milliseconds", args.stats_log_frequency)
# rename to be compatible with Server
args.stats_log_frequency_milliseconds = args.stats_log_frequency
server_kwargs = { key: getattr(args, key) for key in ['port',
'address',
'allow_websocket_origin',
'host',
'prefix',
'develop',
'keep_alive_milliseconds',
'check_unused_sessions_milliseconds',
'unused_session_lifetime_milliseconds',
'stats_log_frequency_milliseconds',
'use_xheaders',
]
if getattr(args, key, None) is not None }
server_kwargs['sign_sessions'] = settings.sign_sessions()
server_kwargs['secret_key'] = settings.secret_key_bytes()
server_kwargs['generate_session_ids'] = True
if args.session_ids is None:
# no --session-ids means use the env vars
pass
elif args.session_ids == 'unsigned':
server_kwargs['sign_sessions'] = False
elif args.session_ids == 'signed':
server_kwargs['sign_sessions'] = True
elif args.session_ids == 'external-signed':
server_kwargs['sign_sessions'] = True
server_kwargs['generate_session_ids'] = False
else:
raise RuntimeError("argparse should have filtered out --session-ids mode " +
args.session_ids)
if server_kwargs['sign_sessions'] and not server_kwargs['secret_key']:
die("To sign sessions, the BOKEH_SECRET_KEY environment variable must be set; " +
"the `bokeh secret` command can be used to generate a new key.")
server = Server(applications, **server_kwargs)
if args.show:
# we have to defer opening in browser until we start up the server
def show_callback():
for route in applications.keys():
server.show(route)
server.io_loop.add_callback(show_callback)
if args.develop:
log.info("Using develop mode (do not enable --develop in production)")
address_string = ''
if server.address is not None and server.address != '':
address_string = ' address ' + server.address
log.info("Starting Bokeh server on port %d%s with applications at paths %r",
server.port,
address_string,
sorted(applications.keys()))
log.info("Starting Bokeh server with process id: %d" % getpid())
server.start()
|
from django.contrib import admin
from connect.moderation.models import ModerationLogMsg
admin.site.register(ModerationLogMsg)
|
"""
from https://gist.github.com/bwhite/3726239
Information Retrieval metrics
Useful Resources:
http://www.cs.utexas.edu/~mooney/ir-course/slides/Evaluation.ppt
http://www.nii.ac.jp/TechReports/05-014E.pdf
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
http://hal.archives-ouvertes.fr/docs/00/72/67/60/PDF/07-busa-fekete.pdf
Learning to Rank for Information Retrieval (Tie-Yan Liu)
"""
import numpy as np
import scipy as sp
import scipy.stats
def mean_reciprocal_rank(rs):
"""Score is reciprocal of the rank of the first relevant item
First element is 'rank 1'. Relevance is binary (nonzero is relevant).
Example from http://en.wikipedia.org/wiki/Mean_reciprocal_rank
>>> rs = [[0, 0, 1], [0, 1, 0], [1, 0, 0]]
>>> mean_reciprocal_rank(rs)
0.61111111111111105
>>> rs = np.array([[0, 0, 0], [0, 1, 0], [1, 0, 0]])
>>> mean_reciprocal_rank(rs)
0.5
>>> rs = [[0, 0, 0, 1], [1, 0, 0], [1, 0, 0]]
>>> mean_reciprocal_rank(rs)
0.75
Args:
rs: Iterator of relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Mean reciprocal rank
"""
rs = (np.asarray(r).nonzero()[0] for r in rs)
return np.mean([1. / (r[0] + 1) if r.size else 0. for r in rs])
def r_precision(r):
"""Score is precision after all relevant documents have been retrieved
Relevance is binary (nonzero is relevant).
>>> r = [0, 0, 1]
>>> r_precision(r)
0.33333333333333331
>>> r = [0, 1, 0]
>>> r_precision(r)
0.5
>>> r = [1, 0, 0]
>>> r_precision(r)
1.0
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
R Precision
"""
r = np.asarray(r) != 0
z = r.nonzero()[0]
if not z.size:
return 0.
return np.mean(r[:z[-1] + 1])
def precision_at_k(r, k):
"""Score is precision @ k
Relevance is binary (nonzero is relevant).
>>> r = [0, 0, 1]
>>> precision_at_k(r, 1)
0.0
>>> precision_at_k(r, 2)
0.0
>>> precision_at_k(r, 3)
0.33333333333333331
>>> precision_at_k(r, 4)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
ValueError: Relevance score length < k
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Precision @ k
Raises:
ValueError: len(r) must be >= k
"""
assert k >= 1
r = np.asarray(r)[:k] != 0
if r.size != k:
raise ValueError('Relevance score length < k')
return np.mean(r)
def average_precision(r):
"""Score is average precision (area under PR curve)
Relevance is binary (nonzero is relevant).
>>> r = [1, 1, 0, 1, 0, 1, 0, 0, 0, 1]
>>> delta_r = 1. / sum(r)
>>> sum([sum(r[:x + 1]) / (x + 1.) * delta_r for x, y in enumerate(r) if y])
0.7833333333333333
>>> average_precision(r)
0.78333333333333333
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Average precision
"""
r = np.asarray(r) != 0
out = [precision_at_k(r, k + 1) for k in range(r.size) if r[k]]
if not out:
return 0.
return np.mean(out)
def mean_average_precision(rs):
"""Score is mean average precision
Relevance is binary (nonzero is relevant).
>>> rs = [[1, 1, 0, 1, 0, 1, 0, 0, 0, 1]]
>>> mean_average_precision(rs)
0.78333333333333333
>>> rs = [[1, 1, 0, 1, 0, 1, 0, 0, 0, 1], [0]]
>>> mean_average_precision(rs)
0.39166666666666666
Args:
rs: Iterator of relevance scores (list or numpy) in rank order
(first element is the first item)
Returns:
Mean average precision
"""
return np.mean([average_precision(r) for r in rs])
def dcg_at_k(r, k, method=0):
"""Score is discounted cumulative gain (dcg)
Relevance is positive real values. Can use binary
as the previous methods.
Example from
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
>>> r = [3, 2, 3, 0, 0, 1, 2, 2, 3, 0]
>>> dcg_at_k(r, 1)
3.0
>>> dcg_at_k(r, 1, method=1)
3.0
>>> dcg_at_k(r, 2)
5.0
>>> dcg_at_k(r, 2, method=1)
4.2618595071429155
>>> dcg_at_k(r, 10)
9.6051177391888114
>>> dcg_at_k(r, 11)
9.6051177391888114
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
method: If 0 then weights are [1.0, 1.0, 0.6309, 0.5, 0.4307, ...]
If 1 then weights are [1.0, 0.6309, 0.5, 0.4307, ...]
Returns:
Discounted cumulative gain
"""
r = np.asfarray(r)[:k]
if r.size:
if method == 0:
return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1)))
elif method == 1:
return np.sum(r / np.log2(np.arange(2, r.size + 2)))
else:
raise ValueError('method must be 0 or 1.')
return 0.
def ndcg_at_k(r, k, method=0):
"""Score is normalized discounted cumulative gain (ndcg)
Relevance is positive real values. Can use binary
as the previous methods.
Example from
http://www.stanford.edu/class/cs276/handouts/EvaluationNew-handout-6-per.pdf
>>> r = [3, 2, 3, 0, 0, 1, 2, 2, 3, 0]
>>> ndcg_at_k(r, 1)
1.0
>>> r = [2, 1, 2, 0]
>>> ndcg_at_k(r, 4)
0.9203032077642922
>>> ndcg_at_k(r, 4, method=1)
0.96519546960144276
>>> ndcg_at_k([0], 1)
0.0
>>> ndcg_at_k([1], 2)
1.0
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
method: If 0 then weights are [1.0, 1.0, 0.6309, 0.5, 0.4307, ...]
If 1 then weights are [1.0, 0.6309, 0.5, 0.4307, ...]
Returns:
Normalized discounted cumulative gain
"""
dcg_max = dcg_at_k(sorted(r, reverse=True), k, method)
if not dcg_max:
return 0.
return dcg_at_k(r, k, method) / dcg_max
def ndcg_at_k_ties(labels, predictions, k, method=0):
'''
See 2008 McSherry et al on how to efficiently compute NDCG with ties
labels are ground truth
'''
labels = labels.copy()
# NDCG requires the ground truth labels to be > 0 (the predictions can be negative)
# if np.any(labels < 0):
# labels += np.abs(labels.min() + 1e-5)
# this is the one with ties:
dcg = dcg_at_k_ties(labels, predictions, k, method=method)
# this one is the vanilla computation that ignores ties (and should match dcg_at_k_ties when no ties are present):
# highest-to-lowest of the true labels (i.e. best first)
dcg_max = dcg_at_k(sorted(labels, reverse=True), k, method)
# NOTE: I have checked that dcg_at_k_ties and dcg_at_k match when there are no ties, or ties in the labels
ndcg = dcg / dcg_max
if not dcg_max:
return 0.
#assert ndcg >= (0.0 - 1e-8) and ndcg <= (1.0 + 1e-8), "ndcg should be between 0 and 1"
return ndcg
def dcg_at_k_ties(labels, predictions, k, method=0):
'''
See 2008 McSherry et al on how to efficiently compute NDCG (method=0 here) with ties
labels are what the "ground truth" judges assign
predictions are the algorithm predictions corresponding to each label
Also, http://en.wikipedia.org/wiki/Discounted_cumulative_gain for basic defns
'''
assert len(labels) == len(predictions), "labels and predictions should be of same length"
assert k <= len(labels), "k should be <= len(labels)"
# order both labels and preds so that they are in order of decreasing predictive score
sorted_ind = np.argsort(predictions)[::-1]
predictions = predictions[sorted_ind]
labels = labels[sorted_ind]
def gain(label, method=0):
if method==0:
return label
elif method==1:
return 2**label-1.0
if method==0:
discount_factors = get_discount_factors(labels)
elif method==1:
raise Exception("need to implement: log_2(i+1)")
assert len(discount_factors) == len(labels), "discount factors has wrong length"
#step through, in current order (of decreasing predictions), accumulating tied gains (which may be singletons)
ii = 0
dcg = 0.0
while (ii < k):
current_pred = predictions[ii]
current_gain = gain(labels[ii])
# intializing the tied cumulative variables
cum_tied_gain = current_gain
cum_tied_disc = discount_factors[ii]
num_ties = 1
ii += 1
# count number of ties
while (ii<len(predictions) and predictions[ii]==current_pred): #while tied
num_ties += 1.0
cum_tied_gain += gain(labels[ii])
if ii < k: cum_tied_disc += discount_factors[ii]
ii += 1
#if len(np.unique(predictions))==1: import ipdb; ipdb.set_trace()
avg_gain = cum_tied_gain/num_ties
dcg += avg_gain*cum_tied_disc
assert not np.isnan(dcg), "found nan dcg"
assert not np.isnan(dcg), "found nan dcg"
return dcg
def get_discount_factors(labels):
ii_range = np.arange(len(labels)) + 1
discount_factors = np.concatenate((np.array([1.0]), 1.0/np.log2(ii_range[1:])))
return discount_factors
def rank_data(r, rground):
# we checked this heavily, and is correct, e.g. rground will go from largest rank to smallest
r = sp.stats.mstats.rankdata(r)
rground = sp.stats.mstats.rankdata(rground)
assert np.sum(r)==np.sum(rground), "ranks should add up to the same"
return r, rground
def dcg_alt(relevances, rank=20):
relevances = np.asarray(relevances)[:rank]
n_relevances = len(relevances)
if n_relevances == 0:
return 0.
discounts = np.log2(np.arange(n_relevances) + 2)
return np.sum(relevances / discounts)
def ndcg_alt(relevances, rank=20):
best_dcg = dcg_alt(sorted(relevances, reverse=True), rank)
if best_dcg == 0:
return 0.
return dcg_alt(relevances, rank) / best_dcg
if __name__ == "__main__":
# # e.g. where all predictions are the same
# labels = np.arange(30)
# predictions = np.ones(30)
# discount_factors = get_discount_factors(labels)
# avg_label = np.mean(labels)
# avg_label_vec = avg_label*np.ones((len(labels),1))
# for k in range(10):
# # one way
# dcg1 = np.dot(discount_factors[0:k,None].T, avg_label_vec[0:k])[0][0]
# # another way
# dcg2 = np.sum(discount_factors[0:k])*avg_label
# # using our function
# dcg3 = dcg_at_k_ties(labels,predictions,k)
# print "%f, %f, %f" % (dcg1, dcg2, dcg3)
# assert(np.abs(dcg1 - dcg2) < 1e-8)
# assert(np.abs(dcg2 - dcg3) < 1e-8)
# print "check out ok for case with all ties in predictions"
truth = np.array([3, 4, 2, 1, 0, 0, 0])
pred1 = np.array([3, 4, 2, 1, 0, 0, 0])
pred2 = np.array([2, 1, 3, 4, 5, 6, 7])
print ndcg_alt(truth[np.argsort(pred2)[::-1]], 5)
print ndcg_at_k(truth[np.argsort(pred2)[::-1]], 5, method=1)
print ndcg_at_k(truth[np.argsort(pred2)[::-1]], 5, method=0)
print ndcg_at_k_ties(truth, pred2, 5, method=1)
print ndcg_at_k_ties(truth, pred2, 5, method=0)
|
import unittest
from mongolite import Connection, Document, OperationFailure, BadIndexError, INDEX_GEO2D, INDEX_ASCENDING, INDEX_DESCENDING
class IndexTestCase(unittest.TestCase):
def setUp(self):
self.connection = Connection(safe=True)
self.col = self.connection['test']['mongolite']
def tearDown(self):
self.connection['test'].drop_collection('mongolite')
self.connection = None
def test_index_basic(self):
@self.connection.register
class Movie(Document):
skeleton = {
'other':{
'deep':unicode,
},
'notindexed':unicode,
}
optional = {
'standard':unicode,
}
indexes = [
{
'fields':[('standard',1),('other.deep',1)],
'unique':True,
},
]
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie['other']['deep'] = u'testdeep'
movie['notindexed'] = u'notthere'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name': 'standard_1_other.deep_1', 'unique':True})
assert item is not None, 'No Index Found'
movie = self.col.Movie()
movie['standard'] = u'test'
movie['other']['deep'] = u'testdeep'
self.assertRaises(OperationFailure, movie.save)
def test_index_basic_dynamic_collection(self):
@self.connection.register
class Movie(Document):
skeleton = {
'other':{
'deep':unicode,
},
'notindexed':unicode,
}
optional = {
'standard':unicode,
}
indexes = [
{
'fields':[('standard',1),('other.deep',1)],
'unique':True,
},
]
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie['other']['deep'] = u'testdeep'
movie['notindexed'] = u'notthere'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name': 'standard_1_other.deep_1', 'unique':True})
assert item is not None, 'No Index Found'
movie = self.col.Movie()
movie['standard'] = u'test'
movie['other']['deep'] = u'testdeep'
self.assertRaises(OperationFailure, movie.save)
self.connection.test.othercol.Movie.generate_indexes()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.othercol', 'name': 'standard_1_other.deep_1', 'unique':True})
assert item is not None, 'No Index Found'
def test_index_single_without_generation(self):
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
},
]
self.connection.register([Movie])
movie = self.col.Movie()
movie['standard'] = u'test'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
assert item is None, 'Index is found'
def test_index_single(self):
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
},
]
self.connection.register([Movie])
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
assert item is not None, 'No Index Found'
def test_index_multi(self):
class Movie(Document):
skeleton = {
'standard':unicode,
'other':{
'deep':unicode,
},
'notindexed':unicode,
'alsoindexed':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
},
{
'fields':[('alsoindexed', 1), ('other.deep', -1)],
'unique':True,
},
]
self.connection.register([Movie])
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
index2 = db['system.indexes'].find_one({'ns':'test.mongolite', 'name': 'alsoindexed_1_other.deep_-1', 'unique':True})
assert item is not None, 'No Index Found'
assert index2 is not None, 'Index not found'
movie = self.col.Movie()
movie['standard'] = u'test'
self.assertRaises(OperationFailure, movie.save)
def test_index_multi2(self):
class Movie(Document):
skeleton = {
'standard':unicode,
'other':{
'deep':unicode,
},
'notindexed':unicode,
'alsoindexed':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
},
{
'fields':'other.deep',
'unique':True,
},
]
self.connection.register([Movie])
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie['other']['deep'] = u'foo'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
index2 = db['system.indexes'].find_one({'ns':'test.mongolite', 'name': 'other.deep_1', 'unique':True})
assert item is not None, 'No Index Found'
assert index2 is not None, 'Index not found'
movie = self.col.Movie()
movie['standard'] = u'test'
self.assertRaises(OperationFailure, movie.save)
movie = self.col.Movie()
movie['other']['deep'] = u'foo'
self.assertRaises(OperationFailure, movie.save)
def test_index_direction(self):
class Movie(Document):
skeleton = {
'standard':unicode,
'other':{
'deep':unicode,
},
'notindexed':unicode,
'alsoindexed':unicode,
}
indexes = [
{
'fields':[('standard',INDEX_DESCENDING)],
'unique':True,
},
{
'fields':[('alsoindexed',INDEX_ASCENDING), ('other.deep',INDEX_DESCENDING)],
'unique':True,
},
]
self.connection.register([Movie])
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie.save()
db = self.connection.test
index1 = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_-1', 'unique':True})
index2 = db['system.indexes'].find_one({'ns':'test.mongolite', 'name': 'alsoindexed_1_other.deep_-1', 'unique':True})
assert index1 is not None, 'No Index Found'
assert index2 is not None, 'Index not found'
def test_index_direction_GEO2D(self):
class Movie(Document):
skeleton = {
'standard':unicode,
'other':{
'deep':unicode,
},
'notindexed':unicode,
'alsoindexed':unicode,
}
indexes = [
{
'fields':[('standard',INDEX_GEO2D)],
'unique':True,
},
{
'fields':[('alsoindexed',INDEX_GEO2D), ('other.deep',INDEX_DESCENDING)],
'unique':True,
},
]
self.connection.register([Movie])
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie.save()
db = self.connection.test
index1 = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_2d', 'unique':True})
index2 = db['system.indexes'].find_one({'ns':'test.mongolite', 'name': 'alsoindexed_2d_other.deep_-1', 'unique':True})
assert index1 is not None, 'No Index Found'
assert index2 is not None, 'Index not found'
def test_bad_index_descriptor(self):
failed = False
try:
class Movie(Document):
skeleton = {'standard':unicode}
indexes = [{'unique':True}]
except BadIndexError, e:
self.assertEqual(str(e), "`fields` key must be specify in indexes")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'std',
},
]
except ValueError, e:
self.assertEqual(str(e), "Error in indexes: can't find std in skeleton or optional")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':{'standard':1},
},
]
except BadIndexError, e:
self.assertEqual(str(e), "fields must be a string or a list of tuples (got <type 'dict'> instead)")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':('standard',1, "blah"),
},
]
except BadIndexError, e:
self.assertEqual(str(e), "fields must be a string or a list of tuples (got <type 'tuple'> instead)")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':[('standard',"2")],
},
]
except BadIndexError, e:
self.assertEqual(str(e), "index direction must be INDEX_DESCENDING, INDEX_ASCENDING, INDEX_OFF, INDEX_ALL or INDEX_GEO2D. Got 2")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':(3,1),
},
]
except BadIndexError, e:
self.assertEqual(str(e), "fields must be a string or a list of tuples (got <type 'tuple'> instead)")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':[("blah",1)],
},
]
except ValueError, e:
self.assertEqual(str(e), "Error in indexes: can't find blah in skeleton or optional")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':[('standard',1), ('bla',1)],
},
]
except ValueError, e:
self.assertEqual(str(e), "Error in indexes: can't find bla in skeleton or optional")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':[('standard',3)],
},
]
except BadIndexError, e:
self.assertEqual(str(e), "index direction must be INDEX_DESCENDING, INDEX_ASCENDING, INDEX_OFF, INDEX_ALL or INDEX_GEO2D. Got 3")
failed = True
self.assertEqual(failed, True)
failed = False
try:
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'std',
},
]
except ValueError, e:
self.assertEqual(str(e), "Error in indexes: can't find std in skeleton or optional")
failed = True
self.assertEqual(failed, True)
def test_index_ttl(self):
class Movie(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
'ttl': 86400
},
# If indexes are still broken validation will choke on the ttl
]
self.connection.register([Movie])
self.col.Movie.generate_indexes()
movie = self.col.Movie()
movie['standard'] = u'test'
movie.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
assert item is not None, 'No Index Found'
def test_index_simple_inheritance(self):
class DocA(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
},
]
class DocB(DocA):
skeleton = {
'docb':unicode,
}
self.connection.register([DocA, DocB])
self.assertEqual(self.col.DocB.indexes, [{'fields': 'standard', 'unique':True}])
self.col.DocB.generate_indexes()
docb = self.col.DocB()
docb['standard'] = u'test'
docb['docb'] = u'foo'
docb.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
assert item is not None, 'No Index Found'
def test_index_inheritance(self):
class DocA(Document):
skeleton = {
'standard':unicode,
}
indexes = [
{
'fields':'standard',
'unique':True,
},
]
class DocB(DocA):
skeleton = {
'docb':unicode,
}
indexes = [
{
'fields':'docb',
'unique':True,
},
]
self.connection.register([DocA, DocB])
self.assertEqual(self.col.DocB.indexes, [{'fields': 'docb', 'unique': True}, {'fields': 'standard', 'unique': True}])
self.col.DocB.generate_indexes()
docb = self.col.DocB()
docb['standard'] = u'test'
docb['docb'] = u'foo'
docb.save()
db = self.connection.test
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'standard_1', 'unique':True, 'key':{'standard':1}})
item = db['system.indexes'].find_one({'ns':'test.mongolite', 'name':'docb_1', 'unique':True, 'key':{'docb':1}})
assert item is not None, 'No Index Found'
def test_index_real_world(self):
import datetime
class MyDoc(Document):
skeleton = {
"mydoc":{
"creation_date":datetime.datetime,
}
}
indexes = [{'fields':[('mydoc.creation_date',-1), ('_id',1)]}]
self.connection.register([MyDoc])
date = datetime.datetime.utcnow()
mydoc = self.col.MyDoc()
mydoc['mydoc']['creation_date'] = date
mydoc['_id'] = u'aaa'
mydoc.save()
mydoc3 = self.col.MyDoc()
mydoc3['mydoc']['creation_date'] = date
mydoc3['_id'] = u'bbb'
mydoc3.save()
import time
time.sleep(1)
date2 = datetime.datetime.utcnow()
mydoc2 = self.col.MyDoc()
mydoc2['mydoc']['creation_date'] = date2
mydoc2['_id'] = u'aa'
mydoc2.save()
time.sleep(1)
date3 = datetime.datetime.utcnow()
mydoc4 = self.col.MyDoc()
mydoc4['mydoc']['creation_date'] = date3
mydoc4['_id'] = u'ccc'
mydoc4.save()
#self.col.ensure_index([('mydoc.creation_date',-1), ('_id',1)])
self.col.MyDoc.generate_indexes()
results = [i['_id'] for i in self.col.MyDoc.find().sort([('mydoc.creation_date',-1),('_id',1)])]
self.assertEqual(results, ['ccc', 'aa', 'aaa', 'bbb'])
self.col.MyDoc.generate_indexes()
def test_index_pymongo(self):
import datetime
date = datetime.datetime.utcnow()
import pymongo
collection = pymongo.Connection()['test']['test_index']
mydoc = {'mydoc':{'creation_date':date}, '_id':u'aaa'}
collection.insert(mydoc)
mydoc2 = {'mydoc':{'creation_date':date}, '_id':u'bbb'}
collection.insert(mydoc2)
import time
time.sleep(1)
date2 = datetime.datetime.utcnow()
mydoc3 = {'mydoc':{'creation_date':date2}, '_id':u'aa'}
collection.insert(mydoc3)
time.sleep(1)
date3 = datetime.datetime.utcnow()
mydoc4 = {'mydoc':{'creation_date':date3}, '_id':u'ccc'}
collection.insert(mydoc4)
collection.ensure_index([('mydoc.creation_date',-1), ('_id',1)])
#print list(collection.database.system.indexes.find())
results = [i['_id'] for i in collection.find().sort([('mydoc.creation_date',-1),('_id',1)])]
print results
assert results == [u'ccc', u'aa', u'aaa', u'bbb'], results
def test_index_inheritance2(self):
class A(Document):
skeleton = {
'a':{
'title':unicode,
}
}
indexes = [{'fields':'a.title'}]
class B(A):
skeleton = {
'b':{
'title':unicode,
}
}
indexes = [{'fields':'b.title'}]
class C(Document):
skeleton = {
'c':{
'title':unicode,
}
}
indexes = [{'fields':'c.title'}]
class D(B, C):
skeleton = {
'd':{
'title':unicode,
}
}
self.connection.register([D])
doc = self.col.D()
self.assertEqual(doc.indexes, [{'fields': 'b.title'}, {'fields': 'a.title'}, {'fields': 'c.title'}])
def test_index_with_default_direction(self):
class MyDoc(Document):
skeleton = {
'foo': unicode,
'bar': int
}
indexes = [
{'fields': [('foo', 1), ('bar', -1)]},
]
self.connection.register([MyDoc])
self.col.MyDoc.generate_indexes()
for i in range(10):
doc = self.col.MyDoc()
doc['foo'] = unicode(i)
doc['bar'] = i
doc.save()
assert self.col.database.system.indexes.find_one({'name': 'foo_1_bar_-1'})
def test_index_with_check(self):
@self.connection.register
class MyDoc(Document):
skeleton = {
'foo': dict,
'bar': int
}
indexes = [
{'fields': 'foo.title', 'check':False},
]
self.col.MyDoc.generate_indexes()
for i in range(10):
doc = self.col.MyDoc()
doc['foo']['title'] = unicode(i)
doc['bar'] = i
doc.save()
assert self.col.database.system.indexes.find_one({'name': 'foo.title_1'})
def test_index_with_check_is_true(self):
@self.connection.register
class MyDoc(Document):
skeleton = {
'foo': unicode,
'bar': int
}
indexes = [
{'fields': [('foo', 1)], 'check':True},
]
self.col.MyDoc.generate_indexes()
for i in range(10):
doc = self.col.MyDoc()
doc['foo'] = unicode(i)
doc['bar'] = i
doc.save()
assert self.col.database.system.indexes.find_one({'name': 'foo_1'})
|
from pythran.tests import TestEnv
from pythran.typing import *
class TestCopperhead(TestEnv):
def test_saxpy(self):
self.run_test(
"def saxpy(a, x, y): return list(map(lambda xi, yi: a * xi + yi, x, y))",
1.5, [1,2,3], [0.,2.,4.],
saxpy=[float, List[int], List[float]])
def test_saxpy2(self):
self.run_test(
"def saxpy2(a, x, y): return [a*xi+yi for xi,yi in zip(x,y)]",
1.5, [1,2,3], [0.,2.,4.],
saxpy2=[float,List[int], List[float]])
def test_saxpy3(self):
code="""
def saxpy3(a, x, y):
def triad(xi, yi): return a * xi + yi
return list(map(triad, x, y))
"""
self.run_test(
code,
1.5, [1,2,3], [0.,2.,4.],
saxpy3=[float,List[int], List[float]])
def test_saxpy4(self):
code="""
def saxpy4(a, x, y):
return manual(y,x,a)
def manual(y,x,a):
__list=list()
for __tuple in zip(y,x):
__list.append(__tuple[0]*a+__tuple[1])
return __list
"""
self.run_test(
code,
1.5, [1,2,3], [0.,2.,4.],
saxpy4=[float,List[int], List[float]])
def test_sxpy(self):
code="""
def sxpy(x, y):
def duad(xi, yi): return xi + yi
return list(map(duad, x, y))
"""
self.run_test(
code,
[1,2,3], [0.,2.,4.],
sxpy=[List[int], List[float]])
def test_incr(self):
self.run_test(
"def incr(x): return list(map(lambda xi: xi + 1, x))",
[0., 0., 0.],
incr=[List[float]])
def test_as_ones(self):
self.run_test(
"def as_ones(x): return list(map(lambda xi: 1, x))",
[0., 0., 0.],
as_ones=[List[float]])
def test_idm(self):
self.run_test(
"def idm(x): return list(map(lambda b: b, x))",
[1, 2, 3],
idm=[List[int]])
def test_incr_list(self):
self.run_test(
"def incr_list(x): return [xi + 1 for xi in x]",
[1., 2., 3.],
incr_list=[List[float]])
def test_idx(self):
code="""
def idx(x):
def id(xi): return xi
return list(map(id, x))"""
self.run_test(code, [1,2,3], idx=[List[int]])
def test_rbf(self):
code="""
from math import exp
def norm2_diff(x, y):
def el(xi, yi):
diff = xi - yi
return diff * diff
return sum(map(el, x, y))
def rbf(ngamma, x, y):
return exp(ngamma * norm2_diff(x,y))"""
self.run_test(
code,
2.3, [1,2,3], [1.1,1.2,1.3],
rbf=[float, List[int], List[float]])
def test_indices(self):
self.run_test(
"def indices(A):return list(range(len(A)))",
[1,2],
indices=[List[int]])
def test_gather(self):
self.run_test(
"def gather(x, indices): return [x[i] for i in indices]",
[1,2,3,4,5], [0,2,4],
gather=[List[int], List[int]])
def test_scatter(self):
code="""
def indices(x): return list(range(len(x)))
def scatter(src, indices_, dst):
assert len(src)==len(indices_)
result = list(dst)
for i in range(len(src)):
result[indices_[i]] = src[i]
return result
"""
self.run_test(
code,
[0.0,1.0,2.,3.,4.,5.,6.,7.,8.,9.],[5,6,7,8,9,0,1,2,3,4],[0,0,0,0,0,0,0,0,0,0,18],
scatter=[List[float], List[int], List[int]])
def test_scan(self):
code="""
def prefix(A): return scan(lambda x,y:x+y, A)
def scan(f, A):
B = list(A)
for i in range(1, len(B)):
B[i] = f(B[i-1], B[i])
return B
"""
self.run_test(code, [1.,2.,3.], prefix=[List[float]])
def test_spvv_csr(self):
code="""
def spvv_csr(x, cols, y):
def gather(x, indices): return [x[i] for i in indices]
z = gather(y, cols)
return sum(map(lambda a, b: a * b, x, z))
"""
self.run_test(code, [1,2,3],[0,1,2],[5.5,6.6,7.7], spvv_csr=[List[int], List[int], List[float]])
def test_spmv_csr(self):
code="""
def spvv_csr(x, cols, y):
def gather(x, indices): return [x[i] for i in indices]
z = gather(y, cols)
return sum(map(lambda a, b: a * b, x, z))
def spmv_csr(Ax, Aj, x):
return list(map(lambda y, cols: spvv_csr(y, cols, x), Ax, Aj))
"""
self.run_test(code, [[0,1,2],[0,1,2],[0,1,2]],[[0,1,2],[0,1,2],[0,1,2]],[0,1,2], spmv_csr=[List[List[int]], List[List[int]], List[int]])
def test_spmv_ell(self):
code="""
def indices(x): return range(len(x))
def spmv_ell(data, idx, x):
def kernel(i):
return sum(map(lambda Aj, J: Aj[i] * x[J[i]], data, idx))
return list(map(kernel, indices(x)))
"""
self.run_test(code, [[0,1,2],[0,1,2],[0,1,2]],[[0,1,2],[0,1,2],[0,1,2]],[0,1,2], spmv_ell=[List[List[int]], List[List[int]], List[int]])
def test_vadd(self):
self.run_test("def vadd(x, y): return list(map(lambda a, b: a + b, x, y))", [0.,1.,2.],[5.,6.,7.], vadd=[List[float], List[float]])
def test_vmul(self):
self.run_test("def vmul(x, y): return list(map(lambda a, b: a * b, x, y))", [0.,1.,2.],[5.,6.,7.], vmul=[List[float], List[float]])
def test_form_preconditioner(self):
code="""
def vadd(x, y): return list(map(lambda a, b: a + b, x, y))
def vmul(x, y): return list(map(lambda a, b: a * b, x, y))
def form_preconditioner(a, b, c):
def det_inverse(ai, bi, ci):
return 1.0/(ai * ci - bi * bi)
indets = list(map(det_inverse, a, b, c))
p_a = vmul(indets, c)
p_b = list(map(lambda a, b: -a * b, indets, b))
p_c = vmul(indets, a)
return p_a, p_b, p_c
"""
self.run_test(code, [1,2,3],[0,1,2],[5.5,6.6,7.7],form_preconditioner=[List[int], List[int], List[float]])
def test_precondition(self):
code="""
def precondition(u, v, p_a, p_b, p_c):
def vadd(x, y): return map(lambda a, b: a + b, x, y)
def vmul(x, y): return map(lambda a, b: a * b, x, y)
e = vadd(vmul(p_a, u), vmul(p_b, v))
f = vadd(vmul(p_b, u), vmul(p_c, v))
return list(e), list(f)
"""
self.run_test(code, [1,2,3], [5.5,6.6,7.7],[1,2,3], [5.5,6.6,7.7],[8.8,9.9,10.10], precondition=[List[int], List[float], List[int], List[float], List[float]])
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
from clone_stmt import CloneStmt
from clone_function import CloneFunction
from dead_code_elim import DCE
from inline import Inliner
from phase import Phase
from simplify import Simplify
from subst import subst_expr, subst_expr_list, subst_expr_tuple, subst_stmt_list
from transform import Transform
|
"""Snapshot Build Bisect Tool
This script bisects a snapshot archive using binary search. It starts at
a bad revision (it will try to guess HEAD) and asks for a last known-good
revision. It will then binary search across this revision range by downloading,
unzipping, and opening Chromium for you. After testing the specific revision,
it will ask you whether it is good or bad before continuing the search.
"""
CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
'/chromium-browser-snapshots')
WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
'/chromium-webkit-snapshots')
ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
'/chromium-browser-asan')
GS_BUCKET_NAME = 'chrome-unsigned/desktop-W15K3Y'
GOOGLE_APIS_URL = 'commondatastorage.googleapis.com'
OFFICIAL_BASE_URL = 'http://%s/%s' % (GOOGLE_APIS_URL, GS_BUCKET_NAME)
CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/src/+log/%s..%s')
ANDROID_TOT_BUCKET_NAME = ('chrome-android-tot/bisect')
ANDROID_BUCKET_NAME = 'chrome-unsigned/android-C4MPAR1'
ANDROID_OFFICIAL_BASE_URL = 'http://%s/%s' % (GOOGLE_APIS_URL, ANDROID_BUCKET_NAME)
CRREV_URL = ('https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/')
OFFICIAL_CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/'
'src/+log/%s..%s?pretty=full')
DEPS_FILE_OLD = ('http://src.chromium.org/viewvc/chrome/trunk/src/'
'DEPS?revision=%d')
DEPS_FILE_NEW = ('https://chromium.googlesource.com/chromium/src/+/%s/DEPS')
BLINK_CHANGELOG_URL = ('http://build.chromium.org'
'/f/chromium/perf/dashboard/ui/changelog_blink.html'
'?url=/trunk&range=%d%%3A%d')
DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
'known good), but no later than %s (first known bad).')
DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
'known bad), but no later than %s (first known good).')
CHROMIUM_GITHASH_TO_SVN_URL = (
'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
BLINK_GITHASH_TO_SVN_URL = (
'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
GITHASH_TO_SVN_URL = {
'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
'blink': BLINK_GITHASH_TO_SVN_URL,
}
CHROMIUM_SEARCH_PATTERN_OLD = (
r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
CHROMIUM_SEARCH_PATTERN = (
r'Cr-Commit-Position: refs/heads/master@{#(\d+)}')
BLINK_SEARCH_PATTERN = (
r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
SEARCH_PATTERN = {
'chromium': CHROMIUM_SEARCH_PATTERN,
'blink': BLINK_SEARCH_PATTERN,
}
CREDENTIAL_ERROR_MESSAGE = ('You are attempting to access protected data with '
'no configured credentials')
ANDROID_CHROME_PACKAGE_NAME = {
'Chrome.apk': 'com.google.android.apps.chrome',
'ChromeBeta.apk': 'com.chrome.beta',
'ChromeCanary.apk': 'com.chrome.canary',
'ChromeDev.apk': 'com.google.android.apps.chrome_dev',
'ChromeStable.apk': 'com.android.chrome',
'ChromeWork.apk': 'com.chrome.work',
}
import httplib
import json
import optparse
import os
import re
import shlex
import shutil
import subprocess
import sys
import tempfile
import threading
import urllib
from distutils.version import LooseVersion
from xml.etree import ElementTree
import zipfile
class PathContext(object):
"""A PathContext is used to carry the information used to construct URLs and
paths when dealing with the storage server and archives."""
def __init__(self, base_url, platform, good_revision, bad_revision,
is_official, is_asan, use_local_cache, flash_path = None,
pdf_path = None, android_apk = None):
super(PathContext, self).__init__()
# Store off the input parameters.
self.base_url = base_url
self.platform = platform # What's passed in to the '-a/--archive' option.
self.good_revision = good_revision
self.bad_revision = bad_revision
self.is_official = is_official
self.is_asan = is_asan
self.build_type = 'release'
self.flash_path = flash_path
# Dictionary which stores svn revision number as key and it's
# corresponding git hash as value. This data is populated in
# _FetchAndParse and used later in GetDownloadURL while downloading
# the build.
self.githash_svn_dict = {}
self.pdf_path = pdf_path
# The name of the ZIP file in a revision directory on the server.
self.archive_name = None
# Whether to cache and use the list of known revisions in a local file to
# speed up the initialization of the script at the next run.
self.use_local_cache = use_local_cache
# Locate the local checkout to speed up the script by using locally stored
# metadata.
abs_file_path = os.path.abspath(os.path.realpath(__file__))
local_src_path = os.path.join(os.path.dirname(abs_file_path), '..')
if abs_file_path.endswith(os.path.join('tools', 'bisect-builds.py')) and\
os.path.exists(os.path.join(local_src_path, '.git')):
self.local_src_path = os.path.normpath(local_src_path)
else:
self.local_src_path = None
# Whether the build should be downloaded using gsutil.
self.download_with_gsutil = False
# If the script is being used for android builds.
self.is_android = self.platform.startswith('android')
# android_apk defaults to Chrome.apk
if self.is_android:
self.android_apk = android_apk if android_apk else 'Chrome.apk'
# Set some internal members:
# _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
# _archive_extract_dir = Uncompressed directory in the archive_name file.
# _binary_name = The name of the executable to run.
if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
self._binary_name = 'chrome'
elif self.platform in ('mac', 'mac64'):
self.archive_name = 'chrome-mac.zip'
self._archive_extract_dir = 'chrome-mac'
elif self.platform in ('win', 'win64'):
self.archive_name = 'chrome-win32.zip'
self._archive_extract_dir = 'chrome-win32'
self._binary_name = 'chrome.exe'
elif self.is_android:
pass
else:
raise Exception('Invalid platform: %s' % self.platform)
if is_official:
if self.platform == 'linux':
self._listing_platform_dir = 'precise32/'
self.archive_name = 'chrome-precise32.zip'
self._archive_extract_dir = 'chrome-precise32'
elif self.platform == 'linux64':
self._listing_platform_dir = 'precise64/'
self.archive_name = 'chrome-precise64.zip'
self._archive_extract_dir = 'chrome-precise64'
elif self.platform == 'mac':
self._listing_platform_dir = 'mac/'
self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
elif self.platform == 'mac64':
self._listing_platform_dir = 'mac64/'
self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
elif self.platform == 'win':
self._listing_platform_dir = 'win/'
self.archive_name = 'chrome-win.zip'
self._archive_extract_dir = 'chrome-win'
elif self.platform == 'win64':
self._listing_platform_dir = 'win64/'
self.archive_name = 'chrome-win64.zip'
self._archive_extract_dir = 'chrome-win64'
elif self.platform == 'android-arm':
self._listing_platform_dir = 'arm/'
self.archive_name = self.android_apk
elif self.platform == 'android-arm-64':
self._listing_platform_dir = 'arm_64/'
self.archive_name = self.android_apk
elif self.platform == 'android-x86':
self._listing_platform_dir = 'x86/'
self.archive_name = self.android_apk
elif self.platform == 'android-x64-64':
self._listing_platform_dir = 'x86_64/'
self.archive_name = self.android_apk
else:
if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
self.archive_name = 'chrome-linux.zip'
self._archive_extract_dir = 'chrome-linux'
if self.platform == 'linux':
self._listing_platform_dir = 'Linux/'
elif self.platform == 'linux64':
self._listing_platform_dir = 'Linux_x64/'
elif self.platform == 'linux-arm':
self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
elif self.platform == 'chromeos':
self._listing_platform_dir = 'Linux_ChromiumOS_Full/'
elif self.platform == 'mac':
self._listing_platform_dir = 'Mac/'
self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
elif self.platform == 'win':
self._listing_platform_dir = 'Win/'
elif self.platform == 'android-arm':
self.archive_name = 'bisect_android.zip'
# Need to download builds using gsutil instead of visiting url for
# authentication reasons.
self.download_with_gsutil = True
def GetASANPlatformDir(self):
"""ASAN builds are in directories like "linux-release", or have filenames
like "asan-win32-release-277079.zip". This aligns to our platform names
except in the case of Windows where they use "win32" instead of "win"."""
if self.platform == 'win':
return 'win32'
else:
return self.platform
def GetListingURL(self, marker=None):
"""Returns the URL for a directory listing, with an optional marker."""
marker_param = ''
if marker:
marker_param = '&marker=' + str(marker)
if self.is_asan:
prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
else:
return (self.base_url + '/?delimiter=/&prefix=' +
self._listing_platform_dir + marker_param)
def GetDownloadURL(self, revision):
"""Gets the download URL for a build archive of a specific revision."""
if self.is_asan:
return '%s/%s-%s/%s-%d.zip' % (
ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
self.GetASANBaseName(), revision)
if self.is_official:
if self.is_android:
official_base_url = ANDROID_OFFICIAL_BASE_URL
else:
official_base_url = OFFICIAL_BASE_URL
return '%s/%s/%s%s' % (
official_base_url, revision, self._listing_platform_dir,
self.archive_name)
else:
if self.is_android:
# These files need to be downloaded through gsutil.
return ('gs://%s/%s/%s' % (ANDROID_TOT_BUCKET_NAME, revision,
self.archive_name))
else:
if str(revision) in self.githash_svn_dict:
revision = self.githash_svn_dict[str(revision)]
return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
revision, self.archive_name)
def GetLastChangeURL(self):
"""Returns a URL to the LAST_CHANGE file."""
return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
def GetASANBaseName(self):
"""Returns the base name of the ASAN zip file."""
if 'linux' in self.platform:
return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
self.build_type)
else:
return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
def GetLaunchPath(self, revision):
"""Returns a relative path (presumably from the archive extraction location)
that is used to run the executable."""
if self.is_asan:
extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
else:
extract_dir = self._archive_extract_dir
return os.path.join(extract_dir, self._binary_name)
def ParseDirectoryIndex(self, last_known_rev):
"""Parses the Google Storage directory listing into a list of revision
numbers."""
def _GetMarkerForRev(revision):
if self.is_asan:
return '%s-%s/%s-%d.zip' % (
self.GetASANPlatformDir(), self.build_type,
self.GetASANBaseName(), revision)
return '%s%d' % (self._listing_platform_dir, revision)
def _FetchAndParse(url):
"""Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
next-marker is not None, then the listing is a partial listing and another
fetch should be performed with next-marker being the marker= GET
parameter."""
handle = urllib.urlopen(url)
document = ElementTree.parse(handle)
# All nodes in the tree are namespaced. Get the root's tag name to extract
# the namespace. Etree does namespaces as |{namespace}tag|.
root_tag = document.getroot().tag
end_ns_pos = root_tag.find('}')
if end_ns_pos == -1:
raise Exception('Could not locate end namespace for directory index')
namespace = root_tag[:end_ns_pos + 1]
# Find the prefix (_listing_platform_dir) and whether or not the list is
# truncated.
prefix_len = len(document.find(namespace + 'Prefix').text)
next_marker = None
is_truncated = document.find(namespace + 'IsTruncated')
if is_truncated is not None and is_truncated.text.lower() == 'true':
next_marker = document.find(namespace + 'NextMarker').text
# Get a list of all the revisions.
revisions = []
githash_svn_dict = {}
if self.is_asan:
asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
# Non ASAN builds are in a <revision> directory. The ASAN builds are
# flat
all_prefixes = document.findall(namespace + 'Contents/' +
namespace + 'Key')
for prefix in all_prefixes:
m = asan_regex.match(prefix.text)
if m:
try:
revisions.append(int(m.group(1)))
except ValueError:
pass
else:
all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
namespace + 'Prefix')
# The <Prefix> nodes have content of the form of
# |_listing_platform_dir/revision/|. Strip off the platform dir and the
# trailing slash to just have a number.
for prefix in all_prefixes:
revnum = prefix.text[prefix_len:-1]
try:
if not revnum.isdigit():
# During the svn-git migration, some items were stored by hash.
# These items may appear anywhere in the list of items.
# If |last_known_rev| is set, assume that the full list has been
# retrieved before (including the hashes), so we can safely skip
# all git hashes and focus on the numeric revision numbers.
if last_known_rev:
revnum = None
else:
git_hash = revnum
revnum = self.GetSVNRevisionFromGitHash(git_hash)
githash_svn_dict[revnum] = git_hash
if revnum is not None:
revnum = int(revnum)
revisions.append(revnum)
except ValueError:
pass
return (revisions, next_marker, githash_svn_dict)
# Fetch the first list of revisions.
if last_known_rev:
revisions = []
# Optimization: Start paging at the last known revision (local cache).
next_marker = _GetMarkerForRev(last_known_rev)
# Optimization: Stop paging at the last known revision (remote).
last_change_rev = GetChromiumRevision(self, self.GetLastChangeURL())
if last_known_rev == last_change_rev:
return []
else:
(revisions, next_marker, new_dict) = _FetchAndParse(self.GetListingURL())
self.githash_svn_dict.update(new_dict)
last_change_rev = None
# If the result list was truncated, refetch with the next marker. Do this
# until an entire directory listing is done.
while next_marker:
sys.stdout.write('\rFetching revisions at marker %s' % next_marker)
sys.stdout.flush()
next_url = self.GetListingURL(next_marker)
(new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
revisions.extend(new_revisions)
self.githash_svn_dict.update(new_dict)
if last_change_rev and last_change_rev in new_revisions:
break
sys.stdout.write('\r')
sys.stdout.flush()
return revisions
def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
response = urllib.urlopen(json_url)
if response.getcode() == 200:
try:
data = json.loads(response.read()[4:])
except ValueError:
print 'ValueError for JSON URL: %s' % json_url
raise ValueError
else:
raise ValueError
if 'message' in data:
message = data['message'].split('\n')
message = [line for line in message if line.strip()]
search_pattern = re.compile(SEARCH_PATTERN[depot])
result = search_pattern.search(message[len(message)-1])
if result:
return result.group(1)
else:
if depot == 'chromium':
result = re.search(CHROMIUM_SEARCH_PATTERN_OLD,
message[len(message)-1])
if result:
return result.group(1)
print 'Failed to get svn revision number for %s' % git_sha1
raise ValueError
def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
def _RunGit(command, path):
command = ['git'] + command
shell = sys.platform.startswith('win')
proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, cwd=path)
(output, _) = proc.communicate()
return (output, proc.returncode)
path = self.local_src_path
if depot == 'blink':
path = os.path.join(self.local_src_path, 'third_party', 'WebKit')
revision = None
try:
command = ['svn', 'find-rev', git_sha1]
(git_output, return_code) = _RunGit(command, path)
if not return_code:
revision = git_output.strip('\n')
except ValueError:
pass
if not revision:
command = ['log', '-n1', '--format=%s', git_sha1]
(git_output, return_code) = _RunGit(command, path)
if not return_code:
revision = re.match('SVN changes up to revision ([0-9]+)', git_output)
revision = revision.group(1) if revision else None
if revision:
return revision
raise ValueError
def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
if not self.local_src_path:
return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
else:
return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
def GetRevList(self):
"""Gets the list of revision numbers between self.good_revision and
self.bad_revision."""
cache = {}
# The cache is stored in the same directory as bisect-builds.py
cache_filename = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
'.bisect-builds-cache.json')
cache_dict_key = self.GetListingURL()
def _LoadBucketFromCache():
if self.use_local_cache:
try:
with open(cache_filename) as cache_file:
cache = json.load(cache_file)
revisions = cache.get(cache_dict_key, [])
githash_svn_dict = cache.get('githash_svn_dict', {})
if revisions:
print 'Loaded revisions %d-%d from %s' % (revisions[0],
revisions[-1], cache_filename)
return (revisions, githash_svn_dict)
except (EnvironmentError, ValueError):
pass
return ([], {})
def _SaveBucketToCache():
"""Save the list of revisions and the git-svn mappings to a file.
The list of revisions is assumed to be sorted."""
if self.use_local_cache:
cache[cache_dict_key] = revlist_all
cache['githash_svn_dict'] = self.githash_svn_dict
try:
with open(cache_filename, 'w') as cache_file:
json.dump(cache, cache_file)
print 'Saved revisions %d-%d to %s' % (
revlist_all[0], revlist_all[-1], cache_filename)
except EnvironmentError:
pass
# Download the revlist and filter for just the range between good and bad.
minrev = min(self.good_revision, self.bad_revision)
maxrev = max(self.good_revision, self.bad_revision)
(revlist_all, self.githash_svn_dict) = _LoadBucketFromCache()
last_known_rev = revlist_all[-1] if revlist_all else 0
if last_known_rev < maxrev:
revlist_all.extend(map(int, self.ParseDirectoryIndex(last_known_rev)))
revlist_all = list(set(revlist_all))
revlist_all.sort()
_SaveBucketToCache()
revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
# Set good and bad revisions to be legit revisions.
if revlist:
if self.good_revision < self.bad_revision:
self.good_revision = revlist[0]
self.bad_revision = revlist[-1]
else:
self.bad_revision = revlist[0]
self.good_revision = revlist[-1]
# Fix chromium rev so that the deps blink revision matches REVISIONS file.
if self.base_url == WEBKIT_BASE_URL:
revlist_all.sort()
self.good_revision = FixChromiumRevForBlink(revlist,
revlist_all,
self,
self.good_revision)
self.bad_revision = FixChromiumRevForBlink(revlist,
revlist_all,
self,
self.bad_revision)
return revlist
def _GetHashToNumberDict(self):
"""Gets the mapping of git hashes to git numbers from Google Storage."""
gs_file = 'gs://%s/gitnumbers_dict.json' % ANDROID_TOT_BUCKET_NAME
local_file = 'gitnumbers_dict.json'
GsutilDownload(gs_file, local_file)
json_data = open(local_file).read()
os.remove(local_file)
return json.loads(json_data)
def GetAndroidToTRevisions(self):
"""Gets the ordered list of revisions between self.good_revision and
self.bad_revision from the Android tip of tree GS bucket.
"""
# Dictionary that maps git hashes to git numbers. The git numbers
# let us order the revisions.
hash_to_num = self._GetHashToNumberDict()
try:
good_rev_num = hash_to_num[self.good_revision]
bad_rev_num = hash_to_num[self.bad_revision]
except KeyError:
exit('Error. Make sure the good and bad revisions are valid git hashes.')
# List of all builds by their git hashes in the storage bucket.
hash_list = GsutilList(ANDROID_TOT_BUCKET_NAME)
# Get list of builds that we want to bisect over.
final_list = []
minnum = min(good_rev_num, bad_rev_num)
maxnum = max(good_rev_num, bad_rev_num)
for githash in hash_list:
if len(githash) != 40:
continue
gitnumber = hash_to_num[githash]
if minnum < gitnumber < maxnum:
final_list.append(githash)
return sorted(final_list, key=lambda h: hash_to_num[h])
def GetOfficialBuildsList(self):
"""Gets the list of official build numbers between self.good_revision and
self.bad_revision."""
# Download the revlist and filter for just the range between good and bad.
minrev = min(self.good_revision, self.bad_revision)
maxrev = max(self.good_revision, self.bad_revision)
if self.is_android:
gs_bucket_name = ANDROID_BUCKET_NAME
else:
gs_bucket_name = GS_BUCKET_NAME
build_numbers = GsutilList(gs_bucket_name)
revision_re = re.compile(r'(\d\d\.\d\.\d{4}\.\d+)')
build_numbers = filter(lambda b: revision_re.search(b), build_numbers)
final_list = []
parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
connection = httplib.HTTPConnection(GOOGLE_APIS_URL)
for build_number in sorted(parsed_build_numbers):
if build_number > maxrev:
break
if build_number < minrev:
continue
path = ('/' + gs_bucket_name + '/' + str(build_number) + '/' +
self._listing_platform_dir + self.archive_name)
connection.request('HEAD', path)
response = connection.getresponse()
if response.status == 200:
final_list.append(str(build_number))
response.read()
connection.close()
return final_list
def CheckDepotToolsInPath():
delimiter = ';' if sys.platform.startswith('win') else ':'
path_list = os.environ['PATH'].split(delimiter)
for path in path_list:
if path.rstrip(os.path.sep).endswith('depot_tools'):
return path
return None
def RunGsutilCommand(args):
gsutil_path = CheckDepotToolsInPath()
if gsutil_path is None:
print ('Follow the instructions in this document '
'http://dev.chromium.org/developers/how-tos/install-depot-tools'
' to install depot_tools and then try again.')
sys.exit(1)
gsutil_path = os.path.join(gsutil_path, 'third_party', 'gsutil', 'gsutil')
gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=None)
stdout, stderr = gsutil.communicate()
if gsutil.returncode:
if (re.findall(r'status[ |=]40[1|3]', stderr) or
stderr.startswith(CREDENTIAL_ERROR_MESSAGE)):
print ('Follow these steps to configure your credentials and try'
' running the bisect-builds.py again.:\n'
' 1. Run "python %s config" and follow its instructions.\n'
' 2. If you have a @google.com account, use that account.\n'
' 3. For the project-id, just enter 0.' % gsutil_path)
sys.exit(1)
else:
raise Exception('Error running the gsutil command: %s' % stderr)
return stdout
def GsutilList(bucket):
query = 'gs://%s/' % bucket
stdout = RunGsutilCommand(['ls', query])
return [url[len(query):].strip('/') for url in stdout.splitlines()]
def GsutilDownload(gs_download_url, filename):
RunGsutilCommand(['cp', gs_download_url, filename])
def UnzipFilenameToDir(filename, directory):
"""Unzip |filename| to |directory|."""
cwd = os.getcwd()
if not os.path.isabs(filename):
filename = os.path.join(cwd, filename)
zf = zipfile.ZipFile(filename)
# Make base.
if not os.path.isdir(directory):
os.mkdir(directory)
os.chdir(directory)
# Extract files.
for info in zf.infolist():
name = info.filename
if name.endswith('/'): # dir
if not os.path.isdir(name):
os.makedirs(name)
else: # file
directory = os.path.dirname(name)
if directory and not os.path.isdir(directory):
os.makedirs(directory)
out = open(name, 'wb')
out.write(zf.read(name))
out.close()
# Set permissions. Permission info in external_attr is shifted 16 bits.
os.chmod(name, info.external_attr >> 16L)
os.chdir(cwd)
def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
"""Downloads and unzips revision |rev|.
@param context A PathContext instance.
@param rev The Chromium revision number/tag to download.
@param filename The destination for the downloaded file.
@param quit_event A threading.Event which will be set by the master thread to
indicate that the download should be aborted.
@param progress_event A threading.Event which will be set by the master thread
to indicate that the progress of the download should be
displayed.
"""
def ReportHook(blocknum, blocksize, totalsize):
if quit_event and quit_event.isSet():
raise RuntimeError('Aborting download of revision %s' % str(rev))
if progress_event and progress_event.isSet():
size = blocknum * blocksize
if totalsize == -1: # Total size not known.
progress = 'Received %d bytes' % size
else:
size = min(totalsize, size)
progress = 'Received %d of %d bytes, %.2f%%' % (
size, totalsize, 100.0 * size / totalsize)
# Send a \r to let all progress messages use just one line of output.
sys.stdout.write('\r' + progress)
sys.stdout.flush()
download_url = context.GetDownloadURL(rev)
try:
if context.download_with_gsutil:
GsutilDownload(download_url, filename)
else:
urllib.urlretrieve(download_url, filename, ReportHook)
if progress_event and progress_event.isSet():
print
except RuntimeError:
pass
def RunADBCommand(args):
cmd = ['adb'] + args
adb = subprocess.Popen(['adb'] + args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=None)
stdout, stderr = adb.communicate()
return stdout
def IsADBInstalled():
"""Checks if ADB is in the environment path."""
try:
adb_output = RunADBCommand(['version'])
return ('Android Debug Bridge' in adb_output)
except OSError:
return False
def GetAndroidDeviceList():
"""Returns the list of Android devices attached to the host machine."""
lines = RunADBCommand(['devices']).split('\n')[1:]
devices = []
for line in lines:
m = re.match('^(.*?)\s+device$', line)
if not m:
continue
devices.append(m.group(1))
return devices
def RunAndroidRevision(context, revision, zip_file):
"""Given a Chrome apk, install it on a local device, and launch Chrome."""
devices = GetAndroidDeviceList()
if len(devices) is not 1:
sys.exit('Please have 1 Android device plugged in. %d devices found'
% len(devices))
if context.is_official:
# Downloaded file is just the .apk in this case.
apk_file = zip_file
else:
cwd = os.getcwd()
tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
UnzipFilenameToDir(zip_file, tempdir)
os.chdir(tempdir)
apk_file = context.android_apk
package_name = ANDROID_CHROME_PACKAGE_NAME[context.android_apk]
print 'Installing...'
RunADBCommand(['install', '-r', '-d', apk_file])
print 'Launching Chrome...\n'
RunADBCommand(['shell', 'am', 'start', '-a',
'android.intent.action.VIEW', '-n', package_name +
'/com.google.android.apps.chrome.Main'])
def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
"""Given a zipped revision, unzip it and run the test."""
print 'Trying revision %s...' % str(revision)
if context.is_android:
RunAndroidRevision(context, revision, zip_file)
# TODO(mikecase): Support running command to auto-bisect Android.
return (None, None, None)
# Create a temp directory and unzip the revision into it.
cwd = os.getcwd()
tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
UnzipFilenameToDir(zip_file, tempdir)
# Hack: Chrome OS archives are missing icudtl.dat; try to copy it from
# the local directory.
if context.platform == 'chromeos':
icudtl_path = 'third_party/icu/source/data/in/icudtl.dat'
if not os.access(icudtl_path, os.F_OK):
print 'Couldn\'t find: ' + icudtl_path
sys.exit()
os.system('cp %s %s/chrome-linux/' % (icudtl_path, tempdir))
os.chdir(tempdir)
# Run the build as many times as specified.
testargs = ['--user-data-dir=%s' % profile] + args
# The sandbox must be run as root on Official Chrome, so bypass it.
if ((context.is_official or context.flash_path or context.pdf_path) and
context.platform.startswith('linux')):
testargs.append('--no-sandbox')
if context.flash_path:
testargs.append('--ppapi-flash-path=%s' % context.flash_path)
# We have to pass a large enough Flash version, which currently needs not
# be correct. Instead of requiring the user of the script to figure out and
# pass the correct version we just spoof it.
testargs.append('--ppapi-flash-version=99.9.999.999')
# TODO(vitalybuka): Remove in the future. See crbug.com/395687.
if context.pdf_path:
shutil.copy(context.pdf_path,
os.path.dirname(context.GetLaunchPath(revision)))
testargs.append('--enable-print-preview')
runcommand = []
for token in shlex.split(command):
if token == '%a':
runcommand.extend(testargs)
else:
runcommand.append(
token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
replace('%s', ' '.join(testargs)))
results = []
for _ in range(num_runs):
subproc = subprocess.Popen(runcommand,
bufsize=-1,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = subproc.communicate()
results.append((subproc.returncode, stdout, stderr))
os.chdir(cwd)
try:
shutil.rmtree(tempdir, True)
except Exception:
pass
for (returncode, stdout, stderr) in results:
if returncode:
return (returncode, stdout, stderr)
return results[0]
def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
"""Asks the user whether build |rev| is good or bad."""
# Loop until we get a response that we can parse.
while True:
response = raw_input('Revision %s is '
'[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
str(rev))
if response and response in ('g', 'b', 'r', 'u'):
return response
if response and response == 'q':
raise SystemExit()
def IsGoodASANBuild(rev, official_builds, status, stdout, stderr):
"""Determine if an ASAN build |rev| is good or bad
Will examine stderr looking for the error message emitted by ASAN. If not
found then will fallback to asking the user."""
if stderr:
bad_count = 0
for line in stderr.splitlines():
print line
if line.find('ERROR: AddressSanitizer:') != -1:
bad_count += 1
if bad_count > 0:
print 'Revision %d determined to be bad.' % rev
return 'b'
return AskIsGoodBuild(rev, official_builds, status, stdout, stderr)
class DownloadJob(object):
"""DownloadJob represents a task to download a given Chromium revision."""
def __init__(self, context, name, rev, zip_file):
super(DownloadJob, self).__init__()
# Store off the input parameters.
self.context = context
self.name = name
self.rev = rev
self.zip_file = zip_file
self.quit_event = threading.Event()
self.progress_event = threading.Event()
self.thread = None
def Start(self):
"""Starts the download."""
fetchargs = (self.context,
self.rev,
self.zip_file,
self.quit_event,
self.progress_event)
self.thread = threading.Thread(target=FetchRevision,
name=self.name,
args=fetchargs)
self.thread.start()
def Stop(self):
"""Stops the download which must have been started previously."""
assert self.thread, 'DownloadJob must be started before Stop is called.'
self.quit_event.set()
self.thread.join()
os.unlink(self.zip_file)
def WaitFor(self):
"""Prints a message and waits for the download to complete. The download
must have been started previously."""
assert self.thread, 'DownloadJob must be started before WaitFor is called.'
print 'Downloading revision %s...' % str(self.rev)
self.progress_event.set() # Display progress of download.
self.thread.join()
def Bisect(context,
num_runs=1,
command='%p %a',
try_args=(),
profile=None,
interactive=True,
evaluate=AskIsGoodBuild):
"""Given known good and known bad revisions, run a binary search on all
archived revisions to determine the last known good revision.
@param context PathContext object initialized with user provided parameters.
@param num_runs Number of times to run each build for asking good/bad.
@param try_args A tuple of arguments to pass to the test application.
@param profile The name of the user profile to run with.
@param interactive If it is false, use command exit code for good or bad
judgment of the argument build.
@param evaluate A function which returns 'g' if the argument build is good,
'b' if it's bad or 'u' if unknown.
Threading is used to fetch Chromium revisions in the background, speeding up
the user's experience. For example, suppose the bounds of the search are
good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
whether revision 50 is good or bad, the next revision to check will be either
25 or 75. So, while revision 50 is being checked, the script will download
revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
known:
- If rev 50 is good, the download of rev 25 is cancelled, and the next test
is run on rev 75.
- If rev 50 is bad, the download of rev 75 is cancelled, and the next test
is run on rev 25.
"""
if not profile:
profile = 'profile'
good_rev = context.good_revision
bad_rev = context.bad_revision
cwd = os.getcwd()
print 'Downloading list of known revisions...',
if not context.use_local_cache and not context.is_official:
print '(use --use-local-cache to cache and re-use the list of revisions)'
else:
print
_GetDownloadPath = lambda rev: os.path.join(cwd,
'%s-%s' % (str(rev), context.archive_name))
if context.is_official:
revlist = context.GetOfficialBuildsList()
elif context.is_android: # Android non-official
revlist = context.GetAndroidToTRevisions()
else:
revlist = context.GetRevList()
# Get a list of revisions to bisect across.
if len(revlist) < 2: # Don't have enough builds to bisect.
msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
raise RuntimeError(msg)
# Figure out our bookends and first pivot point; fetch the pivot revision.
minrev = 0
maxrev = len(revlist) - 1
pivot = maxrev / 2
rev = revlist[pivot]
zip_file = _GetDownloadPath(rev)
fetch = DownloadJob(context, 'initial_fetch', rev, zip_file)
fetch.Start()
fetch.WaitFor()
# Binary search time!
while fetch and fetch.zip_file and maxrev - minrev > 1:
if bad_rev < good_rev:
min_str, max_str = 'bad', 'good'
else:
min_str, max_str = 'good', 'bad'
print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
revlist[maxrev], max_str)
# Pre-fetch next two possible pivots
# - down_pivot is the next revision to check if the current revision turns
# out to be bad.
# - up_pivot is the next revision to check if the current revision turns
# out to be good.
down_pivot = int((pivot - minrev) / 2) + minrev
down_fetch = None
if down_pivot != pivot and down_pivot != minrev:
down_rev = revlist[down_pivot]
down_fetch = DownloadJob(context, 'down_fetch', down_rev,
_GetDownloadPath(down_rev))
down_fetch.Start()
up_pivot = int((maxrev - pivot) / 2) + pivot
up_fetch = None
if up_pivot != pivot and up_pivot != maxrev:
up_rev = revlist[up_pivot]
up_fetch = DownloadJob(context, 'up_fetch', up_rev,
_GetDownloadPath(up_rev))
up_fetch.Start()
# Run test on the pivot revision.
status = None
stdout = None
stderr = None
try:
(status, stdout, stderr) = RunRevision(context,
rev,
fetch.zip_file,
profile,
num_runs,
command,
try_args)
except Exception, e:
print >> sys.stderr, e
# Call the evaluate function to see if the current revision is good or bad.
# On that basis, kill one of the background downloads and complete the
# other, as described in the comments above.
try:
if not interactive:
if status:
answer = 'b'
print 'Bad revision: %s' % rev
else:
answer = 'g'
print 'Good revision: %s' % rev
else:
answer = evaluate(rev, context.is_official, status, stdout, stderr)
if ((answer == 'g' and good_rev < bad_rev)
or (answer == 'b' and bad_rev < good_rev)):
fetch.Stop()
minrev = pivot
if down_fetch:
down_fetch.Stop() # Kill the download of the older revision.
fetch = None
if up_fetch:
up_fetch.WaitFor()
pivot = up_pivot
fetch = up_fetch
elif ((answer == 'b' and good_rev < bad_rev)
or (answer == 'g' and bad_rev < good_rev)):
fetch.Stop()
maxrev = pivot
if up_fetch:
up_fetch.Stop() # Kill the download of the newer revision.
fetch = None
if down_fetch:
down_fetch.WaitFor()
pivot = down_pivot
fetch = down_fetch
elif answer == 'r':
pass # Retry requires no changes.
elif answer == 'u':
# Nuke the revision from the revlist and choose a new pivot.
fetch.Stop()
revlist.pop(pivot)
maxrev -= 1 # Assumes maxrev >= pivot.
if maxrev - minrev > 1:
# Alternate between using down_pivot or up_pivot for the new pivot
# point, without affecting the range. Do this instead of setting the
# pivot to the midpoint of the new range because adjacent revisions
# are likely affected by the same issue that caused the (u)nknown
# response.
if up_fetch and down_fetch:
fetch = [up_fetch, down_fetch][len(revlist) % 2]
elif up_fetch:
fetch = up_fetch
else:
fetch = down_fetch
fetch.WaitFor()
if fetch == up_fetch:
pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
else:
pivot = down_pivot
zip_file = fetch.zip_file
if down_fetch and fetch != down_fetch:
down_fetch.Stop()
if up_fetch and fetch != up_fetch:
up_fetch.Stop()
else:
assert False, 'Unexpected return value from evaluate(): ' + answer
except SystemExit:
print 'Cleaning up...'
for f in [_GetDownloadPath(revlist[down_pivot]),
_GetDownloadPath(revlist[up_pivot])]:
try:
os.unlink(f)
except OSError:
pass
sys.exit(0)
rev = revlist[pivot]
return (revlist[minrev], revlist[maxrev], context)
def GetBlinkDEPSRevisionForChromiumRevision(self, rev):
"""Returns the blink revision that was in REVISIONS file at
chromium revision |rev|."""
def _GetBlinkRev(url, blink_re):
m = blink_re.search(url.read())
url.close()
if m:
return m.group(1)
url = urllib.urlopen(DEPS_FILE_OLD % rev)
if url.getcode() == 200:
# . doesn't match newlines without re.DOTALL, so this is safe.
blink_re = re.compile(r'webkit_revision\D*(\d+)')
return int(_GetBlinkRev(url, blink_re))
else:
url = urllib.urlopen(DEPS_FILE_NEW % GetGitHashFromSVNRevision(rev))
if url.getcode() == 200:
blink_re = re.compile(r'webkit_revision\D*\d+;\D*\d+;(\w+)')
blink_git_sha = _GetBlinkRev(url, blink_re)
return self.GetSVNRevisionFromGitHash(blink_git_sha, 'blink')
raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
def GetBlinkRevisionForChromiumRevision(context, rev):
"""Returns the blink revision that was in REVISIONS file at
chromium revision |rev|."""
def _IsRevisionNumber(revision):
if isinstance(revision, int):
return True
else:
return revision.isdigit()
if str(rev) in context.githash_svn_dict:
rev = context.githash_svn_dict[str(rev)]
file_url = '%s/%s%s/REVISIONS' % (context.base_url,
context._listing_platform_dir, rev)
url = urllib.urlopen(file_url)
if url.getcode() == 200:
try:
data = json.loads(url.read())
except ValueError:
print 'ValueError for JSON URL: %s' % file_url
raise ValueError
else:
raise ValueError
url.close()
if 'webkit_revision' in data:
blink_rev = data['webkit_revision']
if not _IsRevisionNumber(blink_rev):
blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink'))
return blink_rev
else:
raise Exception('Could not get blink revision for cr rev %d' % rev)
def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
"""Returns the chromium revision that has the correct blink revision
for blink bisect, DEPS and REVISIONS file might not match since
blink snapshots point to tip of tree blink.
Note: The revisions_final variable might get modified to include
additional revisions."""
blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(self, rev)
while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
idx = revisions.index(rev)
if idx > 0:
rev = revisions[idx-1]
if rev not in revisions_final:
revisions_final.insert(0, rev)
revisions_final.sort()
return rev
def GetChromiumRevision(context, url):
"""Returns the chromium revision read from given URL."""
try:
# Location of the latest build revision number
latest_revision = urllib.urlopen(url).read()
if latest_revision.isdigit():
return int(latest_revision)
return context.GetSVNRevisionFromGitHash(latest_revision)
except Exception:
print 'Could not determine latest revision. This could be bad...'
return 999999999
def GetGitHashFromSVNRevision(svn_revision):
crrev_url = CRREV_URL + str(svn_revision)
url = urllib.urlopen(crrev_url)
if url.getcode() == 200:
data = json.loads(url.read())
if 'git_sha' in data:
return data['git_sha']
def PrintChangeLog(min_chromium_rev, max_chromium_rev):
"""Prints the changelog URL."""
print (' ' + CHANGELOG_URL % (GetGitHashFromSVNRevision(min_chromium_rev),
GetGitHashFromSVNRevision(max_chromium_rev)))
def main():
usage = ('%prog [options] [-- chromium-options]\n'
'Perform binary search on the snapshot builds to find a minimal\n'
'range of revisions where a behavior change happened. The\n'
'behaviors are described as "good" and "bad".\n'
'It is NOT assumed that the behavior of the later revision is\n'
'the bad one.\n'
'\n'
'Revision numbers should use\n'
' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
' for earlier revs.\n'
' Chrome\'s about: build number and omahaproxy branch_revision\n'
' are incorrect, they are from branches.\n'
'\n'
'Tip: add "-- --no-first-run" to bypass the first run prompts.')
parser = optparse.OptionParser(usage=usage)
# Strangely, the default help output doesn't include the choice list.
choices = ['mac', 'mac64', 'win', 'win64', 'linux', 'linux64', 'linux-arm',
'android-arm', 'android-arm-64', 'android-x86', 'android-x86-64',
'chromeos']
apk_choices = ['Chrome.apk', 'ChromeBeta.apk', 'ChromeCanary.apk',
'ChromeDev.apk', 'ChromeStable.apk']
parser.add_option('-a', '--archive',
choices=choices,
help='The buildbot archive to bisect [%s].' %
'|'.join(choices))
parser.add_option('-o',
action='store_true',
dest='official_builds',
help='Bisect across official Chrome builds (internal '
'only) instead of Chromium archives.')
parser.add_option('-b', '--bad',
type='str',
help='A bad revision to start bisection. '
'May be earlier or later than the good revision. '
'Default is HEAD.')
parser.add_option('-f', '--flash_path',
type='str',
help='Absolute path to a recent Adobe Pepper Flash '
'binary to be used in this bisection (e.g. '
'on Windows C:\...\pepflashplayer.dll and on Linux '
'/opt/google/chrome/PepperFlash/'
'libpepflashplayer.so).')
parser.add_option('-d', '--pdf_path',
type='str',
help='Absolute path to a recent PDF plugin '
'binary to be used in this bisection (e.g. '
'on Windows C:\...\pdf.dll and on Linux '
'/opt/google/chrome/libpdf.so). Option also enables '
'print preview.')
parser.add_option('-g', '--good',
type='str',
help='A good revision to start bisection. ' +
'May be earlier or later than the bad revision. ' +
'Default is 0.')
parser.add_option('-p', '--profile', '--user-data-dir',
type='str',
default='profile',
help='Profile to use; this will not reset every run. '
'Defaults to a clean profile.')
parser.add_option('-t', '--times',
type='int',
default=1,
help='Number of times to run each build before asking '
'if it\'s good or bad. Temporary profiles are reused.')
parser.add_option('-c', '--command',
type='str',
default='%p %a',
help='Command to execute. %p and %a refer to Chrome '
'executable and specified extra arguments '
'respectively. Use %s to specify all extra arguments '
'as one string. Defaults to "%p %a". Note that any '
'extra paths specified should be absolute.')
parser.add_option('-l', '--blink',
action='store_true',
help='Use Blink bisect instead of Chromium. ')
parser.add_option('', '--not-interactive',
action='store_true',
default=False,
help='Use command exit code to tell good/bad revision.')
parser.add_option('--asan',
dest='asan',
action='store_true',
default=False,
help='Allow the script to bisect ASAN builds')
parser.add_option('--use-local-cache',
dest='use_local_cache',
action='store_true',
default=False,
help='Use a local file in the current directory to cache '
'a list of known revisions to speed up the '
'initialization of this script.')
parser.add_option('--adb-path',
dest='adb_path',
help='Absolute path to adb. If you do not have adb in your '
'enviroment PATH and want to bisect Android then '
'you need to specify the path here.')
parser.add_option('--apk',
dest='apk',
choices=apk_choices,
help='Name of apk you want to bisect. [%s]' %
'|'.join(apk_choices))
(opts, args) = parser.parse_args()
if opts.archive is None:
print 'Error: missing required parameter: --archive'
print
parser.print_help()
return 1
if opts.asan:
supported_platforms = ['linux', 'mac', 'win']
if opts.archive not in supported_platforms:
print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
'|'.join(supported_platforms))
return 1
if opts.official_builds:
print 'Error: Do not yet support bisecting official ASAN builds.'
return 1
if opts.asan:
base_url = ASAN_BASE_URL
elif opts.blink:
base_url = WEBKIT_BASE_URL
else:
base_url = CHROMIUM_BASE_URL
# Create the context. Initialize 0 for the revisions as they are set below.
context = PathContext(base_url, opts.archive, opts.good, opts.bad,
opts.official_builds, opts.asan, opts.use_local_cache,
opts.flash_path, opts.pdf_path, opts.apk)
if context.is_android and not opts.official_builds:
if (context.platform != 'android-arm' or
context.android_apk != 'Chrome.apk'):
sys.exit('For non-official builds, can only bisect'
' Chrome.apk arm builds.')
# If bisecting Android, we make sure we have ADB setup.
if context.is_android:
if opts.adb_path:
os.environ['PATH'] = '%s:%s' % (os.path.dirname(opts.adb_path),
os.environ['PATH'])
if not IsADBInstalled():
sys.exit('Please have "adb" in PATH or use adb_path command line option'
'to bisect Android builds.')
# Pick a starting point, try to get HEAD for this.
if not opts.bad:
context.bad_revision = '999.0.0.0'
context.bad_revision = GetChromiumRevision(
context, context.GetLastChangeURL())
# Find out when we were good.
if not opts.good:
context.good_revision = '0.0.0.0' if opts.official_builds else 0
if opts.flash_path:
msg = 'Could not find Flash binary at %s' % opts.flash_path
assert os.path.exists(opts.flash_path), msg
if opts.pdf_path:
msg = 'Could not find PDF binary at %s' % opts.pdf_path
assert os.path.exists(opts.pdf_path), msg
if opts.official_builds:
context.good_revision = LooseVersion(context.good_revision)
context.bad_revision = LooseVersion(context.bad_revision)
elif context.is_android:
# Revisions are git hashes and should be left as strings.
pass
else:
context.good_revision = int(context.good_revision)
context.bad_revision = int(context.bad_revision)
if opts.times < 1:
print('Number of times to run (%d) must be greater than or equal to 1.' %
opts.times)
parser.print_help()
return 1
if opts.asan:
evaluator = IsGoodASANBuild
else:
evaluator = AskIsGoodBuild
# Save these revision numbers to compare when showing the changelog URL
# after the bisect.
good_rev = context.good_revision
bad_rev = context.bad_revision
(min_chromium_rev, max_chromium_rev, context) = Bisect(
context, opts.times, opts.command, args, opts.profile,
not opts.not_interactive, evaluator)
# Get corresponding blink revisions.
try:
min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
min_chromium_rev)
max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
max_chromium_rev)
except Exception:
# Silently ignore the failure.
min_blink_rev, max_blink_rev = 0, 0
if opts.blink:
# We're done. Let the user know the results in an official manner.
if good_rev > bad_rev:
print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
else:
print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
print 'BLINK CHANGELOG URL:'
print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
else:
# We're done. Let the user know the results in an official manner.
if good_rev > bad_rev:
print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
str(max_chromium_rev))
else:
print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
str(max_chromium_rev))
if min_blink_rev != max_blink_rev:
print ('NOTE: There is a Blink roll in the range, '
'you might also want to do a Blink bisect.')
print 'CHANGELOG URL:'
if opts.official_builds:
print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
else:
PrintChangeLog(min_chromium_rev, max_chromium_rev)
if __name__ == '__main__':
sys.exit(main())
|
import os
import tempfile
import base64
import traceback
from OpenSSL import crypto
import M2Crypto
from M2Crypto import X509
from tempfile import mkstemp
from sfa.util.sfalogging import logger
from sfa.util.namespace import urn_to_hrn
from sfa.util.faults import *
def convert_public_key(key):
keyconvert_path = "/usr/bin/keyconvert.py"
if not os.path.isfile(keyconvert_path):
raise IOError, "Could not find keyconvert in %s" % keyconvert_path
# we can only convert rsa keys
if "ssh-dss" in key:
return None
(ssh_f, ssh_fn) = tempfile.mkstemp()
ssl_fn = tempfile.mktemp()
os.write(ssh_f, key)
os.close(ssh_f)
cmd = keyconvert_path + " " + ssh_fn + " " + ssl_fn
os.system(cmd)
# this check leaves the temporary file containing the public key so
# that it can be expected to see why it failed.
# TODO: for production, cleanup the temporary files
if not os.path.exists(ssl_fn):
return None
k = Keypair()
try:
k.load_pubkey_from_file(ssl_fn)
except:
traceback.print_exc()
k = None
# remove the temporary files
os.remove(ssh_fn)
os.remove(ssl_fn)
return k
class Keypair:
key = None # public/private keypair
m2key = None # public key (m2crypto format)
##
# Creates a Keypair object
# @param create If create==True, creates a new public/private key and
# stores it in the object
# @param string If string!=None, load the keypair from the string (PEM)
# @param filename If filename!=None, load the keypair from the file
def __init__(self, create=False, string=None, filename=None):
if create:
self.create()
if string:
self.load_from_string(string)
if filename:
self.load_from_file(filename)
##
# Create a RSA public/private key pair and store it inside the keypair object
def create(self):
self.key = crypto.PKey()
self.key.generate_key(crypto.TYPE_RSA, 1024)
##
# Save the private key to a file
# @param filename name of file to store the keypair in
def save_to_file(self, filename):
open(filename, 'w').write(self.as_pem())
##
# Load the private key from a file. Implicity the private key includes the public key.
def load_from_file(self, filename):
buffer = open(filename, 'r').read()
self.load_from_string(buffer)
##
# Load the private key from a string. Implicitly the private key includes the public key.
def load_from_string(self, string):
self.key = crypto.load_privatekey(crypto.FILETYPE_PEM, string)
self.m2key = M2Crypto.EVP.load_key_string(string)
##
# Load the public key from a string. No private key is loaded.
def load_pubkey_from_file(self, filename):
# load the m2 public key
m2rsakey = M2Crypto.RSA.load_pub_key(filename)
self.m2key = M2Crypto.EVP.PKey()
self.m2key.assign_rsa(m2rsakey)
# create an m2 x509 cert
m2name = M2Crypto.X509.X509_Name()
m2name.add_entry_by_txt(field="CN", type=0x1001, entry="junk", len=-1, loc=-1, set=0)
m2x509 = M2Crypto.X509.X509()
m2x509.set_pubkey(self.m2key)
m2x509.set_serial_number(0)
m2x509.set_issuer_name(m2name)
m2x509.set_subject_name(m2name)
ASN1 = M2Crypto.ASN1.ASN1_UTCTIME()
ASN1.set_time(500)
m2x509.set_not_before(ASN1)
m2x509.set_not_after(ASN1)
junk_key = Keypair(create=True)
m2x509.sign(pkey=junk_key.get_m2_pkey(), md="sha1")
# convert the m2 x509 cert to a pyopenssl x509
m2pem = m2x509.as_pem()
pyx509 = crypto.load_certificate(crypto.FILETYPE_PEM, m2pem)
# get the pyopenssl pkey from the pyopenssl x509
self.key = pyx509.get_pubkey()
##
# Load the public key from a string. No private key is loaded.
def load_pubkey_from_string(self, string):
(f, fn) = tempfile.mkstemp()
os.write(f, string)
os.close(f)
self.load_pubkey_from_file(fn)
os.remove(fn)
##
# Return the private key in PEM format.
def as_pem(self):
return crypto.dump_privatekey(crypto.FILETYPE_PEM, self.key)
##
# Return an M2Crypto key object
def get_m2_pkey(self):
if not self.m2key:
self.m2key = M2Crypto.EVP.load_key_string(self.as_pem())
return self.m2key
##
# Returns a string containing the public key represented by this object.
def get_pubkey_string(self):
m2pkey = self.get_m2_pkey()
return base64.b64encode(m2pkey.as_der())
##
# Return an OpenSSL pkey object
def get_openssl_pkey(self):
return self.key
##
# Given another Keypair object, return TRUE if the two keys are the same.
def is_same(self, pkey):
return self.as_pem() == pkey.as_pem()
def sign_string(self, data):
k = self.get_m2_pkey()
k.sign_init()
k.sign_update(data)
return base64.b64encode(k.sign_final())
def verify_string(self, data, sig):
k = self.get_m2_pkey()
k.verify_init()
k.verify_update(data)
return M2Crypto.m2.verify_final(k.ctx, base64.b64decode(sig), k.pkey)
def compute_hash(self, value):
return self.sign_string(str(value))
class Certificate:
digest = "md5"
cert = None
issuerKey = None
issuerSubject = None
parent = None
separator="-----parent-----"
##
# Create a certificate object.
#
# @param create If create==True, then also create a blank X509 certificate.
# @param subject If subject!=None, then create a blank certificate and set
# it's subject name.
# @param string If string!=None, load the certficate from the string.
# @param filename If filename!=None, load the certficiate from the file.
def __init__(self, create=False, subject=None, string=None, filename=None, intermediate=None):
self.data = {}
if create or subject:
self.create()
if subject:
self.set_subject(subject)
if string:
self.load_from_string(string)
if filename:
self.load_from_file(filename)
if intermediate:
self.set_intermediate_ca(intermediate)
##
# Create a blank X509 certificate and store it in this object.
def create(self):
self.cert = crypto.X509()
self.cert.set_serial_number(3)
self.cert.gmtime_adj_notBefore(0)
self.cert.gmtime_adj_notAfter(60*60*24*365*5) # five years
##
# Given a pyOpenSSL X509 object, store that object inside of this
# certificate object.
def load_from_pyopenssl_x509(self, x509):
self.cert = x509
##
# Load the certificate from a string
def load_from_string(self, string):
# if it is a chain of multiple certs, then split off the first one and
# load it (support for the ---parent--- tag as well as normal chained certs)
string = string.strip()
# if the string has no BEGIN C... then wrap in begin/end
# old behavior was to wrap if it didnt _start_ with BEGIN
# if the string does not start with BEGIN
# then ignore everything before the begin
if string.count('-----BEGIN CERTIFICATE') == 0:
string = '-----BEGIN CERTIFICATE-----\n%s\n-----END CERTIFICATE-----' % string
logger.debug("Wrapping string for cert in BEGIN/END")
beg = string.find('-----BEGIN CERTIFICATE')
if beg > 0:
# skipping over non cert beginning
logger.debug("Skipping non PEM start of cert from string ('%s ...\n... %s'). Skipping to char #%d", string[:25], string[beg-15:beg], beg)
string = string[beg:]
parts = []
if string.count('-----BEGIN CERTIFICATE-----') > 1 and \
string.count(Certificate.separator) == 0:
parts = string.split('-----END CERTIFICATE-----',1)
parts[0] += '-----END CERTIFICATE-----'
else:
parts = string.split(Certificate.separator, 1)
self.cert = crypto.load_certificate(crypto.FILETYPE_PEM, parts[0])
# if there are more certs, then create a parent and let the parent load
# itself from the remainder of the string
if len(parts) > 1 and parts[1] != '':
self.parent = self.__class__()
self.parent.load_from_string(parts[1])
##
# Load the certificate from a file
def load_from_file(self, filename):
file = open(filename)
string = file.read()
self.load_from_string(string)
##
# Save the certificate to a string.
#
# @param save_parents If save_parents==True, then also save the parent certificates.
def save_to_string(self, save_parents=True):
string = crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert)
if save_parents and self.parent:
string = string + self.parent.save_to_string(save_parents)
return string
##
# Save the certificate to a file.
# @param save_parents If save_parents==True, then also save the parent certificates.
def save_to_file(self, filename, save_parents=True, filep=None):
string = self.save_to_string(save_parents=save_parents)
if filep:
f = filep
else:
f = open(filename, 'w')
f.write(string)
f.close()
##
# Save the certificate to a random file in /tmp/
# @param save_parents If save_parents==True, then also save the parent certificates.
def save_to_random_tmp_file(self, save_parents=True):
fp, filename = mkstemp(suffix='cert', text=True)
fp = os.fdopen(fp, "w")
self.save_to_file(filename, save_parents=True, filep=fp)
return filename
##
# Sets the issuer private key and name
# @param key Keypair object containing the private key of the issuer
# @param subject String containing the name of the issuer
# @param cert (optional) Certificate object containing the name of the issuer
def set_issuer(self, key, subject=None, cert=None):
self.issuerKey = key
if subject:
# it's a mistake to use subject and cert params at the same time
assert(not cert)
if isinstance(subject, dict) or isinstance(subject, str):
req = crypto.X509Req()
reqSubject = req.get_subject()
if (isinstance(subject, dict)):
for key in reqSubject.keys():
setattr(reqSubject, key, subject[key])
else:
setattr(reqSubject, "CN", subject)
subject = reqSubject
# subject is not valid once req is out of scope, so save req
self.issuerReq = req
if cert:
# if a cert was supplied, then get the subject from the cert
subject = cert.cert.get_subject()
assert(subject)
self.issuerSubject = subject
##
# Get the issuer name
def get_issuer(self, which="CN"):
x = self.cert.get_issuer()
return getattr(x, which)
##
# Set the subject name of the certificate
def set_subject(self, name):
req = crypto.X509Req()
subj = req.get_subject()
if (isinstance(name, dict)):
for key in name.keys():
setattr(subj, key, name[key])
else:
setattr(subj, "CN", name)
self.cert.set_subject(subj)
##
# Get the subject name of the certificate
def get_subject(self, which="CN"):
x = self.cert.get_subject()
return getattr(x, which)
##
# Get the public key of the certificate.
#
# @param key Keypair object containing the public key
def set_pubkey(self, key):
assert(isinstance(key, Keypair))
self.cert.set_pubkey(key.get_openssl_pkey())
##
# Get the public key of the certificate.
# It is returned in the form of a Keypair object.
def get_pubkey(self):
m2x509 = X509.load_cert_string(self.save_to_string())
pkey = Keypair()
pkey.key = self.cert.get_pubkey()
pkey.m2key = m2x509.get_pubkey()
return pkey
def set_intermediate_ca(self, val):
self.intermediate = val
if val:
self.add_extension('basicConstraints', 1, 'CA:TRUE')
##
# Add an X509 extension to the certificate. Add_extension can only be called
# once for a particular extension name, due to limitations in the underlying
# library.
#
# @param name string containing name of extension
# @param value string containing value of the extension
def add_extension(self, name, critical, value):
ext = crypto.X509Extension (name, critical, value)
self.cert.add_extensions([ext])
##
# Get an X509 extension from the certificate
def get_extension(self, name):
# pyOpenSSL does not have a way to get extensions
m2x509 = X509.load_cert_string(self.save_to_string())
value = m2x509.get_ext(name).get_value()
return value
##
# Set_data is a wrapper around add_extension. It stores the parameter str in
# the X509 subject_alt_name extension. Set_data can only be called once, due
# to limitations in the underlying library.
def set_data(self, str, field='subjectAltName'):
# pyOpenSSL only allows us to add extensions, so if we try to set the
# same extension more than once, it will not work
if self.data.has_key(field):
raise "Cannot set ", field, " more than once"
self.data[field] = str
self.add_extension(field, 0, str)
##
# Return the data string that was previously set with set_data
def get_data(self, field='subjectAltName'):
if self.data.has_key(field):
return self.data[field]
try:
uri = self.get_extension(field)
self.data[field] = uri
except LookupError:
return None
return self.data[field]
##
# Sign the certificate using the issuer private key and issuer subject previous set with set_issuer().
def sign(self):
assert self.cert != None
assert self.issuerSubject != None
assert self.issuerKey != None
self.cert.set_issuer(self.issuerSubject)
self.cert.sign(self.issuerKey.get_openssl_pkey(), self.digest)
##
# Verify the authenticity of a certificate.
# @param pkey is a Keypair object representing a public key. If Pkey
# did not sign the certificate, then an exception will be thrown.
def verify(self, pkey):
# pyOpenSSL does not have a way to verify signatures
m2x509 = X509.load_cert_string(self.save_to_string())
m2pkey = pkey.get_m2_pkey()
# verify it
return m2x509.verify(m2pkey)
# XXX alternatively, if openssl has been patched, do the much simpler:
# try:
# self.cert.verify(pkey.get_openssl_key())
# return 1
# except:
# return 0
##
# Return True if pkey is identical to the public key that is contained in the certificate.
# @param pkey Keypair object
def is_pubkey(self, pkey):
return self.get_pubkey().is_same(pkey)
##
# Given a certificate cert, verify that this certificate was signed by the
# public key contained in cert. Throw an exception otherwise.
#
# @param cert certificate object
def is_signed_by_cert(self, cert):
k = cert.get_pubkey()
result = self.verify(k)
return result
##
# Set the parent certficiate.
#
# @param p certificate object.
def set_parent(self, p):
self.parent = p
##
# Return the certificate object of the parent of this certificate.
def get_parent(self):
return self.parent
##
# Verification examines a chain of certificates to ensure that each parent
# signs the child, and that some certificate in the chain is signed by a
# trusted certificate.
#
# Verification is a basic recursion: <pre>
# if this_certificate was signed by trusted_certs:
# return
# else
# return verify_chain(parent, trusted_certs)
# </pre>
#
# At each recursion, the parent is tested to ensure that it did sign the
# child. If a parent did not sign a child, then an exception is thrown. If
# the bottom of the recursion is reached and the certificate does not match
# a trusted root, then an exception is thrown.
#
# @param Trusted_certs is a list of certificates that are trusted.
#
def verify_chain(self, trusted_certs = None):
# Verify a chain of certificates. Each certificate must be signed by
# the public key contained in it's parent. The chain is recursed
# until a certificate is found that is signed by a trusted root.
# verify expiration time
if self.cert.has_expired():
raise CertExpired(self.get_subject(), "client cert")
# if this cert is signed by a trusted_cert, then we are set
for trusted_cert in trusted_certs:
if self.is_signed_by_cert(trusted_cert):
logger.debug("Cert %s signed by trusted cert %s", self.get_subject(), trusted_cert.get_subject())
# verify expiration of trusted_cert ?
if not trusted_cert.cert.has_expired():
return trusted_cert
else:
logger.debug("Trusted cert %s is expired", trusted_cert.get_subject())
# if there is no parent, then no way to verify the chain
if not self.parent:
#print self.get_subject(), "has no parent"
raise CertMissingParent(self.get_subject())
# if it wasn't signed by the parent...
if not self.is_signed_by_cert(self.parent):
#print self.get_subject(), "is not signed by parent"
return CertNotSignedByParent(self.get_subject())
# if the parent isn't verified...
self.parent.verify_chain(trusted_certs)
return
|
from unittest import TestCase, expectedFailure
class TuringTests(TestCase):
def setUp(self):
from chatterbot import ChatBot
self.chatbot = ChatBot('Agent Jr.')
@expectedFailure
def test_ask_name(self):
response = self.chatbot.get_response(
'What is your name?'
)
self.assertIn('Agent', response.text)
@expectedFailure
def test_repeat_information(self):
"""
Test if we can detect any repeat responses from the agent.
"""
self.fail('Condition not met.')
@expectedFailure
def test_repeat_input(self):
"""
Test what the responses are like if we keep giving the same input.
"""
self.fail('Condition not met.')
@expectedFailure
def test_contradicting_responses(self):
"""
Test if we can get the agent to contradict themselves.
"""
self.fail('Condition not met.')
@expectedFailure
def test_mathematical_ability(self):
"""
The math questions inherently suggest that the agent
should get some math problems wrong in order to seem
more human. My view on this is that it is more useful
to have a bot that is good at math, which could just
as easily be a human.
"""
self.fail('Condition not met.')
@expectedFailure
def test_response_time(self):
"""
Does the agent respond in a realistic amount of time?
"""
self.fail('Condition not met.')
|
import sys
from SimpleXMLRPCServer import SimpleXMLRPCServer
class EngineRPCServer(SimpleXMLRPCServer):
def __init__(self, addr, interpreter, namespace):
SimpleXMLRPCServer.__init__(self, addr)
self.user_namespace = namespace
self._interpreter = interpreter
self.interpreter = self._interpreter(self.user_namespace)
def _dispatch(self, method, params):
try:
func = getattr(self, 'xmlrpc_' + method)
except AttributeError:
raise Exception('method %s is not supported' % method)
else:
return func(*params)
def serve_forever(self):
sys.stdout.flush()
while True:
try:
self.handle_request()
except KeyboardInterrupt:
#sys.stderr.flush()
#sys.stdout.flush()
continue
def xmlrpc_hello(self):
return 'hi'
def xmlrpc_interpreter_go(self):
self.interpreter = self._interpreter(self.user_namespace)
return 'ON'
def xmlrpc_evaluate(self, to_evaluate):
"""Evaluate code in the python interpreter.
return a dict containing:
- stdout
- stderr
- original input (source)
- number of commands source contained
"""
try:
result = self.interpreter.evaluate(to_evaluate)
except AttributeError:
result = 'Interpreter Error: Interpeter is probably starting up.'
return result
def xmlrpc_complete(self, to_complete):
"""Search for possible completion matches of source in the
usernamespace.
return a list of matches
"""
try:
result = self.interpreter.complete(to_complete)
except AttributeError:
result = 'Interpreter Error: Interpeter is probably starting up.'
return result
def xmlrpc_complete_name(self, to_complete):
"""Search for possible completion matches of source in the
usernamespace.
return a list of matches
"""
try:
result = self.interpreter.complete_name(to_complete)
except AttributeError:
result = 'Interpreter Error: Interpeter is probably starting up.'
return result
def xmlrpc_complete_attr(self, to_complete):
"""Get the attributes/methods of an object (source).
return them in a list
"""
try:
result = self.interpreter.complete_attr(to_complete)
except AttributeError:
result = 'Interpreter Error: Interpeter is probably starting up.'
return result
def xmlrpc_introspect(self, to_introspect):
"""Introspect on an object
"""
try:
result = self.interpreter.introspect(to_introspect)
except AttributeError:
result = 'Interpreter Error: Interpeter is probably starting up.'
return result
def xmlrpc_cancel_interrupt(self):
"""Reset interpreters interrupt state
"""
result = self.interpreter.cancel_interrupt()
return result
|
'''
Goal: test functions in utils.py
'''
import os
import time
from app import utils
from .base_test import BaseTestCase
class UtilsTests(BaseTestCase):
def test_create_salt(self):
'''
Verify the remote address reading
https://realpython.com/blog/python/python-web-applications-with-flask-part-iii/
'''
# add testing request context
# http://flask.pocoo.org/docs/0.10/api/#flask.ctx.RequestContext
wsgi_env = {
'REMOTE_ADDR': os.environ.get('REMOTE_ADDR', '1.2.3.4'),
'HTTP_USER_AGENT': os.environ.get('HTTP_USER_AGENT', 'cURL')}
# from werkzeug.datastructures import Headers
# headers = Headers([('Referer', '/example/url')])
# with app.test_request_context(environ_base=wsgi_env, headers=headers)
with self.app.test_request_context(environ_base=wsgi_env):
actual_ip = utils._get_remote_addr()
actual_agent = utils._get_user_agent()
actual_hash = utils._create_salt()
self.assertEquals('1.2.3.4', actual_ip)
self.assertEquals('cURL', actual_agent)
self.assertEquals(16, len(actual_hash))
def test_generate_sha512_hmac(self):
expected = '8vhMgmofeNDCISwvPc9yB7XQiNSPZHwDVz6kuYuA7aPA43j8RQVy+xwI2+87u3Pkpvq/qiuRuDreUoSxblqGzA=='
actual = utils._generate_sha512_hmac('pepper', 'salt', 'password')
self.assertEquals(actual, expected)
def test_generate_auth(self):
wsgi_env = {
'REMOTE_ADDR': os.environ.get('REMOTE_ADDR', '1.2.3.4'),
'HTTP_USER_AGENT': os.environ.get('HTTP_USER_AGENT', 'cURL')}
with self.app.test_request_context(environ_base=wsgi_env):
salt, actual_pass = utils.generate_auth('pepper', 'password')
self.assertIsNotNone(actual_pass)
self.assertEquals(88, len(actual_pass))
def test_clean_int(self):
"""
Verify common cases
"""
cases = [
{"x": None, "exp": None},
{"x": "", "exp": None},
{"x": " ", "exp": None},
{"x": " 0x0", "exp": None},
{"x": "-1", "exp": None},
{"x": "-0.3", "exp": None},
{"x": "0.0", "exp": None},
{"x": "0", "exp": 0},
{"x": "0.3", "exp": None},
{"x": "01", "exp": 1},
{"x": "2", "exp": 2},
{"x": 3, "exp": 3},
{"x": 1.2, "exp": None},
{"x": 123, "exp": 123},
]
for case in cases:
actual = utils.clean_int(case['x'])
expected = case['exp']
self.assertEquals(actual, expected)
def test_pack(self):
self.assertEquals('{"message":"msg","status":"error"}',
utils.pack_error("msg")
.replace(' ', '').replace('\n', ''))
def test_compute_text_md5(self):
""" verify md5 generator """
text = 'text'
self.assertEquals('1cb251ec0d568de6a929b520c4aed8d1',
utils.compute_text_md5(text))
def test_get_email_token(self):
email = 'a@a.com'
salt = 'salt'
secret = 'secret'
token = utils.get_email_token(email, salt, secret)
self.assertEquals('ImFAYS5jb20i', token[0:12])
decoded = utils.get_email_from_token(token, salt, secret)
self.assertEquals(email, decoded)
time.sleep(2)
with self.assertRaises(Exception) as context:
decoded = utils.get_email_from_token(token, salt,
secret, max_age=1)
self.assertTrue('Signature age 2 > 1 seconds' in context.exception)
def test_localize_datetime_none_value(self):
self.assertEquals('', utils.localize_datetime(None))
def test_localize_est_datetime_none_value(self):
self.assertEquals('', utils.localize_est_datetime(None))
|
from django.db import models
from django_dms.models import DocumentBase
class Document(DocumentBase):
""" Basic document entry, with a selected metadata.
"""
title = models.CharField(max_length=150, default="", blank=True)
slug = models.SlugField(unique=True)
summary = models.TextField(default="", blank=True)
date_created = models.DateTimeField("created", null=True, blank=True)
def __unicode__(self):
return self.title or self.slug
@property
def friendly_filename(self):
" A friendly filename (ie not the UUID) for the user to see when they download. "
return '%s.%s' % (self.slug, self.file_extension)
@property
def url_pattern_args(self):
" Arguments for URL patterns using this object. "
return [self.slug]
try:
import extractor
LIB_EXTRACTOR_AVAILABLE = True
except ImportError:
LIB_EXTRACTOR_AVAILABLE = False
if LIB_EXTRACTOR_AVAILABLE:
from django_dms import metadata
class DocumentMetadata(object):
model = Document
def process_title(self, value):
return value.isupper() and value.title() or value
def process_date_created(self, value):
# TODO: This should be in the metadata engine
for pattern in ('%Y-%m-%dT%H:%M:%SZ', '%Y%m%d%H%M%S'):
try:
# String is trimmed to the size of pattern, assuming that
# it is the same length as the string it is matching (coincidently, it often is!).
return datetime.strptime(value[:len(pattern)], pattern)
except ValueError:
continue
return value
metadata.register(Document, title='title', file_mimetype='mimetype', date_created='creation date')
|
"""Provide a TestCase base class for PageTest subclasses' unittests."""
import unittest
from telemetry import benchmark
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
from telemetry.page import page_test
from telemetry.page import test_expectations
from telemetry.results import results_options
from telemetry.unittest_util import options_for_unittests
from telemetry.user_story import user_story_runner
class BasicTestPage(page_module.Page):
def __init__(self, url, page_set, base_dir):
super(BasicTestPage, self).__init__(url, page_set, base_dir)
def RunPageInteractions(self, action_runner):
interaction = action_runner.BeginGestureInteraction('ScrollAction')
action_runner.ScrollPage()
interaction.End()
class EmptyMetadataForTest(benchmark.BenchmarkMetadata):
def __init__(self):
super(EmptyMetadataForTest, self).__init__('')
class PageTestTestCase(unittest.TestCase):
"""A base class to simplify writing unit tests for PageTest subclasses."""
def CreatePageSetFromFileInUnittestDataDir(self, test_filename):
ps = self.CreateEmptyPageSet()
page = BasicTestPage('file://' + test_filename, ps, base_dir=ps.base_dir)
ps.AddUserStory(page)
return ps
def CreateEmptyPageSet(self):
base_dir = util.GetUnittestDataDir()
ps = page_set_module.PageSet(file_path=base_dir)
return ps
def RunMeasurement(self, measurement, ps,
expectations=test_expectations.TestExpectations(),
options=None):
"""Runs a measurement against a pageset, returning the rows its outputs."""
if options is None:
options = options_for_unittests.GetCopy()
assert options
temp_parser = options.CreateParser()
user_story_runner.AddCommandLineArgs(temp_parser)
defaults = temp_parser.get_default_values()
for k, v in defaults.__dict__.items():
if hasattr(options, k):
continue
setattr(options, k, v)
measurement.CustomizeBrowserOptions(options.browser_options)
options.output_file = None
options.output_formats = ['none']
options.suppress_gtest_report = True
options.output_trace_tag = None
user_story_runner.ProcessCommandLineArgs(temp_parser, options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
user_story_runner.Run(measurement, ps, expectations, options, results)
return results
def TestTracingCleanedUp(self, measurement_class, options=None):
ps = self.CreatePageSetFromFileInUnittestDataDir('blank.html')
start_tracing_called = [False]
stop_tracing_called = [False]
class BuggyMeasurement(measurement_class):
def __init__(self, *args, **kwargs):
measurement_class.__init__(self, *args, **kwargs)
# Inject fake tracing methods to tracing_controller
def TabForPage(self, page, browser):
ActualStartTracing = browser.platform.tracing_controller.Start
def FakeStartTracing(*args, **kwargs):
ActualStartTracing(*args, **kwargs)
start_tracing_called[0] = True
raise exceptions.IntentionalException
browser.StartTracing = FakeStartTracing
ActualStopTracing = browser.platform.tracing_controller.Stop
def FakeStopTracing(*args, **kwargs):
result = ActualStopTracing(*args, **kwargs)
stop_tracing_called[0] = True
return result
browser.platform.tracing_controller.Stop = FakeStopTracing
return measurement_class.TabForPage(self, page, browser)
measurement = BuggyMeasurement()
try:
self.RunMeasurement(measurement, ps, options=options)
except page_test.TestNotSupportedOnPlatformError:
pass
if start_tracing_called[0]:
self.assertTrue(stop_tracing_called[0])
|
from PyQt4.QtCore import pyqtSignal, Qt, QTimer
from PyQt4.QtGui import QProgressDialog
from . import job, performer
class Progress(QProgressDialog, performer.ThreadedJobPerformer):
finished = pyqtSignal(['QString'])
def __init__(self, parent):
flags = Qt.CustomizeWindowHint | Qt.WindowTitleHint | Qt.WindowSystemMenuHint
QProgressDialog.__init__(self, '', "Cancel", 0, 100, parent, flags)
self.setModal(True)
self.setAutoReset(False)
self.setAutoClose(False)
self._timer = QTimer()
self._jobid = ''
self._timer.timeout.connect(self.updateProgress)
def updateProgress(self):
# the values might change before setValue happens
last_progress = self.last_progress
last_desc = self.last_desc
if not self._job_running or last_progress is None:
self._timer.stop()
self.close()
if not self.job_cancelled:
self.finished.emit(self._jobid)
return
if self.wasCanceled():
self.job_cancelled = True
return
if last_desc:
self.setLabelText(last_desc)
self.setValue(last_progress)
def run(self, jobid, title, target, args=()):
self._jobid = jobid
self.reset()
self.setLabelText('')
self.run_threaded(target, args)
self.setWindowTitle(title)
self.show()
self._timer.start(500)
|
from collections import defaultdict
from django.test import SimpleTestCase as TestCase
from corehq.apps.app_manager.commcare_settings import parse_condition_string, check_condition, circular_dependencies, SETTINGS, SETTINGS_LOOKUP
from corehq.apps.app_manager.models import Application
class CommCareSettingsTest(TestCase):
def set_up_apps(self, how_many):
apps = []
for _ in range(how_many):
app = Application()
app.profile["features"], app.profile["properties"] = {}, {}
apps.append(app)
return tuple(apps)
def test_parse_condition(self):
str1 = "{features.sense}='true'"
cond1 = parse_condition_string(str1)
self.assertEqual(cond1, {"type": "features", "id": "sense", "equals": "true"})
str2 = "{features.sense}=true"
cond2 = parse_condition_string(str2)
self.assertEqual(cond2, {"type": "features", "id": "sense", "equals": True})
str3 = "{properties.server-tether}='sync'"
cond3 = parse_condition_string(str3)
self.assertEqual(cond3, {"type": "properties", "id": "server-tether", "equals": "sync"})
def test_check_condition(self):
sense_condition = "{features.sense}='true'"
server_tether_condition = "{properties.server-tether}='sync'"
test_app1, test_app2, test_app3, test_app4 = self.set_up_apps(4)
test_app1.profile["features"]["sense"] = True
test_app2.profile["features"]["sense"] = False
test_app3.profile["features"]["sense"] = 'true'
test_app4.profile["properties"]["server-tether"] = 'sync'
self.assertTrue(check_condition(test_app1, sense_condition))
self.assertFalse(check_condition(test_app2, sense_condition))
self.assertTrue(check_condition(test_app3, sense_condition))
self.assertTrue(check_condition(test_app4, server_tether_condition))
def test_circular_dependencies(self):
def generate_lookup(settings):
lookup = defaultdict(lambda: defaultdict(dict))
for setting in settings:
lookup[setting['type']][setting['id']] = setting
return lookup
settings1 = [
{
'contingent_default': [
{'condition': "{features.notsense}='true'", 'value': 'Yes'},
{'condition': "{features.sense}='true'", 'value': 'Yes'},
],
'id': 'cc-login-images',
'type': 'properties',
},
{
'contingent_default': [{'condition': "{properties.server-tether}='true'", 'value': 'Yes'}],
'id': 'sense',
'type': 'features',
},
{
'contingent_default': [{'condition': "{properties.cc-login-images}='true'", 'value': 'Yes'}],
'id': 'server-tether',
'type': 'properties',
},
]
self.assertTrue(circular_dependencies(settings1, generate_lookup(settings1)))
self.assertFalse(circular_dependencies(SETTINGS, SETTINGS_LOOKUP))
def test_get_profile_setting(self):
test_app1, test_app2, test_app3 = self.set_up_apps(3)
test_app1.profile["properties"]["unsent-time-limit"] = "30"
test_app2.profile["properties"]["server-tether"] = "push-only"
test_app3.profile["properties"]["server-tether"] = "sync"
self.assertEqual(test_app1.get_profile_setting("properties", "unsent-time-limit"), "30")
self.assertEqual(test_app2.get_profile_setting("properties", "unsent-time-limit"), "-1")
self.assertEqual(test_app3.get_profile_setting("properties", "unsent-time-limit"), "5")
|
import sys
import re
if sys.argv[1] == "-":
file = sys.stdin
else:
file = open( sys.argv[1], 'r' )
int_re = re.compile("^[0-9]")
var_map = {}
party_mode = 0
could_be_local = {}
local_lines = { 1 : "", 2: "" }
neutral = {}
linenum = 0
for line in file.xreadlines():
args = line.split()
linenum += 1
if len( args ) == 0:
continue
if args[0] == ".input":
var_map[ args[1] ] = int( args[2] )
elif args[0] == ".remove":
del var_map[ args[1] ]
elif args[0][0] == ".":
continue
else:
party_arg = []
for arg in args[2:]:
arg_party = 0
if re.match(int_re, arg) != None:
arg_party = 0
elif not arg in var_map:
raise Exception("cannot identify "+arg+" on line "+str(linenum) )
elif var_map[arg] == 0:
neutral[arg] = line
party_arg.append( arg_party )
if party_arg > 0 and party_arg != party_mode:
local_lines[party_arg] += line
could_be_local[ args[0] ] = party_arg
var_map[ args[0] ] = party_arg
in_party = False
printed_locals = False
file.seek(0)
for line in file.xreadlines():
args = line.split()
if args[0] == ".startparty":
in_party = True
elif args[0] == ".endparty":
in_party = False
elif args[0] == ".remove" and args[1] in could_be_local:
continue
elif not printed_locals and not in_party and args[0][0] != ".":
print ".startparty 1"
print local_lines[1],
print ".endparty 1"
print ".startparty 2"
print local_lines[2],
print ".endparty 2"
printed_locals = True
if not args[0] in could_be_local:
print line.strip()
|
from __future__ import unicode_literals, division, absolute_import
import logging
import re
from datetime import datetime, timedelta
from dateutil.parser import parse as parsedate
from flexget.utils.titles.parser import TitleParser, ParseWarning
from flexget.utils import qualities
from flexget.utils.tools import ReList
log = logging.getLogger('seriesparser')
log.setLevel(logging.INFO)
ID_TYPES = ['ep', 'date', 'sequence', 'id']
class SeriesParser(TitleParser):
"""
Parse series.
:name: series name
:data: data to parse
:expect_ep: expect series to be in season, ep format (ep_regexps)
:expect_id: expect series to be in id format (id_regexps)
"""
separators = '[/ -]'
roman_numeral_re = 'X{0,3}(?:IX|XI{0,4}|VI{0,4}|IV|V|I{1,4})'
english_numbers = ['one', 'two', 'three', 'four', 'five', 'six', 'seven',
'eight', 'nine', 'ten']
# Make sure none of these are found embedded within a word or other numbers
ep_regexps = ReList([TitleParser.re_not_in_word(regexp) for regexp in [
'(?:series|season|s)\s?(\d{1,4})(?:\s(?:.*\s)?)?(?:episode|ep|e|part|pt)\s?(\d{1,3}|%s)(?:\s?e?(\d{1,2}))?' %
roman_numeral_re,
'(?:series|season)\s?(\d{1,4})\s(\d{1,3})\s?of\s?(?:\d{1,3})',
'(\d{1,2})\s?x\s?(\d+)(?:\s(\d{1,2}))?',
'(\d{1,3})\s?of\s?(?:\d{1,3})',
'(?:episode|ep|part|pt)\s?(\d{1,3}|%s)' % roman_numeral_re,
'part\s(%s)' % '|'.join(map(str, english_numbers))]])
unwanted_regexps = ReList([
'(\d{1,3})\s?x\s?(0+)[^1-9]', # 5x0
'S(\d{1,3})D(\d{1,3})', # S3D1
'(\d{1,3})\s?x\s?(all)', # 1xAll
r'(?:season(?:s)|s|series|\b)\s?\d\s?(?:&\s?\d)?[\s-]*(?:complete|full)',
'seasons\s(\d\s){2,}',
'disc\s\d'])
# Make sure none of these are found embedded within a word or other numbers
date_regexps = ReList([TitleParser.re_not_in_word(regexp) for regexp in [
'(\d{2,4})%s(\d{1,2})%s(\d{1,2})' % (separators, separators),
'(\d{1,2})%s(\d{1,2})%s(\d{2,4})' % (separators, separators),
'(\d{4})x(\d{1,2})%s(\d{1,2})' % separators]])
sequence_regexps = ReList([TitleParser.re_not_in_word(regexp) for regexp in [
'(\d{1,3})(?:v(?P<version>\d))?',
'(?:pt|part)\s?(\d+|%s)' % roman_numeral_re]])
unwanted_sequence_regexps = ReList(['seasons?\s?\d{1,2}'])
id_regexps = ReList([])
clean_regexps = ReList(['\[.*?\]', '\(.*?\)'])
# ignore prefix regexps must be passive groups with 0 or 1 occurrences eg. (?:prefix)?
ignore_prefixes = [
'(?:\[[^\[\]]*\])', # ignores group names before the name, eg [foobar] name
'(?:HD.720p?:)',
'(?:HD.1080p?:)']
def __init__(self, name='', alternate_names=None, identified_by='auto', name_regexps=None, ep_regexps=None,
date_regexps=None, sequence_regexps=None, id_regexps=None, strict_name=False, allow_groups=None,
allow_seasonless=True, date_dayfirst=None, date_yearfirst=None, special_ids=None,
prefer_specials=False, assume_special=False):
"""
Init SeriesParser.
:param string name: Name of the series parser is going to try to parse.
:param list alternate_names: Other names for this series that should be allowed.
:param string identified_by: What kind of episode numbering scheme is expected,
valid values are ep, date, sequence, id and auto (default).
:param list name_regexps: Regexps for name matching or None (default),
by default regexp is generated from name.
:param list ep_regexps: Regexps detecting episode,season format.
Given list is prioritized over built-in regexps.
:param list date_regexps: Regexps detecting date format.
Given list is prioritized over built-in regexps.
:param list sequence_regexps: Regexps detecting sequence format.
Given list is prioritized over built-in regexps.
:param list id_regexps: Custom regexps detecting id format.
Given list is prioritized over built in regexps.
:param boolean strict_name: If True name must be immediately be followed by episode identifier.
:param list allow_groups: Optionally specify list of release group names that are allowed.
:param date_dayfirst: Prefer day first notation of dates when there are multiple possible interpretations.
:param date_yearfirst: Prefer year first notation of dates when there are multiple possible interpretations.
This will also populate attribute `group`.
:param special_ids: Identifiers which will cause entry to be flagged as a special.
:param boolean prefer_specials: If True, label entry which matches both a series identifier and a special
identifier as a special.
"""
self.name = name
self.alternate_names = alternate_names or []
self.data = ''
self.identified_by = identified_by
# Stores the type of identifier found, 'ep', 'date', 'sequence' or 'special'
self.id_type = None
self.name_regexps = ReList(name_regexps or [])
self.re_from_name = False
# If custom identifier regexps were provided, prepend them to the appropriate type of built in regexps
for mode in ID_TYPES:
listname = mode + '_regexps'
if locals()[listname]:
setattr(self, listname, ReList(locals()[listname] + getattr(SeriesParser, listname)))
self.specials = self.specials + [i.lower() for i in (special_ids or [])]
self.prefer_specials = prefer_specials
self.assume_special = assume_special
self.strict_name = strict_name
self.allow_groups = allow_groups or []
self.allow_seasonless = allow_seasonless
self.date_dayfirst = date_dayfirst
self.date_yearfirst = date_yearfirst
self.field = None
self._reset()
def _reset(self):
# parse produces these
self.season = None
self.episode = None
self.episodes = 1
self.id = None
self.id_type = None
self.id_groups = None
self.quality = None
self.proper_count = 0
self.special = False
# TODO: group is only produced with allow_groups
self.group = None
# false if item does not match series
self.valid = False
def __setattr__(self, name, value):
"""
Some conversions when setting attributes.
`self.name` and `self.data` are converted to unicode.
"""
if name == 'name' or name == 'data':
if isinstance(value, str):
value = unicode(value)
elif not isinstance(value, unicode):
raise Exception('%s cannot be %s' % (name, repr(value)))
object.__setattr__(self, name, value)
def remove_dirt(self, data):
"""Replaces some characters with spaces"""
return re.sub(r'[_.,\[\]\(\): ]+', ' ', data).strip().lower()
def name_to_re(self, name):
"""Convert 'foo bar' to '^[^...]*foo[^...]*bar[^...]+"""
parenthetical = None
if name.endswith(')'):
p_start = name.rfind('(')
if p_start != -1:
parenthetical = re.escape(name[p_start + 1:-1])
name = name[:p_start - 1]
# Blanks are any non word characters except & and _
blank = r'(?:[^\w&]|_)'
ignore = '(?:' + '|'.join(self.ignore_prefixes) + ')?'
res = re.sub(re.compile(blank + '+', re.UNICODE), ' ', name)
res = res.strip()
# accept either '&' or 'and'
res = re.sub(' (&|and) ', ' (?:and|&) ', res, re.UNICODE)
res = re.sub(' +', blank + '*', res, re.UNICODE)
if parenthetical:
res += '(?:' + blank + '+' + parenthetical + ')?'
# Turn on exact mode for series ending with a parenthetical,
# so that 'Show (US)' is not accepted as 'Show (UK)'
self.strict_name = True
res = '^' + ignore + blank + '*' + '(' + res + ')(?:\\b|_)' + blank + '*'
return res
def parse(self, data=None, field=None, quality=None):
# Clear the output variables before parsing
self._reset()
self.field = field
if quality:
self.quality = quality
if data:
self.data = data
if not self.name or not self.data:
raise Exception('SeriesParser initialization error, name: %s data: %s' %
(repr(self.name), repr(self.data)))
# check if data appears to be unwanted (abort)
if self.parse_unwanted(self.remove_dirt(self.data)):
raise ParseWarning('`{data}` appears to be an episode pack'.format(data=self.data))
name = self.remove_dirt(self.name)
log.debug('name: %s data: %s', name, self.data)
# name end position
name_start = 0
name_end = 0
# regexp name matching
if not self.name_regexps:
# if we don't have name_regexps, generate one from the name
self.name_regexps = ReList(self.name_to_re(name) for name in [self.name] + self.alternate_names)
# With auto regex generation, the first regex group captures the name
self.re_from_name = True
# try all specified regexps on this data
for name_re in self.name_regexps:
match = re.search(name_re, self.data)
if match:
match_start, match_end = match.span(1 if self.re_from_name else 0)
# Always pick the longest matching regex
if match_end > name_end:
name_start, name_end = match_start, match_end
log.debug('NAME SUCCESS: %s matched to %s', name_re.pattern, self.data)
if not name_end:
# leave this invalid
log.debug('FAIL: name regexps %s do not match %s',
[regexp.pattern for regexp in self.name_regexps], self.data)
return
# remove series name from raw data, move any prefix to end of string
data_stripped = self.data[name_end:] + ' ' + self.data[:name_start]
data_stripped = data_stripped.lower()
log.debug('data stripped: %s', data_stripped)
# allow group(s)
if self.allow_groups:
for group in self.allow_groups:
group = group.lower()
for fmt in ['[%s]', '-%s']:
if fmt % group in data_stripped:
log.debug('%s is from group %s', self.data, group)
self.group = group
data_stripped = data_stripped.replace(fmt % group, '')
break
if self.group:
break
else:
log.debug('%s is not from groups %s', self.data, self.allow_groups)
return # leave invalid
# Find quality and clean from data
log.debug('parsing quality ->')
quality = qualities.Quality(data_stripped)
if quality:
# Remove quality string from data
log.debug('quality detected, using remaining data `%s`', quality.clean_text)
data_stripped = quality.clean_text
# Don't override passed in quality
if not self.quality:
self.quality = quality
# Remove unwanted words from data for ep / id parsing
data_stripped = self.remove_words(data_stripped, self.remove, not_in_word=True)
data_parts = re.split('[\W_]+', data_stripped)
for part in data_parts[:]:
if part in self.propers:
self.proper_count += 1
data_parts.remove(part)
elif part == 'fastsub':
# Subtract 5 to leave room for fastsub propers before the normal release
self.proper_count -= 5
data_parts.remove(part)
elif part in self.specials:
self.special = True
data_parts.remove(part)
data_stripped = ' '.join(data_parts).strip()
log.debug("data for date/ep/id parsing '%s'", data_stripped)
# Try date mode before ep mode
if self.identified_by in ['date', 'auto']:
date_match = self.parse_date(data_stripped)
if date_match:
if self.strict_name:
if date_match['match'].start() > 1:
return
self.id = date_match['date']
self.id_groups = date_match['match'].groups()
self.id_type = 'date'
self.valid = True
if not (self.special and self.prefer_specials):
return
else:
log.debug('-> no luck with date_regexps')
if self.identified_by in ['ep', 'auto'] and not self.valid:
ep_match = self.parse_episode(data_stripped)
if ep_match:
# strict_name
if self.strict_name:
if ep_match['match'].start() > 1:
return
if ep_match['end_episode'] > ep_match['episode'] + 2:
# This is a pack of too many episodes, ignore it.
log.debug('Series pack contains too many episodes (%d). Rejecting',
ep_match['end_episode'] - ep_match['episode'])
return
self.season = ep_match['season']
self.episode = ep_match['episode']
if ep_match['end_episode']:
self.episodes = (ep_match['end_episode'] - ep_match['episode']) + 1
else:
self.episodes = 1
self.id_type = 'ep'
self.valid = True
if not (self.special and self.prefer_specials):
return
else:
log.debug('-> no luck with ep_regexps')
if self.identified_by == 'ep':
# we should be getting season, ep !
# try to look up idiotic numbering scheme 101,102,103,201,202
# ressu: Added matching for 0101, 0102... It will fail on
# season 11 though
log.debug('expect_ep enabled')
match = re.search(self.re_not_in_word(r'(0?\d)(\d\d)'), data_stripped, re.IGNORECASE | re.UNICODE)
if match:
# strict_name
if self.strict_name:
if match.start() > 1:
return
self.season = int(match.group(1))
self.episode = int(match.group(2))
log.debug(self)
self.id_type = 'ep'
self.valid = True
return
else:
log.debug('-> no luck with the expect_ep')
# Check id regexps
if self.identified_by in ['id', 'auto'] and not self.valid:
for id_re in self.id_regexps:
match = re.search(id_re, data_stripped)
if match:
# strict_name
if self.strict_name:
if match.start() > 1:
return
found_id = '-'.join(g for g in match.groups() if g)
if not found_id:
# If match groups were all blank, don't accept this match
continue
self.id = found_id
self.id_type = 'id'
self.valid = True
log.debug('found id \'%s\' with regexp \'%s\'', self.id, id_re.pattern)
if not (self.special and self.prefer_specials):
return
else:
break
else:
log.debug('-> no luck with id_regexps')
# Other modes are done, check for unwanted sequence ids
if self.parse_unwanted_sequence(data_stripped):
return
# Check sequences last as they contain the broadest matches
if self.identified_by in ['sequence', 'auto'] and not self.valid:
for sequence_re in self.sequence_regexps:
match = re.search(sequence_re, data_stripped)
if match:
# strict_name
if self.strict_name:
if match.start() > 1:
return
# First matching group is the sequence number
try:
self.id = int(match.group(1))
except ValueError:
self.id = self.roman_to_int(match.group(1))
self.season = 0
self.episode = self.id
# If anime style version was found, overwrite the proper count with it
if 'version' in match.groupdict():
if match.group('version'):
self.proper_count = int(match.group('version')) - 1
self.id_type = 'sequence'
self.valid = True
log.debug('found id \'%s\' with regexp \'%s\'', self.id, sequence_re.pattern)
if not (self.special and self.prefer_specials):
return
else:
break
else:
log.debug('-> no luck with sequence_regexps')
# No id found, check if this is a special
if self.special or self.assume_special:
# Attempt to set id as the title of the special
self.id = data_stripped or 'special'
self.id_type = 'special'
self.valid = True
log.debug('found special, setting id to \'%s\'', self.id)
return
if self.valid:
return
msg = 'Title `%s` looks like series `%s` but cannot find ' % (self.data, self.name)
if self.identified_by == 'auto':
msg += 'any series numbering.'
else:
msg += 'a(n) `%s` style identifier.' % self.identified_by
raise ParseWarning(msg)
def parse_unwanted(self, data):
"""Parses data for an unwanted hits. Return True if the data contains unwanted hits."""
for unwanted_re in self.unwanted_regexps:
match = re.search(unwanted_re, data)
if match:
log.debug('unwanted regexp %s matched %s', unwanted_re.pattern, match.groups())
return True
def parse_unwanted_sequence(self, data):
"""Parses data for an unwanted id hits. Return True if the data contains unwanted hits."""
for seq_unwanted_re in self.unwanted_sequence_regexps:
match = re.search(seq_unwanted_re, data)
if match:
log.debug('unwanted id regexp %s matched %s', seq_unwanted_re, match.groups())
return True
def parse_date(self, data):
"""
Parses :data: for a date identifier.
If found, returns the date and regexp match object
If no date is found returns False
"""
for date_re in self.date_regexps:
match = re.search(date_re, data)
if match:
# Check if this is a valid date
possdates = []
try:
# By default dayfirst and yearfirst will be tried as both True and False
# if either have been defined manually, restrict that option
dayfirst_opts = [True, False]
if self.date_dayfirst is not None:
dayfirst_opts = [self.date_dayfirst]
yearfirst_opts = [True, False]
if self.date_yearfirst is not None:
yearfirst_opts = [self.date_yearfirst]
kwargs_list = ({'dayfirst': d, 'yearfirst': y} for d in dayfirst_opts for y in yearfirst_opts)
for kwargs in kwargs_list:
possdate = parsedate(' '.join(match.groups()), **kwargs)
# Don't accept dates farther than a day in the future
if possdate > datetime.now() + timedelta(days=1):
continue
# Don't accept dates that are too old
if possdate < datetime(1970, 1, 1):
continue
if possdate not in possdates:
possdates.append(possdate)
except ValueError:
log.debug('%s is not a valid date, skipping', match.group(0))
continue
if not possdates:
log.debug('All possible dates for %s were in the future', match.group(0))
continue
possdates.sort()
# Pick the most recent date if there are ambiguities
bestdate = possdates[-1]
return {'date': bestdate, 'match': match}
return False
def parse_episode(self, data):
"""
Parses :data: for an episode identifier.
If found, returns a dict with keys for season, episode, end_episode and the regexp match object
If no episode id is found returns False
"""
# search for season and episode number
for ep_re in self.ep_regexps:
match = re.search(ep_re, data)
if match:
log.debug('found episode number with regexp %s (%s)', ep_re.pattern, match.groups())
matches = match.groups()
if len(matches) >= 2:
season = matches[0]
episode = matches[1]
elif self.allow_seasonless:
# assume season 1 if the season was not specified
season = 1
episode = matches[0]
else:
# Return False if we are not allowing seasonless matches and one is found
return False
# Convert season and episode to integers
try:
season = int(season)
if not episode.isdigit():
try:
idx = self.english_numbers.index(str(episode).lower())
episode = 1 + idx
except ValueError:
episode = self.roman_to_int(episode)
else:
episode = int(episode)
except ValueError:
log.critical('Invalid episode number match %s returned with regexp `%s` for %s',
match.groups(), ep_re.pattern, self.data)
raise
end_episode = None
if len(matches) == 3 and matches[2]:
end_episode = int(matches[2])
if end_episode <= episode or end_episode > episode + 12:
# end episode cannot be before start episode
# Assume large ranges are not episode packs, ticket #1271 TODO: is this the best way?
end_episode = None
# Successfully found an identifier, return the results
return {'season': season,
'episode': episode,
'end_episode': end_episode,
'match': match}
return False
def roman_to_int(self, roman):
"""Converts roman numerals up to 39 to integers"""
roman_map = [('X', 10), ('IX', 9), ('V', 5), ('IV', 4), ('I', 1)]
roman = roman.upper()
# Return False if this is not a roman numeral we can translate
for char in roman:
if char not in 'XVI':
raise ValueError('`%s` is not a valid roman numeral' % roman)
# Add up the parts of the numeral
i = result = 0
for numeral, integer in roman_map:
while roman[i:i + len(numeral)] == numeral:
result += integer
i += len(numeral)
return result
@property
def identifiers(self):
"""Return all identifiers this parser represents. (for packs)"""
# Currently 'ep' is the only id type that supports packs
if not self.valid:
raise Exception('Series flagged invalid')
if self.id_type == 'ep':
return ['S%02dE%02d' % (self.season, self.episode + x) for x in xrange(self.episodes)]
elif self.id_type == 'date':
return [self.id.strftime('%Y-%m-%d')]
if self.id is None:
raise Exception('Series is missing identifier')
else:
return [self.id]
@property
def identifier(self):
"""Return String identifier for parsed episode, eg. S01E02
(will be the first identifier if this is a pack)
"""
return self.identifiers[0]
@property
def pack_identifier(self):
"""Return a combined identifier for the whole pack if this has more than one episode."""
# Currently only supports ep mode
if self.id_type == 'ep' and self.episodes > 1:
return 'S%02dE%02d-E%02d' % (self.season, self.episode, self.episode + self.episodes - 1)
else:
return self.identifier
@property
def proper(self):
return self.proper_count > 0
def __str__(self):
# for some fucking reason it's impossible to print self.field here, if someone figures out why please
# tell me!
valid = 'INVALID'
if self.valid:
valid = 'OK'
return '<SeriesParser(data=%s,name=%s,id=%s,season=%s,episode=%s,quality=%s,proper=%s,status=%s)>' % \
(self.data, self.name, str(self.id), self.season, self.episode,
self.quality, self.proper_count, valid)
def __cmp__(self, other):
"""Compares quality of parsers, if quality is equal, compares proper_count."""
return cmp((self.quality, self.episodes, self.proper_count),
(other.quality, other.episodes, other.proper_count))
def __eq__(self, other):
return self is other
|
import sys, re, os, time, traceback, subprocess
import pickle
from ..Qt import QtCore, QtGui, QT_LIB
from ..python2_3 import basestring
from .. import exceptionHandling as exceptionHandling
from .. import getConfigOption
if QT_LIB == 'PySide':
from . import template_pyside as template
elif QT_LIB == 'PySide2':
from . import template_pyside2 as template
elif QT_LIB == 'PyQt5':
from . import template_pyqt5 as template
else:
from . import template_pyqt as template
class ConsoleWidget(QtGui.QWidget):
"""
Widget displaying console output and accepting command input.
Implements:
- eval python expressions / exec python statements
- storable history of commands
- exception handling allowing commands to be interpreted in the context of any level in the exception stack frame
Why not just use python in an interactive shell (or ipython) ? There are a few reasons:
- pyside does not yet allow Qt event processing and interactive shell at the same time
- on some systems, typing in the console _blocks_ the qt event loop until the user presses enter. This can
be baffling and frustrating to users since it would appear the program has frozen.
- some terminals (eg windows cmd.exe) have notoriously unfriendly interfaces
- ability to add extra features like exception stack introspection
- ability to have multiple interactive prompts, including for spawned sub-processes
"""
def __init__(self, parent=None, namespace=None, historyFile=None, text=None, editor=None):
"""
============== ============================================================================
**Arguments:**
namespace dictionary containing the initial variables present in the default namespace
historyFile optional file for storing command history
text initial text to display in the console window
editor optional string for invoking code editor (called when stack trace entries are
double-clicked). May contain {fileName} and {lineNum} format keys. Example::
editorCommand --loadfile {fileName} --gotoline {lineNum}
============== =============================================================================
"""
QtGui.QWidget.__init__(self, parent)
if namespace is None:
namespace = {}
namespace['__console__'] = self
self.localNamespace = namespace
self.editor = editor
self.multiline = None
self.inCmd = False
self.frames = [] # stack frames to access when an item in the stack list is selected
self.ui = template.Ui_Form()
self.ui.setupUi(self)
self.output = self.ui.output
self.input = self.ui.input
self.input.setFocus()
if text is not None:
self.output.setPlainText(text)
self.historyFile = historyFile
history = self.loadHistory()
if history is not None:
self.input.history = [""] + history
self.ui.historyList.addItems(history[::-1])
self.ui.historyList.hide()
self.ui.exceptionGroup.hide()
self.input.sigExecuteCmd.connect(self.runCmd)
self.ui.historyBtn.toggled.connect(self.ui.historyList.setVisible)
self.ui.historyList.itemClicked.connect(self.cmdSelected)
self.ui.historyList.itemDoubleClicked.connect(self.cmdDblClicked)
self.ui.exceptionBtn.toggled.connect(self.ui.exceptionGroup.setVisible)
self.ui.catchAllExceptionsBtn.toggled.connect(self.catchAllExceptions)
self.ui.catchNextExceptionBtn.toggled.connect(self.catchNextException)
self.ui.clearExceptionBtn.clicked.connect(self.clearExceptionClicked)
self.ui.exceptionStackList.itemClicked.connect(self.stackItemClicked)
self.ui.exceptionStackList.itemDoubleClicked.connect(self.stackItemDblClicked)
self.ui.onlyUncaughtCheck.toggled.connect(self.updateSysTrace)
self.currentTraceback = None
def loadHistory(self):
"""Return the list of previously-invoked command strings (or None)."""
if self.historyFile is not None:
return pickle.load(open(self.historyFile, 'rb'))
def saveHistory(self, history):
"""Store the list of previously-invoked command strings."""
if self.historyFile is not None:
pickle.dump(open(self.historyFile, 'wb'), history)
def runCmd(self, cmd):
#cmd = str(self.input.lastCmd)
self.stdout = sys.stdout
self.stderr = sys.stderr
encCmd = re.sub(r'>', '>', re.sub(r'<', '<', cmd))
encCmd = re.sub(r' ', ' ', encCmd)
self.ui.historyList.addItem(cmd)
self.saveHistory(self.input.history[1:100])
try:
sys.stdout = self
sys.stderr = self
if self.multiline is not None:
self.write("<br><b>%s</b>\n"%encCmd, html=True)
self.execMulti(cmd)
else:
self.write("<br><div style='background-color: #CCF; color: black'><b>%s</b>\n"%encCmd, html=True)
self.inCmd = True
self.execSingle(cmd)
if not self.inCmd:
self.write("</div>\n", html=True)
finally:
sys.stdout = self.stdout
sys.stderr = self.stderr
sb = self.output.verticalScrollBar()
sb.setValue(sb.maximum())
sb = self.ui.historyList.verticalScrollBar()
sb.setValue(sb.maximum())
def globals(self):
frame = self.currentFrame()
if frame is not None and self.ui.runSelectedFrameCheck.isChecked():
return self.currentFrame().f_globals
else:
return self.localNamespace
def locals(self):
frame = self.currentFrame()
if frame is not None and self.ui.runSelectedFrameCheck.isChecked():
return self.currentFrame().f_locals
else:
return self.localNamespace
def currentFrame(self):
## Return the currently selected exception stack frame (or None if there is no exception)
index = self.ui.exceptionStackList.currentRow()
if index >= 0 and index < len(self.frames):
return self.frames[index]
else:
return None
def execSingle(self, cmd):
try:
output = eval(cmd, self.globals(), self.locals())
self.write(repr(output) + '\n')
except SyntaxError:
try:
exec(cmd, self.globals(), self.locals())
except SyntaxError as exc:
if 'unexpected EOF' in exc.msg:
self.multiline = cmd
else:
self.displayException()
except:
self.displayException()
except:
self.displayException()
def execMulti(self, nextLine):
#self.stdout.write(nextLine+"\n")
if nextLine.strip() != '':
self.multiline += "\n" + nextLine
return
else:
cmd = self.multiline
try:
output = eval(cmd, self.globals(), self.locals())
self.write(str(output) + '\n')
self.multiline = None
except SyntaxError:
try:
exec(cmd, self.globals(), self.locals())
self.multiline = None
except SyntaxError as exc:
if 'unexpected EOF' in exc.msg:
self.multiline = cmd
else:
self.displayException()
self.multiline = None
except:
self.displayException()
self.multiline = None
except:
self.displayException()
self.multiline = None
def write(self, strn, html=False):
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if not isGuiThread:
self.stdout.write(strn)
return
self.output.moveCursor(QtGui.QTextCursor.End)
if html:
self.output.textCursor().insertHtml(strn)
else:
if self.inCmd:
self.inCmd = False
self.output.textCursor().insertHtml("</div><br><div style='font-weight: normal; background-color: #FFF; color: black'>")
#self.stdout.write("</div><br><div style='font-weight: normal; background-color: #FFF;'>")
self.output.insertPlainText(strn)
#self.stdout.write(strn)
def displayException(self):
"""
Display the current exception and stack.
"""
tb = traceback.format_exc()
lines = []
indent = 4
prefix = ''
for l in tb.split('\n'):
lines.append(" "*indent + prefix + l)
self.write('\n'.join(lines))
self.exceptionHandler(*sys.exc_info())
def cmdSelected(self, item):
index = -(self.ui.historyList.row(item)+1)
self.input.setHistory(index)
self.input.setFocus()
def cmdDblClicked(self, item):
index = -(self.ui.historyList.row(item)+1)
self.input.setHistory(index)
self.input.execCmd()
def flush(self):
pass
def catchAllExceptions(self, catch=True):
"""
If True, the console will catch all unhandled exceptions and display the stack
trace. Each exception caught clears the last.
"""
self.ui.catchAllExceptionsBtn.setChecked(catch)
if catch:
self.ui.catchNextExceptionBtn.setChecked(False)
self.enableExceptionHandling()
self.ui.exceptionBtn.setChecked(True)
else:
self.disableExceptionHandling()
def catchNextException(self, catch=True):
"""
If True, the console will catch the next unhandled exception and display the stack
trace.
"""
self.ui.catchNextExceptionBtn.setChecked(catch)
if catch:
self.ui.catchAllExceptionsBtn.setChecked(False)
self.enableExceptionHandling()
self.ui.exceptionBtn.setChecked(True)
else:
self.disableExceptionHandling()
def enableExceptionHandling(self):
exceptionHandling.register(self.exceptionHandler)
self.updateSysTrace()
def disableExceptionHandling(self):
exceptionHandling.unregister(self.exceptionHandler)
self.updateSysTrace()
def clearExceptionClicked(self):
self.currentTraceback = None
self.frames = []
self.ui.exceptionInfoLabel.setText("[No current exception]")
self.ui.exceptionStackList.clear()
self.ui.clearExceptionBtn.setEnabled(False)
def stackItemClicked(self, item):
pass
def stackItemDblClicked(self, item):
editor = self.editor
if editor is None:
editor = getConfigOption('editorCommand')
if editor is None:
return
tb = self.currentFrame()
lineNum = tb.tb_lineno
fileName = tb.tb_frame.f_code.co_filename
subprocess.Popen(self.editor.format(fileName=fileName, lineNum=lineNum), shell=True)
def updateSysTrace(self):
## Install or uninstall sys.settrace handler
if not self.ui.catchNextExceptionBtn.isChecked() and not self.ui.catchAllExceptionsBtn.isChecked():
if sys.gettrace() == self.systrace:
sys.settrace(None)
return
if self.ui.onlyUncaughtCheck.isChecked():
if sys.gettrace() == self.systrace:
sys.settrace(None)
else:
if sys.gettrace() is not None and sys.gettrace() != self.systrace:
self.ui.onlyUncaughtCheck.setChecked(False)
raise Exception("sys.settrace is in use; cannot monitor for caught exceptions.")
else:
sys.settrace(self.systrace)
def exceptionHandler(self, excType, exc, tb, systrace=False):
if self.ui.catchNextExceptionBtn.isChecked():
self.ui.catchNextExceptionBtn.setChecked(False)
elif not self.ui.catchAllExceptionsBtn.isChecked():
return
self.currentTraceback = tb
excMessage = ''.join(traceback.format_exception_only(excType, exc))
self.ui.exceptionInfoLabel.setText(excMessage)
if systrace:
# exceptions caught using systrace don't need the usual
# call stack + traceback handling
self.setStack(sys._getframe().f_back.f_back)
else:
self.setStack(frame=sys._getframe().f_back, tb=tb)
def setStack(self, frame=None, tb=None):
"""Display a call stack and exception traceback.
This allows the user to probe the contents of any frame in the given stack.
*frame* may either be a Frame instance or None, in which case the current
frame is retrieved from ``sys._getframe()``.
If *tb* is provided then the frames in the traceback will be appended to
the end of the stack list. If *tb* is None, then sys.exc_info() will
be checked instead.
"""
self.ui.clearExceptionBtn.setEnabled(True)
if frame is None:
frame = sys._getframe().f_back
if tb is None:
tb = sys.exc_info()[2]
self.ui.exceptionStackList.clear()
self.frames = []
# Build stack up to this point
for index, line in enumerate(traceback.extract_stack(frame)):
# extract_stack return value changed in python 3.5
if 'FrameSummary' in str(type(line)):
line = (line.filename, line.lineno, line.name, line._line)
self.ui.exceptionStackList.addItem('File "%s", line %s, in %s()\n %s' % line)
while frame is not None:
self.frames.insert(0, frame)
frame = frame.f_back
if tb is None:
return
self.ui.exceptionStackList.addItem('-- exception caught here: --')
item = self.ui.exceptionStackList.item(self.ui.exceptionStackList.count()-1)
item.setBackground(QtGui.QBrush(QtGui.QColor(200, 200, 200)))
item.setForeground(QtGui.QBrush(QtGui.QColor(50, 50, 50)))
self.frames.append(None)
# And finish the rest of the stack up to the exception
for index, line in enumerate(traceback.extract_tb(tb)):
# extract_stack return value changed in python 3.5
if 'FrameSummary' in str(type(line)):
line = (line.filename, line.lineno, line.name, line._line)
self.ui.exceptionStackList.addItem('File "%s", line %s, in %s()\n %s' % line)
while tb is not None:
self.frames.append(tb.tb_frame)
tb = tb.tb_next
def systrace(self, frame, event, arg):
if event == 'exception' and self.checkException(*arg):
self.exceptionHandler(*arg, systrace=True)
return self.systrace
def checkException(self, excType, exc, tb):
## Return True if the exception is interesting; False if it should be ignored.
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
filterStr = str(self.ui.filterText.text())
if filterStr != '':
if isinstance(exc, Exception):
msg = exc.message
elif isinstance(exc, basestring):
msg = exc
else:
msg = repr(exc)
match = re.search(filterStr, "%s:%s:%s" % (filename, function, msg))
return match is not None
## Go through a list of common exception points we like to ignore:
if excType is GeneratorExit or excType is StopIteration:
return False
if excType is KeyError:
if filename.endswith('python2.7/weakref.py') and function in ('__contains__', 'get'):
return False
if filename.endswith('python2.7/copy.py') and function == '_keep_alive':
return False
if excType is AttributeError:
if filename.endswith('python2.7/collections.py') and function == '__init__':
return False
if filename.endswith('numpy/core/fromnumeric.py') and function in ('all', '_wrapit', 'transpose', 'sum'):
return False
if filename.endswith('numpy/core/arrayprint.py') and function in ('_array2string'):
return False
if filename.endswith('MetaArray.py') and function == '__getattr__':
for name in ('__array_interface__', '__array_struct__', '__array__'): ## numpy looks for these when converting objects to array
if name in exc:
return False
if filename.endswith('flowchart/eq.py'):
return False
if filename.endswith('pyqtgraph/functions.py') and function == 'makeQImage':
return False
if excType is TypeError:
if filename.endswith('numpy/lib/function_base.py') and function == 'iterable':
return False
if excType is ZeroDivisionError:
if filename.endswith('python2.7/traceback.py'):
return False
return True
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from bootcamp.activities.models import Notification
from bootcamp.decorators import ajax_required
@login_required
def notifications(request):
user = request.user
notifications = Notification.objects.filter(to_user=user)
unread = Notification.objects.filter(to_user=user, is_read=False)
for notification in unread:
notification.is_read = True
notification.save()
return render(request, 'activities/notifications.html',
{'notifications': notifications})
@login_required
@ajax_required
def last_notifications(request):
user = request.user
notifications = Notification.objects.filter(to_user=user,
is_read=False)[:5]
for notification in notifications:
notification.is_read = True
notification.save()
return render(request,
'activities/last_notifications.html',
{'notifications': notifications})
@login_required
@ajax_required
def check_notifications(request):
user = request.user
notifications = Notification.objects.filter(to_user=user,
is_read=False)[:5]
return HttpResponse(len(notifications))
|
"""
Django-environ allows you to utilize 12factor inspired environment
variables to configure your Django application.
"""
from __future__ import unicode_literals
import os
import sys
import re
import json
import warnings
import logging
logger = logging.getLogger(__file__)
try:
from django.core.exceptions import ImproperlyConfigured
except ImportError:
class ImproperlyConfigured(Exception):
pass
try:
import urllib.parse as urlparse
except ImportError:
# Python <= 2.6
import urlparse
if sys.version < '3':
text_type = unicode
else:
text_type = str
basestring = str
__author__ = 'joke2k'
__version__ = (0, 3, 1)
_cast_int = lambda v: int(v) if isinstance(v, basestring) and v.isdigit() else v
_cast_str = lambda v: str(v) if isinstance(v, basestring) else v
class NoValue(object):
def __repr__(self):
return '<{0}>'.format(self.__class__.__name__)
class Env(object):
"""Provide schema-based lookups of environment variables so that each
caller doesn't have to pass in `cast` and `default` parameters.
Usage:::
env = Env(MAIL_ENABLED=bool, SMTP_LOGIN=(str, 'DEFAULT'))
if env('MAIL_ENABLED'):
...
"""
NOTSET = NoValue()
BOOLEAN_TRUE_STRINGS = ('true', 'on', 'ok', 'y', 'yes', '1')
URL_CLASS = urlparse.ParseResult
DEFAULT_DATABASE_ENV = 'DATABASE_URL'
DB_SCHEMES = {
'postgres': 'django.db.backends.postgresql_psycopg2',
'postgresql': 'django.db.backends.postgresql_psycopg2',
'psql': 'django.db.backends.postgresql_psycopg2',
'pgsql': 'django.db.backends.postgresql_psycopg2',
'postgis': 'django.contrib.gis.db.backends.postgis',
'mysql': 'django.db.backends.mysql',
'mysql2': 'django.db.backends.mysql',
'mysqlgis': 'django.contrib.gis.db.backends.mysql',
'spatialite': 'django.contrib.gis.db.backends.spatialite',
'sqlite': 'django.db.backends.sqlite3',
'ldap': 'ldapdb.backends.ldap',
}
_DB_BASE_OPTIONS = ['CONN_MAX_AGE', 'ATOMIC_REQUESTS', 'AUTOCOMMIT']
DEFAULT_CACHE_ENV = 'CACHE_URL'
CACHE_SCHEMES = {
'dbcache': 'django.core.cache.backends.db.DatabaseCache',
'dummycache': 'django.core.cache.backends.dummy.DummyCache',
'filecache': 'django.core.cache.backends.filebased.FileBasedCache',
'locmemcache': 'django.core.cache.backends.locmem.LocMemCache',
'memcache': 'django.core.cache.backends.memcached.MemcachedCache',
'pymemcache': 'django.core.cache.backends.memcached.PyLibMCCache',
'rediscache': 'redis_cache.cache.RedisCache',
'redis': 'redis_cache.cache.RedisCache',
}
_CACHE_BASE_OPTIONS = ['TIMEOUT', 'KEY_PREFIX', 'VERSION', 'KEY_FUNCTION']
DEFAULT_EMAIL_ENV = 'EMAIL_URL'
EMAIL_SCHEMES = {
'smtp': 'django.core.mail.backends.smtp.EmailBackend',
'smtps': 'django.core.mail.backends.smtp.EmailBackend',
'consolemail': 'django.core.mail.backends.console.EmailBackend',
'filemail': 'django.core.mail.backends.filebased.EmailBackend',
'memorymail': 'django.core.mail.backends.locmem.EmailBackend',
'dummymail': 'django.core.mail.backends.dummy.EmailBackend'
}
_EMAIL_BASE_OPTIONS = ['EMAIL_USE_TLS', ]
DEFAULT_SEARCH_ENV = 'SEARCH_URL'
SEARCH_SCHEMES = {
"elasticsearch": "haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine",
"solr": "haystack.backends.solr_backend.SolrEngine",
"whoosh": "haystack.backends.whoosh_backend.WhooshEngine",
"simple": "haystack.backends.simple_backend.SimpleEngine",
}
def __init__(self, **schema):
self.schema = schema
def __call__(self, var, cast=None, default=NOTSET, parse_default=False):
return self.get_value(var, cast=cast, default=default, parse_default=parse_default)
# Shortcuts
def str(self, var, default=NOTSET):
"""
:rtype: str
"""
return self.get_value(var, default=default)
def unicode(self, var, default=NOTSET):
"""Helper for python2
:rtype: unicode
"""
return self.get_value(var, cast=text_type, default=default)
def bool(self, var, default=NOTSET):
"""
:rtype: bool
"""
return self.get_value(var, cast=bool, default=default)
def int(self, var, default=NOTSET):
"""
:rtype: int
"""
return self.get_value(var, cast=int, default=default)
def float(self, var, default=NOTSET):
"""
:rtype: float
"""
return self.get_value(var, cast=float, default=default)
def json(self, var, default=NOTSET):
"""
:returns: Json parsed
"""
return self.get_value(var, cast=json.loads, default=default)
def list(self, var, cast=None, default=NOTSET):
"""
:rtype: list
"""
return self.get_value(var, cast=list if not cast else [cast], default=default)
def dict(self, var, cast=dict, default=NOTSET):
"""
:rtype: dict
"""
return self.get_value(var, cast=cast, default=default)
def url(self, var, default=NOTSET):
"""
:rtype: urlparse.ParseResult
"""
return self.get_value(var, cast=urlparse.urlparse, default=default, parse_default=True)
def db_url(self, var=DEFAULT_DATABASE_ENV, default=NOTSET, engine=None):
"""Returns a config dictionary, defaulting to DATABASE_URL.
:rtype: dict
"""
return self.db_url_config(self.get_value(var, default=default), engine=engine)
db=db_url
def cache_url(self, var=DEFAULT_CACHE_ENV, default=NOTSET, backend=None):
"""Returns a config dictionary, defaulting to CACHE_URL.
:rtype: dict
"""
return self.cache_url_config(self.url(var, default=default), backend=backend)
cache=cache_url
def email_url(self, var=DEFAULT_EMAIL_ENV, default=NOTSET, backend=None):
"""Returns a config dictionary, defaulting to EMAIL_URL.
:rtype: dict
"""
return self.email_url_config(self.url(var, default=default), backend=backend)
email=email_url
def search_url(self, var=DEFAULT_SEARCH_ENV, default=NOTSET, engine=None):
"""Returns a config dictionary, defaulting to SEARCH_URL.
:rtype: dict
"""
return self.search_url_config(self.url(var, default=default), engine=engine)
def path(self, var, default=NOTSET, **kwargs):
"""
:rtype: Path
"""
return Path(self.get_value(var, default=default), **kwargs)
def get_value(self, var, cast=None, default=NOTSET, parse_default=False):
"""Return value for given environment variable.
:param var: Name of variable.
:param cast: Type to cast return value as.
:param default: If var not present in environ, return this instead.
:param parse_default: force to parse default..
:returns: Value from environment or default (if set)
"""
logger.debug("get '{0}' casted as '{1}' with default '{2}'".format(var, cast, default))
if var in self.schema:
var_info = self.schema[var]
try:
has_default = len(var_info) == 2
except TypeError:
has_default = False
if has_default:
if not cast:
cast = var_info[0]
if default is self.NOTSET:
try:
default = var_info[1]
except IndexError:
pass
else:
if not cast:
cast = var_info
try:
value = os.environ[var]
except KeyError:
if default is self.NOTSET:
error_msg = "Set the {0} environment variable".format(var)
raise ImproperlyConfigured(error_msg)
value = default
# Resolve any proxied values
if hasattr(value, 'startswith') and value.startswith('$'):
value = value.lstrip('$')
value = self.get_value(value, cast=cast, default=default)
if value != default or parse_default:
value = self.parse_value(value, cast)
return value
# Class and static methods
@classmethod
def parse_value(cls, value, cast):
"""Parse and cast provided value
:param value: Stringed value.
:param cast: Type to cast return value as.
:returns: Casted value
"""
if cast is None:
return value
elif cast is bool:
try:
value = int(value) != 0
except ValueError:
value = value.lower() in cls.BOOLEAN_TRUE_STRINGS
elif isinstance(cast, list):
value = list(map(cast[0], [x for x in value.split(',') if x]))
elif isinstance(cast, dict):
key_cast = cast.get('key', str)
value_cast = cast.get('value', text_type)
value_cast_by_key = cast.get('cast', dict())
value = dict(map(
lambda kv: (key_cast(kv[0]), cls.parse_value(kv[1], value_cast_by_key.get(kv[0], value_cast))),
[val.split('=') for val in value.split(';') if val]
))
elif cast is dict:
#elif hasattr(cast, '__name__') and cast.__name__ == 'dict':
value = dict([val.split('=') for val in value.split(',') if val])
elif cast is list:
value = [x for x in value.split(',') if x]
elif cast is float:
# clean string
float_str = re.sub(r'[^\d,\.]', '', value)
# split for avoid thousand separator and different locale comma/dot symbol
parts = re.split(r'[,\.]', float_str)
if len(parts) == 1:
float_str = parts[0]
else:
float_str = "{0}.{1}".format(''.join(parts[0:-1]), parts[-1])
value = float(float_str)
else:
value = cast(value)
return value
@classmethod
def db_url_config(cls, url, engine=None):
"""Pulled from DJ-Database-URL, parse an arbitrary Database URL.
Support currently exists for PostgreSQL, PostGIS, MySQL and SQLite.
SQLite connects to file based databases. The same URL format is used, omitting the hostname,
and using the "file" portion as the filename of the database.
This has the effect of four slashes being present for an absolute file path:
>>> from environ import Env
>>> Env.db_url_config('sqlite:////full/path/to/your/file.sqlite')
{'ENGINE': 'django.db.backends.sqlite3', 'HOST': None, 'NAME': '/full/path/to/your/file.sqlite', 'PASSWORD': None, 'PORT': None, 'USER': None}
>>> Env.db_url_config('postgres://uf07k1i6d8ia0v:wegauwhgeuioweg@ec2-107-21-253-135.compute-1.amazonaws.com:5431/d8r82722r2kuvn')
{'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': 'ec2-107-21-253-135.compute-1.amazonaws.com', 'NAME': 'd8r82722r2kuvn', 'PASSWORD': 'wegauwhgeuioweg', 'PORT': 5431, 'USER': 'uf07k1i6d8ia0v'}
"""
if not isinstance(url, cls.URL_CLASS):
if url == 'sqlite://:memory:':
# this is a special case, because if we pass this URL into
# urlparse, urlparse will choke trying to interpret "memory"
# as a port number
return {
'ENGINE': cls.DB_SCHEMES['sqlite'],
'NAME': ':memory:'
}
# note: no other settings are required for sqlite
url = urlparse.urlparse(url)
config = {}
# Remove query strings.
path = url.path[1:]
path = path.split('?', 2)[0]
# if we are using sqlite and we have no path, then assume we
# want an in-memory database (this is the behaviour of sqlalchemy)
if url.scheme == 'sqlite' and path == '':
path = ':memory:'
if url.scheme == 'ldap':
path = '{scheme}://{hostname}'.format(scheme=_cast_str(url.scheme), hostname=_cast_str(url.hostname))
if url.port:
path += ':{port}'.format(port=_cast_str(url.port))
# Update with environment configuration.
config.update({
'NAME': path,
'USER': _cast_str(url.username),
'PASSWORD': _cast_str(url.password),
'HOST': _cast_str(url.hostname),
'PORT': _cast_int(url.port),
})
if url.query:
config_options = {}
for k, v in urlparse.parse_qs(url.query).items():
if k.upper() in cls._DB_BASE_OPTIONS:
config.update({k.upper(): _cast_int(v[0])})
else:
config_options.update({k: _cast_int(v[0])})
config['OPTIONS'] = config_options
if engine:
config['ENGINE'] = engine
if url.scheme in Env.DB_SCHEMES:
config['ENGINE'] = Env.DB_SCHEMES[url.scheme]
if not config.get('ENGINE', False):
warnings.warn("Engine not recognized from url: {0}".format(config))
return {}
return config
@classmethod
def cache_url_config(cls, url, backend=None):
"""Pulled from DJ-Cache-URL, parse an arbitrary Cache URL.
:param url:
:param overrides:
:return:
"""
url = urlparse.urlparse(url) if not isinstance(url, cls.URL_CLASS) else url
location = url.netloc.split(',')
if len(location) == 1:
location = location[0]
config = {
'BACKEND': cls.CACHE_SCHEMES[url.scheme],
'LOCATION': location,
}
if url.scheme == 'filecache':
config.update({
'LOCATION': _cast_str(url.netloc + url.path),
})
if url.path and url.scheme in ['memcache', 'pymemcache', 'rediscache']:
config.update({
'LOCATION': 'unix:' + url.path,
})
if url.query:
config_options = {}
for k, v in urlparse.parse_qs(url.query).items():
opt = {k.upper(): _cast_int(v[0])}
if k.upper() in cls._CACHE_BASE_OPTIONS:
config.update(opt)
else:
config_options.update(opt)
config['OPTIONS'] = config_options
if backend:
config['BACKEND'] = backend
return config
@classmethod
def email_url_config(cls, url, backend=None):
"""Parses an email URL."""
config = {}
url = urlparse.urlparse(url) if not isinstance(url, cls.URL_CLASS) else url
# Remove query strings
path = url.path[1:]
path = path.split('?', 2)[0]
# Update with environment configuration
config.update({
'EMAIL_FILE_PATH': path,
'EMAIL_HOST_USER': _cast_str(url.username),
'EMAIL_HOST_PASSWORD': _cast_str(url.password),
'EMAIL_HOST': _cast_str(url.hostname),
'EMAIL_PORT': _cast_int(url.port),
})
if backend:
config['EMAIL_BACKEND'] = backend
elif url.scheme in cls.EMAIL_SCHEMES:
config['EMAIL_BACKEND'] = cls.EMAIL_SCHEMES[url.scheme]
if url.scheme == 'smtps':
config['EMAIL_USE_TLS'] = True
else:
config['EMAIL_USE_TLS'] = False
if url.query:
config_options = {}
for k, v in urlparse.parse_qs(url.query).items():
opt = {k.upper(): _cast_int(v[0])}
if k.upper() in cls._EMAIL_BASE_OPTIONS:
config.update(opt)
else:
config_options.update(opt)
config['OPTIONS'] = config_options
return config
@classmethod
def search_url_config(cls, url, engine=None):
config = {}
url = urlparse.urlparse(url) if not isinstance(url, cls.URL_CLASS) else url
# Remove query strings.
path = url.path[1:]
path = path.split('?', 2)[0]
if url.scheme in cls.SEARCH_SCHEMES:
config["ENGINE"] = cls.SEARCH_SCHEMES[url.scheme]
if path.endswith("/"):
path = path[:-1]
split = path.rsplit("/", 1)
if len(split) > 1:
path = split[:-1]
index = split[-1]
else:
path = ""
index = split[0]
config.update({
"URL": urlparse.urlunparse(("http",) + url[1:2] + (path,) + url[3:]),
"INDEX_NAME": index,
})
if path:
config.update({
"PATH": path,
})
if engine:
config['ENGINE'] = engine
return config
@staticmethod
def read_env(env_file=None, **overrides):
"""Read a .env file into os.environ.
If not given a path to a dotenv path, does filthy magic stack backtracking
to find manage.py and then find the dotenv.
http://www.wellfireinteractive.com/blog/easier-12-factor-django/
https://gist.github.com/bennylope/2999704
"""
if env_file is None:
frame = sys._getframe()
env_file = os.path.join(os.path.dirname(frame.f_back.f_code.co_filename), '.env')
if not os.path.exists(env_file):
warnings.warn("not reading %s - it doesn't exist." % env_file)
return
try:
with open(env_file) if isinstance(env_file, basestring) else env_file as f:
content = f.read()
except IOError:
warnings.warn("not reading %s - it doesn't exist." % env_file)
return
logger.debug('Read environment variables from: {0}'.format(env_file))
for line in content.splitlines():
m1 = re.match(r'\A([A-Za-z_0-9]+)=(.*)\Z', line)
if m1:
key, val = m1.group(1), m1.group(2)
m2 = re.match(r"\A'(.*)'\Z", val)
if m2:
val = m2.group(1)
m3 = re.match(r'\A"(.*)"\Z', val)
if m3:
val = re.sub(r'\\(.)', r'\1', m3.group(1))
os.environ.setdefault(key, text_type(val))
# set defaults
for key, value in overrides.items():
os.environ.setdefault(key, value)
class Path(object):
"""Inspired to Django Two-scoops, handling File Paths in Settings.
>>> from environ import Path
>>> root = Path('/home')
>>> root, root(), root('dev')
(<Path:/home>, '/home', '/home/dev')
>>> root == Path('/home')
True
>>> root in Path('/'), root not in Path('/other/path')
(True, True)
>>> root('dev', 'not_existing_dir', required=True)
Traceback (most recent call last):
environ.environ.ImproperlyConfigured: Create required path: /home/not_existing_dir
>>> public = root.path('public')
>>> public, public.root, public('styles')
(<Path:/home/public>, '/home/public', '/home/public/styles')
>>> assets, scripts = public.path('assets'), public.path('assets', 'scripts')
>>> assets.root, scripts.root
('/home/public/assets', '/home/public/assets/scripts')
>>> assets + 'styles', str(assets + 'styles'), ~assets
(<Path:/home/public/assets/styles>, '/home/public/assets/styles', <Path:/home/public>)
"""
def path(self, *paths, **kwargs):
"""Create new Path based on self.root and provided paths.
:param paths: List of sub paths
:param kwargs: required=False
:rtype: Path
"""
return self.__class__(self.__root__, *paths, **kwargs)
def file(self, name, *args, **kwargs):
"""Open a file.
:param name: Filename appended to self.root
:param args: passed to open()
:param kwargs: passed to open()
:rtype: file
"""
return open(self(name), *args, **kwargs)
@property
def root(self):
"""Current directory for this Path"""
return self.__root__
def __init__(self, start='', *paths, **kwargs):
super(Path, self).__init__()
if kwargs.get('is_file', False):
start = os.path.dirname(start)
self.__root__ = self._absolute_join(start, *paths, **kwargs)
def __call__(self, *paths, **kwargs):
"""Retrieve the absolute path, with appended paths
:param paths: List of sub path of self.root
:param kwargs: required=False
"""
return self._absolute_join(self.__root__, *paths, **kwargs)
def __eq__(self, other):
return self.__root__ == other.__root__
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, other):
return Path(self.__root__, other if not isinstance(other, Path) else other.__root__)
def __sub__(self, other):
if isinstance(other, int):
return self.path('../' * other)
elif isinstance(other, (str, text_type)):
return Path(self.__root__.rstrip(other))
raise TypeError("unsupported operand type(s) for -: '{0}' and '{1}'".format(self, type(other)))
def __invert__(self):
return self.path('..')
def __contains__(self, item):
base_path = self.__root__
if len(base_path) > 1:
base_path = os.path.join(base_path, '')
return item.__root__.startswith(base_path)
def __repr__(self):
return "<Path:{0}>".format(self.__root__)
def __str__(self):
return self.__root__
def __unicode__(self):
return self.__str__()
@staticmethod
def _absolute_join(base, *paths, **kwargs):
absolute_path = os.path.abspath(os.path.join(base, *paths))
if kwargs.get('required', False) and not os.path.exists(absolute_path):
raise ImproperlyConfigured("Create required path: {0}".format(absolute_path))
return absolute_path
def register_scheme(scheme):
for method in filter(lambda s: s.startswith('uses_'), dir(urlparse)):
getattr(urlparse, method).append(scheme)
for schema in list(Env.DB_SCHEMES.keys()) + list(Env.CACHE_SCHEMES.keys()) + list(Env.SEARCH_SCHEMES.keys()) +list(Env.EMAIL_SCHEMES.keys()):
register_scheme(schema)
|
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from gettext import gettext as _
from plugins.rfid.rfidutils import strhex2bin, strbin2dec, find_device
from plugins.plugin import Plugin
from TurtleArt.tapalette import make_palette
from TurtleArt.tautils import debug_output
from TurtleArt.taprimitive import Primitive
from TurtleArt.tatype import TYPE_STRING
import logging
_logger = logging.getLogger('turtleart-activity RFID plugin')
HAL_SERVICE = 'org.freedesktop.Hal'
HAL_MGR_PATH = '/org/freedesktop/Hal/Manager'
HAL_MGR_IFACE = 'org.freedesktop.Hal.Manager'
HAL_DEV_IFACE = 'org.freedesktop.Hal.Device'
REGEXP_SERUSB = '/org/freedesktop/Hal/devices/usb_device[' \
'a-z,A-Z,0-9,_]*serial_usb_[0-9]'
class Rfid(Plugin):
def __init__(self, parent):
Plugin.__init__(self)
self._parent = parent
self._status = False
"""
The following code will initialize a USB RFID reader. Please note that
in order to make this initialization function work, it is necessary to
set the permission for the ttyUSB device to 0666. You can do this by
adding a rule to /etc/udev/rules.d
As root (using sudo or su), copy the following text into a new file in
/etc/udev/rules.d/94-ttyUSB-rules
KERNEL=="ttyUSB[0-9]",MODE="0666"
You only have to do this once.
"""
self.rfid_connected = False
self.rfid_device = find_device()
self.rfid_idn = ''
if self.rfid_device is not None:
_logger.info("RFID device found")
self.rfid_connected = self.rfid_device.do_connect()
if self.rfid_connected:
self.rfid_device.connect("tag-read", self._tag_read_cb)
self.rfid_device.connect("disconnected", self._disconnected_cb)
loop = DBusGMainLoop()
bus = dbus.SystemBus(mainloop=loop)
hmgr_iface = dbus.Interface(
bus.get_object(
HAL_SERVICE,
HAL_MGR_PATH),
HAL_MGR_IFACE)
hmgr_iface.connect_to_signal('DeviceAdded', self._device_added_cb)
self._status = True
def setup(self):
# set up RFID-specific blocks
palette = make_palette('sensor',
colors=["#FF6060", "#A06060"],
help_string=_('Palette of sensor blocks'),
position=6)
if self._status:
palette.add_block('rfid',
style='box-style',
label=_('RFID'),
help_string=_('read value from RFID device'),
value_block=True,
prim_name='rfid')
else:
palette.add_block('rfid',
hidden=True,
style='box-style',
label=_('RFID'),
help_string=_('read value from RFID device'),
value_block=True,
prim_name='rfid')
self._parent.lc.def_prim(
'rfid', 0,
Primitive(self.prim_read_rfid,
return_type=TYPE_STRING,
call_afterwards=self.after_rfid))
def _status_report(self):
debug_output('Reporting RFID status: %s' % (str(self._status)))
return self._status
def _device_added_cb(self, path):
"""
Called from hal connection when a new device is plugged.
"""
if not self.rfid_connected:
self.rfid_device = find_device()
_logger.debug("DEVICE_ADDED: %s" % self.rfid_device)
if self.rfid_device is not None:
_logger.debug("DEVICE_ADDED: RFID device is not None!")
self.rfid_connected = self._device.do_connect()
if self.rfid_connected:
_logger.debug("DEVICE_ADDED: Connected!")
self.rfid_device.connect("tag-read", self._tag_read_cb)
self.rfid_device.connect("disconnected", self._disconnected_cb)
def _disconnected_cb(self, device, text):
"""
Called when the device is disconnected.
"""
self.rfid_connected = False
self.rfid_device = None
def _tag_read_cb(self, device, tagid):
"""
Callback for "tag-read" signal. Receives the read tag id.
"""
idbin = strhex2bin(tagid)
self.rfid_idn = strbin2dec(idbin[26:64])
while self.rfid_idn.__len__() < 9:
self.rfid_idn = '0' + self.rfid_idn
print(tagid, idbin, self.rfid_idn)
self.tw.lc.update_label_value('rfid', self.rfid_idn)
# Block primitives used in talogo
def prim_read_rfid(self):
if self._status:
return self.rfid_idn
else:
return '0'
def after_rfid(self):
if self._parent.lc.update_values:
self._parent.lc.update_label_value('rfid', self.rfid_idn)
|
""" Sahana Eden Automated Test - HRM001 Create Staff
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from gluon import current
import unittest
from tests.web2unittest import SeleniumUnitTest
from selenium.common.exceptions import NoSuchElementException
from s3 import s3_debug
from tests import *
import time
class CreateStaff(SeleniumUnitTest):
def test_hrm001_create_staff(self):
"""
@case: HRM001
@description: Create a Staff
@TestDoc: https://docs.google.com/spreadsheet/ccc?key=0AmB3hMcgB-3idG1XNGhhRG9QWF81dUlKLXpJaFlCMFE
@Test Wiki: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Testing
"""
print "\n"
self.login(account="admin", nexturl="hrm/staff/create")
self.create("hrm_human_resource",
[( "organisation_id",
"International Federation of Red Cross and Red Crescent Societies (IFRC)",
"option"),
( "first_name",
"Robert",
"pr_person"),
( "middle_name",
"James",
"pr_person"),
( "last_name",
"Lemon",
"pr_person"),
( "email",
"rjltestdonotusetest99@romanian.com",
"pr_person"),
( "job_title_id",
"Warehouse Manager",
"option"),
( "site_id",
"AP Zone (Office)",
"option",
3),
]
)
|
"""Test that the wallet resends transactions periodically."""
from collections import defaultdict
import time
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import ToHex
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, wait_until
class P2PStoreTxInvs(P2PInterface):
def __init__(self):
super().__init__()
self.tx_invs_received = defaultdict(int)
def on_inv(self, message):
# Store how many times invs have been received for each tx.
for i in message.inv:
if i.type == 1:
# save txid
self.tx_invs_received[i.hash] += 1
class ResendWalletTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node = self.nodes[0] # alias
node.add_p2p_connection(P2PStoreTxInvs())
self.log.info("Create a new transaction and wait until it's broadcast")
txid = int(node.sendtoaddress(node.getnewaddress(), 1), 16)
# Wallet rebroadcast is first scheduled 1 sec after startup (see
# nNextResend in ResendWalletTransactions()). Sleep for just over a
# second to be certain that it has been called before the first
# setmocktime call below.
time.sleep(1.1)
# Can take a few seconds due to transaction trickling
wait_until(lambda: node.p2p.tx_invs_received[txid] >= 1, lock=mininode_lock)
# Add a second peer since txs aren't rebroadcast to the same peer (see filterInventoryKnown)
node.add_p2p_connection(P2PStoreTxInvs())
self.log.info("Create a block")
# Create and submit a block without the transaction.
# Transactions are only rebroadcast if there has been a block at least five minutes
# after the last time we tried to broadcast. Use mocktime and give an extra minute to be sure.
block_time = int(time.time()) + 6 * 60
node.setmocktime(block_time)
block = create_block(int(node.getbestblockhash(), 16), create_coinbase(node.getblockcount() + 1), block_time)
block.rehash()
block.solve()
node.submitblock(ToHex(block))
# Transaction should not be rebroadcast
node.syncwithvalidationinterfacequeue()
node.p2ps[1].sync_with_ping()
assert_equal(node.p2ps[1].tx_invs_received[txid], 0)
self.log.info("Transaction should be rebroadcast after 30 minutes")
# Use mocktime and give an extra 5 minutes to be sure.
rebroadcast_time = int(time.time()) + 41 * 60
node.setmocktime(rebroadcast_time)
wait_until(lambda: node.p2ps[1].tx_invs_received[txid] >= 1, lock=mininode_lock)
if __name__ == '__main__':
ResendWalletTransactionsTest().main()
|
import socket, string, re, select, errno
from structure import Structure
from random import randint
from struct import *
import time
CVS_REVISION = '$Revision: 526 $'
INADDR_ANY = '0.0.0.0'
BROADCAST_ADDR = '<broadcast>'
NETBIOS_NS_PORT = 137
NETBIOS_SESSION_PORT = 139
SMB_SESSION_PORT = 445
NODE_B = 0x0000
NODE_P = 0x2000
NODE_M = 0x4000
NODE_RESERVED = 0x6000
NODE_GROUP = 0x8000
NODE_UNIQUE = 0x0
TYPE_UNKNOWN = 0x01
TYPE_WORKSTATION = 0x00
TYPE_CLIENT = 0x03
TYPE_SERVER = 0x20
TYPE_DOMAIN_MASTER = 0x1B
TYPE_DOMAIN_CONTROLLER = 0x1C
TYPE_MASTER_BROWSER = 0x1D
TYPE_BROWSER = 0x1E
TYPE_NETDDE = 0x1F
TYPE_STATUS = 0x21
OPCODE_QUERY = 0
OPCODE_REGISTRATION = 0x5
OPCODE_RELEASE = 0x6
OPCODE_WACK = 0x7
OPCODE_REFRESH = 0x8
OPCODE_REQUEST = 0
OPCODE_RESPONSE = 0x10
NM_FLAGS_BROADCAST = 0x1
NM_FLAGS_UNICAST = 0
NM_FLAGS_RA = 0x8
NM_FLAGS_RD = 0x10
NM_FLAGS_TC = 0x20
NM_FLAGS_AA = 0x40
QUESTION_TYPE_NB = 0x20 # NetBIOS general Name Service Resource Record
QUESTION_TYPE_NBSTAT = 0x21 # NetBIOS NODE STATUS Resource Record
QUESTION_CLASS_IN = 0x1 # Internet class
RR_TYPE_A = 0x1 # IP address Resource Record
RR_TYPE_NS = 0x2 # Name Server Resource Record
RR_TYPE_NULL = 0xA # NULL Resource Record
RR_TYPE_NB = 0x20 # NetBIOS general Name Service Resource Record
RR_TYPE_NBSTAT = 0x21 # NetBIOS NODE STATUS Resource Record
RR_CLASS_IN = 1 # Internet class
RCODE_FMT_ERR = 0x1 # Format Error. Request was invalidly formatted.
RCODE_SRV_ERR = 0x2 # Server failure. Problem with NBNS, cannot process name.
RCODE_IMP_ERR = 0x4 # Unsupported request error. Allowable only for challenging NBNS when gets an Update type
# registration request.
RCODE_RFS_ERR = 0x5 # Refused error. For policy reasons server will not register this name from this host.
RCODE_ACT_ERR = 0x6 # Active error. Name is owned by another node.
RCODE_CFT_ERR = 0x7 # Name in conflict error. A UNIQUE name is owned by more than one node.
NAME_FLAGS_PRM = 0x0200 # Permanent Name Flag. If one (1) then entry is for the permanent node name. Flag is zero
# (0) for all other names.
NAME_FLAGS_ACT = 0x0400 # Active Name Flag. All entries have this flag set to one (1).
NAME_FLAG_CNF = 0x0800 # Conflict Flag. If one (1) then name on this node is in conflict.
NAME_FLAG_DRG = 0x1000 # Deregister Flag. If one (1) then this name is in the process of being deleted.
NAME_TYPES = { TYPE_UNKNOWN: 'Unknown', TYPE_WORKSTATION: 'Workstation', TYPE_CLIENT: 'Client',
TYPE_SERVER: 'Server', TYPE_MASTER_BROWSER: 'Master Browser', TYPE_BROWSER: 'Browser Server',
TYPE_DOMAIN_MASTER: 'Domain Master' , TYPE_NETDDE: 'NetDDE Server'}
NETBIOS_SESSION_MESSAGE = 0x0
NETBIOS_SESSION_REQUEST = 0x81
NETBIOS_SESSION_POSITIVE_RESPONSE = 0x82
NETBIOS_SESSION_NEGATIVE_RESPONSE = 0x83
NETBIOS_SESSION_RETARGET_RESPONSE = 0x84
NETBIOS_SESSION_KEEP_ALIVE = 0x85
def strerror(errclass, errcode):
if errclass == ERRCLASS_OS:
return 'OS Error', str(errcode)
elif errclass == ERRCLASS_QUERY:
return 'Query Error', QUERY_ERRORS.get(errcode, 'Unknown error')
elif errclass == ERRCLASS_SESSION:
return 'Session Error', SESSION_ERRORS.get(errcode, 'Unknown error')
else:
return 'Unknown Error Class', 'Unknown Error'
class NetBIOSError(Exception): pass
class NetBIOSTimeout(Exception):
def __init__(self, message = 'The NETBIOS connection with the remote host timed out.'):
Exception.__init__(self, message)
class NBResourceRecord:
def __init__(self, data = 0):
self._data = data
try:
if self._data:
self.rr_name = (re.split('\x00',data))[0]
offset = len(self.rr_name)+1
self.rr_type = unpack('>H', self._data[offset:offset+2])[0]
self.rr_class = unpack('>H', self._data[offset+2: offset+4])[0]
self.ttl = unpack('>L',self._data[offset+4:offset+8])[0]
self.rdlength = unpack('>H', self._data[offset+8:offset+10])[0]
self.rdata = data[offset+10:self.rdlength]
offset = self.rdlength - 2
self.unit_id = data[offset:offset+6]
else:
self.rr_name = ''
self.rr_type = 0
self.rr_class = 0
self.ttl = 0
self.rdlength = 0
self.rdata = ''
self.unit_id = ''
except Exception,e:
raise NetBIOSError( 'Wrong packet format ' )
def set_rr_name(self, name):
self.rr_name = name
def set_rr_type(self, name):
self.rr_type = name
def set_rr_class(self,cl):
self_rr_class = cl
def set_ttl(self,ttl):
self.ttl = ttl
def set_rdata(self,rdata):
self.rdata = rdata
self.rdlength = len(rdata)
def get_unit_id(self):
return self.unit_id
def get_rr_name(self):
return self.rr_name
def get_rr_class(self):
return self.rr_class
def get_ttl(self):
return self.ttl
def get_rdlength(self):
return self.rdlength
def get_rdata(self):
return self.rdata
def rawData(self):
return self.rr_name + pack('!HHLH',self.rr_type, self.rr_class, self.ttl, self.rdlength) + self.rdata
class NBNodeStatusResponse(NBResourceRecord):
def __init__(self, data = 0):
NBResourceRecord.__init__(self,data)
self.num_names = 0
self.node_names = [ ]
self.statstics = ''
self.mac = '00-00-00-00-00-00'
try:
if data:
self._data = self.get_rdata()
self.num_names = unpack('>B',self._data[:1])[0]
offset = 1
for i in range(0, self.num_names):
name = self._data[offset:offset + 15]
type,flags = unpack('>BH', self._data[offset + 15: offset + 18])
offset += 18
self.node_names.append(NBNodeEntry(name, type ,flags))
self.set_mac_in_hexa(self.get_unit_id())
except Exception,e:
raise NetBIOSError( 'Wrong packet format ' )
def set_mac_in_hexa(self, data):
data_aux = ''
for d in data:
if data_aux == '':
data_aux = '%02x' % ord(d)
else:
data_aux += '-%02x' % ord(d)
self.mac = string.upper(data_aux)
def get_num_names(self):
return self.num_names
def get_mac(self):
return self.mac
def set_num_names(self, num):
self.num_names = num
def get_node_names(self):
return self.node_names
def add_node_name(self,node_names):
self.node_names.append(node_names)
self.num_names += 1
def rawData(self):
res = pack('!B', self.num_names )
for i in range(0, self.num_names):
res += self.node_names[i].rawData()
class NBPositiveNameQueryResponse(NBResourceRecord):
def __init__(self,data = 0):
NBResourceRecord.__init__(self,data)
self.add_entries = [ ]
if data:
self._data = self.get_rdata()
class NetBIOSPacket:
""" This is a packet as defined in RFC 1002 """
def __init__(self, data = 0):
self.name_trn_id = 0x0 # Transaction ID for Name Service Transaction.
# Requestor places a unique value for each active
# transaction. Responder puts NAME_TRN_ID value
# from request packet in response packet.
self.opcode = 0 # Packet type code
self.nm_flags = 0 # Flags for operation
self.rcode = 0 # Result codes of request.
self.qdcount = 0 # Unsigned 16 bit integer specifying the number of entries in the question section of a Name
self.ancount = 0 # Unsigned 16 bit integer specifying the number of
# resource records in the answer section of a Name
# Service packet.
self.nscount = 0 # Unsigned 16 bit integer specifying the number of
# resource records in the authority section of a
# Name Service packet.
self.arcount = 0 # Unsigned 16 bit integer specifying the number of
# resource records in the additional records
# section of a Name Service packeT.
self.questions = ''
self.answers = ''
if data == 0:
self._data = ''
else:
try:
self._data = data
self.opcode = ord(data[2]) >> 3
self.nm_flags = ((ord(data[2]) & 0x3) << 4) | ((ord(data[3]) & 0xf0) >> 4)
self.name_trn_id = unpack('>H', self._data[:2])[0]
self.rcode = ord(data[3]) & 0x0f
self.qdcount = unpack('>H', self._data[4:6])[0]
self.ancount = unpack('>H', self._data[6:8])[0]
self.nscount = unpack('>H', self._data[8:10])[0]
self.arcount = unpack('>H', self._data[10:12])[0]
self.answers = self._data[12:]
except Exception,e:
raise NetBIOSError( 'Wrong packet format ' )
def set_opcode(self, opcode):
self.opcode = opcode
def set_trn_id(self, trn):
self.name_trn_id = trn
def set_nm_flags(self, nm_flags):
self.nm_flags = nm_flags
def set_rcode(self, rcode):
self.rcode = rcode
def addQuestion(self, question, qtype, qclass):
self.qdcount = self.qdcount + 1
self.questions += question + pack('!HH',qtype,qclass)
def get_trn_id(self):
return self.name_trn_id
def get_rcode(self):
return self.rcode
def get_nm_flags(self):
return self.name_trn_id
def get_opcode(self):
return self.opcode
def get_qdcount(self):
return self.qdcount
def get_ancount(self):
return self.ancount
def get_nscount(self):
return self.nscount
def get_arcount(self):
return self.arcount
def rawData(self):
secondWord = self.opcode << 11
secondWord = secondWord | (self.nm_flags << 4)
secondWord = secondWord | self.rcode
data = pack('!HHHHHH', self.name_trn_id, secondWord , self.qdcount, self.ancount, self.nscount, self.arcount) + self.questions + self.answers
return data
def get_answers(self):
return self.answers
class NBHostEntry:
def __init__(self, nbname, nametype, ip):
self.__nbname = nbname
self.__nametype = nametype
self.__ip = ip
def get_nbname(self):
return self.__nbname
def get_nametype(self):
return self.__nametype
def get_ip(self):
return self.__ip
def __repr__(self):
return '<NBHostEntry instance: NBname="' + self.__nbname + '", IP="' + self.__ip + '">'
class NBNodeEntry:
def __init__(self, nbname, nametype, flags):
self.__nbname = string.ljust(nbname,17)
self.__nametype = nametype
self.__flags = flags
self.__isgroup = flags & 0x8000
self.__nodetype = flags & 0x6000
self.__deleting = flags & 0x1000
self.__isconflict = flags & 0x0800
self.__isactive = flags & 0x0400
self.__ispermanent = flags & 0x0200
def get_nbname(self):
return self.__nbname
def get_nametype(self):
return self.__nametype
def is_group(self):
return self.__isgroup
def get_nodetype(self):
return self.__nodetype
def is_deleting(self):
return self.__deleting
def is_conflict(self):
return self.__isconflict
def is_active(self):
return self.__isactive
def is_permanent(self):
return self.__ispermanent
def set_nbname(self, name):
self.__nbname = string.ljust(name,17)
def set_nametype(self, type):
self.__nametype = type
def set_flags(self,flags):
self.__flags = flags
def __repr__(self):
s = '<NBNodeEntry instance: NBname="' + self.__nbname + '" NameType="' + NAME_TYPES[self.__nametype] + '"'
if self.__isactive:
s = s + ' ACTIVE'
if self.__isgroup:
s = s + ' GROUP'
if self.__isconflict:
s = s + ' CONFLICT'
if self.__deleting:
s = s + ' DELETING'
return s
def rawData(self):
return self.__nbname + pack('!BH',self.__nametype, self.__flags)
class NetBIOS:
# Creates a NetBIOS instance without specifying any default NetBIOS domain nameserver.
# All queries will be sent through the servport.
def __init__(self, servport = NETBIOS_NS_PORT):
#s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#has_bind = 1
#for i in range(0, 10):
# We try to bind to a port for 10 tries
# try:
# s.bind(( INADDR_ANY, randint(10000, 60000) ))
# s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# has_bind = 1
# except socket.error, ex:
# pass
#if not has_bind:
# raise NetBIOSError, ( 'Cannot bind to a good UDP port', ERRCLASS_OS, errno.EAGAIN )
#self.__sock = s
self.__servport = NETBIOS_NS_PORT
self.__nameserver = None
self.__broadcastaddr = BROADCAST_ADDR
self.mac = '00-00-00-00-00-00'
def _setup_connection(self, dstaddr):
port = randint(10000, 60000)
af, socktype, proto, canonname, sa = socket.getaddrinfo(dstaddr, port, 0, socket.SOCK_DGRAM)[0]
s = socket.socket(af, socktype, proto)
has_bind = 1
for i in range(0, 10):
# We try to bind to a port for 10 tries
try:
s.bind(( INADDR_ANY, randint(10000, 60000) ))
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
has_bind = 1
except socket.error, ex:
pass
if not has_bind:
raise NetBIOSError, ( 'Cannot bind to a good UDP port', ERRCLASS_OS, errno.EAGAIN )
self.__sock = s
# Set the default NetBIOS domain nameserver.
def set_nameserver(self, nameserver):
self.__nameserver = nameserver
# Return the default NetBIOS domain nameserver, or None if none is specified.
def get_nameserver(self):
return self.__nameserver
# Set the broadcast address to be used for query.
def set_broadcastaddr(self, broadcastaddr):
self.__broadcastaddr = broadcastaddr
# Return the broadcast address to be used, or BROADCAST_ADDR if default broadcast address is used.
def get_broadcastaddr(self):
return self.__broadcastaddr
# Returns a list of NBHostEntry instances containing the host information for nbname.
# If a NetBIOS domain nameserver has been specified, it will be used for the query.
# Otherwise, the query is broadcasted on the broadcast address.
def gethostbyname(self, nbname, type = TYPE_WORKSTATION, scope = None, timeout = 1):
return self.__queryname(nbname, self.__nameserver, type, scope, timeout)
# Returns a list of NBNodeEntry instances containing node status information for nbname.
# If destaddr contains an IP address, then this will become an unicast query on the destaddr.
# Raises NetBIOSTimeout if timeout (in secs) is reached.
# Raises NetBIOSError for other errors
def getnodestatus(self, nbname, destaddr = None, type = TYPE_WORKSTATION, scope = None, timeout = 1):
if destaddr:
return self.__querynodestatus(nbname, destaddr, type, scope, timeout)
else:
return self.__querynodestatus(nbname, self.__nameserver, type, scope, timeout)
def getnetbiosname(self, ip):
entries = self.getnodestatus('*',ip)
entries = filter(lambda x:x.get_nametype() == TYPE_SERVER, entries)
return entries[0].get_nbname().strip()
def getmacaddress(self):
return self.mac
def __queryname(self, nbname, destaddr, type, scope, timeout):
self._setup_connection(destaddr)
netbios_name = string.upper(nbname)
trn_id = randint(1, 32000)
p = NetBIOSPacket()
p.set_trn_id(trn_id)
netbios_name = string.upper(nbname)
qn_label = encode_name(netbios_name, type, scope)
p.addQuestion(qn_label, QUESTION_TYPE_NB, QUESTION_CLASS_IN)
qn_label = encode_name(netbios_name, type, scope)
if not destaddr:
p.set_nm_flags(NM_FLAGS_BROADCAST)
destaddr = self.__broadcastaddr
wildcard_query = netbios_name == '*'
req = p.rawData()
self.__sock.sendto(req, 0, ( destaddr, self.__servport ))
addrs = [ ]
tries = 3
while 1:
try:
ready, _, _ = select.select([ self.__sock.fileno() ], [ ] , [ ], timeout)
if not ready:
if tries and not wildcard_query:
# Retry again until tries == 0
self.__sock.sendto(req, 0, ( destaddr, self.__servport ))
tries = tries - 1
elif wildcard_query:
return addrs
else:
raise NetBIOSTimeout
else:
data, _ = self.__sock.recvfrom(65536, 0)
self.__sock.close()
res = NetBIOSPacket(data)
if res.get_trn_id() == p.get_trn_id():
if res.get_rcode():
if res.get_rcode() == 0x03:
return None
else:
raise NetBIOSError, ( 'Negative name query response', ERRCLASS_QUERY, res.get_rcode() )
answ = NBPositiveNameQueryResponse(res.get_answers())
if not wildcard_query:
return addrs
except select.error, ex:
if ex[0] != errno.EINTR and ex[0] != errno.EAGAIN:
raise NetBIOSError, ( 'Error occurs while waiting for response', ERRCLASS_OS, ex[0] )
except socket.error, ex:
pass
def __querynodestatus(self, nbname, destaddr, type, scope, timeout):
self._setup_connection(destaddr)
trn_id = randint(1, 32000)
p = NetBIOSPacket()
p.set_trn_id(trn_id)
netbios_name = string.upper(nbname)
qn_label = encode_name(netbios_name, type, scope)
p.addQuestion(qn_label, QUESTION_TYPE_NBSTAT, QUESTION_CLASS_IN)
if not destaddr:
p.set_nm_flags(NM_FLAGS_BROADCAST)
destaddr = self.__broadcastaddr
req = p.rawData()
tries = 3
while 1:
try:
self.__sock.sendto(req, 0, ( destaddr, self.__servport ))
ready, _, _ = select.select([ self.__sock.fileno() ], [ ] , [ ], timeout)
if not ready:
if tries:
# Retry again until tries == 0
tries = tries - 1
else:
raise NetBIOSTimeout
else:
try:
data, _ = self.__sock.recvfrom(65536, 0)
except Exception, e:
raise NetBIOSError, "recvfrom error: %s" % str(e)
self.__sock.close()
res = NetBIOSPacket(data)
if res.get_trn_id() == p.get_trn_id():
if res.get_rcode():
if res.get_rcode() == 0x03:
# I'm just guessing here
raise NetBIOSError, "Cannot get data from server"
else:
raise NetBIOSError, ( 'Negative name query response', ERRCLASS_QUERY, res.get_rcode() )
answ = NBNodeStatusResponse(res.get_answers())
self.mac = answ.get_mac()
return answ.get_node_names()
except select.error, ex:
if ex[0] != errno.EINTR and ex[0] != errno.EAGAIN:
raise NetBIOSError, ( 'Error occurs while waiting for response', ERRCLASS_OS, ex[0] )
except socket.error, ex:
raise NetBIOSError, 'Connection error: %s' % str(ex)
def encode_name(name, type, scope):
if name == '*':
name = name + '\0' * 15
elif len(name) > 15:
name = name[:15] + chr(type)
else:
name = string.ljust(name, 15) + chr(type)
encoded_name = chr(len(name) * 2) + re.sub('.', _do_first_level_encoding, name)
if scope:
encoded_scope = ''
for s in string.split(scope, '.'):
encoded_scope = encoded_scope + chr(len(s)) + s
return encoded_name + encoded_scope + '\0'
else:
return encoded_name + '\0'
def _do_first_level_encoding(m):
s = ord(m.group(0))
return string.uppercase[s >> 4] + string.uppercase[s & 0x0f]
def decode_name(name):
name_length = ord(name[0])
assert name_length == 32
decoded_name = re.sub('..', _do_first_level_decoding, name[1:33])
if name[33] == '\0':
return 34, decoded_name, ''
else:
decoded_domain = ''
offset = 34
while 1:
domain_length = ord(name[offset])
if domain_length == 0:
break
decoded_domain = '.' + name[offset:offset + domain_length]
offset = offset + domain_length
return offset + 1, decoded_name, decoded_domain
def _do_first_level_decoding(m):
s = m.group(0)
return chr(((ord(s[0]) - ord('A')) << 4) | (ord(s[1]) - ord('A')))
class NetBIOSSessionPacket:
def __init__(self, data = 0):
self.type = 0x0
self.flags = 0x0
self.length = 0x0
if data == 0:
self._trailer = ''
else:
try:
self.type = ord(data[0])
self.flags = ord(data[1])
self.length = unpack('!H', data[2:4])[0]
self._trailer = data[4:]
except:
raise NetBIOSError( 'Wrong packet format ' )
def set_type(self, type):
self.type = type
def get_type(self):
return self.type
def rawData(self):
data = pack('!BBH',self.type,self.flags,self.length) + self._trailer
return data
def set_trailer(self,data):
self._trailer = data
self.length = len(data)
def get_length(self):
return self.length
def get_trailer(self):
return self._trailer
class NetBIOSSession:
def __init__(self, myname, remote_name, remote_host, remote_type = TYPE_SERVER, sess_port = NETBIOS_SESSION_PORT, timeout = None, local_type = TYPE_WORKSTATION, sock = None):
if len(myname) > 15:
self.__myname = string.upper(myname[:15])
else:
self.__myname = string.upper(myname)
self.__local_type = local_type
assert remote_name
# if destination port SMB_SESSION_PORT and remote name *SMBSERVER, we're changing it to its IP address
# helping solving the client mistake ;)
if remote_name == '*SMBSERVER' and sess_port == SMB_SESSION_PORT:
remote_name = remote_host
# If remote name is *SMBSERVER let's try to query its name.. if can't be guessed, continue and hope for the best
if remote_name == '*SMBSERVER':
nb = NetBIOS()
try:
res = nb.getnetbiosname(remote_host)
except:
res = None
pass
if res is not None:
remote_name = res
if len(remote_name) > 15:
self.__remote_name = string.upper(remote_name[:15])
else:
self.__remote_name = string.upper(remote_name)
self.__remote_type = remote_type
self.__remote_host = remote_host
if sock is not None:
# We are acting as a server
self._sock = sock
else:
self._sock = self._setup_connection((remote_host, sess_port))
if sess_port == NETBIOS_SESSION_PORT:
self._request_session(remote_type, local_type, timeout)
def get_myname(self):
return self.__myname
def get_mytype(self):
return self.__local_type
def get_remote_host(self):
return self.__remote_host
def get_remote_name(self):
return self.__remote_name
def get_remote_type(self):
return self.__remote_type
def close(self):
self._sock.close()
def get_socket(self):
return self._sock
class NetBIOSUDPSessionPacket(Structure):
TYPE_DIRECT_UNIQUE = 16
TYPE_DIRECT_GROUP = 17
FLAGS_MORE_FRAGMENTS = 1
FLAGS_FIRST_FRAGMENT = 2
FLAGS_B_NODE = 0
structure = (
('Type','B=16'), # Direct Unique Datagram
('Flags','B=2'), # FLAGS_FIRST_FRAGMENT
('ID','<H'),
('_SourceIP','>L'),
('SourceIP','"'),
('SourcePort','>H=138'),
('DataLegth','>H-Data'),
('Offset','>H=0'),
('SourceName','z'),
('DestinationName','z'),
('Data',':'),
)
def getData(self):
addr = self['SourceIP'].split('.')
addr = [int(x) for x in addr]
addr = (((addr[0] << 8) + addr[1] << 8) + addr[2] << 8) + addr[3]
self['_SourceIP'] = addr
return Structure.getData(self)
def get_trailer(self):
return self['Data']
class NetBIOSUDPSession(NetBIOSSession):
def _setup_connection(self, peer):
af, socktype, proto, canonname, sa = socket.getaddrinfo(peer[0], peer[1], 0, socket.SOCK_DGRAM)[0]
sock = socket.socket(af, socktype, proto)
sock.connect(sa)
sock = socket.socket(af, socktype, proto)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((INADDR_ANY, 138))
self.peer = peer
return sock
def _request_session(self, remote_type, local_type, timeout = None):
pass
def next_id(self):
if hasattr(self, '__dgram_id'):
answer = self.__dgram_id
else:
self.__dgram_id = randint(1,65535)
answer = self.__dgram_id
self.__dgram_id += 1
return answer
def send_packet(self, data):
# Yes... I know...
self._sock.connect(self.peer)
p = NetBIOSUDPSessionPacket()
p['ID'] = self.next_id()
p['SourceIP'] = self._sock.getsockname()[0]
p['SourceName'] = encode_name(self.get_myname(), self.get_mytype(), '')[:-1]
p['DestinationName'] = encode_name(self.get_remote_name(), self.get_remote_type(), '')[:-1]
p['Data'] = data
self._sock.sendto(str(p), self.peer)
self._sock.close()
self._sock = self._setup_connection(self.peer)
def recv_packet(self, timeout = None):
# The next loop is a workaround for a bigger problem:
# When data reaches higher layers, the lower headers are lost,
# and with them, for example, the source IP. Hence, SMB users
# can't know where packets are comming from... we need a better
# solution, right now, we will filter everything except packets
# coming from the remote_host specified in __init__()
while 1:
data, peer = self._sock.recvfrom(8192)
if peer == self.peer: break
return NetBIOSUDPSessionPacket(data)
class NetBIOSTCPSession(NetBIOSSession):
def __init__(self, myname, remote_name, remote_host, remote_type = TYPE_SERVER, sess_port = NETBIOS_SESSION_PORT, timeout = None, local_type = TYPE_WORKSTATION, sock = None, select_poll = False):
self.__select_poll = select_poll
if (self.__select_poll):
self.read_function = self.polling_read
else:
self.read_function = self.non_polling_read
NetBIOSSession.__init__(self, myname, remote_name, remote_host, remote_type = remote_type, sess_port = sess_port, timeout = timeout, local_type = local_type, sock=sock)
def _setup_connection(self, peer):
af, socktype, proto, canonname, sa = socket.getaddrinfo(peer[0], peer[1], 0, socket.SOCK_STREAM)[0]
sock = socket.socket(af, socktype, proto)
sock.connect(sa)
return sock
def send_packet(self, data):
p = NetBIOSSessionPacket()
p.set_type(NETBIOS_SESSION_MESSAGE)
p.set_trailer(data)
self._sock.send(p.rawData())
def recv_packet(self, timeout = None):
data = self.__read(timeout)
return NetBIOSSessionPacket(data)
def _request_session(self, remote_type, local_type, timeout = None):
p = NetBIOSSessionPacket()
remote_name = encode_name(self.get_remote_name(), remote_type, '')
myname = encode_name(self.get_myname(), local_type, '')
p.set_type(NETBIOS_SESSION_REQUEST)
p.set_trailer(remote_name + myname)
self._sock.send(p.rawData())
while 1:
p = self.recv_packet(timeout)
if p.get_type() == NETBIOS_SESSION_NEGATIVE_RESPONSE:
raise NetBIOSError, ( 'Cannot request session', ERRCLASS_SESSION, ord(p.get_trailer()[0]) )
elif p.get_type() == NETBIOS_SESSION_POSITIVE_RESPONSE:
break
else:
# Ignore all other messages, most probably keepalive messages
pass
def polling_read(self, read_length, timeout):
data = ''
if (timeout is None):
timeout = 3600
time_left = timeout
CHUNK_TIME = 0.025
bytes_left = read_length
while bytes_left > 0:
try:
ready, _, _ = select.select([self._sock.fileno() ], [ ], [ ], 0)
if not ready:
if time_left == 0:
raise NetBIOSTimeout
else:
time.sleep(CHUNK_TIME)
time_left = time_left - CHUNK_TIME
continue
received = self._sock.recv(bytes_left)
if len(received) == 0:
raise NetBIOSError, ( 'Error while reading from remote', ERRCLASS_OS, None)
data = data + received
bytes_left = read_length - len(data)
except select.error, ex:
if ex[0] != errno.EINTR and ex[0] != errno.EAGAIN:
raise NetBIOSError, ( 'Error occurs while reading from remote', ERRCLASS_OS, ex[0] )
return data
def non_polling_read(self, read_length, timeout):
data = ''
bytes_left = read_length
while bytes_left > 0:
try:
ready, _, _ = select.select([self._sock.fileno() ], [ ], [ ], timeout)
if not ready:
raise NetBIOSTimeout
received = self._sock.recv(bytes_left)
if len(received) == 0:
raise NetBIOSError, ( 'Error while reading from remote', ERRCLASS_OS, None)
data = data + received
bytes_left = read_length - len(data)
except select.error, ex:
if ex[0] != errno.EINTR and ex[0] != errno.EAGAIN:
raise NetBIOSError, ( 'Error occurs while reading from remote', ERRCLASS_OS, ex[0] )
return data
def __read(self, timeout = None):
data = self.read_function(4, timeout)
type, flags, length = unpack('>ccH', data)
if ord(flags) & 0x01:
length = length | 0x10000
data2 = self.read_function(length, timeout)
return data + data2
ERRCLASS_QUERY = 0x00
ERRCLASS_SESSION = 0xf0
ERRCLASS_OS = 0xff
QUERY_ERRORS = { 0x01: 'Request format error. Please file a bug report.',
0x02: 'Internal server error',
0x03: 'Name does not exist',
0x04: 'Unsupported request',
0x05: 'Request refused'
}
SESSION_ERRORS = { 0x80: 'Not listening on called name',
0x81: 'Not listening for calling name',
0x82: 'Called name not present',
0x83: 'Sufficient resources',
0x8f: 'Unspecified error'
}
def main():
print
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.