repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
reinaH/osf.io | website/addons/dataverse/tests/utils.py | 16 | 4845 | import mock
from dataverse import Connection, Dataverse, Dataset, DataverseFile
from tests.factories import ExternalAccountFactory
from website.addons.base.testing import AddonTestCase
class DataverseAddonTestCase(AddonTestCase):
ADDON_SHORT_NAME = 'dataverse'
def set_user_settings(self, settings):
settings.api_token = 'snowman-frosty'
def set_node_settings(self, settings):
settings.dataverse_alias = 'ALIAS2'
settings.dataverse = 'Example 2'
settings.dataset_doi = 'doi:12.3456/DVN/00001'
settings.dataset_id = '18'
settings.dataset = 'Example (DVN/00001)'
def create_external_account(host='foo.bar.baz', token='doremi-abc-123'):
"""Creates external account for Dataverse with fields populated the same
way as `dataverse_add_user_account`"""
return ExternalAccountFactory(
provider='dataverse',
provider_name='Dataverse',
display_name=host,
oauth_key=host,
oauth_secret=token,
# Note: provider_id in the addon is currently the same as oauth_secret,
# but here we will let it be generated by sequence to avoid
# running into duplicate modular ODM entries.
)
def create_mock_connection(token='snowman-frosty'):
"""
Create a mock dataverse connection.
Pass any credentials other than the default parameters and the connection
will return none.
"""
if not token == 'snowman-frosty':
return None
mock_connection = mock.create_autospec(Connection)
mock_connection.token = token
mock_connection.get_dataverses.return_value = [
create_mock_dataverse('Example 1'),
create_mock_dataverse('Example 2'),
create_mock_dataverse('Example 3'),
]
def _get_dataverse(alias):
return next((
dataverse for dataverse in mock_connection.get_dataverses()
if alias is not None and dataverse.title[-1] == alias[-1]), None
)
mock_connection.get_dataverse = mock.MagicMock(
side_effect=_get_dataverse
)
mock_connection.get_dataverse.return_value = create_mock_dataverse()
return mock_connection
def create_mock_dataverse(title='Example Dataverse 0'):
mock_dataverse = mock.create_autospec(Dataverse)
type(mock_dataverse).title = mock.PropertyMock(return_value=title)
type(mock_dataverse).is_published = mock.PropertyMock(return_value=True)
type(mock_dataverse).alias = mock.PropertyMock(
return_value='ALIAS{}'.format(title[-1])
)
mock_dataverse.get_datasets.return_value = [
create_mock_dataset('DVN/00001'),
create_mock_dataset('DVN/00002'),
create_mock_dataset('DVN/00003'),
]
def _get_dataset_by_doi(doi):
return next((
dataset for dataset in mock_dataverse.get_datasets()
if dataset.doi == doi), None
)
mock_dataverse.get_dataset_by_doi = mock.MagicMock(
side_effect=_get_dataset_by_doi
)
return mock_dataverse
def create_mock_dataset(id='DVN/12345'):
mock_dataset = mock.create_autospec(Dataset)
mock_dataset.citation = 'Example Citation for {0}'.format(id)
mock_dataset.title = 'Example ({0})'.format(id)
mock_dataset.doi = 'doi:12.3456/{0}'.format(id)
mock_dataset.id = '18'
mock_dataset.get_state.return_value = 'DRAFT'
def _create_file(name, published=False):
return create_mock_published_file() if published else create_mock_draft_file()
def _create_files(published=False):
return [_create_file('name.txt', published)]
mock_dataset.get_files = mock.MagicMock(side_effect=_create_files)
mock_dataset.get_file = mock.MagicMock(side_effect=_create_file)
mock_dataset.get_file_by_id = mock.MagicMock(side_effect=_create_file)
# Fail if not given a valid ID
if 'DVN' in id:
return mock_dataset
def create_mock_draft_file(id='54321'):
mock_file = mock.create_autospec(DataverseFile)
mock_file.name = 'file.txt'
mock_file.id = id
mock_file.is_published = False
return mock_file
def create_mock_published_file(id='54321'):
mock_file = mock.create_autospec(DataverseFile)
mock_file.name = 'published.txt'
mock_file.id = id
mock_file.is_published = True
return mock_file
mock_responses = {
'contents': {
u'kind': u'item',
u'name': u'file.txt',
u'ext': u'.txt',
u'file_id': u'54321',
u'urls': {u'download': u'/project/xxxxx/dataverse/file/54321/download/',
u'delete': u'/api/v1/project/xxxxx/dataverse/file/54321/',
u'view': u'/project/xxxxx/dataverse/file/54321/'},
u'permissions': {u'edit': False, u'view': True},
u'addon': u'dataverse',
u'hasPublishedFiles': True,
u'state': 'published',
}
}
| apache-2.0 |
Xeralux/tensorflow | tensorflow/contrib/boosted_trees/estimator_batch/custom_export_strategy_test.py | 47 | 9513 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the conversion code and for feature importances export.
Tests that cover conversion from TFBT format to a tensorflow.contrib.
decision_tree generic_tree_model format and feature importances export.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from google.protobuf import text_format
from tensorflow.contrib.boosted_trees.estimator_batch import custom_export_strategy
from tensorflow.contrib.boosted_trees.proto import tree_config_pb2
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class ConvertModelTest(test_util.TensorFlowTestCase):
def _make_trees(self):
dtec_str = """
trees {
nodes {
leaf {
vector {
value: -1
}
}
}
}
trees {
nodes {
dense_float_binary_split {
feature_column: 0
threshold: 1740.0
left_id: 1
right_id: 2
}
node_metadata {
gain: 500
}
}
nodes {
leaf {
vector {
value: 0.6
}
}
}
nodes {
sparse_float_binary_split_default_left {
split {
feature_column: 0
threshold: 1500.0
left_id: 3
right_id: 4
}
}
node_metadata {
gain: 500
}
}
nodes {
categorical_id_binary_split {
feature_column: 0
feature_id: 5
left_id: 5
right_id: 6
}
node_metadata {
gain: 500
}
}
nodes {
leaf {
vector {
value: 0.8
}
}
}
nodes {
leaf {
vector {
value: 0.5
}
}
}
nodes {
sparse_float_binary_split_default_right {
split {
feature_column: 1
dimension_id:3
threshold: -0.4
left_id: 7
right_id: 8
}
}
node_metadata {
gain: 3600
}
}
nodes {
leaf {
vector {
value: 0.36
}
}
}
nodes {
leaf {
vector {
value: 18
}
}
}
}
tree_weights: 1.0
tree_weights: 0.1
"""
dtec = tree_config_pb2.DecisionTreeEnsembleConfig()
text_format.Merge(dtec_str, dtec)
feature_columns = [
"feature_b",
"feature_a",
"feature_a_m",
"feature_d",
]
return dtec, feature_columns
def testConvertModel(self):
dtec, feature_columns = self._make_trees()
# Assume 2 sparse float columns, one with 1 dimension, the second one with
# 5 dimensions.
# The feature columns in the order they were added.
out = custom_export_strategy.convert_to_universal_format(
dtec, feature_columns, 1, 2, 1)
# Features a and a_m are sparse float features, a_m is multidimensional.
expected_tree = """
features { key: "feature_a_0" }
features { key: "feature_a_m_3" }
features { key: "feature_b" }
features { key: "feature_d" }
model {
ensemble {
summation_combination_technique {
}
members {
submodel {
decision_tree {
nodes {
node_id {
}
leaf {
vector {
value {
float_value: -1.0
}
}
}
}
}
}
submodel_id {
}
}
members {
submodel {
decision_tree {
nodes {
node_id {
}
binary_node {
left_child_id {
value: 1
}
right_child_id {
value: 2
}
inequality_left_child_test {
feature_id {
id {
value: "feature_b"
}
}
threshold {
float_value: 1740.0
}
}
}
}
nodes {
node_id {
value: 1
}
leaf {
vector {
value {
float_value: 0.06
}
}
}
}
nodes {
node_id {
value: 2
}
binary_node {
left_child_id {
value: 3
}
right_child_id {
value: 4
}
inequality_left_child_test {
feature_id {
id {
value: "feature_a_0"
}
}
threshold {
float_value: 1500.0
}
}
}
}
nodes {
node_id {
value: 3
}
binary_node {
left_child_id {
value: 5
}
right_child_id {
value: 6
}
default_direction: RIGHT
custom_left_child_test {
[type.googleapis.com/tensorflow.decision_trees.MatchingValuesTest] {
feature_id {
id {
value: "feature_d"
}
}
value {
int64_value: 5
}
}
}
}
}
nodes {
node_id {
value: 4
}
leaf {
vector {
value {
float_value: 0.08
}
}
}
}
nodes {
node_id {
value: 5
}
leaf {
vector {
value {
float_value: 0.05
}
}
}
}
nodes {
node_id {
value: 6
}
binary_node {
left_child_id {
value: 7
}
right_child_id {
value: 8
}
default_direction: RIGHT
inequality_left_child_test {
feature_id {
id {
value: "feature_a_m_3"
}
}
threshold {
float_value: -0.4
}
}
}
}
nodes {
node_id {
value: 7
}
leaf {
vector {
value {
float_value: 0.036
}
}
}
}
nodes {
node_id {
value: 8
}
leaf {
vector {
value {
float_value: 1.8
}
}
}
}
}
}
submodel_id {
value: 1
}
}
}
}"""
self.assertProtoEquals(expected_tree, out)
def testFeatureImportance(self):
dtec, feature_columns = self._make_trees()
feature_importances = custom_export_strategy._get_feature_importances(
dtec, feature_columns, 1, 2, 1)
self.assertItemsEqual(
["feature_b", "feature_a_0", "feature_a_m_3", "feature_d"],
feature_importances.keys())
self.assertAlmostEqual(50.0, feature_importances["feature_b"], places=4)
self.assertAlmostEqual(50.0, feature_importances["feature_a_0"], places=4)
self.assertAlmostEqual(50.0, feature_importances["feature_d"], places=4)
self.assertAlmostEqual(
360.0, feature_importances["feature_a_m_3"], places=4)
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
indictranstech/internal-erpnext | erpnext/crm/doctype/newsletter/test_newsletter.py | 1 | 1301 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, unittest
from erpnext.crm.doctype.newsletter.newsletter import unsubscribe
from urllib import unquote
class TestNewsletter(unittest.TestCase):
def setUp(self):
frappe.db.sql("update `tabNewsletter List Subscriber` set unsubscribed = 0")
def test_send(self):
self.send_newsletter()
self.assertEquals(len(frappe.get_all("Bulk Email")), 3)
def test_unsubscribe(self):
# test unsubscribe
self.send_newsletter()
email = unquote(frappe.local.flags.signed_query_string.split("email=")[1].split("&")[0])
unsubscribe(email, "_Test Newsletter List")
self.send_newsletter()
self.assertEquals(len(frappe.get_all("Bulk Email")), 2)
def send_newsletter(self):
frappe.db.sql("delete from `tabBulk Email`")
frappe.delete_doc("Newsletter", "_Test Newsletting")
newsletter = frappe.get_doc({
"doctype": "Newsletter",
"subject": "_Test Newsletting",
"newsletter_list": "_Test Newsletter List",
"send_from": "Test Sender <test_sender@example.com>",
"message": "Testing my news."
}).insert(ignore_permissions=True)
newsletter.send_emails()
test_dependencies = ["Newsletter List"]
| agpl-3.0 |
socialwifi/dila | dila/data/alembic/versions/85e1d78ef077_add_po_metadata.py | 1 | 1264 | """Add po metadata.
Revision ID: 85e1d78ef077
Revises: deb5c1dad94a
Create Date: 2017-05-15 08:10:55.332994
"""
# revision identifiers, used by Alembic.
revision = '85e1d78ef077'
down_revision = 'deb5c1dad94a'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('po_metadata',
sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('resource_pk', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('language_pk', sa.Integer(), nullable=False),
sa.Column('key', sa.Text(), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['language_pk'], ['language.id'], ),
sa.ForeignKeyConstraint(['resource_pk'], ['resource.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('resource_pk', 'language_pk', 'key', name='resource_language_key_uc')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('po_metadata')
# ### end Alembic commands ###
| bsd-3-clause |
radomd92/botjagwar | api/translation/core.py | 1 | 12807 | # coding: utf8
import asyncio
import logging
import requests
from aiohttp import ClientSession
from api import entryprocessor
from api.exceptions import NoWordException
from api.output import Output
from api.servicemanager import DictionaryServiceManager
from database.exceptions.http import WordDoesNotExistException
from object_model.word import Entry
log = logging.getLogger(__name__)
default_data_file = '/opt/botjagwar/conf/entry_translator/'
CYRILLIC_ALPHABET_LANGUAGES = ['ru', 'uk', 'bg', 'be']
LANGUAGE_BLACKLIST = ['fr', 'en', 'sh', 'ar', 'de', 'zh']
URL_HEAD = DictionaryServiceManager().get_url_head()
WORKING_WIKI_LANGUAGE = 'mg'
class Translation:
def __init__(self):
"""Mandika teny ary pejy @ teny malagasy"""
super(self.__class__, self).__init__()
self.output = Output()
self.language_blacklist = LANGUAGE_BLACKLIST
self.loop = asyncio.get_event_loop()
def _save_translation_from_bridge_language(self, infos: Entry):
# summary = "Dikan-teny avy amin'ny dikan-teny avy amin'i %s.wiktionary" % infos.origin_wiktionary_edition
# summary += " (%s)" % get_version()
# wikipage = self.output.wikipage(infos)
# target_language_page = pwbot.Page(pwbot.Site(WORKING_WIKI_LANGUAGE, 'wiktionary'), infos.entry)
# try:
# if target_language_page.exists():
# page_content = target_language_page.get()
# if page_content.find('{{=%s=}}' % infos.language) != -1:
# self.output.db(infos)
# return
# else:
# wikipage += page_content
# summary = "+" + summary
# except pwbot.exceptions.IsRedirectPage:
# infos.entry = target_language_page.getRedirectTarget().title()
# self.output.db(infos)
# self._save_translation_from_bridge_language(infos)
# return
#
# except pwbot.exceptions.InvalidTitle as exc:
# log.exception(exc)
# return
#
# except Exception as exc:
# log.exception(exc)
# return
#
# target_language_page.put_async(wikipage, summary)
self.output.db(infos)
def _save_translation_from_page(self, infos: Entry):
# summary = "Dikan-teny avy amin'ny pejy avy amin'i %s.wiktionary" % infos.language
# summary += " (%s)" % get_version()
# wikipage = self.output.wikipage(infos)
# target_language_page = pwbot.Page(pwbot.Site(WORKING_WIKI_LANGUAGE, 'wiktionary'), infos.entry)
# if target_language_page.exists():
# page_content = target_language_page.get()
# if page_content.find('{{=%s=}}' % infos.language) != -1:
# self.output.db(infos)
# return
# else:
# wikipage += page_content
# wikipage, edit_summary = Autoformat(wikipage).wikitext()
# summary = "+" + summary + ", %s" % edit_summary
#
# target_language_page.put_async(wikipage, summary)
self.output.db(infos)
def process_entry_in_native_language(self, content: str, title: str, language: str, unknowns: list):
"""
Yields each translation found
:param content:
:param title:
:param language:
:param unknowns:
:return:
"""
wiktionary_processor_class = entryprocessor.WiktionaryProcessorFactory.create(language)
wiktionary_processor = wiktionary_processor_class()
try:
wiktionary_processor.set_text(content)
wiktionary_processor.set_title(title)
translations = wiktionary_processor.retrieve_translations()
except Exception as exc:
log.exception(exc)
return
for translation in translations:
entry = translation.entry
pos = translation.part_of_speech
entry_language = translation.language
if entry_language in self.language_blacklist: # check in language blacklist
continue
try:
target_language_translations = [
t['definition'] for t in
self.translate_word(title, language)
if t['part_of_speech'] == str(pos)
]
except NoWordException as exc:
log.debug('No translation found for %s in %s' % (title, language))
if title not in unknowns:
unknowns.append((title, language))
break
infos = Entry(
entry=entry,
part_of_speech=str(pos),
entry_definition=target_language_translations,
language=entry_language,
origin_wiktionary_edition=language,
origin_wiktionary_page_name=title)
yield infos
def process_infos(self, infos):
resp = requests.get(URL_HEAD + '/entry/%s/%s' % (infos.language, infos.entry))
if resp.status_code != WordDoesNotExistException.status_code:
return 1
self.output.db(infos)
_generate_redirections(infos)
self._save_translation_from_bridge_language(infos)
return 1
def process_entry_in_foreign_language(self, entry: Entry, title: str, language: str, unknowns: list):
if entry.language in self.language_blacklist:
log.debug("language '%s' is blacklisted, so not translating or processing." % language)
return
try:
log.debug("Translating word in foreign language (%s in '%s')" % (entry.entry_definition[0], language))
target_language_translations = []
for translation in self.translate_word(entry.entry_definition[0], language):
if translation['part_of_speech'] == entry.part_of_speech:
target_language_translations.append(translation['definition'])
if len(target_language_translations) == 0:
log.debug("No matching translations found")
return
except NoWordException:
log.debug("No translation found")
if title not in unknowns:
unknowns.append((entry.entry_definition[0], language))
return
infos = Entry(
entry=title,
part_of_speech=str(entry.part_of_speech),
entry_definition=target_language_translations,
language=entry.language,
origin_wiktionary_edition=language,
origin_wiktionary_page_name=entry.entry_definition[0])
return infos
def process_wiktionary_wiki_page(self, wiki_page):
unknowns = []
try:
language = wiki_page.site.language()
except Exception as exc:
log.error("Couldn't get language.")
log.exception(exc)
return unknowns, 0
# BEGINNING
ret = 0
wiktionary_processor_class = entryprocessor.WiktionaryProcessorFactory.create(language)
wiktionary_processor = wiktionary_processor_class()
if wiki_page.title().find(':') != -1:
return unknowns, ret
if wiki_page.namespace() != 0:
return unknowns, ret
wiktionary_processor.process(wiki_page)
try:
entries = wiktionary_processor.getall()
except Exception as exc:
log.error("getall() failed.")
log.exception(exc)
return unknowns, ret
for entry in entries:
if entry.entry is None or entry.entry_definition is None:
continue
# Attempt a translation of a possible non-lemma entry.
# Part of the effort to integrate word_forms.py in the IRC bot.
if entry.language == language: # if entry in the content language
for info in self.process_entry_in_native_language(
wiki_page.get(), wiki_page.title(), language, unknowns):
ret += self.process_infos(info)
else:
info = self.process_entry_in_foreign_language(
entry, wiki_page.title(), language, unknowns)
if info is not None:
_generate_redirections(info)
self._save_translation_from_bridge_language(info)
self._save_translation_from_page(info)
ret += 1
# Malagasy language pages
# self.update_malagasy_word(translations_in_target_language)
return unknowns, ret
@staticmethod
def translate_word(word: str, language: str):
url = URL_HEAD + '/translations/%s/%s/%s' % (language, WORKING_WIKI_LANGUAGE, word)
resp = requests.get(url)
if resp.status_code == WordDoesNotExistException.status_code:
raise NoWordException()
translations_json = resp.json()
translations = []
if len(translations_json) < 1:
raise NoWordException()
else:
for t in translations_json:
q = {
'part_of_speech': t['part_of_speech'],
'definition': t['definition']
}
if q not in translations:
translations.append(q)
log.debug(str(translations))
return translations
async def _translate_word(self, word: str, language: str):
url = URL_HEAD + '/translations/%s/%s/%s' % (language, WORKING_WIKI_LANGUAGE, word)
async with ClientSession() as client_session:
async with client_session.get(url) as resp:
if resp.status == WordDoesNotExistException.status_code:
raise NoWordException()
translations_json = await resp.json()
translations = []
if len(translations_json) < 1:
raise NoWordException()
else:
for t in translations_json:
q = {
'part_of_speech': t['part_of_speech'],
'definition': t['definition']
}
if q not in translations:
translations.append(q)
return translations
def process_wiktionary_wikitext(self, title: str, language: str, content: str):
"""
Attempt to make a simplified version of all the methods above
:param title:
:param language:
:param content:
:return:
"""
wiktionary_processor_class = entryprocessor.WiktionaryProcessorFactory.create(language)
wiktionary_processor = wiktionary_processor_class()
wiktionary_processor.set_text(content)
wiktionary_processor.set_title(title)
try:
entries = wiktionary_processor.getall()
except Exception as exc:
log.exception(exc)
return -1
print(entries)
for entry in entries:
if entry.entry is None or entry.entry_definition is None:
continue
if entry.language == language: # if entry in the content language
pass
for info in self.process_entry_in_native_language(content, title, language, []):
self.process_infos(info)
else:
info = self.process_entry_in_foreign_language(entry, title, language, [])
if info is not None:
self.output.db(info)
_generate_redirections(info)
self._save_translation_from_bridge_language(info)
self._save_translation_from_page(info)
# Malagasy language pages
# self.update_malagasy_word(translations_in_target_language)
def _generate_redirections(infos):
redirection_target = infos.entry
if infos.language in CYRILLIC_ALPHABET_LANGUAGES:
for char in "́̀":
if redirection_target.find(char) != -1:
redirection_target = redirection_target.replace(char, "")
if redirection_target.find("æ") != -1:
redirection_target = redirection_target.replace("æ", "ӕ")
if infos.entry != redirection_target:
# page = pwbot.Page(pwbot.Site(WORKING_WIKI_LANGUAGE, 'wiktionary'), infos.entry)
# if not page.exists():
# page.put_async("#FIHODINANA [[%s]]" % redirection_target, "fihodinana")
infos.entry = redirection_target
def _get_unaccented_word(word):
for char in "́̀":
if word.find(char) != -1:
word = word.replace(char, "")
return word
| mit |
Earlz/ninjacoin-exploit | share/qt/extract_strings_qt.py | 1294 | 1784 | #!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {')
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
| mit |
jgoclawski/django | django/conf/locale/ko/formats.py | 404 | 2320 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y년 n월 j일'
TIME_FORMAT = 'A g:i'
DATETIME_FORMAT = 'Y년 n월 j일 g:i A'
YEAR_MONTH_FORMAT = 'Y년 n월'
MONTH_DAY_FORMAT = 'n월 j일'
SHORT_DATE_FORMAT = 'Y-n-j.'
SHORT_DATETIME_FORMAT = 'Y-n-j H:i'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
'%Y년 %m월 %d일', # '2006년 10월 25일', with localized suffix.
]
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
'%H시 %M분 %S초', # '14시 30분 59초'
'%H시 %M분', # '14시 30분'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%Y년 %m월 %d일 %H시 %M분 %S초', # '2006년 10월 25일 14시 30분 59초'
'%Y년 %m월 %d일 %H시 %M분', # '2006년 10월 25일 14시 30분'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| bsd-3-clause |
frewsxcv/WeasyPrint | weasyprint/layout/min_max.py | 5 | 1773 | # coding: utf8
"""
weasyprint.layout.min_max
-------------------------
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import functools
def handle_min_max_width(function):
"""Decorate a function that sets the used width of a box to handle
{min,max}-width.
"""
@functools.wraps(function)
def wrapper(box, *args):
computed_margins = box.margin_left, box.margin_right
result = function(box, *args)
if box.width > box.max_width:
box.width = box.max_width
box.margin_left, box.margin_right = computed_margins
result = function(box, *args)
if box.width < box.min_width:
box.width = box.min_width
box.margin_left, box.margin_right = computed_margins
result = function(box, *args)
return result
wrapper.without_min_max = function
return wrapper
def handle_min_max_height(function):
"""Decorate a function that sets the used height of a box to handle
{min,max}-height.
"""
@functools.wraps(function)
def wrapper(box, *args):
computed_margins = box.margin_top, box.margin_bottom
result = function(box, *args)
if box.height > box.max_height:
box.height = box.max_height
box.margin_top, box.margin_bottom = computed_margins
result = function(box, *args)
if box.height < box.min_height:
box.height = box.min_height
box.margin_top, box.margin_bottom = computed_margins
result = function(box, *args)
return result
wrapper.without_min_max = function
return wrapper
| bsd-3-clause |
saleemjaveds/https-github.com-openstack-nova | nova/vnc/xvp_proxy.py | 16 | 6189 | #!/usr/bin/env python
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Eventlet WSGI Services to proxy VNC for XCP protocol."""
import socket
import eventlet
import eventlet.green
import eventlet.greenio
import eventlet.wsgi
from oslo.config import cfg
import webob
from nova.consoleauth import rpcapi as consoleauth_rpcapi
from nova import context
from nova.i18n import _
from nova.openstack.common import log as logging
from nova import version
from nova import wsgi
LOG = logging.getLogger(__name__)
xvp_proxy_opts = [
cfg.IntOpt('xvpvncproxy_port',
default=6081,
help='Port that the XCP VNC proxy should bind to'),
cfg.StrOpt('xvpvncproxy_host',
default='0.0.0.0',
help='Address that the XCP VNC proxy should bind to'),
]
CONF = cfg.CONF
CONF.register_opts(xvp_proxy_opts)
class XCPVNCProxy(object):
"""Class to use the xvp auth protocol to proxy instance vnc consoles."""
def one_way_proxy(self, source, dest):
"""Proxy tcp connection from source to dest."""
while True:
try:
d = source.recv(32384)
except Exception:
d = None
# If recv fails, send a write shutdown the other direction
if d is None or len(d) == 0:
dest.shutdown(socket.SHUT_WR)
break
# If send fails, terminate proxy in both directions
try:
# sendall raises an exception on write error, unlike send
dest.sendall(d)
except Exception:
source.close()
dest.close()
break
def handshake(self, req, connect_info, sockets):
"""Execute hypervisor-specific vnc auth handshaking (if needed)."""
host = connect_info['host']
port = int(connect_info['port'])
server = eventlet.connect((host, port))
# Handshake as necessary
if connect_info.get('internal_access_path'):
server.sendall("CONNECT %s HTTP/1.1\r\n\r\n" %
connect_info['internal_access_path'])
data = ""
while True:
b = server.recv(1)
if b:
data += b
if data.find("\r\n\r\n") != -1:
if not data.split("\r\n")[0].find("200"):
LOG.audit(_("Error in handshake: %s"), data)
return
break
if not b or len(data) > 4096:
LOG.audit(_("Error in handshake: %s"), data)
return
client = req.environ['eventlet.input'].get_socket()
client.sendall("HTTP/1.1 200 OK\r\n\r\n")
sockets['client'] = client
sockets['server'] = server
def proxy_connection(self, req, connect_info, start_response):
"""Spawn bi-directional vnc proxy."""
sockets = {}
t0 = eventlet.spawn(self.handshake, req, connect_info, sockets)
t0.wait()
if not sockets.get('client') or not sockets.get('server'):
LOG.audit(_("Invalid request: %s"), req)
start_response('400 Invalid Request',
[('content-type', 'text/html')])
return "Invalid Request"
client = sockets['client']
server = sockets['server']
t1 = eventlet.spawn(self.one_way_proxy, client, server)
t2 = eventlet.spawn(self.one_way_proxy, server, client)
t1.wait()
t2.wait()
# Make sure our sockets are closed
server.close()
client.close()
def __call__(self, environ, start_response):
try:
req = webob.Request(environ)
LOG.audit(_("Request: %s"), req)
token = req.params.get('token')
if not token:
LOG.audit(_("Request made with missing token: %s"), req)
start_response('400 Invalid Request',
[('content-type', 'text/html')])
return "Invalid Request"
ctxt = context.get_admin_context()
api = consoleauth_rpcapi.ConsoleAuthAPI()
connect_info = api.check_token(ctxt, token)
if not connect_info:
LOG.audit(_("Request made with invalid token: %s"), req)
start_response('401 Not Authorized',
[('content-type', 'text/html')])
return "Not Authorized"
return self.proxy_connection(req, connect_info, start_response)
except Exception as e:
LOG.audit(_("Unexpected error: %s"), e)
class SafeHttpProtocol(eventlet.wsgi.HttpProtocol):
"""HttpProtocol wrapper to suppress IOErrors.
The proxy code above always shuts down client connections, so we catch
the IOError that raises when the SocketServer tries to flush the
connection.
"""
def finish(self):
try:
eventlet.green.BaseHTTPServer.BaseHTTPRequestHandler.finish(self)
except IOError:
pass
eventlet.greenio.shutdown_safe(self.connection)
self.connection.close()
def get_wsgi_server():
LOG.audit(_("Starting nova-xvpvncproxy node (version %s)"),
version.version_string_with_package())
return wsgi.Server("XCP VNC Proxy",
XCPVNCProxy(),
protocol=SafeHttpProtocol,
host=CONF.xvpvncproxy_host,
port=CONF.xvpvncproxy_port)
| apache-2.0 |
SummerLW/Perf-Insight-Report | telemetry/telemetry/internal/backends/chrome_inspector/inspector_page_unittest.py | 28 | 1635 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.testing import tab_test_case
from telemetry.util import image_util
class InspectorPageTest(tab_test_case.TabTestCase):
def testPageNavigateToNormalUrl(self):
self.Navigate('blank.html')
def testCustomActionToNavigate(self):
self.Navigate('page_with_link.html')
self.assertEquals(
self._tab.EvaluateJavaScript('document.location.pathname;'),
'/page_with_link.html')
self._tab.ExecuteJavaScript('document.getElementById("clickme").click();')
self._tab.WaitForNavigate()
self.assertEquals(
self._tab.EvaluateJavaScript('document.location.pathname;'),
'/blank.html')
def testGetCookieByName(self):
self.Navigate('blank.html')
self._tab.ExecuteJavaScript('document.cookie="foo=bar"')
self.assertEquals(self._tab.GetCookieByName('foo'), 'bar')
def testScriptToEvaluateOnCommit(self):
self.Navigate('blank.html',
script_to_evaluate_on_commit='var foo = "bar";')
self._tab.WaitForDocumentReadyStateToBeComplete()
self.assertEquals(self._tab.EvaluateJavaScript('foo'), 'bar')
@decorators.Disabled('chromeos') # crbug.com/483212
def testCaptureScreenshot(self):
if not self._tab.screenshot_supported:
return
self.Navigate('green_rect.html')
res = image_util.Pixels(self._tab.Screenshot())
self.assertEquals(0x00, res[0])
self.assertEquals(0xFF, res[1])
self.assertEquals(0x00, res[2])
| bsd-3-clause |
mapaction/map_review | maps/migrations/0003_auto_20141123_1852.py | 2 | 1150 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('maps', '0002_auto_20141123_1844'),
]
operations = [
migrations.AddField(
model_name='map',
name='disaggregated_affected_population_types',
field=multiselectfield.db.fields.MultiSelectField(blank=True, max_length=16, null=True, choices=[(b'Age', b'Age'), (b'Gender', b'Gender'), (b'Other', b'Other')]),
preserve_default=True,
),
migrations.AddField(
model_name='map',
name='humanitarian_profile_level_1_types',
field=multiselectfield.db.fields.MultiSelectField(blank=True, max_length=103, null=True, choices=[(b'Numbers of dead', b'Numbers of dead'), (b'Numbers of missing/injured', b'Numbers of missing/injured'), (b'Numbers of displaced', b'Numbers of displaced'), (b'Number affected but not displaced', b'Number affected but not displaced'), (b'Other', b'Other')]),
preserve_default=True,
),
]
| gpl-2.0 |
denisff/python-for-android | python3-alpha/python3-src/Lib/turtledemo/peace.py | 65 | 1117 | #!/usr/bin/env python3
""" turtle-example-suite:
tdemo_peace.py
A very simple drawing suitable as a beginner's
programming example.
Uses only commands, which are also available in
old turtle.py.
Intentionally no variables are used except for the
colorloop:
"""
from turtle import *
def main():
peacecolors = ("red3", "orange", "yellow",
"seagreen4", "orchid4",
"royalblue1", "dodgerblue4")
reset()
s = Screen()
up()
goto(-320,-195)
width(70)
for pcolor in peacecolors:
color(pcolor)
down()
forward(640)
up()
backward(640)
left(90)
forward(66)
right(90)
width(25)
color("white")
goto(0,-170)
down()
circle(170)
left(90)
forward(340)
up()
left(180)
forward(170)
right(45)
down()
forward(170)
up()
backward(170)
left(90)
down()
forward(170)
up()
goto(0,300) # vanish if hideturtle() is not available ;-)
return "Done!!"
if __name__ == "__main__":
main()
mainloop()
| apache-2.0 |
shishaochen/TensorFlow-0.8-Win | tensorflow/python/training/rmsprop.py | 9 | 4019 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""One-line documentation for rmsprop module.
rmsprop algorithm [tieleman2012rmsprop]
A detailed description of rmsprop.
- maintain a moving (discounted) average of the square of gradients
- divide gradient by the root of this average
mean_square = decay * mean_square{t-1} + (1-decay) * gradient ** 2
mom = momentum * mom{t-1} + learning_rate * g_t / sqrt(mean_square + epsilon)
delta = - mom
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import math_ops
from tensorflow.python.training import optimizer
from tensorflow.python.training import training_ops
class RMSPropOptimizer(optimizer.Optimizer):
"""Optimizer that implements the RMSProp algorithm.
See the [paper]
(http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf).
@@__init__
"""
def __init__(self,
learning_rate,
decay=0.9,
momentum=0.0,
epsilon=1e-10,
use_locking=False,
name="RMSProp"):
"""Construct a new RMSProp optimizer.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
decay: Discounting factor for the history/coming gradient
momentum: A scalar tensor.
epsilon: Small value to avoid zero denominator.
use_locking: If True use locks for update operation.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "RMSProp".
"""
super(RMSPropOptimizer, self).__init__(use_locking, name)
self._learning_rate = learning_rate
self._decay = decay
self._momentum = momentum
self._epsilon = epsilon
# Tensors for learning rate and momentum. Created in _prepare.
self._learning_rate_tensor = None
self._decay_tensor = None
self._momentum_tensor = None
self._epsilon_tensor = None
def _create_slots(self, var_list):
for v in var_list:
val = constant_op.constant(1.0, dtype=v.dtype, shape=v.get_shape())
self._get_or_make_slot(v, val, "rms", self._name)
self._zeros_slot(v, "momentum", self._name)
def _prepare(self):
self._learning_rate_tensor = ops.convert_to_tensor(self._learning_rate,
name="learning_rate")
self._decay_tensor = ops.convert_to_tensor(self._decay, name="decay")
self._momentum_tensor = ops.convert_to_tensor(self._momentum,
name="momentum")
self._epsilon_tensor = ops.convert_to_tensor(self._epsilon,
name="epsilon")
def _apply_dense(self, grad, var):
rms = self.get_slot(var, "rms")
mom = self.get_slot(var, "momentum")
return training_ops.apply_rms_prop(
var, rms, mom,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
math_ops.cast(self._decay_tensor, var.dtype.base_dtype),
math_ops.cast(self._momentum_tensor, var.dtype.base_dtype),
math_ops.cast(self._epsilon_tensor, var.dtype.base_dtype),
grad, use_locking=self._use_locking).op
def _apply_sparse(self, grad, var):
raise NotImplementedError()
| apache-2.0 |
Designist/sympy | sympy/series/tests/test_demidovich.py | 116 | 4679 | from sympy import limit, Symbol, oo, sqrt, Rational, log, exp, cos, sin, tan, \
pi, asin, together, root
# Numbers listed with the tests refer to problem numbers in the book
# "Anti-demidovich, problemas resueltos, Ed. URSS"
x = Symbol("x")
def test_leadterm():
assert (3 + 2*x**(log(3)/log(2) - 1)).leadterm(x) == (3, 0)
def root3(x):
return root(x, 3)
def root4(x):
return root(x, 4)
def test_Limits_simple_0():
assert limit((2**(x + 1) + 3**(x + 1))/(2**x + 3**x), x, oo) == 3 # 175
def test_Limits_simple_1():
assert limit((x + 1)*(x + 2)*(x + 3)/x**3, x, oo) == 1 # 172
assert limit(sqrt(x + 1) - sqrt(x), x, oo) == 0 # 179
assert limit((2*x - 3)*(3*x + 5)*(4*x - 6)/(3*x**3 + x - 1), x, oo) == 8 # Primjer 1
assert limit(x/root3(x**3 + 10), x, oo) == 1 # Primjer 2
assert limit((x + 1)**2/(x**2 + 1), x, oo) == 1 # 181
def test_Limits_simple_2():
assert limit(1000*x/(x**2 - 1), x, oo) == 0 # 182
assert limit((x**2 - 5*x + 1)/(3*x + 7), x, oo) == oo # 183
assert limit((2*x**2 - x + 3)/(x**3 - 8*x + 5), x, oo) == 0 # 184
assert limit((2*x**2 - 3*x - 4)/sqrt(x**4 + 1), x, oo) == 2 # 186
assert limit((2*x + 3)/(x + root3(x)), x, oo) == 2 # 187
assert limit(x**2/(10 + x*sqrt(x)), x, oo) == oo # 188
assert limit(root3(x**2 + 1)/(x + 1), x, oo) == 0 # 189
assert limit(sqrt(x)/sqrt(x + sqrt(x + sqrt(x))), x, oo) == 1 # 190
def test_Limits_simple_3a():
a = Symbol('a')
#issue 3513
assert together(limit((x**2 - (a + 1)*x + a)/(x**3 - a**3), x, a)) == \
(a - 1)/(3*a**2) # 196
def test_Limits_simple_3b():
h = Symbol("h")
assert limit(((x + h)**3 - x**3)/h, h, 0) == 3*x**2 # 197
assert limit((1/(1 - x) - 3/(1 - x**3)), x, 1) == -1 # 198
assert limit((sqrt(1 + x) - 1)/(root3(1 + x) - 1), x, 0) == Rational(3)/2 # Primer 4
assert limit((sqrt(x) - 1)/(x - 1), x, 1) == Rational(1)/2 # 199
assert limit((sqrt(x) - 8)/(root3(x) - 4), x, 64) == 3 # 200
assert limit((root3(x) - 1)/(root4(x) - 1), x, 1) == Rational(4)/3 # 201
assert limit(
(root3(x**2) - 2*root3(x) + 1)/(x - 1)**2, x, 1) == Rational(1)/9 # 202
def test_Limits_simple_4a():
a = Symbol('a')
assert limit((sqrt(x) - sqrt(a))/(x - a), x, a) == 1/(2*sqrt(a)) # Primer 5
assert limit((sqrt(x) - 1)/(root3(x) - 1), x, 1) == Rational(3)/2 # 205
assert limit((sqrt(1 + x) - sqrt(1 - x))/x, x, 0) == 1 # 207
assert limit(sqrt(x**2 - 5*x + 6) - x, x, oo) == -Rational(5)/2 # 213
def test_limits_simple_4aa():
assert limit(x*(sqrt(x**2 + 1) - x), x, oo) == Rational(1)/2 # 214
def test_Limits_simple_4b():
#issue 3511
assert limit(x - root3(x**3 - 1), x, oo) == 0 # 215
def test_Limits_simple_4c():
assert limit(log(1 + exp(x))/x, x, -oo) == 0 # 267a
assert limit(log(1 + exp(x))/x, x, oo) == 1 # 267b
def test_bounded():
assert limit(sin(x)/x, x, oo) == 0 # 216b
assert limit(x*sin(1/x), x, 0) == 0 # 227a
def test_f1a():
h = Symbol("h")
#issue 3508:
assert limit((sin(2*x)/x)**(1 + x), x, 0) == 2 # Primer 7
def test_f1a2():
#issue 3509:
assert limit(((x - 1)/(x + 1))**x, x, oo) == exp(-2) # Primer 9
def test_f1b():
m = Symbol("m")
n = Symbol("n")
h = Symbol("h")
a = Symbol("a")
assert limit(sin(x)/x, x, 2) == sin(2)/2 # 216a
assert limit(sin(3*x)/x, x, 0) == 3 # 217
assert limit(sin(5*x)/sin(2*x), x, 0) == Rational(5)/2 # 218
assert limit(sin(pi*x)/sin(3*pi*x), x, 0) == Rational(1)/3 # 219
assert limit(x*sin(pi/x), x, oo) == pi # 220
assert limit((1 - cos(x))/x**2, x, 0) == Rational(1, 2) # 221
assert limit(x*sin(1/x), x, oo) == 1 # 227b
assert limit((cos(m*x) - cos(n*x))/x**2, x, 0) == ((n**2 - m**2)/2) # 232
assert limit((tan(x) - sin(x))/x**3, x, 0) == Rational(1, 2) # 233
assert limit((x - sin(2*x))/(x + sin(3*x)), x, 0) == -Rational(1, 4) # 237
assert limit((1 - sqrt(cos(x)))/x**2, x, 0) == Rational(1, 4) # 239
assert limit((sqrt(1 + sin(x)) - sqrt(1 - sin(x)))/x, x, 0) == 1 # 240
assert limit((1 + h/x)**x, x, oo) == exp(h) # Primer 9
assert limit((sin(x) - sin(a))/(x - a), x, a) == cos(a) # 222, *176
assert limit((cos(x) - cos(a))/(x - a), x, a) == -sin(a) # 223
assert limit((sin(x + h) - sin(x))/h, h, 0) == cos(x) # 225
def test_f2a():
assert limit(((x + 1)/(2*x + 1))**(x**2), x, oo) == 0 # Primer 8
def test_f2():
assert limit((sqrt(
cos(x)) - root3(cos(x)))/(sin(x)**2), x, 0) == -Rational(1, 12) # *184
def test_f3():
a = Symbol('a')
#issue 3504
assert limit(asin(a*x)/x, x, 0) == a
| bsd-3-clause |
CatBakun/AutobahnPython | examples/websocket/multiproto/server1.py | 19 | 3850 | ###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS, \
HttpException
class BaseService:
"""
Simple base for our services.
"""
def __init__(self, proto):
self.proto = proto
def onOpen(self):
pass
def onClose(self, wasClean, code, reason):
pass
def onMessage(self, msg, isBinary):
pass
class Echo1Service(BaseService):
"""
Awesome Echo Service 1.
"""
def onMessage(self, msg, isBinary):
self.proto.sendMessage("Echo 1 - " + msg)
class Echo2Service(BaseService):
"""
Awesome Echo Service 2.
"""
def onMessage(self, msg, isBinary):
self.proto.sendMessage("Echo 2 - " + msg)
class ServiceServerProtocol(WebSocketServerProtocol):
SERVICEMAP = {'/echo1': Echo1Service,
'/echo2': Echo2Service}
def __init__(self):
self.service = None
def onConnect(self, connectionRequest):
## connectionRequest has all the information from the initial
## WebSocket opening handshake ..
print connectionRequest.peer
print connectionRequest.peerstr
print connectionRequest.headers
print connectionRequest.host
print connectionRequest.path
print connectionRequest.params
print connectionRequest.version
print connectionRequest.origin
print connectionRequest.protocols
print connectionRequest.extensions
## We map to services based on path component of the URL the
## WebSocket client requested. This is just an example. We could
## use other information from connectionRequest, such has HTTP headers,
## WebSocket subprotocol, WebSocket origin etc etc
##
if self.SERVICEMAP.has_key(connectionRequest.path):
self.service = self.SERVICEMAP[connectionRequest.path](self)
else:
err = "No service under %s" % connectionRequest.path
print err
raise HttpException(404, err)
def onOpen(self):
if self.service:
self.service.onOpen()
def onMessage(self, msg, isBinary):
if self.service:
self.service.onMessage(msg, isBinary)
def onClose(self, wasClean, code, reason):
if self.service:
self.service.onClose(wasClean, code, reason)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
factory = WebSocketServerFactory("ws://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory.protocol = ServiceServerProtocol
factory.setProtocolOptions(allowHixie76 = True, failByDrop = False)
listenWS(factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(8080, web)
reactor.run()
| apache-2.0 |
simonwydooghe/ansible | test/units/modules/network/fortios/test_fortios_webfilter_content_header.py | 21 | 8109 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_webfilter_content_header
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_webfilter_content_header.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_webfilter_content_header_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'webfilter_content_header': {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_webfilter_content_header.fortios_webfilter(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('webfilter', 'content-header', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_webfilter_content_header_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'webfilter_content_header': {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_webfilter_content_header.fortios_webfilter(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('webfilter', 'content-header', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_webfilter_content_header_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'webfilter_content_header': {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_webfilter_content_header.fortios_webfilter(input_data, fos_instance)
delete_method_mock.assert_called_with('webfilter', 'content-header', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_webfilter_content_header_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'webfilter_content_header': {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_webfilter_content_header.fortios_webfilter(input_data, fos_instance)
delete_method_mock.assert_called_with('webfilter', 'content-header', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_webfilter_content_header_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'webfilter_content_header': {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_webfilter_content_header.fortios_webfilter(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('webfilter', 'content-header', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_webfilter_content_header_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'webfilter_content_header': {
'random_attribute_not_valid': 'tag',
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_webfilter_content_header.fortios_webfilter(input_data, fos_instance)
expected_data = {
'comment': 'Optional comments.',
'id': '4',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('webfilter', 'content-header', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
eharney/cinder | cinder/api/contrib/volume_mig_status_attribute.py | 8 | 2148 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.api import extensions
from cinder.api.openstack import wsgi
authorize = extensions.soft_extension_authorizer('volume',
'volume_mig_status_attribute')
class VolumeMigStatusAttributeController(wsgi.Controller):
def _add_volume_mig_status_attribute(self, req, resp_volume):
db_volume = req.get_db_volume(resp_volume['id'])
key = "%s:migstat" % Volume_mig_status_attribute.alias
resp_volume[key] = db_volume['migration_status']
key = "%s:name_id" % Volume_mig_status_attribute.alias
resp_volume[key] = db_volume['_name_id']
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['cinder.context']
if authorize(context):
self._add_volume_mig_status_attribute(req, resp_obj.obj['volume'])
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['cinder.context']
if authorize(context):
for vol in list(resp_obj.obj['volumes']):
self._add_volume_mig_status_attribute(req, vol)
class Volume_mig_status_attribute(extensions.ExtensionDescriptor):
"""Expose migration_status as an attribute of a volume."""
name = "VolumeMigStatusAttribute"
alias = "os-vol-mig-status-attr"
updated = "2013-08-08T00:00:00+00:00"
def get_controller_extensions(self):
controller = VolumeMigStatusAttributeController()
extension = extensions.ControllerExtension(self, 'volumes', controller)
return [extension]
| apache-2.0 |
sedden/django-basic-apps | basic/bookmarks/views.py | 10 | 1569 | from django.views.generic import date_based, list_detail
from basic.bookmarks.models import *
def bookmark_list(request, page=0):
return list_detail.object_list(
request,
queryset=Bookmark.objects.all(),
paginate_by=20,
page=page,
)
bookmark_list.__doc__ = list_detail.object_list.__doc__
def bookmark_archive_year(request, year):
return date_based.archive_year(
request,
year=year,
date_field='created',
queryset=Bookmark.objects.published(),
make_object_list=True,
)
bookmark_archive_year.__doc__ = date_based.archive_year.__doc__
def bookmark_archive_month(request, year, month):
return date_based.archive_month(
request,
year=year,
month=month,
date_field='created',
queryset=Bookmark.objects.published(),
)
bookmark_archive_month.__doc__ = date_based.archive_month.__doc__
def bookmark_archive_day(request, year, month, day):
return date_based.archive_day(
request,
year=year,
month=month,
day=day,
date_field='created',
queryset=Bookmark.objects.published(),
)
bookmark_archive_day.__doc__ = date_based.archive_day.__doc__
def bookmark_detail(request, object_id, year, month, day):
return date_based.object_detail(
request,
year=year,
month=month,
day=day,
date_field='created',
object_id=object_id,
queryset=Bookmark.objects.published(),
)
bookmark_detail.__doc__ = date_based.object_detail.__doc__ | bsd-3-clause |
yojota/volatility | volatility/plugins/overlays/windows/vista_sp12_x86_syscalls.py | 45 | 39669 | # Volatility
# Copyright (c) 2008-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
syscalls = [
[
'NtAcceptConnectPort', # 0x0
'NtAccessCheck', # 0x1
'NtAccessCheckAndAuditAlarm', # 0x2
'NtAccessCheckByType', # 0x3
'NtAccessCheckByTypeAndAuditAlarm', # 0x4
'NtAccessCheckByTypeResultList', # 0x5
'NtAccessCheckByTypeResultListAndAuditAlarm', # 0x6
'NtAccessCheckByTypeResultListAndAuditAlarmByHandle', # 0x7
'NtAddAtom', # 0x8
'NtAddBootEntry', # 0x9
'NtAddDriverEntry', # 0xa
'NtAdjustGroupsToken', # 0xb
'NtAdjustPrivilegesToken', # 0xc
'NtAlertResumeThread', # 0xd
'NtAlertThread', # 0xe
'NtAllocateLocallyUniqueId', # 0xf
'NtAllocateUserPhysicalPages', # 0x10
'NtAllocateUuids', # 0x11
'NtAllocateVirtualMemory', # 0x12
'NtAlpcAcceptConnectPort', # 0x13
'NtAlpcCancelMessage', # 0x14
'NtAlpcConnectPort', # 0x15
'NtAlpcCreatePort', # 0x16
'NtAlpcCreatePortSection', # 0x17
'NtAlpcCreateResourceReserve', # 0x18
'NtAlpcCreateSectionView', # 0x19
'NtAlpcCreateSecurityContext', # 0x1a
'NtAlpcDeletePortSection', # 0x1b
'NtAlpcDeleteResourceReserve', # 0x1c
'NtAlpcDeleteSectionView', # 0x1d
'NtAlpcDeleteSecurityContext', # 0x1e
'NtAlpcDisconnectPort', # 0x1f
'NtAlpcImpersonateClientOfPort', # 0x20
'NtAlpcOpenSenderProcess', # 0x21
'NtAlpcOpenSenderThread', # 0x22
'NtAlpcQueryInformation', # 0x23
'NtAlpcQueryInformationMessage', # 0x24
'NtAlpcRevokeSecurityContext', # 0x25
'NtAlpcSendWaitReceivePort', # 0x26
'NtAlpcSetInformation', # 0x27
'NtApphelpCacheControl', # 0x28
'NtAreMappedFilesTheSame', # 0x29
'NtAssignProcessToJobObject', # 0x2a
'NtCallbackReturn', # 0x2b
'NtCancelDeviceWakeupRequest', # 0x2c
'NtCancelIoFile', # 0x2d
'NtCancelTimer', # 0x2e
'NtClearEvent', # 0x2f
'NtClose', # 0x30
'NtCloseObjectAuditAlarm', # 0x31
'NtCompactKeys', # 0x32
'NtCompareTokens', # 0x33
'NtCompleteConnectPort', # 0x34
'NtCompressKey', # 0x35
'NtConnectPort', # 0x36
'NtContinue', # 0x37
'NtCreateDebugObject', # 0x38
'NtCreateDirectoryObject', # 0x39
'NtCreateEvent', # 0x3a
'NtCreateEventPair', # 0x3b
'NtCreateFile', # 0x3c
'NtCreateIoCompletion', # 0x3d
'NtCreateJobObject', # 0x3e
'NtCreateJobSet', # 0x3f
'NtCreateKey', # 0x40
'NtCreateKeyTransacted', # 0x41
'NtCreateMailslotFile', # 0x42
'NtCreateMutant', # 0x43
'NtCreateNamedPipeFile', # 0x44
'NtCreatePrivateNamespace', # 0x45
'NtCreatePagingFile', # 0x46
'NtCreatePort', # 0x47
'NtCreateProcess', # 0x48
'NtCreateProcessEx', # 0x49
'NtCreateProfile', # 0x4a
'NtCreateSection', # 0x4b
'NtCreateSemaphore', # 0x4c
'NtCreateSymbolicLinkObject', # 0x4d
'NtCreateThread', # 0x4e
'NtCreateTimer', # 0x4f
'NtCreateToken', # 0x50
'NtCreateTransaction', # 0x51
'NtOpenTransaction', # 0x52
'NtQueryInformationTransaction', # 0x53
'NtQueryInformationTransactionManager', # 0x54
'NtPrePrepareEnlistment', # 0x55
'NtPrepareEnlistment', # 0x56
'NtCommitEnlistment', # 0x57
'NtReadOnlyEnlistment', # 0x58
'NtRollbackComplete', # 0x59
'NtRollbackEnlistment', # 0x5a
'NtCommitTransaction', # 0x5b
'NtRollbackTransaction', # 0x5c
'NtPrePrepareComplete', # 0x5d
'NtPrepareComplete', # 0x5e
'NtCommitComplete', # 0x5f
'NtSinglePhaseReject', # 0x60
'NtSetInformationTransaction', # 0x61
'NtSetInformationTransactionManager', # 0x62
'NtSetInformationResourceManager', # 0x63
'NtCreateTransactionManager', # 0x64
'NtOpenTransactionManager', # 0x65
'NtRenameTransactionManager', # 0x66
'NtRollforwardTransactionManager', # 0x67
'NtRecoverEnlistment', # 0x68
'NtRecoverResourceManager', # 0x69
'NtRecoverTransactionManager', # 0x6a
'NtCreateResourceManager', # 0x6b
'NtOpenResourceManager', # 0x6c
'NtGetNotificationResourceManager', # 0x6d
'NtQueryInformationResourceManager', # 0x6e
'NtCreateEnlistment', # 0x6f
'NtOpenEnlistment', # 0x70
'NtSetInformationEnlistment', # 0x71
'NtQueryInformationEnlistment', # 0x72
'NtCreateWaitablePort', # 0x73
'NtDebugActiveProcess', # 0x74
'NtDebugContinue', # 0x75
'NtDelayExecution', # 0x76
'NtDeleteAtom', # 0x77
'NtDeleteBootEntry', # 0x78
'NtDeleteDriverEntry', # 0x79
'NtDeleteFile', # 0x7a
'NtDeleteKey', # 0x7b
'NtDeletePrivateNamespace', # 0x7c
'NtDeleteObjectAuditAlarm', # 0x7d
'NtDeleteValueKey', # 0x7e
'NtDeviceIoControlFile', # 0x7f
'NtDisplayString', # 0x80
'NtDuplicateObject', # 0x81
'NtDuplicateToken', # 0x82
'NtEnumerateBootEntries', # 0x83
'NtEnumerateDriverEntries', # 0x84
'NtEnumerateKey', # 0x85
'NtEnumerateSystemEnvironmentValuesEx', # 0x86
'NtEnumerateTransactionObject', # 0x87
'NtEnumerateValueKey', # 0x88
'NtExtendSection', # 0x89
'NtFilterToken', # 0x8a
'NtFindAtom', # 0x8b
'NtFlushBuffersFile', # 0x8c
'NtFlushInstructionCache', # 0x8d
'NtFlushKey', # 0x8e
'NtFlushProcessWriteBuffers', # 0x8f
'NtFlushVirtualMemory', # 0x90
'NtFlushWriteBuffer', # 0x91
'NtFreeUserPhysicalPages', # 0x92
'NtFreeVirtualMemory', # 0x93
'NtFreezeRegistry', # 0x94
'NtFreezeTransactions', # 0x95
'NtFsControlFile', # 0x96
'NtGetContextThread', # 0x97
'NtGetDevicePowerState', # 0x98
'NtGetNlsSectionPtr', # 0x99
'NtGetPlugPlayEvent', # 0x9a
'NtGetWriteWatch', # 0x9b
'NtImpersonateAnonymousToken', # 0x9c
'NtImpersonateClientOfPort', # 0x9d
'NtImpersonateThread', # 0x9e
'NtInitializeNlsFiles', # 0x9f
'NtInitializeRegistry', # 0xa0
'NtInitiatePowerAction', # 0xa1
'NtIsProcessInJob', # 0xa2
'NtIsSystemResumeAutomatic', # 0xa3
'NtListenPort', # 0xa4
'NtLoadDriver', # 0xa5
'NtLoadKey', # 0xa6
'NtLoadKey2', # 0xa7
'NtLoadKeyEx', # 0xa8
'NtLockFile', # 0xa9
'NtLockProductActivationKeys', # 0xaa
'NtLockRegistryKey', # 0xab
'NtLockVirtualMemory', # 0xac
'NtMakePermanentObject', # 0xad
'NtMakeTemporaryObject', # 0xae
'NtMapUserPhysicalPages', # 0xaf
'NtMapUserPhysicalPagesScatter', # 0xb0
'NtMapViewOfSection', # 0xb1
'NtModifyBootEntry', # 0xb2
'NtModifyDriverEntry', # 0xb3
'NtNotifyChangeDirectoryFile', # 0xb4
'NtNotifyChangeKey', # 0xb5
'NtNotifyChangeMultipleKeys', # 0xb6
'NtOpenDirectoryObject', # 0xb7
'NtOpenEvent', # 0xb8
'NtOpenEventPair', # 0xb9
'NtOpenFile', # 0xba
'NtOpenIoCompletion', # 0xbb
'NtOpenJobObject', # 0xbc
'NtOpenKey', # 0xbd
'NtOpenKeyTransacted', # 0xbe
'NtOpenMutant', # 0xbf
'NtOpenPrivateNamespace', # 0xc0
'NtOpenObjectAuditAlarm', # 0xc1
'NtOpenProcess', # 0xc2
'NtOpenProcessToken', # 0xc3
'NtOpenProcessTokenEx', # 0xc4
'NtOpenSection', # 0xc5
'NtOpenSemaphore', # 0xc6
'NtOpenSession', # 0xc7
'NtOpenSymbolicLinkObject', # 0xc8
'NtOpenThread', # 0xc9
'NtOpenThreadToken', # 0xca
'NtOpenThreadTokenEx', # 0xcb
'NtOpenTimer', # 0xcc
'NtPlugPlayControl', # 0xcd
'NtPowerInformation', # 0xce
'NtPrivilegeCheck', # 0xcf
'NtPrivilegeObjectAuditAlarm', # 0xd0
'NtPrivilegedServiceAuditAlarm', # 0xd1
'NtProtectVirtualMemory', # 0xd2
'NtPulseEvent', # 0xd3
'NtQueryAttributesFile', # 0xd4
'NtQueryBootEntryOrder', # 0xd5
'NtQueryBootOptions', # 0xd6
'NtQueryDebugFilterState', # 0xd7
'NtQueryDefaultLocale', # 0xd8
'NtQueryDefaultUILanguage', # 0xd9
'NtQueryDirectoryFile', # 0xda
'NtQueryDirectoryObject', # 0xdb
'NtQueryDriverEntryOrder', # 0xdc
'NtQueryEaFile', # 0xdd
'NtQueryEvent', # 0xde
'NtQueryFullAttributesFile', # 0xdf
'NtQueryInformationAtom', # 0xe0
'NtQueryInformationFile', # 0xe1
'NtQueryInformationJobObject', # 0xe2
'NtQueryInformationPort', # 0xe3
'NtQueryInformationProcess', # 0xe4
'NtQueryInformationThread', # 0xe5
'NtQueryInformationToken', # 0xe6
'NtQueryInstallUILanguage', # 0xe7
'NtQueryIntervalProfile', # 0xe8
'NtQueryIoCompletion', # 0xe9
'NtQueryKey', # 0xea
'NtQueryMultipleValueKey', # 0xeb
'NtQueryMutant', # 0xec
'NtQueryObject', # 0xed
'NtQueryOpenSubKeys', # 0xee
'NtQueryOpenSubKeysEx', # 0xef
'NtQueryPerformanceCounter', # 0xf0
'NtQueryQuotaInformationFile', # 0xf1
'NtQuerySection', # 0xf2
'NtQuerySecurityObject', # 0xf3
'NtQuerySemaphore', # 0xf4
'NtQuerySymbolicLinkObject', # 0xf5
'NtQuerySystemEnvironmentValue', # 0xf6
'NtQuerySystemEnvironmentValueEx', # 0xf7
'NtQuerySystemInformation', # 0xf8
'NtQuerySystemTime', # 0xf9
'NtQueryTimer', # 0xfa
'NtQueryTimerResolution', # 0xfb
'NtQueryValueKey', # 0xfc
'NtQueryVirtualMemory', # 0xfd
'NtQueryVolumeInformationFile', # 0xfe
'NtQueueApcThread', # 0xff
'NtRaiseException', # 0x100
'NtRaiseHardError', # 0x101
'NtReadFile', # 0x102
'NtReadFileScatter', # 0x103
'NtReadRequestData', # 0x104
'NtReadVirtualMemory', # 0x105
'NtRegisterThreadTerminatePort', # 0x106
'NtReleaseMutant', # 0x107
'NtReleaseSemaphore', # 0x108
'NtRemoveIoCompletion', # 0x109
'NtRemoveProcessDebug', # 0x10a
'NtRenameKey', # 0x10b
'NtReplaceKey', # 0x10c
'NtReplacePartitionUnit', # 0x10d
'NtReplyPort', # 0x10e
'NtReplyWaitReceivePort', # 0x10f
'NtReplyWaitReceivePortEx', # 0x110
'NtReplyWaitReplyPort', # 0x111
'NtRequestDeviceWakeup', # 0x112
'NtRequestPort', # 0x113
'NtRequestWaitReplyPort', # 0x114
'NtRequestWakeupLatency', # 0x115
'NtResetEvent', # 0x116
'NtResetWriteWatch', # 0x117
'NtRestoreKey', # 0x118
'NtResumeProcess', # 0x119
'NtResumeThread', # 0x11a
'NtSaveKey', # 0x11b
'NtSaveKeyEx', # 0x11c
'NtSaveMergedKeys', # 0x11d
'NtSecureConnectPort', # 0x11e
'NtSetBootEntryOrder', # 0x11f
'NtSetBootOptions', # 0x120
'NtSetContextThread', # 0x121
'NtSetDebugFilterState', # 0x122
'NtSetDefaultHardErrorPort', # 0x123
'NtSetDefaultLocale', # 0x124
'NtSetDefaultUILanguage', # 0x125
'NtSetDriverEntryOrder', # 0x126
'NtSetEaFile', # 0x127
'NtSetEvent', # 0x128
'NtSetEventBoostPriority', # 0x129
'NtSetHighEventPair', # 0x12a
'NtSetHighWaitLowEventPair', # 0x12b
'NtSetInformationDebugObject', # 0x12c
'NtSetInformationFile', # 0x12d
'NtSetInformationJobObject', # 0x12e
'NtSetInformationKey', # 0x12f
'NtSetInformationObject', # 0x130
'NtSetInformationProcess', # 0x131
'NtSetInformationThread', # 0x132
'NtSetInformationToken', # 0x133
'NtSetIntervalProfile', # 0x134
'NtSetIoCompletion', # 0x135
'NtSetLdtEntries', # 0x136
'NtSetLowEventPair', # 0x137
'NtSetLowWaitHighEventPair', # 0x138
'NtSetQuotaInformationFile', # 0x139
'NtSetSecurityObject', # 0x13a
'NtSetSystemEnvironmentValue', # 0x13b
'NtSetSystemEnvironmentValueEx', # 0x13c
'NtSetSystemInformation', # 0x13d
'NtSetSystemPowerState', # 0x13e
'NtSetSystemTime', # 0x13f
'NtSetThreadExecutionState', # 0x140
'NtSetTimer', # 0x141
'NtSetTimerResolution', # 0x142
'NtSetUuidSeed', # 0x143
'NtSetValueKey', # 0x144
'NtSetVolumeInformationFile', # 0x145
'NtShutdownSystem', # 0x146
'NtSignalAndWaitForSingleObject', # 0x147
'NtStartProfile', # 0x148
'NtStopProfile', # 0x149
'NtSuspendProcess', # 0x14a
'NtSuspendThread', # 0x14b
'NtSystemDebugControl', # 0x14c
'NtTerminateJobObject', # 0x14d
'NtTerminateProcess', # 0x14e
'NtTerminateThread', # 0x14f
'NtTestAlert', # 0x150
'NtThawRegistry', # 0x151
'NtThawTransactions', # 0x152
'NtTraceEvent', # 0x153
'NtTraceControl', # 0x154
'NtTranslateFilePath', # 0x155
'NtUnloadDriver', # 0x156
'NtUnloadKey', # 0x157
'NtUnloadKey2', # 0x158
'NtUnloadKeyEx', # 0x159
'NtUnlockFile', # 0x15a
'NtUnlockVirtualMemory', # 0x15b
'NtUnmapViewOfSection', # 0x15c
'NtVdmControl', # 0x15d
'NtWaitForDebugEvent', # 0x15e
'NtWaitForMultipleObjects', # 0x15f
'NtWaitForSingleObject', # 0x160
'NtWaitHighEventPair', # 0x161
'NtWaitLowEventPair', # 0x162
'NtWriteFile', # 0x163
'NtWriteFileGather', # 0x164
'NtWriteRequestData', # 0x165
'NtWriteVirtualMemory', # 0x166
'NtYieldExecution', # 0x167
'NtCreateKeyedEvent', # 0x168
'NtOpenKeyedEvent', # 0x169
'NtReleaseKeyedEvent', # 0x16a
'NtWaitForKeyedEvent', # 0x16b
'NtQueryPortInformationProcess', # 0x16c
'NtGetCurrentProcessorNumber', # 0x16d
'NtWaitForMultipleObjects32', # 0x16e
'NtGetNextProcess', # 0x16f
'NtGetNextThread', # 0x170
'NtCancelIoFileEx', # 0x171
'NtCancelSynchronousIoFile', # 0x172
'NtRemoveIoCompletionEx', # 0x173
'NtRegisterProtocolAddressInformation', # 0x174
'NtPropagationComplete', # 0x175
'NtPropagationFailed', # 0x176
'NtCreateWorkerFactory', # 0x177
'NtReleaseWorkerFactoryWorker', # 0x178
'NtWaitForWorkViaWorkerFactory', # 0x179
'NtSetInformationWorkerFactory', # 0x17a
'NtQueryInformationWorkerFactory', # 0x17b
'NtWorkerFactoryWorkerReady', # 0x17c
'NtShutdownWorkerFactory', # 0x17d
'NtCreateThreadEx', # 0x17e
'NtCreateUserProcess', # 0x17f
'NtQueryLicenseValue', # 0x180
'NtMapCMFModule', # 0x181
'NtIsUILanguageComitted', # 0x182
'NtFlushInstallUILanguage', # 0x183
'NtGetMUIRegistryInfo', # 0x184
'NtAcquireCMFViewOwnership', # 0x185
'NtReleaseCMFViewOwnership', # 0x186
],
[
'NtGdiAbortDoc', # 0x0
'NtGdiAbortPath', # 0x1
'NtGdiAddFontResourceW', # 0x2
'NtGdiAddRemoteFontToDC', # 0x3
'NtGdiAddFontMemResourceEx', # 0x4
'NtGdiRemoveMergeFont', # 0x5
'NtGdiAddRemoteMMInstanceToDC', # 0x6
'NtGdiAlphaBlend', # 0x7
'NtGdiAngleArc', # 0x8
'NtGdiAnyLinkedFonts', # 0x9
'NtGdiFontIsLinked', # 0xa
'NtGdiArcInternal', # 0xb
'NtGdiBeginPath', # 0xc
'NtGdiBitBlt', # 0xd
'NtGdiCancelDC', # 0xe
'NtGdiCheckBitmapBits', # 0xf
'NtGdiCloseFigure', # 0x10
'NtGdiClearBitmapAttributes', # 0x11
'NtGdiClearBrushAttributes', # 0x12
'NtGdiColorCorrectPalette', # 0x13
'NtGdiCombineRgn', # 0x14
'NtGdiCombineTransform', # 0x15
'NtGdiComputeXformCoefficients', # 0x16
'NtGdiConfigureOPMProtectedOutput', # 0x17
'NtGdiConsoleTextOut', # 0x18
'NtGdiConvertMetafileRect', # 0x19
'NtGdiCreateBitmap', # 0x1a
'NtGdiCreateClientObj', # 0x1b
'NtGdiCreateColorSpace', # 0x1c
'NtGdiCreateColorTransform', # 0x1d
'NtGdiCreateCompatibleBitmap', # 0x1e
'NtGdiCreateCompatibleDC', # 0x1f
'NtGdiCreateDIBBrush', # 0x20
'NtGdiCreateDIBitmapInternal', # 0x21
'NtGdiCreateDIBSection', # 0x22
'NtGdiCreateEllipticRgn', # 0x23
'NtGdiCreateHalftonePalette', # 0x24
'NtGdiCreateHatchBrushInternal', # 0x25
'NtGdiCreateMetafileDC', # 0x26
'NtGdiCreateOPMProtectedOutputs', # 0x27
'NtGdiCreatePaletteInternal', # 0x28
'NtGdiCreatePatternBrushInternal', # 0x29
'NtGdiCreatePen', # 0x2a
'NtGdiCreateRectRgn', # 0x2b
'NtGdiCreateRoundRectRgn', # 0x2c
'NtGdiCreateServerMetaFile', # 0x2d
'NtGdiCreateSolidBrush', # 0x2e
'NtGdiD3dContextCreate', # 0x2f
'NtGdiD3dContextDestroy', # 0x30
'NtGdiD3dContextDestroyAll', # 0x31
'NtGdiD3dValidateTextureStageState', # 0x32
'NtGdiD3dDrawPrimitives2', # 0x33
'NtGdiDdGetDriverState', # 0x34
'NtGdiDdAddAttachedSurface', # 0x35
'NtGdiDdAlphaBlt', # 0x36
'NtGdiDdAttachSurface', # 0x37
'NtGdiDdBeginMoCompFrame', # 0x38
'NtGdiDdBlt', # 0x39
'NtGdiDdCanCreateSurface', # 0x3a
'NtGdiDdCanCreateD3DBuffer', # 0x3b
'NtGdiDdColorControl', # 0x3c
'NtGdiDdCreateDirectDrawObject', # 0x3d
'NtGdiDdCreateSurface', # 0x3e
'NtGdiDdCreateD3DBuffer', # 0x3f
'NtGdiDdCreateMoComp', # 0x40
'NtGdiDdCreateSurfaceObject', # 0x41
'NtGdiDdDeleteDirectDrawObject', # 0x42
'NtGdiDdDeleteSurfaceObject', # 0x43
'NtGdiDdDestroyMoComp', # 0x44
'NtGdiDdDestroySurface', # 0x45
'NtGdiDdDestroyD3DBuffer', # 0x46
'NtGdiDdEndMoCompFrame', # 0x47
'NtGdiDdFlip', # 0x48
'NtGdiDdFlipToGDISurface', # 0x49
'NtGdiDdGetAvailDriverMemory', # 0x4a
'NtGdiDdGetBltStatus', # 0x4b
'NtGdiDdGetDC', # 0x4c
'NtGdiDdGetDriverInfo', # 0x4d
'NtGdiDdGetDxHandle', # 0x4e
'NtGdiDdGetFlipStatus', # 0x4f
'NtGdiDdGetInternalMoCompInfo', # 0x50
'NtGdiDdGetMoCompBuffInfo', # 0x51
'NtGdiDdGetMoCompGuids', # 0x52
'NtGdiDdGetMoCompFormats', # 0x53
'NtGdiDdGetScanLine', # 0x54
'NtGdiDdLock', # 0x55
'NtGdiDdLockD3D', # 0x56
'NtGdiDdQueryDirectDrawObject', # 0x57
'NtGdiDdQueryMoCompStatus', # 0x58
'NtGdiDdReenableDirectDrawObject', # 0x59
'NtGdiDdReleaseDC', # 0x5a
'NtGdiDdRenderMoComp', # 0x5b
'NtGdiDdResetVisrgn', # 0x5c
'NtGdiDdSetColorKey', # 0x5d
'NtGdiDdSetExclusiveMode', # 0x5e
'NtGdiDdSetGammaRamp', # 0x5f
'NtGdiDdCreateSurfaceEx', # 0x60
'NtGdiDdSetOverlayPosition', # 0x61
'NtGdiDdUnattachSurface', # 0x62
'NtGdiDdUnlock', # 0x63
'NtGdiDdUnlockD3D', # 0x64
'NtGdiDdUpdateOverlay', # 0x65
'NtGdiDdWaitForVerticalBlank', # 0x66
'NtGdiDvpCanCreateVideoPort', # 0x67
'NtGdiDvpColorControl', # 0x68
'NtGdiDvpCreateVideoPort', # 0x69
'NtGdiDvpDestroyVideoPort', # 0x6a
'NtGdiDvpFlipVideoPort', # 0x6b
'NtGdiDvpGetVideoPortBandwidth', # 0x6c
'NtGdiDvpGetVideoPortField', # 0x6d
'NtGdiDvpGetVideoPortFlipStatus', # 0x6e
'NtGdiDvpGetVideoPortInputFormats', # 0x6f
'NtGdiDvpGetVideoPortLine', # 0x70
'NtGdiDvpGetVideoPortOutputFormats', # 0x71
'NtGdiDvpGetVideoPortConnectInfo', # 0x72
'NtGdiDvpGetVideoSignalStatus', # 0x73
'NtGdiDvpUpdateVideoPort', # 0x74
'NtGdiDvpWaitForVideoPortSync', # 0x75
'NtGdiDvpAcquireNotification', # 0x76
'NtGdiDvpReleaseNotification', # 0x77
'NtGdiDxgGenericThunk', # 0x78
'NtGdiDeleteClientObj', # 0x79
'NtGdiDeleteColorSpace', # 0x7a
'NtGdiDeleteColorTransform', # 0x7b
'NtGdiDeleteObjectApp', # 0x7c
'NtGdiDescribePixelFormat', # 0x7d
'NtGdiDestroyOPMProtectedOutput', # 0x7e
'NtGdiGetPerBandInfo', # 0x7f
'NtGdiDoBanding', # 0x80
'NtGdiDoPalette', # 0x81
'NtGdiDrawEscape', # 0x82
'NtGdiEllipse', # 0x83
'NtGdiEnableEudc', # 0x84
'NtGdiEndDoc', # 0x85
'NtGdiEndPage', # 0x86
'NtGdiEndPath', # 0x87
'NtGdiEnumFontChunk', # 0x88
'NtGdiEnumFontClose', # 0x89
'NtGdiEnumFontOpen', # 0x8a
'NtGdiEnumObjects', # 0x8b
'NtGdiEqualRgn', # 0x8c
'NtGdiEudcLoadUnloadLink', # 0x8d
'NtGdiExcludeClipRect', # 0x8e
'NtGdiExtCreatePen', # 0x8f
'NtGdiExtCreateRegion', # 0x90
'NtGdiExtEscape', # 0x91
'NtGdiExtFloodFill', # 0x92
'NtGdiExtGetObjectW', # 0x93
'NtGdiExtSelectClipRgn', # 0x94
'NtGdiExtTextOutW', # 0x95
'NtGdiFillPath', # 0x96
'NtGdiFillRgn', # 0x97
'NtGdiFlattenPath', # 0x98
'NtGdiFlush', # 0x99
'NtGdiForceUFIMapping', # 0x9a
'NtGdiFrameRgn', # 0x9b
'NtGdiFullscreenControl', # 0x9c
'NtGdiGetAndSetDCDword', # 0x9d
'NtGdiGetAppClipBox', # 0x9e
'NtGdiGetBitmapBits', # 0x9f
'NtGdiGetBitmapDimension', # 0xa0
'NtGdiGetBoundsRect', # 0xa1
'NtGdiGetCertificate', # 0xa2
'NtGdiGetCertificateSize', # 0xa3
'NtGdiGetCharABCWidthsW', # 0xa4
'NtGdiGetCharacterPlacementW', # 0xa5
'NtGdiGetCharSet', # 0xa6
'NtGdiGetCharWidthW', # 0xa7
'NtGdiGetCharWidthInfo', # 0xa8
'NtGdiGetColorAdjustment', # 0xa9
'NtGdiGetColorSpaceforBitmap', # 0xaa
'NtGdiGetCOPPCompatibleOPMInformation', # 0xab
'NtGdiGetDCDword', # 0xac
'NtGdiGetDCforBitmap', # 0xad
'NtGdiGetDCObject', # 0xae
'NtGdiGetDCPoint', # 0xaf
'NtGdiGetDeviceCaps', # 0xb0
'NtGdiGetDeviceGammaRamp', # 0xb1
'NtGdiGetDeviceCapsAll', # 0xb2
'NtGdiGetDIBitsInternal', # 0xb3
'NtGdiGetETM', # 0xb4
'NtGdiGetEudcTimeStampEx', # 0xb5
'NtGdiGetFontData', # 0xb6
'NtGdiGetFontResourceInfoInternalW', # 0xb7
'NtGdiGetGlyphIndicesW', # 0xb8
'NtGdiGetGlyphIndicesWInternal', # 0xb9
'NtGdiGetGlyphOutline', # 0xba
'NtGdiGetOPMInformation', # 0xbb
'NtGdiGetKerningPairs', # 0xbc
'NtGdiGetLinkedUFIs', # 0xbd
'NtGdiGetMiterLimit', # 0xbe
'NtGdiGetMonitorID', # 0xbf
'NtGdiGetNearestColor', # 0xc0
'NtGdiGetNearestPaletteIndex', # 0xc1
'NtGdiGetObjectBitmapHandle', # 0xc2
'NtGdiGetOPMRandomNumber', # 0xc3
'NtGdiGetOutlineTextMetricsInternalW', # 0xc4
'NtGdiGetPath', # 0xc5
'NtGdiGetPixel', # 0xc6
'NtGdiGetRandomRgn', # 0xc7
'NtGdiGetRasterizerCaps', # 0xc8
'NtGdiGetRealizationInfo', # 0xc9
'NtGdiGetRegionData', # 0xca
'NtGdiGetRgnBox', # 0xcb
'NtGdiGetServerMetaFileBits', # 0xcc
'NtGdiGetSpoolMessage', # 0xcd
'NtGdiGetStats', # 0xce
'NtGdiGetStockObject', # 0xcf
'NtGdiGetStringBitmapW', # 0xd0
'NtGdiGetSuggestedOPMProtectedOutputArraySize', # 0xd1
'NtGdiGetSystemPaletteUse', # 0xd2
'NtGdiGetTextCharsetInfo', # 0xd3
'NtGdiGetTextExtent', # 0xd4
'NtGdiGetTextExtentExW', # 0xd5
'NtGdiGetTextFaceW', # 0xd6
'NtGdiGetTextMetricsW', # 0xd7
'NtGdiGetTransform', # 0xd8
'NtGdiGetUFI', # 0xd9
'NtGdiGetEmbUFI', # 0xda
'NtGdiGetUFIPathname', # 0xdb
'NtGdiGetEmbedFonts', # 0xdc
'NtGdiChangeGhostFont', # 0xdd
'NtGdiAddEmbFontToDC', # 0xde
'NtGdiGetFontUnicodeRanges', # 0xdf
'NtGdiGetWidthTable', # 0xe0
'NtGdiGradientFill', # 0xe1
'NtGdiHfontCreate', # 0xe2
'NtGdiIcmBrushInfo', # 0xe3
'NtGdiInit', # 0xe4
'NtGdiInitSpool', # 0xe5
'NtGdiIntersectClipRect', # 0xe6
'NtGdiInvertRgn', # 0xe7
'NtGdiLineTo', # 0xe8
'NtGdiMakeFontDir', # 0xe9
'NtGdiMakeInfoDC', # 0xea
'NtGdiMaskBlt', # 0xeb
'NtGdiModifyWorldTransform', # 0xec
'NtGdiMonoBitmap', # 0xed
'NtGdiMoveTo', # 0xee
'NtGdiOffsetClipRgn', # 0xef
'NtGdiOffsetRgn', # 0xf0
'NtGdiOpenDCW', # 0xf1
'NtGdiPatBlt', # 0xf2
'NtGdiPolyPatBlt', # 0xf3
'NtGdiPathToRegion', # 0xf4
'NtGdiPlgBlt', # 0xf5
'NtGdiPolyDraw', # 0xf6
'NtGdiPolyPolyDraw', # 0xf7
'NtGdiPolyTextOutW', # 0xf8
'NtGdiPtInRegion', # 0xf9
'NtGdiPtVisible', # 0xfa
'NtGdiQueryFonts', # 0xfb
'NtGdiQueryFontAssocInfo', # 0xfc
'NtGdiRectangle', # 0xfd
'NtGdiRectInRegion', # 0xfe
'NtGdiRectVisible', # 0xff
'NtGdiRemoveFontResourceW', # 0x100
'NtGdiRemoveFontMemResourceEx', # 0x101
'NtGdiResetDC', # 0x102
'NtGdiResizePalette', # 0x103
'NtGdiRestoreDC', # 0x104
'NtGdiRoundRect', # 0x105
'NtGdiSaveDC', # 0x106
'NtGdiScaleViewportExtEx', # 0x107
'NtGdiScaleWindowExtEx', # 0x108
'NtGdiSelectBitmap', # 0x109
'NtGdiSelectBrush', # 0x10a
'NtGdiSelectClipPath', # 0x10b
'NtGdiSelectFont', # 0x10c
'NtGdiSelectPen', # 0x10d
'NtGdiSetBitmapAttributes', # 0x10e
'NtGdiSetBitmapBits', # 0x10f
'NtGdiSetBitmapDimension', # 0x110
'NtGdiSetBoundsRect', # 0x111
'NtGdiSetBrushAttributes', # 0x112
'NtGdiSetBrushOrg', # 0x113
'NtGdiSetColorAdjustment', # 0x114
'NtGdiSetColorSpace', # 0x115
'NtGdiSetDeviceGammaRamp', # 0x116
'NtGdiSetDIBitsToDeviceInternal', # 0x117
'NtGdiSetFontEnumeration', # 0x118
'NtGdiSetFontXform', # 0x119
'NtGdiSetIcmMode', # 0x11a
'NtGdiSetLinkedUFIs', # 0x11b
'NtGdiSetMagicColors', # 0x11c
'NtGdiSetMetaRgn', # 0x11d
'NtGdiSetMiterLimit', # 0x11e
'NtGdiGetDeviceWidth', # 0x11f
'NtGdiMirrorWindowOrg', # 0x120
'NtGdiSetLayout', # 0x121
'NtGdiSetOPMSigningKeyAndSequenceNumbers', # 0x122
'NtGdiSetPixel', # 0x123
'NtGdiSetPixelFormat', # 0x124
'NtGdiSetRectRgn', # 0x125
'NtGdiSetSystemPaletteUse', # 0x126
'NtGdiSetTextJustification', # 0x127
'NtGdiSetupPublicCFONT', # 0x128
'NtGdiSetVirtualResolution', # 0x129
'NtGdiSetSizeDevice', # 0x12a
'NtGdiStartDoc', # 0x12b
'NtGdiStartPage', # 0x12c
'NtGdiStretchBlt', # 0x12d
'NtGdiStretchDIBitsInternal', # 0x12e
'NtGdiStrokeAndFillPath', # 0x12f
'NtGdiStrokePath', # 0x130
'NtGdiSwapBuffers', # 0x131
'NtGdiTransformPoints', # 0x132
'NtGdiTransparentBlt', # 0x133
'NtGdiUnloadPrinterDriver', # 0x134
'NtGdiUnmapMemFont', # 0x135
'NtGdiUnrealizeObject', # 0x136
'NtGdiUpdateColors', # 0x137
'NtGdiWidenPath', # 0x138
'NtUserActivateKeyboardLayout', # 0x139
'NtUserAddClipboardFormatListener', # 0x13a
'NtUserAlterWindowStyle', # 0x13b
'NtUserAssociateInputContext', # 0x13c
'NtUserAttachThreadInput', # 0x13d
'NtUserBeginPaint', # 0x13e
'NtUserBitBltSysBmp', # 0x13f
'NtUserBlockInput', # 0x140
'NtUserBuildHimcList', # 0x141
'NtUserBuildHwndList', # 0x142
'NtUserBuildNameList', # 0x143
'NtUserBuildPropList', # 0x144
'NtUserCallHwnd', # 0x145
'NtUserCallHwndLock', # 0x146
'NtUserCallHwndOpt', # 0x147
'NtUserCallHwndParam', # 0x148
'NtUserCallHwndParamLock', # 0x149
'NtUserCallMsgFilter', # 0x14a
'NtUserCallNextHookEx', # 0x14b
'NtUserCallNoParam', # 0x14c
'NtUserCallOneParam', # 0x14d
'NtUserCallTwoParam', # 0x14e
'NtUserChangeClipboardChain', # 0x14f
'NtUserChangeDisplaySettings', # 0x150
'NtUserCheckAccessForIntegrityLevel', # 0x151
'NtUserCheckDesktopByThreadId', # 0x152
'NtUserCheckWindowThreadDesktop', # 0x153
'NtUserCheckImeHotKey', # 0x154
'NtUserCheckMenuItem', # 0x155
'NtUserChildWindowFromPointEx', # 0x156
'NtUserClipCursor', # 0x157
'NtUserCloseClipboard', # 0x158
'NtUserCloseDesktop', # 0x159
'NtUserCloseWindowStation', # 0x15a
'NtUserConsoleControl', # 0x15b
'NtUserConvertMemHandle', # 0x15c
'NtUserCopyAcceleratorTable', # 0x15d
'NtUserCountClipboardFormats', # 0x15e
'NtUserCreateAcceleratorTable', # 0x15f
'NtUserCreateCaret', # 0x160
'NtUserCreateDesktopEx', # 0x161
'NtUserCreateInputContext', # 0x162
'NtUserCreateLocalMemHandle', # 0x163
'NtUserCreateWindowEx', # 0x164
'NtUserCreateWindowStation', # 0x165
'NtUserDdeInitialize', # 0x166
'NtUserDeferWindowPos', # 0x167
'NtUserDefSetText', # 0x168
'NtUserDeleteMenu', # 0x169
'NtUserDestroyAcceleratorTable', # 0x16a
'NtUserDestroyCursor', # 0x16b
'NtUserDestroyInputContext', # 0x16c
'NtUserDestroyMenu', # 0x16d
'NtUserDestroyWindow', # 0x16e
'NtUserDisableThreadIme', # 0x16f
'NtUserDispatchMessage', # 0x170
'NtUserDoSoundConnect', # 0x171
'NtUserDoSoundDisconnect', # 0x172
'NtUserDragDetect', # 0x173
'NtUserDragObject', # 0x174
'NtUserDrawAnimatedRects', # 0x175
'NtUserDrawCaption', # 0x176
'NtUserDrawCaptionTemp', # 0x177
'NtUserDrawIconEx', # 0x178
'NtUserDrawMenuBarTemp', # 0x179
'NtUserEmptyClipboard', # 0x17a
'NtUserEnableMenuItem', # 0x17b
'NtUserEnableScrollBar', # 0x17c
'NtUserEndDeferWindowPosEx', # 0x17d
'NtUserEndMenu', # 0x17e
'NtUserEndPaint', # 0x17f
'NtUserEnumDisplayDevices', # 0x180
'NtUserEnumDisplayMonitors', # 0x181
'NtUserEnumDisplaySettings', # 0x182
'NtUserEvent', # 0x183
'NtUserExcludeUpdateRgn', # 0x184
'NtUserFillWindow', # 0x185
'NtUserFindExistingCursorIcon', # 0x186
'NtUserFindWindowEx', # 0x187
'NtUserFlashWindowEx', # 0x188
'NtUserFrostCrashedWindow', # 0x189
'NtUserGetAltTabInfo', # 0x18a
'NtUserGetAncestor', # 0x18b
'NtUserGetAppImeLevel', # 0x18c
'NtUserGetAsyncKeyState', # 0x18d
'NtUserGetAtomName', # 0x18e
'NtUserGetCaretBlinkTime', # 0x18f
'NtUserGetCaretPos', # 0x190
'NtUserGetClassInfoEx', # 0x191
'NtUserGetClassName', # 0x192
'NtUserGetClipboardData', # 0x193
'NtUserGetClipboardFormatName', # 0x194
'NtUserGetClipboardOwner', # 0x195
'NtUserGetClipboardSequenceNumber', # 0x196
'NtUserGetClipboardViewer', # 0x197
'NtUserGetClipCursor', # 0x198
'NtUserGetComboBoxInfo', # 0x199
'NtUserGetControlBrush', # 0x19a
'NtUserGetControlColor', # 0x19b
'NtUserGetCPD', # 0x19c
'NtUserGetCursorFrameInfo', # 0x19d
'NtUserGetCursorInfo', # 0x19e
'NtUserGetDC', # 0x19f
'NtUserGetDCEx', # 0x1a0
'NtUserGetDoubleClickTime', # 0x1a1
'NtUserGetForegroundWindow', # 0x1a2
'NtUserGetGuiResources', # 0x1a3
'NtUserGetGUIThreadInfo', # 0x1a4
'NtUserGetIconInfo', # 0x1a5
'NtUserGetIconSize', # 0x1a6
'NtUserGetImeHotKey', # 0x1a7
'NtUserGetImeInfoEx', # 0x1a8
'NtUserGetInternalWindowPos', # 0x1a9
'NtUserGetKeyboardLayoutList', # 0x1aa
'NtUserGetKeyboardLayoutName', # 0x1ab
'NtUserGetKeyboardState', # 0x1ac
'NtUserGetKeyNameText', # 0x1ad
'NtUserGetKeyState', # 0x1ae
'NtUserGetListBoxInfo', # 0x1af
'NtUserGetMenuBarInfo', # 0x1b0
'NtUserGetMenuIndex', # 0x1b1
'NtUserGetMenuItemRect', # 0x1b2
'NtUserGetMessage', # 0x1b3
'NtUserGetMouseMovePointsEx', # 0x1b4
'NtUserGetObjectInformation', # 0x1b5
'NtUserGetOpenClipboardWindow', # 0x1b6
'NtUserGetPriorityClipboardFormat', # 0x1b7
'NtUserGetProcessWindowStation', # 0x1b8
'NtUserGetRawInputBuffer', # 0x1b9
'NtUserGetRawInputData', # 0x1ba
'NtUserGetRawInputDeviceInfo', # 0x1bb
'NtUserGetRawInputDeviceList', # 0x1bc
'NtUserGetRegisteredRawInputDevices', # 0x1bd
'NtUserGetScrollBarInfo', # 0x1be
'NtUserGetSystemMenu', # 0x1bf
'NtUserGetThreadDesktop', # 0x1c0
'NtUserGetThreadState', # 0x1c1
'NtUserGetTitleBarInfo', # 0x1c2
'NtUserGetUpdatedClipboardFormats', # 0x1c3
'NtUserGetUpdateRect', # 0x1c4
'NtUserGetUpdateRgn', # 0x1c5
'NtUserGetWindowDC', # 0x1c6
'NtUserGetWindowPlacement', # 0x1c7
'NtUserGetWOWClass', # 0x1c8
'NtUserGhostWindowFromHungWindow', # 0x1c9
'NtUserHardErrorControl', # 0x1ca
'NtUserHideCaret', # 0x1cb
'NtUserHiliteMenuItem', # 0x1cc
'NtUserHungWindowFromGhostWindow', # 0x1cd
'NtUserImpersonateDdeClientWindow', # 0x1ce
'NtUserInitialize', # 0x1cf
'NtUserInitializeClientPfnArrays', # 0x1d0
'NtUserInitTask', # 0x1d1
'NtUserInternalGetWindowText', # 0x1d2
'NtUserInternalGetWindowIcon', # 0x1d3
'NtUserInvalidateRect', # 0x1d4
'NtUserInvalidateRgn', # 0x1d5
'NtUserIsClipboardFormatAvailable', # 0x1d6
'NtUserKillTimer', # 0x1d7
'NtUserLoadKeyboardLayoutEx', # 0x1d8
'NtUserLockWindowStation', # 0x1d9
'NtUserLockWindowUpdate', # 0x1da
'NtUserLockWorkStation', # 0x1db
'NtUserLogicalToPhysicalPoint', # 0x1dc
'NtUserMapVirtualKeyEx', # 0x1dd
'NtUserMenuItemFromPoint', # 0x1de
'NtUserMessageCall', # 0x1df
'NtUserMinMaximize', # 0x1e0
'NtUserMNDragLeave', # 0x1e1
'NtUserMNDragOver', # 0x1e2
'NtUserModifyUserStartupInfoFlags', # 0x1e3
'NtUserMoveWindow', # 0x1e4
'NtUserNotifyIMEStatus', # 0x1e5
'NtUserNotifyProcessCreate', # 0x1e6
'NtUserNotifyWinEvent', # 0x1e7
'NtUserOpenClipboard', # 0x1e8
'NtUserOpenDesktop', # 0x1e9
'NtUserOpenInputDesktop', # 0x1ea
'NtUserOpenThreadDesktop', # 0x1eb
'NtUserOpenWindowStation', # 0x1ec
'NtUserPaintDesktop', # 0x1ed
'NtUserPaintMonitor', # 0x1ee
'NtUserPeekMessage', # 0x1ef
'NtUserPhysicalToLogicalPoint', # 0x1f0
'NtUserPostMessage', # 0x1f1
'NtUserPostThreadMessage', # 0x1f2
'NtUserPrintWindow', # 0x1f3
'NtUserProcessConnect', # 0x1f4
'NtUserQueryInformationThread', # 0x1f5
'NtUserQueryInputContext', # 0x1f6
'NtUserQuerySendMessage', # 0x1f7
'NtUserQueryWindow', # 0x1f8
'NtUserRealChildWindowFromPoint', # 0x1f9
'NtUserRealInternalGetMessage', # 0x1fa
'NtUserRealWaitMessageEx', # 0x1fb
'NtUserRedrawWindow', # 0x1fc
'NtUserRegisterClassExWOW', # 0x1fd
'NtUserRegisterErrorReportingDialog', # 0x1fe
'NtUserRegisterUserApiHook', # 0x1ff
'NtUserRegisterHotKey', # 0x200
'NtUserRegisterRawInputDevices', # 0x201
'NtUserRegisterTasklist', # 0x202
'NtUserRegisterWindowMessage', # 0x203
'NtUserRemoveClipboardFormatListener', # 0x204
'NtUserRemoveMenu', # 0x205
'NtUserRemoveProp', # 0x206
'NtUserResolveDesktop', # 0x207
'NtUserResolveDesktopForWOW', # 0x208
'NtUserSBGetParms', # 0x209
'NtUserScrollDC', # 0x20a
'NtUserScrollWindowEx', # 0x20b
'NtUserSelectPalette', # 0x20c
'NtUserSendInput', # 0x20d
'NtUserSetActiveWindow', # 0x20e
'NtUserSetAppImeLevel', # 0x20f
'NtUserSetCapture', # 0x210
'NtUserSetClassLong', # 0x211
'NtUserSetClassWord', # 0x212
'NtUserSetClipboardData', # 0x213
'NtUserSetClipboardViewer', # 0x214
'NtUserSetConsoleReserveKeys', # 0x215
'NtUserSetCursor', # 0x216
'NtUserSetCursorContents', # 0x217
'NtUserSetCursorIconData', # 0x218
'NtUserSetFocus', # 0x219
'NtUserSetImeHotKey', # 0x21a
'NtUserSetImeInfoEx', # 0x21b
'NtUserSetImeOwnerWindow', # 0x21c
'NtUserSetInformationProcess', # 0x21d
'NtUserSetInformationThread', # 0x21e
'NtUserSetInternalWindowPos', # 0x21f
'NtUserSetKeyboardState', # 0x220
'NtUserSetMenu', # 0x221
'NtUserSetMenuContextHelpId', # 0x222
'NtUserSetMenuDefaultItem', # 0x223
'NtUserSetMenuFlagRtoL', # 0x224
'NtUserSetObjectInformation', # 0x225
'NtUserSetParent', # 0x226
'NtUserSetProcessWindowStation', # 0x227
'NtUserGetProp', # 0x228
'NtUserSetProp', # 0x229
'NtUserSetScrollInfo', # 0x22a
'NtUserSetShellWindowEx', # 0x22b
'NtUserSetSysColors', # 0x22c
'NtUserSetSystemCursor', # 0x22d
'NtUserSetSystemMenu', # 0x22e
'NtUserSetSystemTimer', # 0x22f
'NtUserSetThreadDesktop', # 0x230
'NtUserSetThreadLayoutHandles', # 0x231
'NtUserSetThreadState', # 0x232
'NtUserSetTimer', # 0x233
'NtUserSetProcessDPIAware', # 0x234
'NtUserSetWindowFNID', # 0x235
'NtUserSetWindowLong', # 0x236
'NtUserSetWindowPlacement', # 0x237
'NtUserSetWindowPos', # 0x238
'NtUserSetWindowRgn', # 0x239
'NtUserGetWindowRgnEx', # 0x23a
'NtUserSetWindowRgnEx', # 0x23b
'NtUserSetWindowsHookAW', # 0x23c
'NtUserSetWindowsHookEx', # 0x23d
'NtUserSetWindowStationUser', # 0x23e
'NtUserSetWindowWord', # 0x23f
'NtUserSetWinEventHook', # 0x240
'NtUserShowCaret', # 0x241
'NtUserShowScrollBar', # 0x242
'NtUserShowWindow', # 0x243
'NtUserShowWindowAsync', # 0x244
'NtUserSoundSentry', # 0x245
'NtUserSwitchDesktop', # 0x246
'NtUserSystemParametersInfo', # 0x247
'NtUserTestForInteractiveUser', # 0x248
'NtUserThunkedMenuInfo', # 0x249
'NtUserThunkedMenuItemInfo', # 0x24a
'NtUserToUnicodeEx', # 0x24b
'NtUserTrackMouseEvent', # 0x24c
'NtUserTrackPopupMenuEx', # 0x24d
'NtUserCalcMenuBar', # 0x24e
'NtUserPaintMenuBar', # 0x24f
'NtUserTranslateAccelerator', # 0x250
'NtUserTranslateMessage', # 0x251
'NtUserUnhookWindowsHookEx', # 0x252
'NtUserUnhookWinEvent', # 0x253
'NtUserUnloadKeyboardLayout', # 0x254
'NtUserUnlockWindowStation', # 0x255
'NtUserUnregisterClass', # 0x256
'NtUserUnregisterUserApiHook', # 0x257
'NtUserUnregisterHotKey', # 0x258
'NtUserUpdateInputContext', # 0x259
'NtUserUpdateInstance', # 0x25a
'NtUserUpdateLayeredWindow', # 0x25b
'NtUserGetLayeredWindowAttributes', # 0x25c
'NtUserSetLayeredWindowAttributes', # 0x25d
'NtUserUpdatePerUserSystemParameters', # 0x25e
'NtUserUserHandleGrantAccess', # 0x25f
'NtUserValidateHandleSecure', # 0x260
'NtUserValidateRect', # 0x261
'NtUserValidateTimerCallback', # 0x262
'NtUserVkKeyScanEx', # 0x263
'NtUserWaitForInputIdle', # 0x264
'NtUserWaitForMsgAndEvent', # 0x265
'NtUserWaitMessage', # 0x266
'NtUserWin32PoolAllocationStats', # 0x267
'NtUserWindowFromPhysicalPoint', # 0x268
'NtUserWindowFromPoint', # 0x269
'NtUserYieldTask', # 0x26a
'NtUserRemoteConnect', # 0x26b
'NtUserRemoteRedrawRectangle', # 0x26c
'NtUserRemoteRedrawScreen', # 0x26d
'NtUserRemoteStopScreenUpdates', # 0x26e
'NtUserCtxDisplayIOCtl', # 0x26f
'NtUserRegisterSessionPort', # 0x270
'NtUserUnregisterSessionPort', # 0x271
'NtUserUpdateWindowTransform', # 0x272
'NtUserDwmStartRedirection', # 0x273
'NtUserDwmStopRedirection', # 0x274
'NtUserDwmHintDxUpdate', # 0x275
'NtUserDwmGetDxRgn', # 0x276
'NtUserGetWindowMinimizeRect', # 0x277
'NtGdiEngAssociateSurface', # 0x278
'NtGdiEngCreateBitmap', # 0x279
'NtGdiEngCreateDeviceSurface', # 0x27a
'NtGdiEngCreateDeviceBitmap', # 0x27b
'NtGdiEngCreatePalette', # 0x27c
'NtGdiEngComputeGlyphSet', # 0x27d
'NtGdiEngCopyBits', # 0x27e
'NtGdiEngDeletePalette', # 0x27f
'NtGdiEngDeleteSurface', # 0x280
'NtGdiEngEraseSurface', # 0x281
'NtGdiEngUnlockSurface', # 0x282
'NtGdiEngLockSurface', # 0x283
'NtGdiEngBitBlt', # 0x284
'NtGdiEngStretchBlt', # 0x285
'NtGdiEngPlgBlt', # 0x286
'NtGdiEngMarkBandingSurface', # 0x287
'NtGdiEngStrokePath', # 0x288
'NtGdiEngFillPath', # 0x289
'NtGdiEngStrokeAndFillPath', # 0x28a
'NtGdiEngPaint', # 0x28b
'NtGdiEngLineTo', # 0x28c
'NtGdiEngAlphaBlend', # 0x28d
'NtGdiEngGradientFill', # 0x28e
'NtGdiEngTransparentBlt', # 0x28f
'NtGdiEngTextOut', # 0x290
'NtGdiEngStretchBltROP', # 0x291
'NtGdiXLATEOBJ_cGetPalette', # 0x292
'NtGdiXLATEOBJ_iXlate', # 0x293
'NtGdiXLATEOBJ_hGetColorTransform', # 0x294
'NtGdiCLIPOBJ_bEnum', # 0x295
'NtGdiCLIPOBJ_cEnumStart', # 0x296
'NtGdiCLIPOBJ_ppoGetPath', # 0x297
'NtGdiEngDeletePath', # 0x298
'NtGdiEngCreateClip', # 0x299
'NtGdiEngDeleteClip', # 0x29a
'NtGdiBRUSHOBJ_ulGetBrushColor', # 0x29b
'NtGdiBRUSHOBJ_pvAllocRbrush', # 0x29c
'NtGdiBRUSHOBJ_pvGetRbrush', # 0x29d
'NtGdiBRUSHOBJ_hGetColorTransform', # 0x29e
'NtGdiXFORMOBJ_bApplyXform', # 0x29f
'NtGdiXFORMOBJ_iGetXform', # 0x2a0
'NtGdiFONTOBJ_vGetInfo', # 0x2a1
'NtGdiFONTOBJ_pxoGetXform', # 0x2a2
'NtGdiFONTOBJ_cGetGlyphs', # 0x2a3
'NtGdiFONTOBJ_pifi', # 0x2a4
'NtGdiFONTOBJ_pfdg', # 0x2a5
'NtGdiFONTOBJ_pQueryGlyphAttrs', # 0x2a6
'NtGdiFONTOBJ_pvTrueTypeFontFile', # 0x2a7
'NtGdiFONTOBJ_cGetAllGlyphHandles', # 0x2a8
'NtGdiSTROBJ_bEnum', # 0x2a9
'NtGdiSTROBJ_bEnumPositionsOnly', # 0x2aa
'NtGdiSTROBJ_bGetAdvanceWidths', # 0x2ab
'NtGdiSTROBJ_vEnumStart', # 0x2ac
'NtGdiSTROBJ_dwGetCodePage', # 0x2ad
'NtGdiPATHOBJ_vGetBounds', # 0x2ae
'NtGdiPATHOBJ_bEnum', # 0x2af
'NtGdiPATHOBJ_vEnumStart', # 0x2b0
'NtGdiPATHOBJ_vEnumStartClipLines', # 0x2b1
'NtGdiPATHOBJ_bEnumClipLines', # 0x2b2
'NtGdiGetDhpdev', # 0x2b3
'NtGdiEngCheckAbort', # 0x2b4
'NtGdiHT_Get8BPPFormatPalette', # 0x2b5
'NtGdiHT_Get8BPPMaskPalette', # 0x2b6
'NtGdiUpdateTransform', # 0x2b7
'NtGdiSetPUMPDOBJ', # 0x2b8
'NtGdiBRUSHOBJ_DeleteRbrush', # 0x2b9
'NtGdiUMPDEngFreeUserMem', # 0x2ba
'NtGdiDrawStream', # 0x2bb
'NtGdiDwmGetDirtyRgn', # 0x2bc
'NtGdiDwmGetSurfaceData', # 0x2bd
'NtGdiDdDDICreateAllocation', # 0x2be
'NtGdiDdDDIQueryResourceInfo', # 0x2bf
'NtGdiDdDDIOpenResource', # 0x2c0
'NtGdiDdDDIDestroyAllocation', # 0x2c1
'NtGdiDdDDISetAllocationPriority', # 0x2c2
'NtGdiDdDDIQueryAllocationResidency', # 0x2c3
'NtGdiDdDDICreateDevice', # 0x2c4
'NtGdiDdDDIDestroyDevice', # 0x2c5
'NtGdiDdDDICreateContext', # 0x2c6
'NtGdiDdDDIDestroyContext', # 0x2c7
'NtGdiDdDDICreateSynchronizationObject', # 0x2c8
'NtGdiDdDDIDestroySynchronizationObject', # 0x2c9
'NtGdiDdDDIWaitForSynchronizationObject', # 0x2ca
'NtGdiDdDDISignalSynchronizationObject', # 0x2cb
'NtGdiDdDDIGetRuntimeData', # 0x2cc
'NtGdiDdDDIQueryAdapterInfo', # 0x2cd
'NtGdiDdDDILock', # 0x2ce
'NtGdiDdDDIUnlock', # 0x2cf
'NtGdiDdDDIGetDisplayModeList', # 0x2d0
'NtGdiDdDDISetDisplayMode', # 0x2d1
'NtGdiDdDDIGetMultisampleMethodList', # 0x2d2
'NtGdiDdDDIPresent', # 0x2d3
'NtGdiDdDDIRender', # 0x2d4
'NtGdiDdDDIOpenAdapterFromDeviceName', # 0x2d5
'NtGdiDdDDIOpenAdapterFromHdc', # 0x2d6
'NtGdiDdDDICloseAdapter', # 0x2d7
'NtGdiDdDDIGetSharedPrimaryHandle', # 0x2d8
'NtGdiDdDDIEscape', # 0x2d9
'NtGdiDdDDIQueryStatistics', # 0x2da
'NtGdiDdDDISetVidPnSourceOwner', # 0x2db
'NtGdiDdDDIGetPresentHistory', # 0x2dc
'NtGdiDdDDICreateOverlay', # 0x2dd
'NtGdiDdDDIUpdateOverlay', # 0x2de
'NtGdiDdDDIFlipOverlay', # 0x2df
'NtGdiDdDDIDestroyOverlay', # 0x2e0
'NtGdiDdDDIWaitForVerticalBlankEvent', # 0x2e1
'NtGdiDdDDISetGammaRamp', # 0x2e2
'NtGdiDdDDIGetDeviceState', # 0x2e3
'NtGdiDdDDICreateDCFromMemory', # 0x2e4
'NtGdiDdDDIDestroyDCFromMemory', # 0x2e5
'NtGdiDdDDISetContextSchedulingPriority', # 0x2e6
'NtGdiDdDDIGetContextSchedulingPriority', # 0x2e7
'NtGdiDdDDISetProcessSchedulingPriorityClass', # 0x2e8
'NtGdiDdDDIGetProcessSchedulingPriorityClass', # 0x2e9
'NtGdiDdDDIReleaseProcessVidPnSourceOwners', # 0x2ea
'NtGdiDdDDIGetScanLine', # 0x2eb
'NtGdiDdDDISetQueuedLimit', # 0x2ec
'NtGdiDdDDIPollDisplayChildren', # 0x2ed
'NtGdiDdDDIInvalidateActiveVidPn', # 0x2ee
'NtGdiDdDDICheckOcclusion', # 0x2ef
'NtGdiDdDDIWaitForIdle', # 0x2f0
'NtGdiDdDDICheckMonitorPowerState', # 0x2f1
'NtGdiDdDDICheckExclusiveOwnership', # 0x2f2
'NtGdiDdDDISetDisplayPrivateDriverFormat', # 0x2f3
'NtGdiDdDDISharedPrimaryLockNotification', # 0x2f4
'NtGdiDdDDISharedPrimaryUnLockNotification', # 0x2f5
'DxgStubEnableDirectDrawRedirection', # 0x2f6
'DxgStubDeleteDirectDrawObject', # 0x2f7
'NtGdiGetNumberOfPhysicalMonitors', # 0x2f8
'NtGdiGetPhysicalMonitors', # 0x2f9
'NtGdiGetPhysicalMonitorDescription', # 0x2fa
'NtGdiDestroyPhysicalMonitor', # 0x2fb
'NtGdiDDCCIGetVCPFeature', # 0x2fc
'NtGdiDDCCISetVCPFeature', # 0x2fd
'NtGdiDDCCISaveCurrentSettings', # 0x2fe
'NtGdiDDCCIGetCapabilitiesStringLength', # 0x2ff
'NtGdiDDCCIGetCapabilitiesString', # 0x300
'NtGdiDDCCIGetTimingReport', # 0x301
'NtUserSetMirrorRendering', # 0x302
'NtUserShowSystemCursor', # 0x303
],
]
| gpl-2.0 |
petrutlucian94/nova_dev | nova/objects/virtual_interface.py | 16 | 3601 | # Copyright (C) 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import exception
from nova.objects import base
from nova.objects import fields
class VirtualInterface(base.NovaPersistentObject, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(),
'address': fields.StringField(nullable=True),
'network_id': fields.IntegerField(),
'instance_uuid': fields.UUIDField(),
'uuid': fields.UUIDField(),
}
@staticmethod
def _from_db_object(context, vif, db_vif):
for field in vif.fields:
vif[field] = db_vif[field]
vif._context = context
vif.obj_reset_changes()
return vif
@base.remotable_classmethod
def get_by_id(cls, context, vif_id):
db_vif = db.virtual_interface_get(context, vif_id)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_uuid(cls, context, vif_uuid):
db_vif = db.virtual_interface_get_by_uuid(context, vif_uuid)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_address(cls, context, address):
db_vif = db.virtual_interface_get_by_address(context, address)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_instance_and_network(cls, context, instance_uuid, network_id):
db_vif = db.virtual_interface_get_by_instance_and_network(context,
instance_uuid, network_id)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable
def create(self, context):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
db_vif = db.virtual_interface_create(context, updates)
self._from_db_object(context, self, db_vif)
@base.remotable_classmethod
def delete_by_instance_uuid(cls, context, instance_uuid):
db.virtual_interface_delete_by_instance(context, instance_uuid)
class VirtualInterfaceList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('VirtualInterface'),
}
child_versions = {
'1.0': '1.0',
}
@base.remotable_classmethod
def get_all(cls, context):
db_vifs = db.virtual_interface_get_all(context)
return base.obj_make_list(context, cls(), VirtualInterface, db_vifs)
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid, use_slave=False):
db_vifs = db.virtual_interface_get_by_instance(context, instance_uuid,
use_slave=use_slave)
return base.obj_make_list(context, cls(), VirtualInterface, db_vifs)
| apache-2.0 |
SnappleCap/oh-mainline | mysite/base/migrations/0004_shrink_timestamp_text_column.py | 17 | 1721 | # This file is part of OpenHatch.
# Copyright (C) 2010 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.base.models import *
class Migration:
def forwards(self, orm):
# Changing field 'Timestamp.key'
# (to signature: django.db.models.fields.CharField(unique=True, max_length=64))
db.alter_column('base_timestamp', 'key', orm['base.timestamp:key'])
def backwards(self, orm):
# Changing field 'Timestamp.key'
# (to signature: django.db.models.fields.CharField(max_length=255, unique=True))
db.alter_column('base_timestamp', 'key', orm['base.timestamp:key'])
models = {
'base.timestamp': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {})
}
}
complete_apps = ['base']
| agpl-3.0 |
safwanrahman/kuma | kuma/dashboards/forms.py | 4 | 1734 | from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from kuma.core.form_fields import StrippedCharField
LANG_CHOICES = [('', _('All Locales'))] + settings.LANGUAGES
PERIOD_CHOICES = [
('', _('None')),
('hour', _('Hour')),
('day', _('Day')),
('week', _('Week')),
('month', _('30 days')),
]
class RevisionDashboardForm(forms.Form):
ALL_AUTHORS = 0
KNOWN_AUTHORS = 1
UNKNOWN_AUTHORS = 2
AUTHOR_CHOICES = [
(ALL_AUTHORS, _('All Authors')),
(KNOWN_AUTHORS, _('Known Authors')),
(UNKNOWN_AUTHORS, _('Unknown Authors')),
]
locale = forms.ChoiceField(
choices=LANG_CHOICES,
# Required for non-translations, which is
# enforced in Document.clean().
required=False,
label=_(u'Locale:'))
user = StrippedCharField(
min_length=1, max_length=255,
required=False,
label=_(u'User:'))
topic = StrippedCharField(
min_length=1, max_length=255,
required=False,
label=_(u'Topic:'))
start_date = forms.DateField(
required=False, label=_(u'Start Date:'),
input_formats=['%m/%d/%Y'],
widget=forms.TextInput(attrs={'pattern': '\d{1,2}/\d{1,2}/\d{4}'}))
end_date = forms.DateField(
required=False, label=_(u'End Date:'),
input_formats=['%m/%d/%Y'],
widget=forms.TextInput(attrs={'pattern': '\d{1,2}/\d{1,2}/\d{4}'}))
preceding_period = forms.ChoiceField(
choices=PERIOD_CHOICES,
required=False,
label=_(u'Preceding Period:'))
authors = forms.ChoiceField(
choices=AUTHOR_CHOICES,
required=False,
label=_(u'Authors'))
| mpl-2.0 |
angelapper/odoo | addons/crm_claim/report/crm_claim_report.py | 42 | 3581 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields,osv
from openerp import tools
AVAILABLE_PRIORITIES = [
('0', 'Low'),
('1', 'Normal'),
('2', 'High')
]
class crm_claim_report(osv.osv):
""" CRM Claim Report"""
_name = "crm.claim.report"
_auto = False
_description = "CRM Claim Report"
_columns = {
'user_id':fields.many2one('res.users', 'User', readonly=True),
'team_id':fields.many2one('crm.team', 'Team', oldname='section_id', readonly=True),
'nbr': fields.integer('# of Claims', readonly=True), # TDE FIXME master: rename into nbr_claims
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True, select=True),
'claim_date': fields.datetime('Claim Date', readonly=True),
'delay_close': fields.float('Delay to close', digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to close the case"),
'stage_id': fields.many2one ('crm.claim.stage', 'Stage', readonly=True,domain="[('team_ids','=',team_id)]"),
'categ_id': fields.many2one('crm.claim.category', 'Category',readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'priority': fields.selection(AVAILABLE_PRIORITIES, 'Priority'),
'type_action': fields.selection([('correction','Corrective Action'),('prevention','Preventive Action')], 'Action Type'),
'date_closed': fields.datetime('Close Date', readonly=True, select=True),
'date_deadline': fields.date('Deadline', readonly=True, select=True),
'delay_expected': fields.float('Overpassed Deadline',digits=(16,2),readonly=True, group_operator="avg"),
'email': fields.integer('# Emails', size=128, readonly=True),
'subject': fields.char('Claim Subject', readonly=True)
}
def init(self, cr):
""" Display Number of cases And Team Name
@param cr: the current row, from the database cursor,
"""
tools.drop_view_if_exists(cr, 'crm_claim_report')
cr.execute("""
create or replace view crm_claim_report as (
select
min(c.id) as id,
c.date as claim_date,
c.date_closed as date_closed,
c.date_deadline as date_deadline,
c.user_id,
c.stage_id,
c.team_id,
c.partner_id,
c.company_id,
c.categ_id,
c.name as subject,
count(*) as nbr,
c.priority as priority,
c.type_action as type_action,
c.create_date as create_date,
avg(extract('epoch' from (c.date_closed-c.create_date)))/(3600*24) as delay_close,
(SELECT count(id) FROM mail_message WHERE model='crm.claim' AND res_id=c.id) AS email,
extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24) as delay_expected
from
crm_claim c
group by c.date,\
c.user_id,c.team_id, c.stage_id,\
c.categ_id,c.partner_id,c.company_id,c.create_date,
c.priority,c.type_action,c.date_deadline,c.date_closed,c.id
)""")
| agpl-3.0 |
verpoorten/immobilier | main/layout.py | 1 | 1238 | ##############################################################################
#
# Immobilier it's an application
# designed to manage the core business of property management, buildings,
# rental agreement and so on.
#
# Copyright (C) 2016-2018 Verpoorten Leïla
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django import shortcuts
from random import randint
def render(request, template, values):
values['js'] = randint(0, 100)
return shortcuts.render(request, template, values) | agpl-3.0 |
b-me/django | django/contrib/gis/maps/google/zoom.py | 527 | 6676 | from __future__ import unicode_literals
from math import atan, exp, log, pi, sin
from django.contrib.gis.geos import GEOSGeometry, LinearRing, Point, Polygon
from django.contrib.gis.maps.google.gmap import GoogleMapException
from django.utils.six.moves import range
# Constants used for degree to radian conversion, and vice-versa.
DTOR = pi / 180.
RTOD = 180. / pi
class GoogleZoom(object):
"""
GoogleZoom is a utility for performing operations related to the zoom
levels on Google Maps.
This class is inspired by the OpenStreetMap Mapnik tile generation routine
`generate_tiles.py`, and the article "How Big Is the World" (Hack #16) in
"Google Maps Hacks" by Rich Gibson and Schuyler Erle.
`generate_tiles.py` may be found at:
http://trac.openstreetmap.org/browser/applications/rendering/mapnik/generate_tiles.py
"Google Maps Hacks" may be found at http://safari.oreilly.com/0596101619
"""
def __init__(self, num_zoom=19, tilesize=256):
"Initializes the Google Zoom object."
# Google's tilesize is 256x256, square tiles are assumed.
self._tilesize = tilesize
# The number of zoom levels
self._nzoom = num_zoom
# Initializing arrays to hold the parameters for each one of the
# zoom levels.
self._degpp = [] # Degrees per pixel
self._radpp = [] # Radians per pixel
self._npix = [] # 1/2 the number of pixels for a tile at the given zoom level
# Incrementing through the zoom levels and populating the parameter arrays.
z = tilesize # The number of pixels per zoom level.
for i in range(num_zoom):
# Getting the degrees and radians per pixel, and the 1/2 the number of
# for every zoom level.
self._degpp.append(z / 360.) # degrees per pixel
self._radpp.append(z / (2 * pi)) # radians per pixel
self._npix.append(z / 2) # number of pixels to center of tile
# Multiplying `z` by 2 for the next iteration.
z *= 2
def __len__(self):
"Returns the number of zoom levels."
return self._nzoom
def get_lon_lat(self, lonlat):
"Unpacks longitude, latitude from GEOS Points and 2-tuples."
if isinstance(lonlat, Point):
lon, lat = lonlat.coords
else:
lon, lat = lonlat
return lon, lat
def lonlat_to_pixel(self, lonlat, zoom):
"Converts a longitude, latitude coordinate pair for the given zoom level."
# Setting up, unpacking the longitude, latitude values and getting the
# number of pixels for the given zoom level.
lon, lat = self.get_lon_lat(lonlat)
npix = self._npix[zoom]
# Calculating the pixel x coordinate by multiplying the longitude value
# with the number of degrees/pixel at the given zoom level.
px_x = round(npix + (lon * self._degpp[zoom]))
# Creating the factor, and ensuring that 1 or -1 is not passed in as the
# base to the logarithm. Here's why:
# if fac = -1, we'll get log(0) which is undefined;
# if fac = 1, our logarithm base will be divided by 0, also undefined.
fac = min(max(sin(DTOR * lat), -0.9999), 0.9999)
# Calculating the pixel y coordinate.
px_y = round(npix + (0.5 * log((1 + fac) / (1 - fac)) * (-1.0 * self._radpp[zoom])))
# Returning the pixel x, y to the caller of the function.
return (px_x, px_y)
def pixel_to_lonlat(self, px, zoom):
"Converts a pixel to a longitude, latitude pair at the given zoom level."
if len(px) != 2:
raise TypeError('Pixel should be a sequence of two elements.')
# Getting the number of pixels for the given zoom level.
npix = self._npix[zoom]
# Calculating the longitude value, using the degrees per pixel.
lon = (px[0] - npix) / self._degpp[zoom]
# Calculating the latitude value.
lat = RTOD * (2 * atan(exp((px[1] - npix) / (-1.0 * self._radpp[zoom]))) - 0.5 * pi)
# Returning the longitude, latitude coordinate pair.
return (lon, lat)
def tile(self, lonlat, zoom):
"""
Returns a Polygon corresponding to the region represented by a fictional
Google Tile for the given longitude/latitude pair and zoom level. This
tile is used to determine the size of a tile at the given point.
"""
# The given lonlat is the center of the tile.
delta = self._tilesize / 2
# Getting the pixel coordinates corresponding to the
# the longitude/latitude.
px = self.lonlat_to_pixel(lonlat, zoom)
# Getting the lower-left and upper-right lat/lon coordinates
# for the bounding box of the tile.
ll = self.pixel_to_lonlat((px[0] - delta, px[1] - delta), zoom)
ur = self.pixel_to_lonlat((px[0] + delta, px[1] + delta), zoom)
# Constructing the Polygon, representing the tile and returning.
return Polygon(LinearRing(ll, (ll[0], ur[1]), ur, (ur[0], ll[1]), ll), srid=4326)
def get_zoom(self, geom):
"Returns the optimal Zoom level for the given geometry."
# Checking the input type.
if not isinstance(geom, GEOSGeometry) or geom.srid != 4326:
raise TypeError('get_zoom() expects a GEOS Geometry with an SRID of 4326.')
# Getting the envelope for the geometry, and its associated width, height
# and centroid.
env = geom.envelope
env_w, env_h = self.get_width_height(env.extent)
center = env.centroid
for z in range(self._nzoom):
# Getting the tile at the zoom level.
tile_w, tile_h = self.get_width_height(self.tile(center, z).extent)
# When we span more than one tile, this is an approximately good
# zoom level.
if (env_w > tile_w) or (env_h > tile_h):
if z == 0:
raise GoogleMapException('Geometry width and height should not exceed that of the Earth.')
return z - 1
# Otherwise, we've zoomed in to the max.
return self._nzoom - 1
def get_width_height(self, extent):
"""
Returns the width and height for the given extent.
"""
# Getting the lower-left, upper-left, and upper-right
# coordinates from the extent.
ll = Point(extent[:2])
ul = Point(extent[0], extent[3])
ur = Point(extent[2:])
# Calculating the width and height.
height = ll.distance(ul)
width = ul.distance(ur)
return width, height
| bsd-3-clause |
broferek/ansible | lib/ansible/modules/remote_management/ipmi/ipmi_boot.py | 47 | 5712 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipmi_boot
short_description: Management of order of boot devices
description:
- Use this module to manage order of boot devices
version_added: "2.2"
options:
name:
description:
- Hostname or ip address of the BMC.
required: true
port:
description:
- Remote RMCP port.
default: 623
user:
description:
- Username to use to connect to the BMC.
required: true
password:
description:
- Password to connect to the BMC.
required: true
bootdev:
description:
- Set boot device to use on next reboot
required: true
choices:
- network -- Request network boot
- floppy -- Boot from floppy
- hd -- Boot from hard drive
- safe -- Boot from hard drive, requesting 'safe mode'
- optical -- boot from CD/DVD/BD drive
- setup -- Boot into setup utility
- default -- remove any IPMI directed boot device request
state:
description:
- Whether to ensure that boot devices is desired.
default: present
choices:
- present -- Request system turn on
- absent -- Request system turn on
persistent:
description:
- If set, ask that system firmware uses this device beyond next boot.
Be aware many systems do not honor this.
type: bool
default: 'no'
uefiboot:
description:
- If set, request UEFI boot explicitly.
Strictly speaking, the spec suggests that if not set, the system should BIOS boot and offers no "don't care" option.
In practice, this flag not being set does not preclude UEFI boot on any system I've encountered.
type: bool
default: 'no'
requirements:
- "python >= 2.6"
- pyghmi
author: "Bulat Gaifullin (@bgaifullin) <gaifullinbf@gmail.com>"
'''
RETURN = '''
bootdev:
description: The boot device name which will be used beyond next boot.
returned: success
type: str
sample: default
persistent:
description: If True, system firmware will use this device beyond next boot.
returned: success
type: bool
sample: false
uefimode:
description: If True, system firmware will use UEFI boot explicitly beyond next boot.
returned: success
type: bool
sample: false
'''
EXAMPLES = '''
# Ensure bootdevice is HD.
- ipmi_boot:
name: test.testdomain.com
user: admin
password: password
bootdev: hd
# Ensure bootdevice is not Network
- ipmi_boot:
name: test.testdomain.com
user: admin
password: password
bootdev: network
state: absent
'''
import traceback
PYGHMI_IMP_ERR = None
try:
from pyghmi.ipmi import command
except ImportError:
PYGHMI_IMP_ERR = traceback.format_exc()
command = None
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
port=dict(default=623, type='int'),
user=dict(required=True, no_log=True),
password=dict(required=True, no_log=True),
state=dict(default='present', choices=['present', 'absent']),
bootdev=dict(required=True, choices=['network', 'hd', 'floppy', 'safe', 'optical', 'setup', 'default']),
persistent=dict(default=False, type='bool'),
uefiboot=dict(default=False, type='bool')
),
supports_check_mode=True,
)
if command is None:
module.fail_json(msg=missing_required_lib('pyghmi'), exception=PYGHMI_IMP_ERR)
name = module.params['name']
port = module.params['port']
user = module.params['user']
password = module.params['password']
state = module.params['state']
bootdev = module.params['bootdev']
persistent = module.params['persistent']
uefiboot = module.params['uefiboot']
request = dict()
if state == 'absent' and bootdev == 'default':
module.fail_json(msg="The bootdev 'default' cannot be used with state 'absent'.")
# --- run command ---
try:
ipmi_cmd = command.Command(
bmc=name, userid=user, password=password, port=port
)
module.debug('ipmi instantiated - name: "%s"' % name)
current = ipmi_cmd.get_bootdev()
# uefimode may not supported by BMC, so use desired value as default
current.setdefault('uefimode', uefiboot)
if state == 'present' and current != dict(bootdev=bootdev, persistent=persistent, uefimode=uefiboot):
request = dict(bootdev=bootdev, uefiboot=uefiboot, persist=persistent)
elif state == 'absent' and current['bootdev'] == bootdev:
request = dict(bootdev='default')
else:
module.exit_json(changed=False, **current)
if module.check_mode:
response = dict(bootdev=request['bootdev'])
else:
response = ipmi_cmd.set_bootdev(**request)
if 'error' in response:
module.fail_json(msg=response['error'])
if 'persist' in request:
response['persistent'] = request['persist']
if 'uefiboot' in request:
response['uefimode'] = request['uefiboot']
module.exit_json(changed=True, **response)
except Exception as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
nattee/cafe-grader-web | lib/assets/Lib/encodings/cp037.py | 37 | 13428 | """ Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp037',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x9c' # 0x04 -> CONTROL
'\t' # 0x05 -> HORIZONTAL TABULATION
'\x86' # 0x06 -> CONTROL
'\x7f' # 0x07 -> DELETE
'\x97' # 0x08 -> CONTROL
'\x8d' # 0x09 -> CONTROL
'\x8e' # 0x0A -> CONTROL
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x9d' # 0x14 -> CONTROL
'\x85' # 0x15 -> CONTROL
'\x08' # 0x16 -> BACKSPACE
'\x87' # 0x17 -> CONTROL
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x92' # 0x1A -> CONTROL
'\x8f' # 0x1B -> CONTROL
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
'\x80' # 0x20 -> CONTROL
'\x81' # 0x21 -> CONTROL
'\x82' # 0x22 -> CONTROL
'\x83' # 0x23 -> CONTROL
'\x84' # 0x24 -> CONTROL
'\n' # 0x25 -> LINE FEED
'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
'\x1b' # 0x27 -> ESCAPE
'\x88' # 0x28 -> CONTROL
'\x89' # 0x29 -> CONTROL
'\x8a' # 0x2A -> CONTROL
'\x8b' # 0x2B -> CONTROL
'\x8c' # 0x2C -> CONTROL
'\x05' # 0x2D -> ENQUIRY
'\x06' # 0x2E -> ACKNOWLEDGE
'\x07' # 0x2F -> BELL
'\x90' # 0x30 -> CONTROL
'\x91' # 0x31 -> CONTROL
'\x16' # 0x32 -> SYNCHRONOUS IDLE
'\x93' # 0x33 -> CONTROL
'\x94' # 0x34 -> CONTROL
'\x95' # 0x35 -> CONTROL
'\x96' # 0x36 -> CONTROL
'\x04' # 0x37 -> END OF TRANSMISSION
'\x98' # 0x38 -> CONTROL
'\x99' # 0x39 -> CONTROL
'\x9a' # 0x3A -> CONTROL
'\x9b' # 0x3B -> CONTROL
'\x14' # 0x3C -> DEVICE CONTROL FOUR
'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
'\x9e' # 0x3E -> CONTROL
'\x1a' # 0x3F -> SUBSTITUTE
' ' # 0x40 -> SPACE
'\xa0' # 0x41 -> NO-BREAK SPACE
'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
'\xa2' # 0x4A -> CENT SIGN
'.' # 0x4B -> FULL STOP
'<' # 0x4C -> LESS-THAN SIGN
'(' # 0x4D -> LEFT PARENTHESIS
'+' # 0x4E -> PLUS SIGN
'|' # 0x4F -> VERTICAL LINE
'&' # 0x50 -> AMPERSAND
'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
'!' # 0x5A -> EXCLAMATION MARK
'$' # 0x5B -> DOLLAR SIGN
'*' # 0x5C -> ASTERISK
')' # 0x5D -> RIGHT PARENTHESIS
';' # 0x5E -> SEMICOLON
'\xac' # 0x5F -> NOT SIGN
'-' # 0x60 -> HYPHEN-MINUS
'/' # 0x61 -> SOLIDUS
'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
'\xa6' # 0x6A -> BROKEN BAR
',' # 0x6B -> COMMA
'%' # 0x6C -> PERCENT SIGN
'_' # 0x6D -> LOW LINE
'>' # 0x6E -> GREATER-THAN SIGN
'?' # 0x6F -> QUESTION MARK
'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
'`' # 0x79 -> GRAVE ACCENT
':' # 0x7A -> COLON
'#' # 0x7B -> NUMBER SIGN
'@' # 0x7C -> COMMERCIAL AT
"'" # 0x7D -> APOSTROPHE
'=' # 0x7E -> EQUALS SIGN
'"' # 0x7F -> QUOTATION MARK
'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
'a' # 0x81 -> LATIN SMALL LETTER A
'b' # 0x82 -> LATIN SMALL LETTER B
'c' # 0x83 -> LATIN SMALL LETTER C
'd' # 0x84 -> LATIN SMALL LETTER D
'e' # 0x85 -> LATIN SMALL LETTER E
'f' # 0x86 -> LATIN SMALL LETTER F
'g' # 0x87 -> LATIN SMALL LETTER G
'h' # 0x88 -> LATIN SMALL LETTER H
'i' # 0x89 -> LATIN SMALL LETTER I
'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
'\xb1' # 0x8F -> PLUS-MINUS SIGN
'\xb0' # 0x90 -> DEGREE SIGN
'j' # 0x91 -> LATIN SMALL LETTER J
'k' # 0x92 -> LATIN SMALL LETTER K
'l' # 0x93 -> LATIN SMALL LETTER L
'm' # 0x94 -> LATIN SMALL LETTER M
'n' # 0x95 -> LATIN SMALL LETTER N
'o' # 0x96 -> LATIN SMALL LETTER O
'p' # 0x97 -> LATIN SMALL LETTER P
'q' # 0x98 -> LATIN SMALL LETTER Q
'r' # 0x99 -> LATIN SMALL LETTER R
'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
'\xb8' # 0x9D -> CEDILLA
'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
'\xa4' # 0x9F -> CURRENCY SIGN
'\xb5' # 0xA0 -> MICRO SIGN
'~' # 0xA1 -> TILDE
's' # 0xA2 -> LATIN SMALL LETTER S
't' # 0xA3 -> LATIN SMALL LETTER T
'u' # 0xA4 -> LATIN SMALL LETTER U
'v' # 0xA5 -> LATIN SMALL LETTER V
'w' # 0xA6 -> LATIN SMALL LETTER W
'x' # 0xA7 -> LATIN SMALL LETTER X
'y' # 0xA8 -> LATIN SMALL LETTER Y
'z' # 0xA9 -> LATIN SMALL LETTER Z
'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
'\xbf' # 0xAB -> INVERTED QUESTION MARK
'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
'\xae' # 0xAF -> REGISTERED SIGN
'^' # 0xB0 -> CIRCUMFLEX ACCENT
'\xa3' # 0xB1 -> POUND SIGN
'\xa5' # 0xB2 -> YEN SIGN
'\xb7' # 0xB3 -> MIDDLE DOT
'\xa9' # 0xB4 -> COPYRIGHT SIGN
'\xa7' # 0xB5 -> SECTION SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
'[' # 0xBA -> LEFT SQUARE BRACKET
']' # 0xBB -> RIGHT SQUARE BRACKET
'\xaf' # 0xBC -> MACRON
'\xa8' # 0xBD -> DIAERESIS
'\xb4' # 0xBE -> ACUTE ACCENT
'\xd7' # 0xBF -> MULTIPLICATION SIGN
'{' # 0xC0 -> LEFT CURLY BRACKET
'A' # 0xC1 -> LATIN CAPITAL LETTER A
'B' # 0xC2 -> LATIN CAPITAL LETTER B
'C' # 0xC3 -> LATIN CAPITAL LETTER C
'D' # 0xC4 -> LATIN CAPITAL LETTER D
'E' # 0xC5 -> LATIN CAPITAL LETTER E
'F' # 0xC6 -> LATIN CAPITAL LETTER F
'G' # 0xC7 -> LATIN CAPITAL LETTER G
'H' # 0xC8 -> LATIN CAPITAL LETTER H
'I' # 0xC9 -> LATIN CAPITAL LETTER I
'\xad' # 0xCA -> SOFT HYPHEN
'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
'}' # 0xD0 -> RIGHT CURLY BRACKET
'J' # 0xD1 -> LATIN CAPITAL LETTER J
'K' # 0xD2 -> LATIN CAPITAL LETTER K
'L' # 0xD3 -> LATIN CAPITAL LETTER L
'M' # 0xD4 -> LATIN CAPITAL LETTER M
'N' # 0xD5 -> LATIN CAPITAL LETTER N
'O' # 0xD6 -> LATIN CAPITAL LETTER O
'P' # 0xD7 -> LATIN CAPITAL LETTER P
'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
'R' # 0xD9 -> LATIN CAPITAL LETTER R
'\xb9' # 0xDA -> SUPERSCRIPT ONE
'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
'\\' # 0xE0 -> REVERSE SOLIDUS
'\xf7' # 0xE1 -> DIVISION SIGN
'S' # 0xE2 -> LATIN CAPITAL LETTER S
'T' # 0xE3 -> LATIN CAPITAL LETTER T
'U' # 0xE4 -> LATIN CAPITAL LETTER U
'V' # 0xE5 -> LATIN CAPITAL LETTER V
'W' # 0xE6 -> LATIN CAPITAL LETTER W
'X' # 0xE7 -> LATIN CAPITAL LETTER X
'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
'\xb2' # 0xEA -> SUPERSCRIPT TWO
'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
'0' # 0xF0 -> DIGIT ZERO
'1' # 0xF1 -> DIGIT ONE
'2' # 0xF2 -> DIGIT TWO
'3' # 0xF3 -> DIGIT THREE
'4' # 0xF4 -> DIGIT FOUR
'5' # 0xF5 -> DIGIT FIVE
'6' # 0xF6 -> DIGIT SIX
'7' # 0xF7 -> DIGIT SEVEN
'8' # 0xF8 -> DIGIT EIGHT
'9' # 0xF9 -> DIGIT NINE
'\xb3' # 0xFA -> SUPERSCRIPT THREE
'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
malelew/UCLA_Dining_Web_App | ENV/lib/python2.7/site-packages/pip/commands/completion.py | 435 | 1991 | from __future__ import absolute_import
import sys
from pip.basecommand import Command
BASE_COMPLETION = """
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
"""
COMPLETION_SCRIPTS = {
'bash': """
_pip_completion()
{
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
COMP_CWORD=$COMP_CWORD \\
PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
""", 'zsh': """
function _pip_completion {
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
"""}
class CompletionCommand(Command):
"""A helper command to be used for command completion."""
name = 'completion'
summary = 'A helper command to be used for command completion'
hidden = True
def __init__(self, *args, **kw):
super(CompletionCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'--bash', '-b',
action='store_const',
const='bash',
dest='shell',
help='Emit completion code for bash')
cmd_opts.add_option(
'--zsh', '-z',
action='store_const',
const='zsh',
dest='shell',
help='Emit completion code for zsh')
self.parser.insert_option_group(0, cmd_opts)
def run(self, options, args):
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = COMPLETION_SCRIPTS.get(options.shell, '')
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
else:
sys.stderr.write(
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
)
| mit |
grepme/CMPUT410Lab01 | virt_env/virt1/lib/python2.7/site-packages/PasteScript-1.7.5-py2.7.egg/paste/script/default_sysconfig.py | 6 | 1418 | # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
This module contains default sysconfig settings.
The command object is inserted into this module as a global variable
``paste_command``, and can be used inside functions.
"""
def add_custom_options(parser):
"""
This method can modify the ``parser`` object (which is an
``optparse.OptionParser`` instance). This can be used to add new
options to the command.
"""
pass
def default_config_filename(installer):
"""
This function can return a default filename or directory for the
configuration file, if none was explicitly given.
Return None to mean no preference. The first non-None returning
value will be used.
Pay attention to ``installer.expect_config_directory`` here,
and to ``installer.default_config_filename``.
"""
return installer.default_config_filename
def install_variables(installer):
"""
Returns a dictionary of variables for use later in the process
(e.g., filling a configuration file). These are combined from all
sysconfig files.
"""
return {}
def post_setup_hook(installer, config_file):
"""
This is called at the very end of ``paster setup-app``. You
might use it to register an application globally.
"""
pass
| apache-2.0 |
utopiaprince/micropython | tests/pyb/can.py | 25 | 4205 | from pyb import CAN
import pyb
# test we can correctly create by id or name
for bus in (-1, 0, 1, 2, 3, "YA", "YB", "YC"):
try:
CAN(bus, CAN.LOOPBACK)
print("CAN", bus)
except ValueError:
print("ValueError", bus)
CAN.initfilterbanks(14)
can = CAN(1)
print(can)
can.init(CAN.LOOPBACK)
print(can)
print(can.any(0))
# Catch all filter
can.setfilter(0, CAN.MASK16, 0, (0, 0, 0, 0))
can.send('abcd', 123, timeout=5000)
print(can.any(0))
print(can.recv(0))
can.send('abcd', -1, timeout=5000)
print(can.recv(0))
can.send('abcd', 0x7FF + 1, timeout=5000)
print(can.recv(0))
# Test too long message
try:
can.send('abcdefghi', 0x7FF, timeout=5000)
except ValueError:
print('passed')
else:
print('failed')
del can
# Testing extended IDs
can = CAN(1, CAN.LOOPBACK, extframe = True)
# Catch all filter
can.setfilter(0, CAN.MASK32, 0, (0, 0))
print(can)
try:
can.send('abcde', 0x7FF + 1, timeout=5000)
except ValueError:
print('failed')
else:
r = can.recv(0)
if r[0] == 0x7FF+1 and r[3] == b'abcde':
print('passed')
else:
print('failed, wrong data received')
del can
# Test RxCallbacks
can = CAN(1, CAN.LOOPBACK)
can.setfilter(0, CAN.LIST16, 0, (1, 2, 3, 4))
can.setfilter(1, CAN.LIST16, 1, (5, 6, 7, 8))
def cb0(bus, reason):
print('cb0')
if reason == 0:
print('pending')
if reason == 1:
print('full')
if reason == 2:
print('overflow')
def cb1(bus, reason):
print('cb1')
if reason == 0:
print('pending')
if reason == 1:
print('full')
if reason == 2:
print('overflow')
def cb0a(bus, reason):
print('cb0a')
if reason == 0:
print('pending')
if reason == 1:
print('full')
if reason == 2:
print('overflow')
def cb1a(bus, reason):
print('cb1a')
if reason == 0:
print('pending')
if reason == 1:
print('full')
if reason == 2:
print('overflow')
can.rxcallback(0, cb0)
can.rxcallback(1, cb1)
can.send('11111111',1, timeout=5000)
can.send('22222222',2, timeout=5000)
can.send('33333333',3, timeout=5000)
can.rxcallback(0, cb0a)
can.send('44444444',4, timeout=5000)
can.send('55555555',5, timeout=5000)
can.send('66666666',6, timeout=5000)
can.send('77777777',7, timeout=5000)
can.rxcallback(1, cb1a)
can.send('88888888',8, timeout=5000)
print(can.recv(0))
print(can.recv(0))
print(can.recv(0))
print(can.recv(1))
print(can.recv(1))
print(can.recv(1))
can.send('11111111',1, timeout=5000)
can.send('55555555',5, timeout=5000)
print(can.recv(0))
print(can.recv(1))
del can
# Testing asyncronous send
can = CAN(1, CAN.LOOPBACK)
can.setfilter(0, CAN.MASK16, 0, (0, 0, 0, 0))
while can.any(0):
can.recv(0)
can.send('abcde', 1, timeout=0)
print(can.any(0))
while not can.any(0):
pass
print(can.recv(0))
try:
can.send('abcde', 2, timeout=0)
can.send('abcde', 3, timeout=0)
can.send('abcde', 4, timeout=0)
can.send('abcde', 5, timeout=0)
except OSError as e:
if str(e) == '16':
print('passed')
else:
print('failed')
pyb.delay(500)
while can.any(0):
print(can.recv(0))
# Testing rtr messages
bus1 = CAN(1, CAN.LOOPBACK)
bus2 = CAN(2, CAN.LOOPBACK, extframe = True)
while bus1.any(0):
bus1.recv(0)
while bus2.any(0):
bus2.recv(0)
bus1.setfilter(0, CAN.LIST16, 0, (1, 2, 3, 4))
bus1.setfilter(1, CAN.LIST16, 0, (5, 6, 7, 8), rtr=(True, True, True, True))
bus1.setfilter(2, CAN.MASK16, 0, (64, 64, 32, 32), rtr=(False, True))
bus2.setfilter(0, CAN.LIST32, 0, (1, 2), rtr=(True, True))
bus2.setfilter(1, CAN.LIST32, 0, (3, 4), rtr=(True, False))
bus2.setfilter(2, CAN.MASK32, 0, (16, 16), rtr=(False,))
bus2.setfilter(2, CAN.MASK32, 0, (32, 32), rtr=(True,))
bus1.send('',1,rtr=True)
print(bus1.any(0))
bus1.send('',5,rtr=True)
print(bus1.recv(0))
bus1.send('',6,rtr=True)
print(bus1.recv(0))
bus1.send('',7,rtr=True)
print(bus1.recv(0))
bus1.send('',16,rtr=True)
print(bus1.any(0))
bus1.send('',32,rtr=True)
print(bus1.recv(0))
bus2.send('',1,rtr=True)
print(bus2.recv(0))
bus2.send('',2,rtr=True)
print(bus2.recv(0))
bus2.send('',3,rtr=True)
print(bus2.recv(0))
bus2.send('',4,rtr=True)
print(bus2.any(0))
| mit |
abartlet/samba | buildtools/wafsamba/tests/__init__.py | 47 | 1184 | # Copyright (C) 2012 Jelmer Vernooij <jelmer@samba.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Tests for wafsamba."""
from unittest import (
TestCase,
TestLoader,
)
def test_suite():
names = [
'abi',
'bundled',
'utils',
]
module_names = ['wafsamba.tests.test_' + name for name in names]
loader = TestLoader()
result = loader.suiteClass()
suite = loader.loadTestsFromNames(module_names)
result.addTests(suite)
return result
| gpl-3.0 |
ferabra/edx-platform | common/lib/xmodule/xmodule/tests/test_stringify.py | 187 | 1256 | """
Tests stringify functions used in xmodule html
"""
from nose.tools import assert_equals # pylint: disable=no-name-in-module
from lxml import etree
from xmodule.stringify import stringify_children
def test_stringify():
text = 'Hi <div x="foo">there <span>Bruce</span><b>!</b></div>'
html = '''<html a="b" foo="bar">{0}</html>'''.format(text)
xml = etree.fromstring(html)
out = stringify_children(xml)
assert_equals(out, text)
def test_stringify_again():
html = r"""<html name="Voltage Source Answer" >A voltage source is non-linear!
<div align="center">
<img src="/static/images/circuits/voltage-source.png"/>
\(V=V_C\)
</div>
But it is <a href="http://mathworld.wolfram.com/AffineFunction.html">affine</a>,
which means linear except for an offset.
</html>
"""
html = """<html>A voltage source is non-linear!
<div align="center">
</div>
But it is <a href="http://mathworld.wolfram.com/AffineFunction.html">affine</a>,
which means linear except for an offset.
</html>
"""
xml = etree.fromstring(html)
out = stringify_children(xml)
print "output:"
print out
# Tracking strange content repeating bug
# Should appear once
assert_equals(out.count("But it is "), 1)
| agpl-3.0 |
mtp401/airflow | setup.py | 1 | 5424 | from setuptools import setup, find_packages, Command
from setuptools.command.test import test as TestCommand
import os
import sys
# Kept manually in sync with airflow.__version__
version = '1.7.0'
class Tox(TestCommand):
user_options = [('tox-args=', None, "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = ''
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import tox
errno = tox.cmdline(args=self.tox_args.split())
sys.exit(errno)
class CleanCommand(Command):
"""Custom clean command to tidy up the project root."""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
os.system('rm -vrf ./build ./dist ./*.pyc ./*.tgz ./*.egg-info')
async = [
'greenlet>=0.4.9',
'eventlet>= 0.9.7',
'gevent>=0.13'
]
celery = [
'celery>=3.1.17',
'flower>=0.7.3'
]
crypto = ['cryptography>=0.9.3']
doc = [
'sphinx>=1.2.3',
'sphinx-argparse>=0.1.13',
'sphinx-rtd-theme>=0.1.6',
'Sphinx-PyPI-upload>=0.2.1'
]
docker = ['docker-py>=1.6.0']
druid = ['pydruid>=0.2.1']
gcloud = [
'gcloud>=0.11.0',
]
gcp_api = [
'httplib2',
'google-api-python-client<=1.4.2',
'oauth2client>=1.5.2, <2.0.0',
'PyOpenSSL',
]
hdfs = ['snakebite>=2.7.8']
webhdfs = ['hdfs[dataframe,avro,kerberos]>=2.0.4']
hive = [
'hive-thrift-py>=0.0.1',
'pyhive>=0.1.3',
'impyla>=0.13.3',
'unicodecsv>=0.14.1'
]
jdbc = ['jaydebeapi>=0.2.0']
mssql = ['pymssql>=2.1.1', 'unicodecsv>=0.14.1']
mysql = ['mysqlclient>=1.3.6']
rabbitmq = ['librabbitmq>=1.6.1']
oracle = ['cx_Oracle>=5.1.2']
postgres = ['psycopg2>=2.6']
s3 = [
'boto>=2.36.0',
'filechunkio>=1.6',
]
samba = ['pysmbclient>=0.1.3']
slack = ['slackclient>=1.0.0']
statsd = ['statsd>=3.0.1, <4.0']
vertica = ['vertica-python>=0.5.1']
ldap = ['ldap3>=0.9.9.1']
kerberos = ['pykerberos>=1.1.8',
'thrift_sasl>=0.2.0',
'snakebite[kerberos]>=2.7.8']
password = [
'bcrypt>=2.0.0',
'flask-bcrypt>=0.7.1',
]
github_enterprise = ['Flask-OAuthlib>=0.9.1']
qds = ['qds-sdk>=1.9.0']
all_dbs = postgres + mysql + hive + mssql + hdfs + vertica
devel = ['lxml>=3.3.4', 'nose', 'nose-parameterized', 'mock']
devel_minreq = devel + mysql + doc + password + s3
devel_hadoop = devel_minreq + hive + hdfs + webhdfs + kerberos
devel_all = devel + all_dbs + doc + samba + s3 + slack + crypto + oracle + docker
setup(
name='airflow',
description='Programmatically author, schedule and monitor data pipelines',
license='Apache License 2.0',
version=version,
packages=find_packages(),
package_data={'': ['airflow/alembic.ini']},
include_package_data=True,
zip_safe=False,
scripts=['airflow/bin/airflow'],
install_requires=[
'alembic>=0.8.3, <0.9',
'babel>=1.3, <2.0',
'chartkick>=0.4.2, < 0.5',
'croniter>=0.3.8, <0.4',
'dill>=0.2.2, <0.3',
'python-daemon>=2.1.1, <2.2',
'flask>=0.10.1, <0.11',
'flask-admin>=1.4.0, <2.0.0',
'flask-cache>=0.13.1, <0.14',
'flask-login==0.2.11',
'future>=0.15.0, <0.16',
'funcsigs>=0.4, <1',
'gunicorn>=19.3.0, <19.4.0', # 19.4.? seemed to have issues
'jinja2>=2.7.3, <3.0',
'markdown>=2.5.2, <3.0',
'pandas>=0.15.2, <1.0.0',
'pygments>=2.0.1, <3.0',
'python-dateutil>=2.3, <3',
'requests>=2.5.1, <3',
'setproctitle>=1.1.8, <2',
'sqlalchemy>=0.9.8',
'thrift>=0.9.2, <0.10',
'Flask-WTF==0.12'
],
extras_require={
'all': devel_all,
'all_dbs': all_dbs,
'async': async,
'celery': celery,
'crypto': crypto,
'devel': devel_minreq,
'devel_hadoop': devel_hadoop,
'doc': doc,
'docker': docker,
'druid': druid,
'gcloud': gcloud,
'gcp_api': gcp_api,
'hdfs': hdfs,
'hive': hive,
'jdbc': jdbc,
'mssql': mssql,
'mysql': mysql,
'oracle': oracle,
'postgres': postgres,
'rabbitmq': rabbitmq,
's3': s3,
'samba': samba,
'slack': slack,
'statsd': statsd,
'vertica': vertica,
'ldap': ldap,
'webhdfs': webhdfs,
'kerberos': kerberos,
'password': password,
'github_enterprise': github_enterprise,
'qds': qds
},
classifiers={
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Monitoring',
},
author='Maxime Beauchemin',
author_email='maximebeauchemin@gmail.com',
url='https://github.com/airbnb/airflow',
download_url=(
'https://github.com/airbnb/airflow/tarball/' + version),
cmdclass={'test': Tox,
'extra_clean': CleanCommand,
},
)
| apache-2.0 |
openprocurement/restkit | tests/010-test-proxies.py | 4 | 3207 | # -*- coding: utf-8 -*-
#
# This file is part of restkit released under the MIT license.
# See the NOTICE for more information.
import t
from _server_test import HOST, PORT
from restkit.contrib import wsgi_proxy
root_uri = "http://%s:%s" % (HOST, PORT)
def with_webob(func):
def wrapper(*args, **kwargs):
from webob import Request
req = Request.blank('/')
req.environ['SERVER_NAME'] = '%s:%s' % (HOST, PORT)
return func(req)
wrapper.func_name = func.func_name
return wrapper
@with_webob
def test_001(req):
req.path_info = '/query'
proxy = wsgi_proxy.Proxy()
resp = req.get_response(proxy)
body = resp.body
assert 'path: /query' in body, str(resp)
@with_webob
def test_002(req):
req.path_info = '/json'
req.environ['CONTENT_TYPE'] = 'application/json'
req.method = 'POST'
req.body = 'test post'
proxy = wsgi_proxy.Proxy(allowed_methods=['POST'])
resp = req.get_response(proxy)
body = resp.body
assert resp.content_length == 9, str(resp)
proxy = wsgi_proxy.Proxy(allowed_methods=['GET'])
resp = req.get_response(proxy)
assert resp.status.startswith('403'), resp.status
@with_webob
def test_003(req):
req.path_info = '/json'
req.environ['CONTENT_TYPE'] = 'application/json'
req.method = 'PUT'
req.body = 'test post'
proxy = wsgi_proxy.Proxy(allowed_methods=['PUT'])
resp = req.get_response(proxy)
body = resp.body
assert resp.content_length == 9, str(resp)
proxy = wsgi_proxy.Proxy(allowed_methods=['GET'])
resp = req.get_response(proxy)
assert resp.status.startswith('403'), resp.status
@with_webob
def test_004(req):
req.path_info = '/ok'
req.method = 'HEAD'
proxy = wsgi_proxy.Proxy(allowed_methods=['HEAD'])
resp = req.get_response(proxy)
body = resp.body
assert resp.content_type == 'text/plain', str(resp)
@with_webob
def test_005(req):
req.path_info = '/delete'
req.method = 'DELETE'
proxy = wsgi_proxy.Proxy(allowed_methods=['DELETE'])
resp = req.get_response(proxy)
body = resp.body
assert resp.content_type == 'text/plain', str(resp)
proxy = wsgi_proxy.Proxy(allowed_methods=['GET'])
resp = req.get_response(proxy)
assert resp.status.startswith('403'), resp.status
@with_webob
def test_006(req):
req.path_info = '/redirect'
req.method = 'GET'
proxy = wsgi_proxy.Proxy(allowed_methods=['GET'])
resp = req.get_response(proxy)
body = resp.body
assert resp.location == '%s/complete_redirect' % root_uri, str(resp)
@with_webob
def test_007(req):
req.path_info = '/redirect_to_url'
req.method = 'GET'
proxy = wsgi_proxy.Proxy(allowed_methods=['GET'])
resp = req.get_response(proxy)
body = resp.body
print resp.location
assert resp.location == '%s/complete_redirect' % root_uri, str(resp)
@with_webob
def test_008(req):
req.path_info = '/redirect_to_url'
req.script_name = '/name'
req.method = 'GET'
proxy = wsgi_proxy.Proxy(allowed_methods=['GET'], strip_script_name=True)
resp = req.get_response(proxy)
body = resp.body
assert resp.location == '%s/name/complete_redirect' % root_uri, str(resp)
| apache-2.0 |
fin/froide | froide/foirequest/south_migrations/0013_auto__add_field_foimessage_status.py | 6 | 16723 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from froide.helper.auth_migration_util import USER_DB_NAME
APP_MODEL, APP_MODEL_NAME = 'account.User', 'account.user'
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FoiMessage.status'
db.add_column('foirequest_foimessage', 'status', self.gf('django.db.models.fields.CharField')(default=None, max_length=50, null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'FoiMessage.status'
db.delete_column('foirequest_foimessage', 'status')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
APP_MODEL_NAME: {
'Meta': {'object_name': 'User', 'db_table': "'%s'" % USER_DB_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'foirequest.foiattachment': {
'Meta': {'ordering': "('name',)", 'object_name': 'FoiAttachment'},
'belongs_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foirequest.FoiMessage']", 'null': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filetype': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'foirequest.foievent': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'FoiEvent'},
'context_json': ('django.db.models.fields.TextField', [], {}),
'event_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publicbody.PublicBody']", 'null': 'True', 'blank': 'True'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foirequest.FoiRequest']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % APP_MODEL, 'null': 'True', 'blank': 'True'})
},
'foirequest.foimessage': {
'Meta': {'ordering': "('timestamp',)", 'object_name': 'FoiMessage'},
'html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_postal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_response': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'original': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'plaintext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'recipient': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'recipient_public_body': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'received_messages'", 'null': 'True', 'to': "orm['publicbody.PublicBody']"}),
'redacted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foirequest.FoiRequest']"}),
'sender_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'sender_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'sender_public_body': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'send_messages'", 'null': 'True', 'to': "orm['publicbody.PublicBody']"}),
'sender_user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % APP_MODEL, 'null': 'True', 'blank': 'True'}),
'sent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'})
},
'foirequest.foirequest': {
'Meta': {'ordering': "('last_message',)", 'object_name': 'FoiRequest'},
'checked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'costs': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'due_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'first_message': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_foi': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_message': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publicbody.FoiLaw']", 'null': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publicbody.PublicBody']", 'null': 'True', 'blank': 'True'}),
'refusal_reason': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'resolution': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resolved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'secret_address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']", 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % APP_MODEL, 'null': 'True'}),
'visibility': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'foirequest.publicbodysuggestion': {
'Meta': {'ordering': "('timestamp',)", 'object_name': 'PublicBodySuggestion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publicbody.PublicBody']"}),
'reason': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foirequest.FoiRequest']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % APP_MODEL, 'null': 'True'})
},
'publicbody.foilaw': {
'Meta': {'object_name': 'FoiLaw'},
'combined': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['publicbody.FoiLaw']", 'symmetrical': 'False', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'letter_end': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'letter_start': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'long_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'max_response_time': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_response_time_unit': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'meta': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'priority': ('django.db.models.fields.SmallIntegerField', [], {'default': '3'}),
'refusal_reasons': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['sites.Site']", 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'publicbody.publicbody': {
'Meta': {'object_name': 'PublicBody'},
'_created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'public_body_creators'", 'null': 'True', 'to': "orm['%s']" % APP_MODEL}),
'_updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'public_body_updaters'", 'null': 'True', 'to': "orm['%s']" % APP_MODEL}),
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'classification': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'classification_slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'contact': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'depth': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'geography': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'laws': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['publicbody.FoiLaw']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'number_of_requests': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'children'", 'null': 'True', 'blank': 'True', 'to': "orm['publicbody.PublicBody']"}),
'root': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'descendants'", 'null': 'True', 'blank': 'True', 'to': "orm['publicbody.PublicBody']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['sites.Site']", 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['publicbody.PublicBodyTopic']", 'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'website_dump': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'publicbody.publicbodytopic': {
'Meta': {'object_name': 'PublicBodyTopic'},
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['foirequest']
| mit |
yashLadha/coala | tests/coalaDeleteOrigTest.py | 27 | 1908 | import tempfile
import unittest
import os
import re
from coalib import coala_delete_orig
from coala_utils.ContextManagers import retrieve_stderr
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
class coalaDeleteOrigTest(unittest.TestCase):
def setUp(self):
self.section = Section('default')
self.section.append(Setting('config', '/path/to/file'))
@unittest.mock.patch('os.getcwd')
def test_nonexistent_coafile(self, mocked_getcwd):
mocked_getcwd.return_value = None
retval = coala_delete_orig.main()
self.assertEqual(retval, 255)
@unittest.mock.patch('coalib.parsing.Globbing.glob')
def test_remove_exception(self, mock_glob):
# Non existent file
mock_glob.return_value = ['non_existent_file']
with retrieve_stderr() as stderr:
retval = coala_delete_orig.main(section=self.section)
output = stderr.getvalue()
self.assertEqual(retval, 0)
self.assertIn("Couldn't delete", output)
# Directory instead of file
with tempfile.TemporaryDirectory() as filename, \
retrieve_stderr() as stderr:
mock_glob.return_value = [filename]
retval = coala_delete_orig.main(section=self.section)
output = stderr.getvalue()
self.assertEqual(retval, 0)
self.assertIn("Couldn't delete", output)
def test_normal_running(self):
with tempfile.TemporaryDirectory() as directory:
temporary = tempfile.mkstemp(suffix='.orig', dir=directory)
os.close(temporary[0])
section = Section('')
section.append(Setting('project_dir', re.escape(directory)))
retval = coala_delete_orig.main(section=section)
self.assertEqual(retval, 0)
self.assertFalse(os.path.isfile(temporary[1]))
| agpl-3.0 |
D-Pointer/imperium-server | python-server/udp_handler.py | 1 | 3390 |
from twisted.internet.protocol import DatagramProtocol
import socket
import struct
import datetime
import udp_packet
class UdpHandler (DatagramProtocol):
PACKET_TYPE_SIZE = struct.calcsize( '!B' )
def __init__(self, statistics, logger, reactor):
self.statistics = statistics
self.logger = logger
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Make the port non-blocking and start it listening on any port
self.socket.setblocking(False)
self.socket.bind( ('0.0.0.0', 0) )
# now pass the file descriptor to the reactor and register us as a hamdler
self.port = reactor.adoptDatagramPort( self.socket.fileno(), socket.AF_INET, self )
# the address to the own player
self.address = None
# the opponent UDP handler
self.opponent = None
def startProtocol(self):
self.logger.info( "start UDP protocol" )
def datagramReceived(self, data, addr):
self.logger.debug("received %d bytes from %s", len(data), addr )
# save the address if needed
if self.address == None:
self.address = addr
offset = 0
packetType, = struct.unpack_from( '!B', data, 0 )
offset += UdpHandler.PACKET_TYPE_SIZE
if packetType == udp_packet.UdpPacket.PING:
timestamp, = struct.unpack_from( '!I', data, offset )
self.logger.debug( "sending pong to %s:%d for timestamp %d", addr[0], addr[1], timestamp )
response = struct.pack( '!BI', udp_packet.UdpPacket.PONG, timestamp )
self.transport.write( response, addr )
# update statistics
self.statistics.lock()
self.statistics.udpBytesReceived += len(data)
self.statistics.udpBytesSent += len(response)
self.statistics.udpLastReceived = datetime.datetime.now()
self.statistics.udpLastSent = self.statistics.udpLastReceived
self.statistics.release()
elif packetType == udp_packet.UdpPacket.DATA:
# precautions
if not self.opponent.address:
self.logger.warn( "no opponent UDP handler yet" )
else:
self.opponent.transport.write( data, self.opponent.address )
# update statistics
self.statistics.lock()
self.statistics.udpPacketsReceived += 1
self.statistics.udpBytesReceived += len(data)
self.statistics.udpLastReceived = datetime.datetime.now()
self.statistics.release()
# opponent stats
self.opponent.statistics.lock()
self.opponent.statistics.udpPacketsSent += 1
self.opponent.statistics.udpBytesSent += len(data)
self.opponent.statistics.udpLastSent = self.statistics.udpLastReceived
self.opponent.statistics.release()
def cleanup (self):
self.logger.debug( "cleaning up UDP connection to %s:%d", self.address[0], self.address[1] )
if self.socket:
self.socket.close()
self.socket = None
self.opponent = None
self.port = None
def getLocalPort(self):
return self.socket.getsockname()[ 1 ]
def sendStartPackets (self):
self.logger.debug("TODO: send to us and opponent" ) | gpl-2.0 |
yohanesyuen/mal | rpython/env.py | 50 | 1432 | from mal_types import MalType, MalSym, MalList, throw_str
# Environment
class Env():
def __init__(self, outer=None, binds=None, exprs=None):
self.data = {}
self.outer = outer or None
if binds:
assert isinstance(binds, MalList) and isinstance(exprs, MalList)
for i in range(len(binds)):
bind = binds[i]
if not isinstance(bind, MalSym):
throw_str("env bind value is not a symbol")
if bind.value == u"&":
bind = binds[i+1]
if not isinstance(bind, MalSym):
throw_str("env bind value is not a symbol")
self.data[bind.value] = exprs.slice(i)
break
else:
self.data[bind.value] = exprs[i]
def find(self, key):
assert isinstance(key, MalSym)
if key.value in self.data: return self
elif self.outer: return self.outer.find(key)
else: return None
def set(self, key, value):
assert isinstance(key, MalSym)
assert isinstance(value, MalType)
self.data[key.value] = value
return value
def get(self, key):
assert isinstance(key, MalSym)
env = self.find(key)
if not env: throw_str("'" + str(key.value) + "' not found")
return env.data[key.value]
| mpl-2.0 |
mrphrazer/miasm | miasm/loader/new_cstruct.py | 4 | 8816 | #! /usr/bin/env python
from __future__ import print_function
import re
import struct
from miasm.core.utils import force_bytes
from future.utils import PY3, viewitems, with_metaclass
type2realtype = {}
size2type = {}
size2type_s = {}
for t in 'B', 'H', 'I', 'Q':
s = struct.calcsize(t)
type2realtype[t] = s * 8
size2type[s * 8] = t
for t in 'b', 'h', 'i', 'q':
s = struct.calcsize(t)
type2realtype[t] = s * 8
size2type_s[s * 8] = t
type2realtype['u08'] = size2type[8]
type2realtype['u16'] = size2type[16]
type2realtype['u32'] = size2type[32]
type2realtype['u64'] = size2type[64]
type2realtype['s08'] = size2type_s[8]
type2realtype['s16'] = size2type_s[16]
type2realtype['s32'] = size2type_s[32]
type2realtype['s64'] = size2type_s[64]
type2realtype['d'] = 'd'
type2realtype['f'] = 'f'
type2realtype['q'] = 'q'
type2realtype['ptr'] = 'ptr'
sex_types = {0: '<', 1: '>'}
def fix_size(fields, wsize):
out = []
for name, v in fields:
if v.endswith("s"):
pass
elif v == "ptr":
v = size2type[wsize]
elif not v in type2realtype:
raise ValueError("unknown Cstruct type", v)
else:
v = type2realtype[v]
out.append((name, v))
fields = out
return fields
def real_fmt(fmt, wsize):
if fmt == "ptr":
v = size2type[wsize]
elif fmt in type2realtype:
v = type2realtype[fmt]
else:
v = fmt
return v
all_cstructs = {}
class Cstruct_Metaclass(type):
field_suffix = "_value"
def __new__(cls, name, bases, dct):
for fields in dct['_fields']:
fname = fields[0]
if fname in ['parent', 'parent_head']:
raise ValueError('field name will confuse internal structs',
repr(fname))
dct[fname] = property(dct.pop("get_" + fname,
lambda self, fname=fname: getattr(
self, fname + self.__class__.field_suffix)),
dct.pop("set_" + fname,
lambda self, v, fname=fname: setattr(
self, fname + self.__class__.field_suffix, v)),
dct.pop("del_" + fname, None))
o = super(Cstruct_Metaclass, cls).__new__(cls, name, bases, dct)
if name != "CStruct":
all_cstructs[name] = o
return o
def unpack_l(cls, s, off=0, parent_head=None, _sex=None, _wsize=None):
if _sex is None and _wsize is None:
# get sex and size from parent
if parent_head is not None:
_sex = parent_head._sex
_wsize = parent_head._wsize
else:
_sex = 0
_wsize = 32
c = cls(_sex=_sex, _wsize=_wsize)
if parent_head is None:
parent_head = c
c.parent_head = parent_head
of1 = off
for field in c._fields:
cpt = None
if len(field) == 2:
fname, ffmt = field
elif len(field) == 3:
fname, ffmt, cpt = field
if ffmt in type2realtype or (isinstance(ffmt, str) and re.match(r'\d+s', ffmt)):
# basic types
if cpt:
value = []
i = 0
while i < cpt(c):
fmt = real_fmt(ffmt, _wsize)
of2 = of1 + struct.calcsize(fmt)
value.append(struct.unpack(c.sex + fmt, s[of1:of2])[0])
of1 = of2
i += 1
else:
fmt = real_fmt(ffmt, _wsize)
of2 = of1 + struct.calcsize(fmt)
if not (0 <= of1 < len(s) and 0 <= of2 < len(s)):
raise RuntimeError("not enough data")
value = struct.unpack(c.sex + fmt, s[of1:of2])[0]
elif ffmt == "sz": # null terminated special case
of2 = s.find(b'\x00', of1)
if of2 == -1:
raise ValueError('no null char in string!')
of2 += 1
value = s[of1:of2 - 1]
elif ffmt in all_cstructs:
of2 = of1
# sub structures
if cpt:
value = []
i = 0
while i < cpt(c):
v, l = all_cstructs[ffmt].unpack_l(
s, of1, parent_head, _sex, _wsize)
v.parent = c
value.append(v)
of2 = of1 + l
of1 = of2
i += 1
else:
value, l = all_cstructs[ffmt].unpack_l(
s, of1, parent_head, _sex, _wsize)
value.parent = c
of2 = of1 + l
elif isinstance(ffmt, tuple):
f_get, f_set = ffmt
value, of2 = f_get(c, s, of1)
else:
raise ValueError('unknown class', ffmt)
of1 = of2
setattr(c, fname + c.__class__.field_suffix, value)
return c, of2 - off
def unpack(cls, s, off=0, parent_head=None, _sex=None, _wsize=None):
c, l = cls.unpack_l(s, off=off,
parent_head=parent_head, _sex=_sex, _wsize=_wsize)
return c
class CStruct(with_metaclass(Cstruct_Metaclass, object)):
_packformat = ""
_fields = []
def __init__(self, parent_head=None, _sex=None, _wsize=None, **kargs):
self.parent_head = parent_head
self._size = None
kargs = dict(kargs)
# if not sex or size: get the one of the parent
if _sex == None and _wsize == None:
if parent_head:
_sex = parent_head._sex
_wsize = parent_head._wsize
else:
# else default sex & size
_sex = 0
_wsize = 32
# _sex is 0 or 1, sex is '<' or '>'
self._sex = _sex
self._wsize = _wsize
if self._packformat:
self.sex = self._packformat
else:
self.sex = sex_types[_sex]
for f in self._fields:
setattr(self, f[0] + self.__class__.field_suffix, None)
if kargs:
for k, v in viewitems(kargs):
self.__dict__[k + self.__class__.field_suffix] = v
def pack(self):
out = b''
for field in self._fields:
cpt = None
if len(field) == 2:
fname, ffmt = field
elif len(field) == 3:
fname, ffmt, cpt = field
value = getattr(self, fname + self.__class__.field_suffix)
if ffmt in type2realtype or (isinstance(ffmt, str) and re.match(r'\d+s', ffmt)):
# basic types
fmt = real_fmt(ffmt, self._wsize)
if cpt == None:
if value == None:
o = struct.calcsize(fmt) * b"\x00"
elif ffmt.endswith('s'):
new_value = force_bytes(value)
o = struct.pack(self.sex + fmt, new_value)
else:
o = struct.pack(self.sex + fmt, value)
else:
o = b""
for v in value:
if value == None:
o += struct.calcsize(fmt) * b"\x00"
else:
o += struct.pack(self.sex + fmt, v)
elif ffmt == "sz": # null terminated special case
o = value + b'\x00'
elif ffmt in all_cstructs:
# sub structures
if cpt == None:
o = bytes(value)
else:
o = b""
for v in value:
o += bytes(v)
elif isinstance(ffmt, tuple):
f_get, f_set = ffmt
o = f_set(self, value)
else:
raise ValueError('unknown class', ffmt)
out += o
return out
def __bytes__(self):
return self.pack()
def __str__(self):
if PY3:
return repr(self)
return self.__bytes__()
def __len__(self):
return len(self.pack())
def __repr__(self):
return "<%s=%s>" % (self.__class__.__name__, "/".join(
repr(getattr(self, x[0])) for x in self._fields)
)
def __getitem__(self, item): # to work with format strings
return getattr(self, item)
| gpl-2.0 |
Spleen64/Sick-Beard | lib/requests/packages/chardet2/langcyrillicmodel.py | 63 | 17764 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
win1251_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
latin5_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
macCyrillic_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
IBM855_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
IBM866_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest sequences: 0.1237%
# negative sequences: 0.0009%
RussianLangModel = ( \
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)
Koi8rModel = { \
'charToOrderMap': KOI8R_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "KOI8-R"
}
Win1251CyrillicModel = { \
'charToOrderMap': win1251_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
Latin5CyrillicModel = { \
'charToOrderMap': latin5_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
MacCyrillicModel = { \
'charToOrderMap': macCyrillic_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "MacCyrillic"
};
Ibm866Model = { \
'charToOrderMap': IBM866_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM866"
}
Ibm855Model = { \
'charToOrderMap': IBM855_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM855"
}
| gpl-3.0 |
eusi/MissionPlanerHM | Lib/site-packages/scipy/integrate/ode.py | 55 | 25735 | # Authors: Pearu Peterson, Pauli Virtanen, John Travers
"""
First-order ODE integrators.
User-friendly interface to various numerical integrators for solving a
system of first order ODEs with prescribed initial conditions::
d y(t)[i]
--------- = f(t,y(t))[i],
d t
y(t=0)[i] = y0[i],
where::
i = 0, ..., len(y0) - 1
class ode
---------
A generic interface class to numeric integrators. It has the following
methods::
integrator = ode(f,jac=None)
integrator = integrator.set_integrator(name,**params)
integrator = integrator.set_initial_value(y0,t0=0.0)
integrator = integrator.set_f_params(*args)
integrator = integrator.set_jac_params(*args)
y1 = integrator.integrate(t1,step=0,relax=0)
flag = integrator.successful()
class complex_ode
-----------------
This class has the same generic interface as ode, except it can handle complex
f, y and Jacobians by transparently translating them into the equivalent
real valued system. It supports the real valued solvers (i.e not zvode) and is
an alternative to ode with the zvode solver, sometimes performing better.
"""
integrator_info = \
"""
Available integrators
---------------------
vode
~~~~
Real-valued Variable-coefficient Ordinary Differential Equation
solver, with fixed-leading-coefficient implementation. It provides
implicit Adams method (for non-stiff problems) and a method based on
backward differentiation formulas (BDF) (for stiff problems).
Source: http://www.netlib.org/ode/vode.f
This integrator accepts the following parameters in set_integrator()
method of the ode class:
- atol : float or sequence
absolute tolerance for solution
- rtol : float or sequence
relative tolerance for solution
- lband : None or int
- rband : None or int
Jacobian band width, jac[i,j] != 0 for i-lband <= j <= i+rband.
Setting these requires your jac routine to return the jacobian
in packed format, jac_packed[i-j+lband, j] = jac[i,j].
- method: 'adams' or 'bdf'
Which solver to use, Adams (non-stiff) or BDF (stiff)
- with_jacobian : bool
Whether to use the jacobian
- nsteps : int
Maximum number of (internally defined) steps allowed during one
call to the solver.
- first_step : float
- min_step : float
- max_step : float
Limits for the step sizes used by the integrator.
- order : int
Maximum order used by the integrator,
order <= 12 for Adams, <= 5 for BDF.
zvode
~~~~~
Complex-valued Variable-coefficient Ordinary Differential Equation
solver, with fixed-leading-coefficient implementation. It provides
implicit Adams method (for non-stiff problems) and a method based on
backward differentiation formulas (BDF) (for stiff problems).
Source: http://www.netlib.org/ode/zvode.f
This integrator accepts the same parameters in set_integrator()
as the "vode" solver.
:Note:
When using ZVODE for a stiff system, it should only be used for
the case in which the function f is analytic, that is, when each f(i)
is an analytic function of each y(j). Analyticity means that the
partial derivative df(i)/dy(j) is a unique complex number, and this
fact is critical in the way ZVODE solves the dense or banded linear
systems that arise in the stiff case. For a complex stiff ODE system
in which f is not analytic, ZVODE is likely to have convergence
failures, and for this problem one should instead use DVODE on the
equivalent real system (in the real and imaginary parts of y).
dopri5
~~~~~~
Numerical solution of a system of first order
ordinary differential equations y'=f(x,y).
this is an explicit runge-kutta method of order (4)5
due to Dormand & Prince (with stepsize control and
dense output).
Authors: E. Hairer and G. Wanner
Universite de Geneve, Dept. de Mathematiques
CH-1211 Geneve 24, Switzerland
e-mail: ernst.hairer@math.unige.ch
gerhard.wanner@math.unige.ch
This code is described in:
E. Hairer, S.P. Norsett and G. Wanner, Solving Ordinary
Differential Equations i. Nonstiff Problems. 2nd edition.
Springer Series in Computational Mathematics,
Springer-Verlag (1993)
This integrator accepts the following parameters in set_integrator()
method of the ode class:
- atol : float or sequence
absolute tolerance for solution
- rtol : float or sequence
relative tolerance for solution
- nsteps : int
Maximum number of (internally defined) steps allowed during one
call to the solver.
- first_step : float
- max_step : float
- safety : float
Safety factor on new step selection (default 0.9)
- ifactor : float
- dfactor : float
Maximum factor to increase/decrease step sixe by in one step
- beta : float
Beta parameter for stabilised step size control.
dop853
~~~~~~
Numerical solution of a system of first 0rder
ordinary differential equations y'=f(x,y).
this is an explicit runge-kutta method of order 8(5,3)
due to Dormand & Prince (with stepsize control and
dense output).
Options and references the same as dopri5.
"""
if __doc__:
__doc__ += integrator_info
# XXX: Integrators must have:
# ===========================
# cvode - C version of vode and vodpk with many improvements.
# Get it from http://www.netlib.org/ode/cvode.tar.gz
# To wrap cvode to Python, one must write extension module by
# hand. Its interface is too much 'advanced C' that using f2py
# would be too complicated (or impossible).
#
# How to define a new integrator:
# ===============================
#
# class myodeint(IntegratorBase):
#
# runner = <odeint function> or None
#
# def __init__(self,...): # required
# <initialize>
#
# def reset(self,n,has_jac): # optional
# # n - the size of the problem (number of equations)
# # has_jac - whether user has supplied its own routine for Jacobian
# <allocate memory,initialize further>
#
# def run(self,f,jac,y0,t0,t1,f_params,jac_params): # required
# # this method is called to integrate from t=t0 to t=t1
# # with initial condition y0. f and jac are user-supplied functions
# # that define the problem. f_params,jac_params are additional
# # arguments
# # to these functions.
# <calculate y1>
# if <calculation was unsuccesful>:
# self.success = 0
# return t1,y1
#
# # In addition, one can define step() and run_relax() methods (they
# # take the same arguments as run()) if the integrator can support
# # these features (see IntegratorBase doc strings).
#
# if myodeint.runner:
# IntegratorBase.integrator_classes.append(myodeint)
__all__ = ['ode', 'complex_ode']
__version__ = "$Id$"
__docformat__ = "restructuredtext en"
import re
import warnings
from numpy import asarray, array, zeros, int32, isscalar, real, imag
import vode as _vode
import _dop
#------------------------------------------------------------------------------
# User interface
#------------------------------------------------------------------------------
class ode(object):
"""\
A generic interface class to numeric integrators.
See also
--------
odeint : an integrator with a simpler interface based on lsoda from ODEPACK
quad : for finding the area under a curve
Examples
--------
A problem to integrate and the corresponding jacobian:
>>> from scipy import eye
>>> from scipy.integrate import ode
>>>
>>> y0, t0 = [1.0j, 2.0], 0
>>>
>>> def f(t, y, arg1):
>>> return [1j*arg1*y[0] + y[1], -arg1*y[1]**2]
>>> def jac(t, y, arg1):
>>> return [[1j*arg1, 1], [0, -arg1*2*y[1]]]
The integration:
>>> r = ode(f, jac).set_integrator('zvode', method='bdf', with_jacobian=True)
>>> r.set_initial_value(y0, t0).set_f_params(2.0).set_jac_params(2.0)
>>> t1 = 10
>>> dt = 1
>>> while r.successful() and r.t < t1:
>>> r.integrate(r.t+dt)
>>> print r.t, r.y
"""
if __doc__:
__doc__ += integrator_info
def __init__(self, f, jac=None):
"""
Define equation y' = f(y,t) where (optional) jac = df/dy.
Parameters
----------
f : f(t, y, *f_args)
Rhs of the equation. t is a scalar, y.shape == (n,).
f_args is set by calling set_f_params(*args)
jac : jac(t, y, *jac_args)
Jacobian of the rhs, jac[i,j] = d f[i] / d y[j]
jac_args is set by calling set_f_params(*args)
"""
self.stiff = 0
self.f = f
self.jac = jac
self.f_params = ()
self.jac_params = ()
self.y = []
def set_initial_value(self, y, t=0.0):
"""Set initial conditions y(t) = y."""
if isscalar(y):
y = [y]
n_prev = len(self.y)
if not n_prev:
self.set_integrator('') # find first available integrator
self.y = asarray(y, self._integrator.scalar)
self.t = t
self._integrator.reset(len(self.y),self.jac is not None)
return self
def set_integrator(self, name, **integrator_params):
"""
Set integrator by name.
Parameters
----------
name : str
Name of the integrator.
integrator_params :
Additional parameters for the integrator.
"""
integrator = find_integrator(name)
if integrator is None:
# FIXME: this really should be raise an exception. Will that break
# any code?
warnings.warn('No integrator name match with %r or is not '
'available.' % name)
else:
self._integrator = integrator(**integrator_params)
if not len(self.y):
self.t = 0.0
self.y = array([0.0], self._integrator.scalar)
self._integrator.reset(len(self.y),self.jac is not None)
return self
def integrate(self, t, step=0, relax=0):
"""Find y=y(t), set y as an initial condition, and return y."""
if step and self._integrator.supports_step:
mth = self._integrator.step
elif relax and self._integrator.supports_run_relax:
mth = self._integrator.run_relax
else:
mth = self._integrator.run
self.y,self.t = mth(self.f,self.jac or (lambda :None),
self.y,self.t,t,
self.f_params,self.jac_params)
return self.y
def successful(self):
"""Check if integration was successful."""
try:
self._integrator
except AttributeError:
self.set_integrator('')
return self._integrator.success==1
def set_f_params(self,*args):
"""Set extra parameters for user-supplied function f."""
self.f_params = args
return self
def set_jac_params(self,*args):
"""Set extra parameters for user-supplied function jac."""
self.jac_params = args
return self
class complex_ode(ode):
""" A wrapper of ode for complex systems.
For usage examples, see `ode`.
"""
def __init__(self, f, jac=None):
"""
Define equation y' = f(y,t), where y and f can be complex.
Parameters
----------
f : f(t, y, *f_args)
Rhs of the equation. t is a scalar, y.shape == (n,).
f_args is set by calling set_f_params(*args)
jac : jac(t, y, *jac_args)
Jacobian of the rhs, jac[i,j] = d f[i] / d y[j]
jac_args is set by calling set_f_params(*args)
"""
self.cf = f
self.cjac = jac
if jac is not None:
ode.__init__(self, self._wrap, self._wrap_jac)
else:
ode.__init__(self, self._wrap, None)
def _wrap(self, t, y, *f_args):
f = self.cf(*((t, y[::2] + 1j*y[1::2]) + f_args))
self.tmp[::2] = real(f)
self.tmp[1::2] = imag(f)
return self.tmp
def _wrap_jac(self, t, y, *jac_args):
jac = self.cjac(*((t, y[::2] + 1j*y[1::2]) + jac_args))
self.jac_tmp[1::2,1::2] = self.jac_tmp[::2,::2] = real(jac)
self.jac_tmp[1::2,::2] = imag(jac)
self.jac_tmp[::2,1::2] = -self.jac_tmp[1::2,::2]
return self.jac_tmp
def set_integrator(self, name, **integrator_params):
"""
Set integrator by name.
Parameters
----------
name : str
Name of the integrator
integrator_params :
Additional parameters for the integrator.
"""
if name == 'zvode':
raise ValueError("zvode should be used with ode, not zode")
return ode.set_integrator(self, name, **integrator_params)
def set_initial_value(self, y, t=0.0):
"""Set initial conditions y(t) = y."""
y = asarray(y)
self.tmp = zeros(y.size*2, 'float')
self.tmp[::2] = real(y)
self.tmp[1::2] = imag(y)
if self.cjac is not None:
self.jac_tmp = zeros((y.size*2, y.size*2), 'float')
return ode.set_initial_value(self, self.tmp, t)
def integrate(self, t, step=0, relax=0):
"""Find y=y(t), set y as an initial condition, and return y."""
y = ode.integrate(self, t, step, relax)
return y[::2] + 1j*y[1::2]
#------------------------------------------------------------------------------
# ODE integrators
#------------------------------------------------------------------------------
def find_integrator(name):
for cl in IntegratorBase.integrator_classes:
if re.match(name,cl.__name__,re.I):
return cl
return None
class IntegratorBase(object):
runner = None # runner is None => integrator is not available
success = None # success==1 if integrator was called successfully
supports_run_relax = None
supports_step = None
integrator_classes = []
scalar = float
def reset(self,n,has_jac):
"""Prepare integrator for call: allocate memory, set flags, etc.
n - number of equations.
has_jac - if user has supplied function for evaluating Jacobian.
"""
def run(self,f,jac,y0,t0,t1,f_params,jac_params):
"""Integrate from t=t0 to t=t1 using y0 as an initial condition.
Return 2-tuple (y1,t1) where y1 is the result and t=t1
defines the stoppage coordinate of the result.
"""
raise NotImplementedError('all integrators must define '
'run(f,jac,t0,t1,y0,f_params,jac_params)')
def step(self,f,jac,y0,t0,t1,f_params,jac_params):
"""Make one integration step and return (y1,t1)."""
raise NotImplementedError('%s does not support step() method' %
self.__class__.__name__)
def run_relax(self,f,jac,y0,t0,t1,f_params,jac_params):
"""Integrate from t=t0 to t>=t1 and return (y1,t)."""
raise NotImplementedError('%s does not support run_relax() method' %
self.__class__.__name__)
#XXX: __str__ method for getting visual state of the integrator
class vode(IntegratorBase):
runner = getattr(_vode,'dvode',None)
messages = {-1:'Excess work done on this call. (Perhaps wrong MF.)',
-2:'Excess accuracy requested. (Tolerances too small.)',
-3:'Illegal input detected. (See printed message.)',
-4:'Repeated error test failures. (Check all input.)',
-5:'Repeated convergence failures. (Perhaps bad'
' Jacobian supplied or wrong choice of MF or tolerances.)',
-6:'Error weight became zero during problem. (Solution'
' component i vanished, and ATOL or ATOL(i) = 0.)'
}
supports_run_relax = 1
supports_step = 1
def __init__(self,
method = 'adams',
with_jacobian = 0,
rtol=1e-6,atol=1e-12,
lband=None,uband=None,
order = 12,
nsteps = 500,
max_step = 0.0, # corresponds to infinite
min_step = 0.0,
first_step = 0.0, # determined by solver
):
if re.match(method,r'adams',re.I):
self.meth = 1
elif re.match(method,r'bdf',re.I):
self.meth = 2
else:
raise ValueError('Unknown integration method %s' % method)
self.with_jacobian = with_jacobian
self.rtol = rtol
self.atol = atol
self.mu = uband
self.ml = lband
self.order = order
self.nsteps = nsteps
self.max_step = max_step
self.min_step = min_step
self.first_step = first_step
self.success = 1
def reset(self,n,has_jac):
# Calculate parameters for Fortran subroutine dvode.
if has_jac:
if self.mu is None and self.ml is None:
miter = 1
else:
if self.mu is None: self.mu = 0
if self.ml is None: self.ml = 0
miter = 4
else:
if self.mu is None and self.ml is None:
if self.with_jacobian:
miter = 2
else:
miter = 0
else:
if self.mu is None: self.mu = 0
if self.ml is None: self.ml = 0
if self.ml==self.mu==0:
miter = 3
else:
miter = 5
mf = 10*self.meth + miter
if mf==10:
lrw = 20 + 16*n
elif mf in [11,12]:
lrw = 22 + 16*n + 2*n*n
elif mf == 13:
lrw = 22 + 17*n
elif mf in [14,15]:
lrw = 22 + 18*n + (3*self.ml+2*self.mu)*n
elif mf == 20:
lrw = 20 + 9*n
elif mf in [21,22]:
lrw = 22 + 9*n + 2*n*n
elif mf == 23:
lrw = 22 + 10*n
elif mf in [24,25]:
lrw = 22 + 11*n + (3*self.ml+2*self.mu)*n
else:
raise ValueError('Unexpected mf=%s' % mf)
if miter in [0,3]:
liw = 30
else:
liw = 30 + n
rwork = zeros((lrw,), float)
rwork[4] = self.first_step
rwork[5] = self.max_step
rwork[6] = self.min_step
self.rwork = rwork
iwork = zeros((liw,), int32)
if self.ml is not None:
iwork[0] = self.ml
if self.mu is not None:
iwork[1] = self.mu
iwork[4] = self.order
iwork[5] = self.nsteps
iwork[6] = 2 # mxhnil
self.iwork = iwork
self.call_args = [self.rtol,self.atol,1,1,self.rwork,self.iwork,mf]
self.success = 1
def run(self,*args):
y1,t,istate = self.runner(*(args[:5]+tuple(self.call_args)+args[5:]))
if istate <0:
warnings.warn('vode: ' + self.messages.get(istate,'Unexpected istate=%s'%istate))
self.success = 0
else:
self.call_args[3] = 2 # upgrade istate from 1 to 2
return y1,t
def step(self,*args):
itask = self.call_args[2]
self.call_args[2] = 2
r = self.run(*args)
self.call_args[2] = itask
return r
def run_relax(self,*args):
itask = self.call_args[2]
self.call_args[2] = 3
r = self.run(*args)
self.call_args[2] = itask
return r
if vode.runner is not None:
IntegratorBase.integrator_classes.append(vode)
class zvode(vode):
runner = getattr(_vode,'zvode',None)
supports_run_relax = 1
supports_step = 1
scalar = complex
def reset(self, n, has_jac):
# Calculate parameters for Fortran subroutine dvode.
if has_jac:
if self.mu is None and self.ml is None:
miter = 1
else:
if self.mu is None: self.mu = 0
if self.ml is None: self.ml = 0
miter = 4
else:
if self.mu is None and self.ml is None:
if self.with_jacobian:
miter = 2
else:
miter = 0
else:
if self.mu is None: self.mu = 0
if self.ml is None: self.ml = 0
if self.ml==self.mu==0:
miter = 3
else:
miter = 5
mf = 10*self.meth + miter
if mf in (10,):
lzw = 15*n
elif mf in (11, 12):
lzw = 15*n + 2*n**2
elif mf in (-11, -12):
lzw = 15*n + n**2
elif mf in (13,):
lzw = 16*n
elif mf in (14,15):
lzw = 17*n + (3*self.ml + 2*self.mu)*n
elif mf in (-14,-15):
lzw = 16*n + (2*self.ml + self.mu)*n
elif mf in (20,):
lzw = 8*n
elif mf in (21, 22):
lzw = 8*n + 2*n**2
elif mf in (-21,-22):
lzw = 8*n + n**2
elif mf in (23,):
lzw = 9*n
elif mf in (24, 25):
lzw = 10*n + (3*self.ml + 2*self.mu)*n
elif mf in (-24, -25):
lzw = 9*n + (2*self.ml + self.mu)*n
lrw = 20 + n
if miter in (0, 3):
liw = 30
else:
liw = 30 + n
zwork = zeros((lzw,), complex)
self.zwork = zwork
rwork = zeros((lrw,), float)
rwork[4] = self.first_step
rwork[5] = self.max_step
rwork[6] = self.min_step
self.rwork = rwork
iwork = zeros((liw,), int32)
if self.ml is not None:
iwork[0] = self.ml
if self.mu is not None:
iwork[1] = self.mu
iwork[4] = self.order
iwork[5] = self.nsteps
iwork[6] = 2 # mxhnil
self.iwork = iwork
self.call_args = [self.rtol,self.atol,1,1,
self.zwork,self.rwork,self.iwork,mf]
self.success = 1
def run(self,*args):
y1,t,istate = self.runner(*(args[:5]+tuple(self.call_args)+args[5:]))
if istate < 0:
warnings.warn('zvode: ' +
self.messages.get(istate, 'Unexpected istate=%s'%istate))
self.success = 0
else:
self.call_args[3] = 2 # upgrade istate from 1 to 2
return y1, t
if zvode.runner is not None:
IntegratorBase.integrator_classes.append(zvode)
class dopri5(IntegratorBase):
runner = getattr(_dop,'dopri5',None)
name = 'dopri5'
messages = { 1 : 'computation successful',
2 : 'comput. successful (interrupted by solout)',
-1 : 'input is not consistent',
-2 : 'larger nmax is needed',
-3 : 'step size becomes too small',
-4 : 'problem is probably stiff (interrupted)',
}
def __init__(self,
rtol=1e-6,atol=1e-12,
nsteps = 500,
max_step = 0.0,
first_step = 0.0, # determined by solver
safety = 0.9,
ifactor = 10.0,
dfactor = 0.2,
beta = 0.0,
method = None
):
self.rtol = rtol
self.atol = atol
self.nsteps = nsteps
self.max_step = max_step
self.first_step = first_step
self.safety = safety
self.ifactor = ifactor
self.dfactor = dfactor
self.beta = beta
self.success = 1
def reset(self,n,has_jac):
work = zeros((8*n+21,), float)
work[1] = self.safety
work[2] = self.dfactor
work[3] = self.ifactor
work[4] = self.beta
work[5] = self.max_step
work[6] = self.first_step
self.work = work
iwork = zeros((21,), int32)
iwork[0] = self.nsteps
self.iwork = iwork
self.call_args = [self.rtol,self.atol,self._solout,self.work,self.iwork]
self.success = 1
def run(self,f,jac,y0,t0,t1,f_params,jac_params):
x,y,iwork,idid = self.runner(*((f,t0,y0,t1) + tuple(self.call_args)))
if idid < 0:
warnings.warn(self.name + ': ' +
self.messages.get(idid, 'Unexpected idid=%s'%idid))
self.success = 0
return y,x
def _solout(self, *args):
# dummy solout function
pass
if dopri5.runner is not None:
IntegratorBase.integrator_classes.append(dopri5)
class dop853(dopri5):
runner = getattr(_dop,'dop853',None)
name = 'dop853'
def __init__(self,
rtol=1e-6,atol=1e-12,
nsteps = 500,
max_step = 0.0,
first_step = 0.0, # determined by solver
safety = 0.9,
ifactor = 6.0,
dfactor = 0.3,
beta = 0.0,
method = None
):
self.rtol = rtol
self.atol = atol
self.nsteps = nsteps
self.max_step = max_step
self.first_step = first_step
self.safety = safety
self.ifactor = ifactor
self.dfactor = dfactor
self.beta = beta
self.success = 1
def reset(self,n,has_jac):
work = zeros((11*n+21,), float)
work[1] = self.safety
work[2] = self.dfactor
work[3] = self.ifactor
work[4] = self.beta
work[5] = self.max_step
work[6] = self.first_step
self.work = work
iwork = zeros((21,), int32)
iwork[0] = self.nsteps
self.iwork = iwork
self.call_args = [self.rtol,self.atol,self._solout,self.work,self.iwork]
self.success = 1
if dop853.runner is not None:
IntegratorBase.integrator_classes.append(dop853)
| gpl-3.0 |
heeraj123/oh-mainline | vendor/packages/django-debug-toolbar/debug_toolbar/panels/timer.py | 32 | 3432 | from __future__ import absolute_import, unicode_literals
try:
import resource # Not available on Win32 systems
except ImportError:
resource = None
import time
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import Panel
class TimerPanel(Panel):
"""
Panel that displays the time a response took in milliseconds.
"""
def nav_subtitle(self):
stats = self.get_stats()
if hasattr(self, '_start_rusage'):
utime = self._end_rusage.ru_utime - self._start_rusage.ru_utime
stime = self._end_rusage.ru_stime - self._start_rusage.ru_stime
return _("CPU: %(cum)0.2fms (%(total)0.2fms)") % {
'cum': (utime + stime) * 1000.0,
'total': stats['total_time']
}
elif 'total_time' in stats:
return _("Total: %0.2fms") % stats['total_time']
else:
return ''
has_content = resource is not None
title = _("Time")
template = 'debug_toolbar/panels/timer.html'
@property
def content(self):
stats = self.get_stats()
rows = (
(_("User CPU time"), _("%(utime)0.3f msec") % stats),
(_("System CPU time"), _("%(stime)0.3f msec") % stats),
(_("Total CPU time"), _("%(total)0.3f msec") % stats),
(_("Elapsed time"), _("%(total_time)0.3f msec") % stats),
(_("Context switches"), _("%(vcsw)d voluntary, %(ivcsw)d involuntary") % stats),
)
return render_to_string(self.template, {'rows': rows})
def process_request(self, request):
self._start_time = time.time()
if self.has_content:
self._start_rusage = resource.getrusage(resource.RUSAGE_SELF)
def process_response(self, request, response):
stats = {}
if hasattr(self, '_start_time'):
stats['total_time'] = (time.time() - self._start_time) * 1000
if hasattr(self, '_start_rusage'):
self._end_rusage = resource.getrusage(resource.RUSAGE_SELF)
stats['utime'] = 1000 * self._elapsed_ru('ru_utime')
stats['stime'] = 1000 * self._elapsed_ru('ru_stime')
stats['total'] = stats['utime'] + stats['stime']
stats['vcsw'] = self._elapsed_ru('ru_nvcsw')
stats['ivcsw'] = self._elapsed_ru('ru_nivcsw')
stats['minflt'] = self._elapsed_ru('ru_minflt')
stats['majflt'] = self._elapsed_ru('ru_majflt')
# these are documented as not meaningful under Linux. If you're running BSD
# feel free to enable them, and add any others that I hadn't gotten to before
# I noticed that I was getting nothing but zeroes and that the docs agreed. :-(
#
# stats['blkin'] = self._elapsed_ru('ru_inblock')
# stats['blkout'] = self._elapsed_ru('ru_oublock')
# stats['swap'] = self._elapsed_ru('ru_nswap')
# stats['rss'] = self._end_rusage.ru_maxrss
# stats['srss'] = self._end_rusage.ru_ixrss
# stats['urss'] = self._end_rusage.ru_idrss
# stats['usrss'] = self._end_rusage.ru_isrss
self.record_stats(stats)
def _elapsed_ru(self, name):
return getattr(self._end_rusage, name) - getattr(self._start_rusage, name)
| agpl-3.0 |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/tornado/test/util_test.py | 40 | 5235 | # coding: utf-8
from __future__ import absolute_import, division, print_function, with_statement
import sys
from tornado.escape import utf8
from tornado.util import raise_exc_info, Configurable, u, exec_in, ArgReplacer
from tornado.test.util import unittest
try:
from cStringIO import StringIO # py2
except ImportError:
from io import StringIO # py3
class RaiseExcInfoTest(unittest.TestCase):
def test_two_arg_exception(self):
# This test would fail on python 3 if raise_exc_info were simply
# a three-argument raise statement, because TwoArgException
# doesn't have a "copy constructor"
class TwoArgException(Exception):
def __init__(self, a, b):
super(TwoArgException, self).__init__()
self.a, self.b = a, b
try:
raise TwoArgException(1, 2)
except TwoArgException:
exc_info = sys.exc_info()
try:
raise_exc_info(exc_info)
self.fail("didn't get expected exception")
except TwoArgException as e:
self.assertIs(e, exc_info[1])
class TestConfigurable(Configurable):
@classmethod
def configurable_base(cls):
return TestConfigurable
@classmethod
def configurable_default(cls):
return TestConfig1
class TestConfig1(TestConfigurable):
def initialize(self, a=None):
self.a = a
class TestConfig2(TestConfigurable):
def initialize(self, b=None):
self.b = b
class ConfigurableTest(unittest.TestCase):
def setUp(self):
self.saved = TestConfigurable._save_configuration()
def tearDown(self):
TestConfigurable._restore_configuration(self.saved)
def checkSubclasses(self):
# no matter how the class is configured, it should always be
# possible to instantiate the subclasses directly
self.assertIsInstance(TestConfig1(), TestConfig1)
self.assertIsInstance(TestConfig2(), TestConfig2)
obj = TestConfig1(a=1)
self.assertEqual(obj.a, 1)
obj = TestConfig2(b=2)
self.assertEqual(obj.b, 2)
def test_default(self):
obj = TestConfigurable()
self.assertIsInstance(obj, TestConfig1)
self.assertIs(obj.a, None)
obj = TestConfigurable(a=1)
self.assertIsInstance(obj, TestConfig1)
self.assertEqual(obj.a, 1)
self.checkSubclasses()
def test_config_class(self):
TestConfigurable.configure(TestConfig2)
obj = TestConfigurable()
self.assertIsInstance(obj, TestConfig2)
self.assertIs(obj.b, None)
obj = TestConfigurable(b=2)
self.assertIsInstance(obj, TestConfig2)
self.assertEqual(obj.b, 2)
self.checkSubclasses()
def test_config_args(self):
TestConfigurable.configure(None, a=3)
obj = TestConfigurable()
self.assertIsInstance(obj, TestConfig1)
self.assertEqual(obj.a, 3)
obj = TestConfigurable(a=4)
self.assertIsInstance(obj, TestConfig1)
self.assertEqual(obj.a, 4)
self.checkSubclasses()
# args bound in configure don't apply when using the subclass directly
obj = TestConfig1()
self.assertIs(obj.a, None)
def test_config_class_args(self):
TestConfigurable.configure(TestConfig2, b=5)
obj = TestConfigurable()
self.assertIsInstance(obj, TestConfig2)
self.assertEqual(obj.b, 5)
obj = TestConfigurable(b=6)
self.assertIsInstance(obj, TestConfig2)
self.assertEqual(obj.b, 6)
self.checkSubclasses()
# args bound in configure don't apply when using the subclass directly
obj = TestConfig2()
self.assertIs(obj.b, None)
class UnicodeLiteralTest(unittest.TestCase):
def test_unicode_escapes(self):
self.assertEqual(utf8(u('\u00e9')), b'\xc3\xa9')
class ExecInTest(unittest.TestCase):
# This test is python 2 only because there are no new future imports
# defined in python 3 yet.
@unittest.skipIf(sys.version_info >= print_function.getMandatoryRelease(),
'no testable future imports')
def test_no_inherit_future(self):
# This file has from __future__ import print_function...
f = StringIO()
print('hello', file=f)
# ...but the template doesn't
exec_in('print >> f, "world"', dict(f=f))
self.assertEqual(f.getvalue(), 'hello\nworld\n')
class ArgReplacerTest(unittest.TestCase):
def setUp(self):
def function(x, y, callback=None, z=None):
pass
self.replacer = ArgReplacer(function, 'callback')
def test_omitted(self):
self.assertEqual(self.replacer.replace('new', (1, 2), dict()),
(None, (1, 2), dict(callback='new')))
def test_position(self):
self.assertEqual(self.replacer.replace('new', (1, 2, 'old', 3), dict()),
('old', [1, 2, 'new', 3], dict()))
def test_keyword(self):
self.assertEqual(self.replacer.replace('new', (1,),
dict(y=2, callback='old', z=3)),
('old', (1,), dict(y=2, callback='new', z=3)))
| gpl-3.0 |
mpurzynski/MozDef | mq/plugins/fluentdSqsFixup.py | 2 | 5436 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2015 Mozilla Corporation
#
# This script copies the format/handling mechanism of ipFixup.py (git f5734b0c7e412424b44a6d7af149de6250fc70a2)
import netaddr
from mozdef_util.utilities.toUTC import toUTC
def isIPv4(ip):
try:
return netaddr.valid_ipv4(ip)
except:
return False
def addError(message, error):
'''add an error note to a message'''
if 'errors' not in message:
message['errors'] = list()
if isinstance(message['errors'], list):
message['errors'].append(error)
class message(object):
def __init__(self):
'''register our criteria for being passed a message
as a list of lower case strings or values to match with an event's dictionary of keys or values
set the priority if you have a preference for order of plugins to run.
0 goes first, 100 is assumed/default if not sent
'''
# ask for anything that could house an IP address
self.registration = ['nubis_events_non_prod', 'nubis_events_prod']
self.priority = 15
def onMessage(self, message, metadata):
"""
Ensure all messages have the mandatory mozdef fields
"""
# Making sufficiently sure this is a fluentd-forwarded message from
# fluentd SQS plugin, so that we don't spend too much time on other
# message types
if 'az' not in message and 'instance_id' not in message and '__tag' not in message:
return (message, metadata)
if 'details' not in message:
message['details'] = dict()
if 'summary' not in message and 'message' in message:
message['summary'] = message['message']
if 'utctimestamp' not in message and 'time' in message:
message['utctimestamp'] = toUTC(message['time']).isoformat()
# Bro format of {u'Timestamp': 1.482437837e+18}
if 'utctimestamp' not in message and 'Timestamp' in message:
message['utctimestamp'] = toUTC(message['Timestamp']).isoformat()
# host is used to store dns-style-ip entries in AWS, for ex
# ip-10-162-8-26 is 10.162.8.26. obviously there is no strong guarantee
# that this is always trusted. It's better than nothing though. At the
# time of writing, there is no ipv6 support AWS-side for this kind of
# field. It may be overridden later by a better field, if any exists
if 'host' in message:
tmp = message['host']
if tmp.startswith('ip-'):
ipText = tmp.split('ip-')[1].replace('-', '.')
if isIPv4(ipText):
if 'destinationipaddress' not in message:
message['details']['destinationipaddress'] = ipText
if 'destinationipv4address' not in message:
message['details']['destinationipv4address'] = ipText
else:
message['details']['destinationipaddress'] = '0.0.0.0'
message['details']['destinationipv4address'] = '0.0.0.0'
addError(message,
'plugin: {0} error: {1}:{2}'.format(
'fluentSqsFixUp.py',
'destinationipaddress is invalid',
ipText))
if 'hostname' not in message:
message['hostname'] = tmp
# All messages with __tag 'ec2.forward*' are actually syslog forwarded
# messages, so classify as such
if '__tag' in message:
tmp = message['__tag']
if tmp.startswith('ec2.forward'):
message['category'] = 'syslog'
message['source'] = 'syslog'
if 'ident' in message:
tmp = message['ident']
message['details']['program'] = tmp
if 'processname' not in message and 'program' in message['details']:
message['processname'] = message['details']['program']
if 'processid' not in message and 'pid' in message:
message['processid'] = message['pid']
else:
message['processid'] = 0
# Unknown really, but this field is mandatory.
if 'severity' not in message:
message['severity'] = 'INFO'
# We already have the time of event stored in 'timestamp' so we don't
# need 'time'
if 'time' in message:
message.pop('time')
# Any remaining keys which aren't mandatory fields should be moved
# to details
# https://mozdef.readthedocs.io/en/latest/usage.html#mandatory-fields
original_keys = list(message.keys())
for key in original_keys:
if key not in [
'summary',
'utctimestamp',
'hostname',
'category',
'source',
'processname',
'processid',
'severity',
'tags',
'details']:
message['details'][key] = message[key]
message.pop(key)
return (message, metadata)
| mpl-2.0 |
jamesandariese/dd-agent | tests/checks/integration/test_etcd.py | 41 | 4101 | # 3p
from nose.plugins.attrib import attr
# project
from tests.checks.common import AgentCheckTest
@attr(requires='etcd')
class CheckEtcdTest(AgentCheckTest):
CHECK_NAME = "etcd"
STORE_METRICS = [
'compareanddelete.fail',
'compareanddelete.success',
'compareandswap.fail',
'compareandswap.success',
'create.fail',
'create.success',
'delete.fail',
'delete.success',
'expire.count',
'gets.fail',
'gets.success',
'sets.fail',
'sets.success',
'update.fail',
'update.success',
'watchers',
]
def __init__(self, *args, **kwargs):
AgentCheckTest.__init__(self, *args, **kwargs)
self.config = {"instances": [{"url": "http://localhost:4001"}]}
def test_metrics(self):
self.run_check_twice(self.config)
tags = ['url:http://localhost:4001', 'etcd_state:leader']
for mname in self.STORE_METRICS:
self.assertMetric('etcd.store.%s' % mname, tags=tags, count=1)
self.assertMetric('etcd.self.send.appendrequest.count', tags=tags, count=1)
self.assertMetric('etcd.self.recv.appendrequest.count', tags=tags, count=1)
self.assertServiceCheckOK(self.check.SERVICE_CHECK_NAME,
count=1,
tags=['url:http://localhost:4001'])
self.coverage_report()
# FIXME: not really an integration test, should be pretty easy
# to spin up a cluster to test that.
def test_followers(self):
mock = {
"followers": {
"etcd-node1": {
"counts": {
"fail": 1212,
"success": 4163176
},
"latency": {
"average": 2.7206299430775007,
"current": 1.486487,
"maximum": 2018.410279,
"minimum": 1.011763,
"standardDeviation": 6.246990702203536
}
},
"etcd-node3": {
"counts": {
"fail": 1378,
"success": 4164598
},
"latency": {
"average": 2.707100125761001,
"current": 1.666258,
"maximum": 1409.054765,
"minimum": 0.998415,
"standardDeviation": 5.910089773061448
}
}
},
"leader": "etcd-node2"
}
mocks = {
'_get_leader_metrics': lambda url, ssl, timeout: mock
}
self.run_check_twice(self.config, mocks=mocks)
common_leader_tags = ['url:http://localhost:4001', 'etcd_state:leader']
follower_tags = [
common_leader_tags[:] + ['follower:etcd-node1'],
common_leader_tags[:] + ['follower:etcd-node3'],
]
for fol_tags in follower_tags:
self.assertMetric('etcd.leader.counts.fail', count=1, tags=fol_tags)
self.assertMetric('etcd.leader.counts.success', count=1, tags=fol_tags)
self.assertMetric('etcd.leader.latency.avg', count=1, tags=fol_tags)
self.assertMetric('etcd.leader.latency.min', count=1, tags=fol_tags)
self.assertMetric('etcd.leader.latency.max', count=1, tags=fol_tags)
self.assertMetric('etcd.leader.latency.stddev', count=1, tags=fol_tags)
self.assertMetric('etcd.leader.latency.current', count=1, tags=fol_tags)
def test_bad_config(self):
self.assertRaises(Exception,
lambda: self.run_check({"instances": [{"url": "http://localhost:4001/test"}]}))
self.assertServiceCheckCritical(self.check.SERVICE_CHECK_NAME,
count=1,
tags=['url:http://localhost:4001/test/v2/stats/self'])
self.coverage_report()
| bsd-3-clause |
yencarnacion/jaikuengine | .google_appengine/lib/django-1.2/tests/regressiontests/delete_regress/models.py | 52 | 1111 | from django.db import models
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
class Award(models.Model):
name = models.CharField(max_length=25)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey()
class AwardNote(models.Model):
award = models.ForeignKey(Award)
note = models.CharField(max_length=100)
class Person(models.Model):
name = models.CharField(max_length=25)
awards = generic.GenericRelation(Award)
class Book(models.Model):
pagecount = models.IntegerField()
class Toy(models.Model):
name = models.CharField(max_length=50)
class Child(models.Model):
name = models.CharField(max_length=50)
toys = models.ManyToManyField(Toy, through='PlayedWith')
class PlayedWith(models.Model):
child = models.ForeignKey(Child)
toy = models.ForeignKey(Toy)
date = models.DateField(db_column='date_col')
class PlayedWithNote(models.Model):
played = models.ForeignKey(PlayedWith)
note = models.TextField()
| apache-2.0 |
stvstnfrd/edx-platform | openedx/core/djangoapps/api_admin/models.py | 1 | 9182 | """Models for API management."""
import logging
from smtplib import SMTPException
from config_models.models import ConfigurationModel
from django.conf import settings
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.urls import reverse
from django.utils.translation import ugettext as _u
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from six.moves.urllib.parse import urlunsplit # pylint: disable=import-error
from common.djangoapps.edxmako.shortcuts import render_to_string
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
log = logging.getLogger(__name__)
@python_2_unicode_compatible
class ApiAccessRequest(TimeStampedModel):
"""
Model to track API access for a user.
.. pii: Stores a website, company name, company address for this user
.. pii_types: location, external_service, other
.. pii_retirement: local_api
"""
PENDING = u'pending'
DENIED = u'denied'
APPROVED = u'approved'
STATUS_CHOICES = (
(PENDING, _('Pending')),
(DENIED, _('Denied')),
(APPROVED, _('Approved')),
)
user = models.OneToOneField(User, related_name='api_access_request', on_delete=models.CASCADE)
status = models.CharField(
max_length=255,
choices=STATUS_CHOICES,
default=PENDING,
db_index=True,
help_text=_('Status of this API access request'),
)
website = models.URLField(help_text=_('The URL of the website associated with this API user.'))
reason = models.TextField(help_text=_('The reason this user wants to access the API.'))
company_name = models.CharField(max_length=255, default=u'')
company_address = models.CharField(max_length=255, default=u'')
site = models.ForeignKey(Site, on_delete=models.CASCADE)
contacted = models.BooleanField(default=False)
class Meta:
get_latest_by = 'modified'
ordering = ('-modified', '-created',)
@classmethod
def has_api_access(cls, user):
"""Returns whether or not this user has been granted API access.
Arguments:
user (User): The user to check access for.
Returns:
bool
"""
return cls.api_access_status(user) == cls.APPROVED
@classmethod
def api_access_status(cls, user):
"""
Returns the user's API access status, or None if they have not
requested access.
Arguments:
user (User): The user to check access for.
Returns:
str or None
"""
try:
return cls.objects.get(user=user).status
except cls.DoesNotExist:
return None
@classmethod
def retire_user(cls, user):
"""
Retires the user's API acccess request table for GDPR
Arguments:
user (User): The user linked to the data to retire in the model.
Returns:
True: If the user has a linked data in the model and retirement is successful
False: user has no linked data in the model.
"""
try:
retire_target = cls.objects.get(user=user)
except cls.DoesNotExist:
return False
else:
retire_target.website = ''
retire_target.company_address = ''
retire_target.company_name = ''
retire_target.reason = ''
retire_target.save()
return True
def approve(self):
"""Approve this request."""
log.info(u'Approving API request from user [%s].', self.user.id)
self.status = self.APPROVED
self.save()
def deny(self):
"""Deny this request."""
log.info(u'Denying API request from user [%s].', self.user.id)
self.status = self.DENIED
self.save()
def __str__(self):
return u'ApiAccessRequest {website} [{status}]'.format(website=self.website, status=self.status)
@python_2_unicode_compatible
class ApiAccessConfig(ConfigurationModel):
"""
Configuration for API management.
.. no_pii:
"""
def __str__(self):
return 'ApiAccessConfig [enabled={}]'.format(self.enabled)
@receiver(post_save, sender=ApiAccessRequest, dispatch_uid="api_access_request_post_save_email")
def send_request_email(sender, instance, created, **kwargs): # pylint: disable=unused-argument
""" Send request email after new record created. """
if created:
_send_new_pending_email(instance)
@receiver(pre_save, sender=ApiAccessRequest, dispatch_uid="api_access_request_pre_save_email")
def send_decision_email(sender, instance, **kwargs): # pylint: disable=unused-argument
""" Send decision email after status changed. """
if instance.id and not instance.contacted:
old_instance = ApiAccessRequest.objects.get(pk=instance.id)
if instance.status != old_instance.status:
_send_decision_email(instance)
def _send_new_pending_email(instance):
""" Send an email to settings.API_ACCESS_MANAGER_EMAIL with the contents of this API access request. """
context = {
'approval_url': urlunsplit(
(
'https' if settings.HTTPS == 'on' else 'http',
instance.site.domain,
reverse('admin:api_admin_apiaccessrequest_change', args=(instance.id,)),
'',
'',
)
),
'api_request': instance
}
message = render_to_string('api_admin/api_access_request_email_new_request.txt', context)
try:
send_mail(
_u(u'API access request from {company}').format(company=instance.company_name),
message,
settings.API_ACCESS_FROM_EMAIL,
[settings.API_ACCESS_MANAGER_EMAIL],
fail_silently=False
)
except SMTPException:
log.exception(u'Error sending API user notification email for request [%s].', instance.id)
def _send_decision_email(instance):
""" Send an email to requesting user with the decision made about their request. """
context = {
'name': instance.user.username,
'api_management_url': urlunsplit(
(
'https' if settings.HTTPS == 'on' else 'http',
instance.site.domain,
reverse('api_admin:api-status'),
'',
'',
)
),
'authentication_docs_url': settings.AUTH_DOCUMENTATION_URL,
'api_docs_url': settings.API_DOCUMENTATION_URL,
'support_email_address': settings.API_ACCESS_FROM_EMAIL,
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
}
message = render_to_string(
'api_admin/api_access_request_email_{status}.txt'.format(status=instance.status),
context
)
try:
send_mail(
_u('API access request'),
message,
settings.API_ACCESS_FROM_EMAIL,
[instance.user.email],
fail_silently=False
)
instance.contacted = True
except SMTPException:
log.exception(u'Error sending API user notification email for request [%s].', instance.id)
@python_2_unicode_compatible
class Catalog(models.Model):
"""
A (non-Django-managed) model for Catalogs in the course discovery service.
.. no_pii:
"""
id = models.IntegerField(primary_key=True) # pylint: disable=invalid-name
name = models.CharField(max_length=255, null=False, blank=False)
query = models.TextField(null=False, blank=False)
viewers = models.TextField()
class Meta(object):
# Catalogs live in course discovery, so we do not create any
# tables in LMS. Instead we override the save method to not
# touch the database, and use our API client to communicate
# with discovery.
managed = False
def __init__(self, *args, **kwargs):
attributes = kwargs.get('attributes')
if attributes:
self.id = attributes['id'] # pylint: disable=invalid-name
self.name = attributes['name']
self.query = attributes['query']
self.viewers = attributes['viewers']
else:
super(Catalog, self).__init__(*args, **kwargs) # lint-amnesty, pylint: disable=super-with-arguments
def save(self, **kwargs): # lint-amnesty, pylint: disable=arguments-differ, unused-argument
return None
@property
def attributes(self):
"""Return a dictionary representation of this catalog."""
return {
'id': self.id,
'name': self.name,
'query': self.query,
'viewers': self.viewers,
}
def __str__(self):
return u'Catalog {name} [{query}]'.format(name=self.name, query=self.query)
| agpl-3.0 |
FreekingDean/home-assistant | homeassistant/components/device_tracker/demo.py | 30 | 1123 | """Demo platform for the device tracker."""
import random
from homeassistant.components.device_tracker import DOMAIN
def setup_scanner(hass, config, see):
"""Setup the demo tracker."""
def offset():
"""Return random offset."""
return (random.randrange(500, 2000)) / 2e5 * random.choice((-1, 1))
def random_see(dev_id, name):
"""Randomize a sighting."""
see(
dev_id=dev_id,
host_name=name,
gps=(hass.config.latitude + offset(),
hass.config.longitude + offset()),
gps_accuracy=random.randrange(50, 150),
battery=random.randrange(10, 90)
)
def observe(call=None):
"""Observe three entities."""
random_see('demo_paulus', 'Paulus')
random_see('demo_anne_therese', 'Anne Therese')
observe()
see(
dev_id='demo_home_boy',
host_name='Home Boy',
gps=[hass.config.latitude - 0.00002, hass.config.longitude + 0.00002],
gps_accuracy=20,
battery=53
)
hass.services.register(DOMAIN, 'demo', observe)
return True
| mit |
larsmans/numpy | doc/summarize.py | 89 | 4836 | #!/usr/bin/env python
"""
summarize.py
Show a summary about which Numpy functions are documented and which are not.
"""
from __future__ import division, absolute_import, print_function
import os, glob, re, sys, inspect, optparse
import collections
sys.path.append(os.path.join(os.path.dirname(__file__), 'sphinxext'))
from sphinxext.phantom_import import import_phantom_module
from sphinxext.autosummary_generate import get_documented
CUR_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(CUR_DIR, 'source', 'reference')
SKIP_LIST = """
# --- aliases:
alltrue sometrue bitwise_not cumproduct
row_stack column_stack product rank
# -- skipped:
core lib f2py dual doc emath ma rec char distutils oldnumeric numarray
testing version matlib
add_docstring add_newdoc add_newdocs fastCopyAndTranspose pkgload
conjugate disp
int0 object0 unicode0 uint0 string_ string0 void0
flagsobj
setup PackageLoader
lib.scimath.arccos lib.scimath.arcsin lib.scimath.arccosh lib.scimath.arcsinh
lib.scimath.arctanh lib.scimath.log lib.scimath.log2 lib.scimath.log10
lib.scimath.logn lib.scimath.power lib.scimath.sqrt
# --- numpy.random:
random random.info random.mtrand random.ranf random.sample random.random
# --- numpy.fft:
fft fft.Tester fft.bench fft.fftpack fft.fftpack_lite fft.helper
fft.info fft.test
# --- numpy.linalg:
linalg linalg.Tester
linalg.bench linalg.info linalg.lapack_lite linalg.linalg linalg.test
# --- numpy.ctypeslib:
ctypeslib ctypeslib.test
""".split()
def main():
p = optparse.OptionParser(__doc__)
p.add_option("-c", "--columns", action="store", type="int", dest="cols",
default=3, help="Maximum number of columns")
options, args = p.parse_args()
if len(args) != 0:
p.error('Wrong number of arguments')
# prepare
fn = os.path.join(CUR_DIR, 'dump.xml')
if os.path.isfile(fn):
import_phantom_module(fn)
# check
documented, undocumented = check_numpy()
# report
in_sections = {}
for name, locations in documented.items():
for (filename, section, keyword, toctree) in locations:
in_sections.setdefault((filename, section, keyword), []).append(name)
print("Documented")
print("==========\n")
last_filename = None
for (filename, section, keyword), names in sorted(in_sections.items()):
if filename != last_filename:
print("--- %s\n" % filename)
last_filename = filename
print(" ** ", section)
print(format_in_columns(sorted(names), options.cols))
print("\n")
print("")
print("Undocumented")
print("============\n")
print(format_in_columns(sorted(undocumented.keys()), options.cols))
def check_numpy():
documented = get_documented(glob.glob(SOURCE_DIR + '/*.rst'))
undocumented = {}
import numpy, numpy.fft, numpy.linalg, numpy.random
for mod in [numpy, numpy.fft, numpy.linalg, numpy.random,
numpy.ctypeslib, numpy.emath, numpy.ma]:
undocumented.update(get_undocumented(documented, mod, skip=SKIP_LIST))
for d in (documented, undocumented):
for k in d.keys():
if k.startswith('numpy.'):
d[k[6:]] = d[k]
del d[k]
return documented, undocumented
def get_undocumented(documented, module, module_name=None, skip=[]):
"""
Find out which items in Numpy are not documented.
Returns
-------
undocumented : dict of bool
Dictionary containing True for each documented item name
and False for each undocumented one.
"""
undocumented = {}
if module_name is None:
module_name = module.__name__
for name in dir(module):
obj = getattr(module, name)
if name.startswith('_'): continue
full_name = '.'.join([module_name, name])
if full_name in skip: continue
if full_name.startswith('numpy.') and full_name[6:] in skip: continue
if not (inspect.ismodule(obj) or isinstance(obj, collections.Callable) or inspect.isclass(obj)):
continue
if full_name not in documented:
undocumented[full_name] = True
return undocumented
def format_in_columns(lst, max_columns):
"""
Format a list containing strings to a string containing the items
in columns.
"""
lst = [str(_m) for _m in lst]
col_len = max([len(_m) for _m in lst]) + 2
ncols = 80//col_len
if ncols > max_columns:
ncols = max_columns
if ncols <= 0:
ncols = 1
if len(lst) % ncols == 0:
nrows = len(lst)//ncols
else:
nrows = 1 + len(lst)//ncols
fmt = ' %%-%ds ' % (col_len-2)
lines = []
for n in range(nrows):
lines.append("".join([fmt % x for x in lst[n::nrows]]))
return "\n".join(lines)
if __name__ == "__main__": main()
| bsd-3-clause |
dataxu/ansible | lib/ansible/modules/network/aci/aci_aep_to_domain.py | 26 | 8771 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_aep_to_domain
short_description: Bind AEPs to Physical or Virtual Domains (infra:RsDomP)
description:
- Bind AEPs to Physical or Virtual Domains on Cisco ACI fabrics.
notes:
- The C(aep) and C(domain) parameters should exist before using this module.
The M(aci_aep) and M(aci_domain) can be used for these.
- More information about the internal APIC class B(infra:RsDomP) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
version_added: '2.5'
options:
aep:
description:
- The name of the Attachable Access Entity Profile.
aliases: [ aep_name ]
domain:
description:
- Name of the physical or virtual domain being associated with the AEP.
aliases: [ domain_name, domain_profile ]
domain_type:
description:
- Determines if the Domain is physical (phys) or virtual (vmm).
choices: [ fc, l2dom, l3dom, phys, vmm ]
aliases: [ type ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
vm_provider:
description:
- The VM platform for VMM Domains.
- Support for Kubernetes was added in ACI v3.0.
- Support for CloudFoundry, OpenShift and Red Hat was added in ACI v3.1.
choices: [ cloudfoundry, kubernetes, microsoft, openshift, openstack, redhat, vmware ]
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Add AEP to domain binding
aci_aep_to_domain: &binding_present
host: apic
username: admin
password: SomeSecretPassword
aep: test_aep
domain: phys_dom
domain_type: phys
state: present
- name: Remove AEP to domain binding
aci_aep_to_domain: &binding_absent
host: apic
username: admin
password: SomeSecretPassword
aep: test_aep
domain: phys_dom
domain_type: phys
state: absent
- name: Query our AEP to domain binding
aci_aep_to_domain:
host: apic
username: admin
password: SomeSecretPassword
aep: test_aep
domain: phys_dom
domain_type: phys
state: query
- name: Query all AEP to domain bindings
aci_aep_to_domain: &binding_query
host: apic
username: admin
password: SomeSecretPassword
state: query
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
VM_PROVIDER_MAPPING = dict(
cloudfoundry='CloudFoundry',
kubernetes='Kubernetes',
microsoft='Microsoft',
openshift='OpenShift',
openstack='OpenStack',
redhat='Redhat',
vmware='VMware',
)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
aep=dict(type='str', aliases=['aep_name']), # Not required for querying all objects
domain=dict(type='str', aliases=['domain_name', 'domain_profile']), # Not required for querying all objects
domain_type=dict(type='str', choices=['fc', 'l2dom', 'l3dom', 'phys', 'vmm'], aliases=['type']), # Not required for querying all objects
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
vm_provider=dict(type='str', choices=['cloudfoundry', 'kubernetes', 'microsoft', 'openshift', 'openstack', 'redhat', 'vmware']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['domain_type', 'vmm', ['vm_provider']],
['state', 'absent', ['aep', 'domain', 'domain_type']],
['state', 'present', ['aep', 'domain', 'domain_type']],
],
required_together=[
['domain', 'domain_type'],
],
)
aep = module.params['aep']
domain = module.params['domain']
domain_type = module.params['domain_type']
vm_provider = module.params['vm_provider']
state = module.params['state']
# Report when vm_provider is set when type is not virtual
if domain_type != 'vmm' and vm_provider is not None:
module.fail_json(msg="Domain type '{0}' cannot have a 'vm_provider'".format(domain_type))
# Compile the full domain for URL building
if domain_type == 'fc':
domain_mo = 'uni/fc-{0}'.format(domain)
elif domain_type == 'l2dom':
domain_mo = 'uni/l2dom-{0}'.format(domain)
elif domain_type == 'l3dom':
domain_mo = 'uni/l3dom-{0}'.format(domain)
elif domain_type == 'phys':
domain_mo = 'uni/phys-{0}'.format(domain)
elif domain_type == 'vmm':
domain_mo = 'uni/vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
else:
domain_mo = None
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='infraAttEntityP',
aci_rn='infra/attentp-{0}'.format(aep),
filter_target='eq(infraAttEntityP.name, "{0}")'.format(aep),
module_object=aep,
),
subclass_1=dict(
aci_class='infraRsDomP',
aci_rn='rsdomP-[{0}]'.format(domain_mo),
filter_target='eq(infraRsDomP.tDn, "{0}")'.format(domain_mo),
module_object=domain_mo,
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='infraRsDomP',
class_config=dict(tDn=domain_mo),
)
aci.get_diff(aci_class='infraRsDomP')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
csrocha/OpenUpgrade | addons/website_event_track/controllers/event.py | 332 | 8323 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import collections
import datetime
import re
import pytz
import openerp
import openerp.tools
from openerp.addons.web import http
from openerp.addons.web.http import request
class website_event(http.Controller):
@http.route(['''/event/<model("event.event"):event>/track/<model("event.track", "[('event_id','=',event[0])]"):track>'''], type='http', auth="public", website=True)
def event_track_view(self, event, track, **post):
track_obj = request.registry.get('event.track')
track = track_obj.browse(request.cr, openerp.SUPERUSER_ID, track.id, context=request.context)
values = { 'track': track, 'event': track.event_id, 'main_object': track }
return request.website.render("website_event_track.track_view", values)
def _prepare_calendar(self, event, event_track_ids):
local_tz = pytz.timezone(event.timezone_of_event or 'UTC')
locations = {} # { location: [track, start_date, end_date, rowspan]}
dates = [] # [ (date, {}) ]
for track in event_track_ids:
locations.setdefault(track.location_id or False, [])
forcetr = True
for track in event_track_ids:
start_date = (datetime.datetime.strptime(track.date, '%Y-%m-%d %H:%M:%S')).replace(tzinfo=pytz.utc).astimezone(local_tz)
end_date = start_date + datetime.timedelta(hours = (track.duration or 0.5))
location = track.location_id or False
locations.setdefault(location, [])
# New TR, align all events
if forcetr or (start_date>dates[-1][0]) or not location:
dates.append((start_date, {}, bool(location)))
for loc in locations.keys():
if locations[loc] and (locations[loc][-1][2] > start_date):
locations[loc][-1][3] += 1
elif not locations[loc] or locations[loc][-1][2] < start_date:
locations[loc].append([False, locations[loc] and locations[loc][-1][2] or dates[0][0], start_date, 1])
dates[-1][1][loc] = locations[loc][-1]
forcetr = not bool(location)
# Add event
if locations[location] and locations[location][-1][1] > start_date:
locations[location][-1][3] -= 1
locations[location].append([track, start_date, end_date, 1])
dates[-1][1][location] = locations[location][-1]
return {
'locations': locations,
'dates': dates
}
# TODO: not implemented
@http.route(['''/event/<model("event.event", "[('show_tracks','=',1)]"):event>/agenda'''], type='http', auth="public", website=True)
def event_agenda(self, event, tag=None, **post):
days_tracks = collections.defaultdict(lambda: [])
for track in sorted(event.track_ids, key=lambda x: (x.date, bool(x.location_id))):
if not track.date: continue
days_tracks[track.date[:10]].append(track)
days = {}
days_tracks_count = {}
for day, tracks in days_tracks.iteritems():
days_tracks_count[day] = len(tracks)
days[day] = self._prepare_calendar(event, tracks)
cr, uid, context = request.cr, request.uid, request.context
track_obj = request.registry['event.track']
tracks_ids = track_obj.search(cr, openerp.SUPERUSER_ID, [('event_id', '=', event.id)], context=context)
speakers = dict()
for t in track_obj.browse(cr, openerp.SUPERUSER_ID, tracks_ids, context=context):
acc = ""
for speaker in t.speaker_ids:
acc = speaker.name + u" – " + acc if acc else speaker.name
speakers[t.id] = acc
return request.website.render("website_event_track.agenda", {
'event': event,
'days': days,
'days_nbr': days_tracks_count,
'speakers': speakers,
'tag': tag
})
@http.route([
'''/event/<model("event.event", "[('show_tracks','=',1)]"):event>/track''',
'''/event/<model("event.event", "[('show_tracks','=',1)]"):event>/track/tag/<model("event.track.tag"):tag>'''
], type='http', auth="public", website=True)
def event_tracks(self, event, tag=None, **post):
searches = {}
if tag:
searches.update(tag=tag.id)
track_obj = request.registry.get('event.track')
track_ids = track_obj.search(request.cr, request.uid,
[("id", "in", [track.id for track in event.track_ids]), ("tag_ids", "=", tag.id)], context=request.context)
tracks = track_obj.browse(request.cr, request.uid, track_ids, context=request.context)
else:
tracks = event.track_ids
def html2text(html):
return re.sub(r'<[^>]+>', "", html)
values = {
'event': event,
'main_object': event,
'tracks': tracks,
'tags': event.tracks_tag_ids,
'searches': searches,
'html2text': html2text
}
return request.website.render("website_event_track.tracks", values)
@http.route(['''/event/<model("event.event", "[('show_track_proposal','=',1)]"):event>/track_proposal'''], type='http', auth="public", website=True)
def event_track_proposal(self, event, **post):
values = { 'event': event }
return request.website.render("website_event_track.event_track_proposal", values)
@http.route(['/event/<model("event.event"):event>/track_proposal/post'], type='http', auth="public", methods=['POST'], website=True)
def event_track_proposal_post(self, event, **post):
cr, uid, context = request.cr, request.uid, request.context
tobj = request.registry['event.track']
tags = []
for tag in event.allowed_track_tag_ids:
if post.get('tag_'+str(tag.id)):
tags.append(tag.id)
e = openerp.tools.escape
track_description = '''<section data-snippet-id="text-block">
<div class="container">
<div class="row">
<div class="col-md-12 text-center">
<h2>%s</h2>
</div>
<div class="col-md-12">
<p>%s</p>
</div>
<div class="col-md-12">
<h3>About The Author</h3>
<p>%s</p>
</div>
</div>
</div>
</section>''' % (e(post['track_name']),
e(post['description']), e(post['biography']))
track_id = tobj.create(cr, openerp.SUPERUSER_ID, {
'name': post['track_name'],
'event_id': event.id,
'tag_ids': [(6, 0, tags)],
'user_id': False,
'description': track_description
}, context=context)
tobj.message_post(cr, openerp.SUPERUSER_ID, [track_id], body="""Proposed By: %s<br/>
Mail: <a href="mailto:%s">%s</a><br/>
Phone: %s""" % (e(post['partner_name']), e(post['email_from']),
e(post['email_from']), e(post['phone'])), context=context)
track = tobj.browse(cr, uid, track_id, context=context)
values = {'track': track, 'event':event}
return request.website.render("website_event_track.event_track_proposal_success", values)
| agpl-3.0 |
mosen/salt-osx | _states/gatekeeper.py | 1 | 1736 | """
Enable or disable gatekeeper system wide
:maintainer: Mosen <mosen@github.com>
:maturity: new
:platform: darwin
"""
import salt.utils
__virtualname__ = 'gatekeeper'
def __virtual__():
"""Only load on OSX"""
return __virtualname__ if salt.utils.platform.is_darwin() else False
def enabled(name):
'''
Enforce gatekeeper as being enabled.
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
spctl_enabled = __salt__['spctl.enabled']()
if not spctl_enabled:
ret['changes']['old'] = {'enabled': False}
ret['changes']['new'] = {'enabled': True}
ret['comment'] = 'Gatekeeper has been enabled'
else:
ret['result'] = True
ret['comment'] = 'Gatekeeper is already enabled'
return ret
if __opts__['test'] == True:
ret['comment'] = 'Gatekeeper will be enabled'
ret['result'] = None
return ret
status = __salt__['spctl.enable']()
ret['result'] = True
return ret
def disabled(name):
'''
Enforce gatekeeper as being disabled
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
spctl_enabled = __salt__['spctl.enabled']()
if spctl_enabled:
ret['changes']['old'] = {'enabled': True}
ret['changes']['new'] = {'enabled': False}
ret['comment'] = 'Gatekeeper has been disabled'
else:
ret['result'] = True
ret['comment'] = 'Gatekeeper is already disabled'
return ret
if __opts__['test'] == True:
ret['comment'] = 'Gatekeeper will be disabled'
ret['result'] = None
return ret
status = __salt__['spctl.disable']()
ret['result'] = True
return ret
| mit |
RCOSDP/waterbutler | tests/providers/s3/test_provider.py | 1 | 52609 | import os
import io
import xml
import json
import time
import base64
import hashlib
import aiohttpretty
from http import client
from urllib import parse
from unittest import mock
import pytest
from boto.compat import BytesIO
from boto.utils import compute_md5
from waterbutler.providers.s3 import S3Provider
from waterbutler.core.path import WaterButlerPath
from waterbutler.core import streams, metadata, exceptions
from waterbutler.providers.s3 import settings as pd_settings
from tests.utils import MockCoroutine
from tests.providers.s3.fixtures import (auth,
settings,
credentials,
file_content,
folder_metadata,
folder_metadata,
version_metadata,
create_session_resp,
folder_and_contents,
complete_upload_resp,
file_header_metadata,
file_metadata_object,
folder_item_metadata,
generic_http_403_resp,
generic_http_404_resp,
list_parts_resp_empty,
folder_empty_metadata,
single_version_metadata,
revision_metadata_object,
upload_parts_headers_list,
list_parts_resp_not_empty,
folder_key_metadata_object,
folder_single_item_metadata,
file_metadata_headers_object,
)
@pytest.fixture
def mock_time(monkeypatch):
mock_time = mock.Mock(return_value=1454684930.0)
monkeypatch.setattr(time, 'time', mock_time)
@pytest.fixture
def provider(auth, credentials, settings):
provider = S3Provider(auth, credentials, settings)
provider._check_region = MockCoroutine()
return provider
@pytest.fixture
def file_like(file_content):
return io.BytesIO(file_content)
@pytest.fixture
def file_stream(file_like):
return streams.FileStreamReader(file_like)
def location_response(location):
return (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
'{}</LocationConstraint>'
).format(location)
def list_objects_response(keys, truncated=False):
response = '''<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>bucket</Name>
<Prefix/>
<Marker/>
<MaxKeys>1000</MaxKeys>'''
response += '<IsTruncated>' + str(truncated).lower() + '</IsTruncated>'
response += ''.join(map(
lambda x: '<Contents><Key>{}</Key></Contents>'.format(x),
keys
))
response += '</ListBucketResult>'
return response.encode('utf-8')
def bulk_delete_body(keys):
payload = '<?xml version="1.0" encoding="UTF-8"?>'
payload += '<Delete>'
payload += ''.join(map(
lambda x: '<Object><Key>{}</Key></Object>'.format(x),
keys
))
payload += '</Delete>'
payload = payload.encode('utf-8')
md5 = base64.b64encode(hashlib.md5(payload).digest())
headers = {
'Content-Length': str(len(payload)),
'Content-MD5': md5.decode('ascii'),
'Content-Type': 'text/xml',
}
return (payload, headers)
def list_upload_chunks_body(parts_metadata):
payload = '''<?xml version="1.0" encoding="UTF-8"?>
<ListPartsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Bucket>example-bucket</Bucket>
<Key>example-object</Key>
<UploadId>XXBsb2FkIElEIGZvciBlbHZpbmcncyVcdS1tb3ZpZS5tMnRzEEEwbG9hZA</UploadId>
<Initiator>
<ID>arn:aws:iam::111122223333:user/some-user-11116a31-17b5-4fb7-9df5-b288870f11xx</ID>
<DisplayName>umat-user-11116a31-17b5-4fb7-9df5-b288870f11xx</DisplayName>
</Initiator>
<Owner>
<ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID>
<DisplayName>someName</DisplayName>
</Owner>
<StorageClass>STANDARD</StorageClass>
<PartNumberMarker>1</PartNumberMarker>
<NextPartNumberMarker>3</NextPartNumberMarker>
<MaxParts>2</MaxParts>
<IsTruncated>false</IsTruncated>
<Part>
<PartNumber>2</PartNumber>
<LastModified>2010-11-10T20:48:34.000Z</LastModified>
<ETag>"7778aef83f66abc1fa1e8477f296d394"</ETag>
<Size>10485760</Size>
</Part>
<Part>
<PartNumber>3</PartNumber>
<LastModified>2010-11-10T20:48:33.000Z</LastModified>
<ETag>"aaaa18db4cc2f85cedef654fccc4a4x8"</ETag>
<Size>10485760</Size>
</Part>
</ListPartsResult>
'''.encode('utf-8')
md5 = compute_md5(BytesIO(payload))
headers = {
'Content-Length': str(len(payload)),
'Content-MD5': md5[1],
'Content-Type': 'text/xml',
}
return payload, headers
def build_folder_params(path):
return {'prefix': path.path, 'delimiter': '/'}
class TestRegionDetection:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
@pytest.mark.parametrize("region_name,host", [
('', 's3.amazonaws.com'),
('EU', 's3-eu-west-1.amazonaws.com'),
('us-east-2', 's3-us-east-2.amazonaws.com'),
('us-west-1', 's3-us-west-1.amazonaws.com'),
('us-west-2', 's3-us-west-2.amazonaws.com'),
('ca-central-1', 's3-ca-central-1.amazonaws.com'),
('eu-central-1', 's3-eu-central-1.amazonaws.com'),
('eu-west-2', 's3-eu-west-2.amazonaws.com'),
('ap-northeast-1', 's3-ap-northeast-1.amazonaws.com'),
('ap-northeast-2', 's3-ap-northeast-2.amazonaws.com'),
('ap-south-1', 's3-ap-south-1.amazonaws.com'),
('ap-southeast-1', 's3-ap-southeast-1.amazonaws.com'),
('ap-southeast-2', 's3-ap-southeast-2.amazonaws.com'),
('sa-east-1', 's3-sa-east-1.amazonaws.com'),
])
async def test_region_host(self, auth, credentials, settings, region_name, host, mock_time):
provider = S3Provider(auth, credentials, settings)
orig_host = provider.connection.host
region_url = provider.bucket.generate_url(
100,
'GET',
query_parameters={'location': ''},
)
aiohttpretty.register_uri('GET',
region_url,
status=200,
body=location_response(region_name))
await provider._check_region()
assert provider.connection.host == host
class TestValidatePath:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_file(self, provider, file_header_metadata, mock_time):
file_path = 'foobah'
params = {'prefix': '/' + file_path + '/', 'delimiter': '/'}
good_metadata_url = provider.bucket.new_key('/' + file_path).generate_url(100, 'HEAD')
bad_metadata_url = provider.bucket.generate_url(100)
aiohttpretty.register_uri('HEAD', good_metadata_url, headers=file_header_metadata)
aiohttpretty.register_uri('GET', bad_metadata_url, params=params, status=404)
assert WaterButlerPath('/') == await provider.validate_v1_path('/')
try:
wb_path_v1 = await provider.validate_v1_path('/' + file_path)
except Exception as exc:
pytest.fail(str(exc))
with pytest.raises(exceptions.NotFoundError) as exc:
await provider.validate_v1_path('/' + file_path + '/')
assert exc.value.code == client.NOT_FOUND
wb_path_v0 = await provider.validate_path('/' + file_path)
assert wb_path_v1 == wb_path_v0
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_folder(self, provider, folder_metadata, mock_time):
folder_path = 'Photos'
params = {'prefix': '/' + folder_path + '/', 'delimiter': '/'}
good_metadata_url = provider.bucket.generate_url(100)
bad_metadata_url = provider.bucket.new_key('/' + folder_path).generate_url(100, 'HEAD')
aiohttpretty.register_uri(
'GET', good_metadata_url, params=params,
body=folder_metadata, headers={'Content-Type': 'application/xml'}
)
aiohttpretty.register_uri('HEAD', bad_metadata_url, status=404)
try:
wb_path_v1 = await provider.validate_v1_path('/' + folder_path + '/')
except Exception as exc:
pytest.fail(str(exc))
with pytest.raises(exceptions.NotFoundError) as exc:
await provider.validate_v1_path('/' + folder_path)
assert exc.value.code == client.NOT_FOUND
wb_path_v0 = await provider.validate_path('/' + folder_path + '/')
assert wb_path_v1 == wb_path_v0
@pytest.mark.asyncio
async def test_normal_name(self, provider, mock_time):
path = await provider.validate_path('/this/is/a/path.txt')
assert path.name == 'path.txt'
assert path.parent.name == 'a'
assert path.is_file
assert not path.is_dir
assert not path.is_root
@pytest.mark.asyncio
async def test_folder(self, provider, mock_time):
path = await provider.validate_path('/this/is/a/folder/')
assert path.name == 'folder'
assert path.parent.name == 'a'
assert not path.is_file
assert path.is_dir
assert not path.is_root
@pytest.mark.asyncio
async def test_root(self, provider, mock_time):
path = await provider.validate_path('/this/is/a/folder/')
assert path.name == 'folder'
assert path.parent.name == 'a'
assert not path.is_file
assert path.is_dir
assert not path.is_root
class TestCRUD:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download(self, provider, mock_time):
path = WaterButlerPath('/muhtriangle')
response_headers = {'response-content-disposition': 'attachment'}
url = provider.bucket.new_key(path.path).generate_url(100,
response_headers=response_headers)
aiohttpretty.register_uri('GET', url, body=b'delicious', auto_length=True)
result = await provider.download(path)
content = await result.read()
assert content == b'delicious'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_range(self, provider, mock_time):
path = WaterButlerPath('/muhtriangle')
response_headers = {'response-content-disposition': 'attachment'}
url = provider.bucket.new_key(path.path).generate_url(100,
response_headers=response_headers)
aiohttpretty.register_uri('GET', url, body=b'de', auto_length=True, status=206)
result = await provider.download(path, range=(0, 1))
assert result.partial
content = await result.read()
assert content == b'de'
assert aiohttpretty.has_call(method='GET', uri=url, headers={'Range': 'bytes=0-1'})
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_version(self, provider, mock_time):
path = WaterButlerPath('/muhtriangle')
url = provider.bucket.new_key(path.path).generate_url(
100,
query_parameters={'versionId': 'someversion'},
response_headers={'response-content-disposition': 'attachment'},
)
aiohttpretty.register_uri('GET', url, body=b'delicious', auto_length=True)
result = await provider.download(path, revision='someversion')
content = await result.read()
assert content == b'delicious'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_display_name(self, provider, mock_time):
path = WaterButlerPath('/muhtriangle')
response_headers = {'response-content-disposition': "attachment; filename*=UTF-8''tuna"}
url = provider.bucket.new_key(path.path).generate_url(100,
response_headers=response_headers)
aiohttpretty.register_uri('GET', url, body=b'delicious', auto_length=True)
result = await provider.download(path, displayName='tuna')
content = await result.read()
assert content == b'delicious'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_not_found(self, provider, mock_time):
path = WaterButlerPath('/muhtriangle')
response_headers = {'response-content-disposition': 'attachment'}
url = provider.bucket.new_key(path.path).generate_url(100,
response_headers=response_headers)
aiohttpretty.register_uri('GET', url, status=404)
with pytest.raises(exceptions.DownloadError):
await provider.download(path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_folder_400s(self, provider, mock_time):
with pytest.raises(exceptions.DownloadError) as e:
await provider.download(WaterButlerPath('/cool/folder/mom/'))
assert e.value.code == 400
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_update(self,
provider,
file_content,
file_stream,
file_header_metadata,
mock_time):
path = WaterButlerPath('/foobah')
content_md5 = hashlib.md5(file_content).hexdigest()
url = provider.bucket.new_key(path.path).generate_url(100, 'PUT')
metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri('HEAD', metadata_url, headers=file_header_metadata)
header = {'ETag': '"{}"'.format(content_md5)}
aiohttpretty.register_uri('PUT', url, status=201, headers=header)
metadata, created = await provider.upload(file_stream, path)
assert metadata.kind == 'file'
assert not created
assert aiohttpretty.has_call(method='PUT', uri=url)
assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_encrypted(self,
provider,
file_content,
file_stream,
file_header_metadata,
mock_time):
# Set trigger for encrypt_key=True in s3.provider.upload
provider.encrypt_uploads = True
path = WaterButlerPath('/foobah')
content_md5 = hashlib.md5(file_content).hexdigest()
url = provider.bucket.new_key(path.path).generate_url(100, 'PUT', encrypt_key=True)
metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri(
'HEAD',
metadata_url,
responses=[
{'status': 404},
{'headers': file_header_metadata},
],
)
headers={'ETag': '"{}"'.format(content_md5)}
aiohttpretty.register_uri('PUT', url, status=200, headers=headers)
metadata, created = await provider.upload(file_stream, path)
assert metadata.kind == 'file'
assert metadata.extra['encryption'] == 'AES256'
assert created
assert aiohttpretty.has_call(method='PUT', uri=url)
assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
# Fixtures are shared between tests. Need to revert the settings back.
provider.encrypt_uploads = False
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_limit_chunked(self, provider, file_stream, mock_time):
assert file_stream.size == 6
provider.CONTIGUOUS_UPLOAD_SIZE_LIMIT = 5
provider.CHUNK_SIZE = 2
path = WaterButlerPath('/foobah')
provider._chunked_upload = MockCoroutine()
provider.metadata = MockCoroutine()
await provider.upload(file_stream, path)
assert provider._chunked_upload.called_with(file_stream, path)
# Fixtures are shared between tests. Need to revert the settings back.
provider.CONTIGUOUS_UPLOAD_SIZE_LIMIT = pd_settings.CONTIGUOUS_UPLOAD_SIZE_LIMIT
provider.CHUNK_SIZE = pd_settings.CHUNK_SIZE
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_limit_contiguous(self, provider, file_stream, mock_time):
assert file_stream.size == 6
provider.CONTIGUOUS_UPLOAD_SIZE_LIMIT = 10
provider.CHUNK_SIZE = 2
path = WaterButlerPath('/foobah')
provider._contiguous_upload = MockCoroutine()
provider.metadata = MockCoroutine()
await provider.upload(file_stream, path)
assert provider._contiguous_upload.called_with(file_stream, path)
provider.CONTIGUOUS_UPLOAD_SIZE_LIMIT = pd_settings.CONTIGUOUS_UPLOAD_SIZE_LIMIT
provider.CHUNK_SIZE = pd_settings.CHUNK_SIZE
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_create_upload_session_no_encryption(self, provider,
create_session_resp,
mock_time):
path = WaterButlerPath('/foobah')
init_url = provider.bucket.new_key(path.path).generate_url(
100,
'POST',
query_parameters={'uploads': ''},
)
aiohttpretty.register_uri('POST', init_url, body=create_session_resp, status=200)
session_id = await provider._create_upload_session(path)
expected_session_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
assert aiohttpretty.has_call(method='POST', uri=init_url)
assert session_id is not None
assert session_id == expected_session_id
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_create_upload_session_with_encryption(self, provider,
create_session_resp,
mock_time):
provider.encrypt_uploads = True
path = WaterButlerPath('/foobah')
init_url = provider.bucket.new_key(path.path).generate_url(
100,
'POST',
query_parameters={'uploads': ''},
encrypt_key=True
)
aiohttpretty.register_uri('POST', init_url, body=create_session_resp, status=200)
session_id = await provider._create_upload_session(path)
expected_session_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
assert aiohttpretty.has_call(method='POST', uri=init_url)
assert session_id is not None
assert session_id == expected_session_id
provider.encrypt_uploads = False
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_upload_parts(self, provider, file_stream,
upload_parts_headers_list):
assert file_stream.size == 6
provider.CHUNK_SIZE = 2
side_effect = json.loads(upload_parts_headers_list).get('headers_list')
assert len(side_effect) == 3
provider._upload_part = MockCoroutine(side_effect=side_effect)
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
parts_metadata = await provider._upload_parts(file_stream, path, upload_id)
assert provider._upload_part.call_count == 3
assert len(parts_metadata) == 3
assert parts_metadata == side_effect
provider.CHUNK_SIZE = pd_settings.CHUNK_SIZE
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_upload_parts_remainder(self, provider,
upload_parts_headers_list):
file_stream = streams.StringStream('abcdefghijklmnopqrst')
assert file_stream.size == 20
provider.CHUNK_SIZE = 9
side_effect = json.loads(upload_parts_headers_list).get('headers_list')
assert len(side_effect) == 3
provider._upload_part = MockCoroutine(side_effect=side_effect)
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
parts_metadata = await provider._upload_parts(file_stream, path, upload_id)
assert provider._upload_part.call_count == 3
provider._upload_part.assert_has_calls([
mock.call(file_stream, path, upload_id, 1, 9),
mock.call(file_stream, path, upload_id, 2, 9),
mock.call(file_stream, path, upload_id, 3, 2),
])
assert len(parts_metadata) == 3
assert parts_metadata == side_effect
provider.CHUNK_SIZE = pd_settings.CHUNK_SIZE
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_upload_part(self, provider, file_stream,
upload_parts_headers_list,
mock_time):
assert file_stream.size == 6
provider.CHUNK_SIZE = 2
path = WaterButlerPath('/foobah')
chunk_number = 1
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
params = {
'partNumber': str(chunk_number),
'uploadId': upload_id,
}
headers = {'Content-Length': str(provider.CHUNK_SIZE)}
upload_part_url = provider.bucket.new_key(path.path).generate_url(
100,
'PUT',
query_parameters=params,
headers=headers
)
# aiohttp resp headers use upper case
part_headers = json.loads(upload_parts_headers_list).get('headers_list')[0]
part_headers = {k.upper(): v for k, v in part_headers.items()}
aiohttpretty.register_uri('PUT', upload_part_url, status=200, headers=part_headers)
part_metadata = await provider._upload_part(file_stream, path, upload_id, chunk_number,
provider.CHUNK_SIZE)
assert aiohttpretty.has_call(method='PUT', uri=upload_part_url)
assert part_headers == part_metadata
provider.CHUNK_SIZE = pd_settings.CHUNK_SIZE
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_chunked_upload_complete_multipart_upload(self, provider,
upload_parts_headers_list,
complete_upload_resp, mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
params = {'uploadId': upload_id}
payload = '<?xml version="1.0" encoding="UTF-8"?>'
payload += '<CompleteMultipartUpload>'
# aiohttp resp headers are upper case
headers_list = json.loads(upload_parts_headers_list).get('headers_list')
headers_list = [{k.upper(): v for k, v in headers.items()} for headers in headers_list]
for i, part in enumerate(headers_list):
payload += '<Part>'
payload += '<PartNumber>{}</PartNumber>'.format(i+1) # part number must be >= 1
payload += '<ETag>{}</ETag>'.format(xml.sax.saxutils.escape(part['ETAG']))
payload += '</Part>'
payload += '</CompleteMultipartUpload>'
payload = payload.encode('utf-8')
headers = {
'Content-Length': str(len(payload)),
'Content-MD5': compute_md5(BytesIO(payload))[1],
'Content-Type': 'text/xml',
}
complete_url = provider.bucket.new_key(path.path).generate_url(
100,
'POST',
headers=headers,
query_parameters=params
)
aiohttpretty.register_uri(
'POST',
complete_url,
status=200,
body=complete_upload_resp
)
await provider._complete_multipart_upload(path, upload_id, headers_list)
assert aiohttpretty.has_call(method='POST', uri=complete_url, params=params)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_abort_chunked_upload_session_deleted(self, provider, generic_http_404_resp,
mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
abort_url = provider.bucket.new_key(path.path).generate_url(
100,
'DELETE',
query_parameters={'uploadId': upload_id}
)
list_url = provider.bucket.new_key(path.path).generate_url(
100,
'GET',
query_parameters={'uploadId': upload_id}
)
aiohttpretty.register_uri('DELETE', abort_url, status=204)
aiohttpretty.register_uri('GET', list_url, body=generic_http_404_resp, status=404)
aborted = await provider._abort_chunked_upload(path, upload_id)
assert aiohttpretty.has_call(method='DELETE', uri=abort_url)
assert aborted is True
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_abort_chunked_upload_list_empty(self, provider, list_parts_resp_empty,
mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
abort_url = provider.bucket.new_key(path.path).generate_url(
100,
'DELETE',
query_parameters={'uploadId': upload_id}
)
list_url = provider.bucket.new_key(path.path).generate_url(
100,
'GET',
query_parameters={'uploadId': upload_id}
)
aiohttpretty.register_uri('DELETE', abort_url, status=204)
aiohttpretty.register_uri('GET', list_url, body=list_parts_resp_empty, status=200)
aborted = await provider._abort_chunked_upload(path, upload_id)
assert aiohttpretty.has_call(method='DELETE', uri=abort_url)
assert aiohttpretty.has_call(method='GET', uri=list_url)
assert aborted is True
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_abort_chunked_upload_list_not_empty(self,
provider,
list_parts_resp_not_empty,
mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
abort_url = provider.bucket.new_key(path.path).generate_url(
100,
'DELETE',
query_parameters={'uploadId': upload_id}
)
list_url = provider.bucket.new_key(path.path).generate_url(
100,
'GET',
query_parameters={'uploadId': upload_id}
)
aiohttpretty.register_uri('DELETE', abort_url, status=204)
aiohttpretty.register_uri('GET', list_url, body=list_parts_resp_not_empty, status=200)
aborted = await provider._abort_chunked_upload(path, upload_id)
assert aiohttpretty.has_call(method='DELETE', uri=abort_url)
assert aborted is False
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_list_uploaded_chunks_session_not_found(self,
provider,
generic_http_404_resp,
mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
list_url = provider.bucket.new_key(path.path).generate_url(
100,
'GET',
query_parameters={'uploadId': upload_id}
)
aiohttpretty.register_uri('GET', list_url, body=generic_http_404_resp, status=404)
resp_xml, session_deleted = await provider._list_uploaded_chunks(path, upload_id)
assert aiohttpretty.has_call(method='GET', uri=list_url)
assert resp_xml is not None
assert session_deleted is True
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_list_uploaded_chunks_empty_list(self,
provider,
list_parts_resp_empty,
mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
list_url = provider.bucket.new_key(path.path).generate_url(
100,
'GET',
query_parameters={'uploadId': upload_id}
)
aiohttpretty.register_uri('GET', list_url, body=list_parts_resp_empty, status=200)
resp_xml, session_deleted = await provider._list_uploaded_chunks(path, upload_id)
assert aiohttpretty.has_call(method='GET', uri=list_url)
assert resp_xml is not None
assert session_deleted is False
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_list_uploaded_chunks_list_not_empty(self,
provider,
list_parts_resp_not_empty,
mock_time):
path = WaterButlerPath('/foobah')
upload_id = 'EXAMPLEJZ6e0YupT2h66iePQCc9IEbYbDUy4RTpMeoSMLPRp8Z5o1u' \
'8feSRonpvnWsKKG35tI2LB9VDPiCgTy.Gq2VxQLYjrue4Nq.NBdqI-'
list_url = provider.bucket.new_key(path.path).generate_url(
100,
'GET',
query_parameters={'uploadId': upload_id}
)
aiohttpretty.register_uri('GET', list_url, body=list_parts_resp_not_empty, status=200)
resp_xml, session_deleted = await provider._list_uploaded_chunks(path, upload_id)
assert aiohttpretty.has_call(method='GET', uri=list_url)
assert resp_xml is not None
assert session_deleted is False
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete(self, provider, mock_time):
path = WaterButlerPath('/some-file')
url = provider.bucket.new_key(path.path).generate_url(100, 'DELETE')
aiohttpretty.register_uri('DELETE', url, status=200)
await provider.delete(path)
assert aiohttpretty.has_call(method='DELETE', uri=url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete_comfirm_delete(self, provider, folder_and_contents, mock_time):
path = WaterButlerPath('/')
query_url = provider.bucket.generate_url(100, 'GET')
aiohttpretty.register_uri(
'GET',
query_url,
params={'prefix': ''},
body=folder_and_contents,
status=200,
)
(payload, headers) = bulk_delete_body(
['thisfolder/', 'thisfolder/item1', 'thisfolder/item2']
)
delete_url = provider.bucket.generate_url(
100,
'POST',
query_parameters={'delete': ''},
headers=headers,
)
aiohttpretty.register_uri('POST', delete_url, status=204)
with pytest.raises(exceptions.DeleteError):
await provider.delete(path)
await provider.delete(path, confirm_delete=1)
assert aiohttpretty.has_call(method='POST', uri=delete_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_folder_delete(self, provider, folder_and_contents, mock_time):
path = WaterButlerPath('/some-folder/')
params = {'prefix': 'some-folder/'}
query_url = provider.bucket.generate_url(100, 'GET')
aiohttpretty.register_uri(
'GET',
query_url,
params=params,
body=folder_and_contents,
status=200,
)
query_params = {'delete': ''}
(payload, headers) = bulk_delete_body(
['thisfolder/', 'thisfolder/item1', 'thisfolder/item2']
)
delete_url = provider.bucket.generate_url(
100,
'POST',
query_parameters=query_params,
headers=headers,
)
aiohttpretty.register_uri('POST', delete_url, status=204)
await provider.delete(path)
assert aiohttpretty.has_call(method='GET', uri=query_url, params=params)
assert aiohttpretty.has_call(method='POST', uri=delete_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_single_item_folder_delete(self,
provider,
folder_single_item_metadata,
mock_time):
path = WaterButlerPath('/single-thing-folder/')
params = {'prefix': 'single-thing-folder/'}
query_url = provider.bucket.generate_url(100, 'GET')
aiohttpretty.register_uri(
'GET',
query_url,
params=params,
body=folder_single_item_metadata,
status=200,
)
(payload, headers) = bulk_delete_body(
['my-image.jpg']
)
delete_url = provider.bucket.generate_url(
100,
'POST',
query_parameters={'delete': ''},
headers=headers,
)
aiohttpretty.register_uri('POST', delete_url, status=204)
await provider.delete(path)
assert aiohttpretty.has_call(method='GET', uri=query_url, params=params)
aiohttpretty.register_uri('POST', delete_url, status=204)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_empty_folder_delete(self, provider, folder_empty_metadata, mock_time):
path = WaterButlerPath('/empty-folder/')
params = {'prefix': 'empty-folder/'}
query_url = provider.bucket.generate_url(100, 'GET')
aiohttpretty.register_uri(
'GET',
query_url,
params=params,
body=folder_empty_metadata,
status=200,
)
with pytest.raises(exceptions.NotFoundError):
await provider.delete(path)
assert aiohttpretty.has_call(method='GET', uri=query_url, params=params)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_large_folder_delete(self, provider, mock_time):
path = WaterButlerPath('/some-folder/')
query_url = provider.bucket.generate_url(100, 'GET')
keys_one = [str(x) for x in range(2500, 3500)]
response_one = list_objects_response(keys_one, truncated=True)
params_one = {'prefix': 'some-folder/'}
keys_two = [str(x) for x in range(3500, 3601)]
response_two = list_objects_response(keys_two)
params_two = {'prefix': 'some-folder/', 'marker': '3499'}
aiohttpretty.register_uri(
'GET',
query_url,
params=params_one,
body=response_one,
status=200,
)
aiohttpretty.register_uri(
'GET',
query_url,
params=params_two,
body=response_two,
status=200,
)
query_params = {'delete': None}
(payload_one, headers_one) = bulk_delete_body(keys_one)
delete_url_one = provider.bucket.generate_url(
100,
'POST',
query_parameters=query_params,
headers=headers_one,
)
aiohttpretty.register_uri('POST', delete_url_one, status=204)
(payload_two, headers_two) = bulk_delete_body(keys_two)
delete_url_two = provider.bucket.generate_url(
100,
'POST',
query_parameters=query_params,
headers=headers_two,
)
aiohttpretty.register_uri('POST', delete_url_two, status=204)
await provider.delete(path)
assert aiohttpretty.has_call(method='GET', uri=query_url, params=params_one)
assert aiohttpretty.has_call(method='GET', uri=query_url, params=params_two)
assert aiohttpretty.has_call(method='POST', uri=delete_url_one)
assert aiohttpretty.has_call(method='POST', uri=delete_url_two)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_accepts_url(self, provider, mock_time):
path = WaterButlerPath('/my-image')
response_headers = {'response-content-disposition': 'attachment'}
url = provider.bucket.new_key(path.path).generate_url(100,
'GET',
response_headers=response_headers)
ret_url = await provider.download(path, accept_url=True)
assert ret_url == url
class TestMetadata:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_folder(self, provider, folder_metadata, mock_time):
path = WaterButlerPath('/darp/')
url = provider.bucket.generate_url(100)
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, body=folder_metadata,
headers={'Content-Type': 'application/xml'})
result = await provider.metadata(path)
assert isinstance(result, list)
assert len(result) == 3
assert result[0].name == ' photos'
assert result[1].name == 'my-image.jpg'
assert result[2].extra['md5'] == '1b2cf535f27731c974343645a3985328'
assert result[2].extra['hashes']['md5'] == '1b2cf535f27731c974343645a3985328'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_folder_self_listing(self, provider, folder_and_contents, mock_time):
path = WaterButlerPath('/thisfolder/')
url = provider.bucket.generate_url(100)
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, body=folder_and_contents)
result = await provider.metadata(path)
assert isinstance(result, list)
assert len(result) == 2
for fobj in result:
assert fobj.name != path.path
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_folder_metadata_folder_item(self, provider, folder_item_metadata, mock_time):
path = WaterButlerPath('/')
url = provider.bucket.generate_url(100)
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, body=folder_item_metadata,
headers={'Content-Type': 'application/xml'})
result = await provider.metadata(path)
assert isinstance(result, list)
assert len(result) == 1
assert result[0].kind == 'folder'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_empty_metadata_folder(self, provider, folder_empty_metadata, mock_time):
path = WaterButlerPath('/this-is-not-the-root/')
metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
url = provider.bucket.generate_url(100)
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, body=folder_empty_metadata,
headers={'Content-Type': 'application/xml'})
aiohttpretty.register_uri('HEAD', metadata_url, header=folder_empty_metadata,
headers={'Content-Type': 'application/xml'})
result = await provider.metadata(path)
assert isinstance(result, list)
assert len(result) == 0
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_file(self, provider, file_header_metadata, mock_time):
path = WaterButlerPath('/Foo/Bar/my-image.jpg')
url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri('HEAD', url, headers=file_header_metadata)
result = await provider.metadata(path)
assert isinstance(result, metadata.BaseFileMetadata)
assert result.path == str(path)
assert result.name == 'my-image.jpg'
assert result.extra['md5'] == 'fba9dede5f27731c9771645a39863328'
assert result.extra['hashes']['md5'] == 'fba9dede5f27731c9771645a39863328'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_file_lastest_revision(self, provider, file_header_metadata, mock_time):
path = WaterButlerPath('/Foo/Bar/my-image.jpg')
url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri('HEAD', url, headers=file_header_metadata)
result = await provider.metadata(path, revision='Latest')
assert isinstance(result, metadata.BaseFileMetadata)
assert result.path == str(path)
assert result.name == 'my-image.jpg'
assert result.extra['md5'] == 'fba9dede5f27731c9771645a39863328'
assert result.extra['hashes']['md5'] == 'fba9dede5f27731c9771645a39863328'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_file_missing(self, provider, mock_time):
path = WaterButlerPath('/notfound.txt')
url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri('HEAD', url, status=404)
with pytest.raises(exceptions.MetadataError):
await provider.metadata(path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload(self,
provider,
file_content,
file_stream,
file_header_metadata,
mock_time):
path = WaterButlerPath('/foobah')
content_md5 = hashlib.md5(file_content).hexdigest()
url = provider.bucket.new_key(path.path).generate_url(100, 'PUT')
metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri(
'HEAD',
metadata_url,
responses=[
{'status': 404},
{'headers': file_header_metadata},
],
)
headers = {'ETag': '"{}"'.format(content_md5)}
aiohttpretty.register_uri('PUT', url, status=200, headers=headers),
metadata, created = await provider.upload(file_stream, path)
assert metadata.kind == 'file'
assert created
assert aiohttpretty.has_call(method='PUT', uri=url)
assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_checksum_mismatch(self,
provider,
file_stream,
file_header_metadata,
mock_time):
path = WaterButlerPath('/foobah')
url = provider.bucket.new_key(path.path).generate_url(100, 'PUT')
metadata_url = provider.bucket.new_key(path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri(
'HEAD',
metadata_url,
responses=[
{'status': 404},
{'headers': file_header_metadata},
],
)
aiohttpretty.register_uri('PUT', url, status=200, headers={'ETag': '"bad hash"'})
with pytest.raises(exceptions.UploadChecksumMismatchError):
await provider.upload(file_stream, path)
assert aiohttpretty.has_call(method='PUT', uri=url)
assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
class TestCreateFolder:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_raise_409(self, provider, folder_metadata, mock_time):
path = WaterButlerPath('/alreadyexists/')
url = provider.bucket.generate_url(100, 'GET')
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, body=folder_metadata,
headers={'Content-Type': 'application/xml'})
with pytest.raises(exceptions.FolderNamingConflict) as e:
await provider.create_folder(path)
assert e.value.code == 409
assert e.value.message == ('Cannot create folder "alreadyexists", because a file or '
'folder already exists with that name')
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_must_start_with_slash(self, provider, mock_time):
path = WaterButlerPath('/alreadyexists')
with pytest.raises(exceptions.CreateFolderError) as e:
await provider.create_folder(path)
assert e.value.code == 400
assert e.value.message == 'Path must be a directory'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_errors_out(self, provider, mock_time):
path = WaterButlerPath('/alreadyexists/')
url = provider.bucket.generate_url(100, 'GET')
params = build_folder_params(path)
create_url = provider.bucket.new_key(path.path).generate_url(100, 'PUT')
aiohttpretty.register_uri('GET', url, params=params, status=404)
aiohttpretty.register_uri('PUT', create_url, status=403)
with pytest.raises(exceptions.CreateFolderError) as e:
await provider.create_folder(path)
assert e.value.code == 403
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_errors_out_metadata(self, provider, mock_time):
path = WaterButlerPath('/alreadyexists/')
url = provider.bucket.generate_url(100, 'GET')
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, status=403)
with pytest.raises(exceptions.MetadataError) as e:
await provider.create_folder(path)
assert e.value.code == 403
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_creates(self, provider, mock_time):
path = WaterButlerPath('/doesntalreadyexists/')
url = provider.bucket.generate_url(100, 'GET')
params = build_folder_params(path)
create_url = provider.bucket.new_key(path.path).generate_url(100, 'PUT')
aiohttpretty.register_uri('GET', url, params=params, status=404)
aiohttpretty.register_uri('PUT', create_url, status=200)
resp = await provider.create_folder(path)
assert resp.kind == 'folder'
assert resp.name == 'doesntalreadyexists'
assert resp.path == '/doesntalreadyexists/'
class TestOperations:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_intra_copy(self, provider, file_header_metadata, mock_time):
source_path = WaterButlerPath('/source')
dest_path = WaterButlerPath('/dest')
metadata_url = provider.bucket.new_key(dest_path.path).generate_url(100, 'HEAD')
aiohttpretty.register_uri('HEAD', metadata_url, headers=file_header_metadata)
header_path = '/' + os.path.join(provider.settings['bucket'], source_path.path)
headers = {'x-amz-copy-source': parse.quote(header_path)}
url = provider.bucket.new_key(dest_path.path).generate_url(100, 'PUT', headers=headers)
aiohttpretty.register_uri('PUT', url, status=200)
metadata, exists = await provider.intra_copy(provider, source_path, dest_path)
assert provider._check_region.called
assert metadata.kind == 'file'
assert not exists
assert aiohttpretty.has_call(method='HEAD', uri=metadata_url)
assert aiohttpretty.has_call(method='PUT', uri=url, headers=headers)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_version_metadata(self, provider, version_metadata, mock_time):
path = WaterButlerPath('/my-image.jpg')
url = provider.bucket.generate_url(100, 'GET', query_parameters={'versions': ''})
params = build_folder_params(path)
aiohttpretty.register_uri('GET', url, params=params, status=200, body=version_metadata)
data = await provider.revisions(path)
assert isinstance(data, list)
assert len(data) == 3
for item in data:
assert hasattr(item, 'extra')
assert hasattr(item, 'version')
assert hasattr(item, 'version_identifier')
assert aiohttpretty.has_call(method='GET', uri=url, params=params)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_single_version_metadata(self, provider, single_version_metadata, mock_time):
path = WaterButlerPath('/single-version.file')
url = provider.bucket.generate_url(100, 'GET', query_parameters={'versions': ''})
params = build_folder_params(path)
aiohttpretty.register_uri('GET',
url,
params=params,
status=200,
body=single_version_metadata)
data = await provider.revisions(path)
assert isinstance(data, list)
assert len(data) == 1
for item in data:
assert hasattr(item, 'extra')
assert hasattr(item, 'version')
assert hasattr(item, 'version_identifier')
assert aiohttpretty.has_call(method='GET', uri=url, params=params)
def test_can_intra_move(self, provider):
file_path = WaterButlerPath('/my-image.jpg')
folder_path = WaterButlerPath('/folder/', folder=True)
assert provider.can_intra_move(provider)
assert provider.can_intra_move(provider, file_path)
assert not provider.can_intra_move(provider, folder_path)
def test_can_intra_copy(self, provider):
file_path = WaterButlerPath('/my-image.jpg')
folder_path = WaterButlerPath('/folder/', folder=True)
assert provider.can_intra_copy(provider)
assert provider.can_intra_copy(provider, file_path)
assert not provider.can_intra_copy(provider, folder_path)
def test_can_duplicate_names(self, provider):
assert provider.can_duplicate_names()
| apache-2.0 |
ted-ross/summit2017 | applications/client-once.py | 1 | 1985 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function, unicode_literals
import optparse
from proton import Message
from proton.handlers import MessagingHandler
from proton.reactor import Container, DynamicNodeProperties
class Client(MessagingHandler):
def __init__(self, url):
super(Client, self).__init__()
self.url = url
self.sent = False
def on_start(self, event):
self.conn = event.container.connect(self.url)
self.receiver = event.container.create_receiver(self.conn, None, dynamic=True)
def on_link_opened(self, event):
if event.receiver == self.receiver:
self.reply_to = event.receiver.remote_source.address
self.sender = event.container.create_sender(self.conn, "Service")
def on_sendable(self, event):
if not self.sent:
msg = Message(reply_to = self.reply_to, correlation_id = 0, body = "Client Request")
self.sender.send(msg)
self.sent = True
def on_message(self, event):
print("%s" % event.message.body)
def on_settled(self, event):
self.conn.close()
Container(Client("amq02.lab.eng.rdu2.redhat.com")).run()
| apache-2.0 |
richardnpaul/FWL-Website | lib/python2.7/site-packages/django/contrib/gis/sitemaps/kml.py | 482 | 2481 | from django.core import urlresolvers
from django.contrib.sitemaps import Sitemap
from django.contrib.gis.db.models.fields import GeometryField
from django.db import models
class KMLSitemap(Sitemap):
"""
A minimal hook to produce KML sitemaps.
"""
geo_format = 'kml'
def __init__(self, locations=None):
# If no locations specified, then we try to build for
# every model in installed applications.
self.locations = self._build_kml_sources(locations)
def _build_kml_sources(self, sources):
"""
Goes through the given sources and returns a 3-tuple of
the application label, module name, and field name of every
GeometryField encountered in the sources.
If no sources are provided, then all models.
"""
kml_sources = []
if sources is None:
sources = models.get_models()
for source in sources:
if isinstance(source, models.base.ModelBase):
for field in source._meta.fields:
if isinstance(field, GeometryField):
kml_sources.append((source._meta.app_label,
source._meta.module_name,
field.name))
elif isinstance(source, (list, tuple)):
if len(source) != 3:
raise ValueError('Must specify a 3-tuple of (app_label, module_name, field_name).')
kml_sources.append(source)
else:
raise TypeError('KML Sources must be a model or a 3-tuple.')
return kml_sources
def get_urls(self, page=1, site=None):
"""
This method is overrridden so the appropriate `geo_format` attribute
is placed on each URL element.
"""
urls = Sitemap.get_urls(self, page=page, site=site)
for url in urls: url['geo_format'] = self.geo_format
return urls
def items(self):
return self.locations
def location(self, obj):
return urlresolvers.reverse('django.contrib.gis.sitemaps.views.%s' % self.geo_format,
kwargs={'label' : obj[0],
'model' : obj[1],
'field_name': obj[2],
}
)
class KMZSitemap(KMLSitemap):
geo_format = 'kmz'
| gpl-3.0 |
Hemisphere-Project/HPlayer2 | profiles/xpa.py | 1 | 4228 | from core.engine.hplayer import HPlayer2
from core.engine import network
import os, sys, types, platform
# DIRECTORY / FILE
profilename = os.path.basename(__file__).split('.')[0]
base_path = ['/data/sync/'+profilename, '/data/usb']
# INIT HPLAYER
hplayer = HPlayer2(base_path, "/data/hplayer2-"+profilename+".cfg")
# PLAYERS
player = hplayer.addPlayer('mpv','mpv')
midi = hplayer.addPlayer('midi','midi')
# Interfaces
hplayer.addInterface('zyre', 'wlan0')
hplayer.addInterface('http2', 8080)
# hplayer.addInterface('http', 8037)
hplayer.addInterface('keyboard')
if HPlayer2.isRPi():
hplayer.addInterface('keypad')
hplayer.addInterface('gpio', [21], 310)
# MASTER / SLAVE sequencer
iamLeader = False
# Broadcast Order on OSC/Zyre to other Pi's
#
def broadcast(path, *args):
# print(path, list(args))
if path.startswith('play'):
hplayer.interface('zyre').node.broadcast(path, list(args), 200) ## WARNING LATENCY !!
else:
hplayer.interface('zyre').node.broadcast(path, list(args))
# Detect if i am zyre Leader
@hplayer.on('zyre.event')
def leadSequencer(*data):
global iamLeader
iamLeader = (data[0]['from'] == 'self')
# Receive a sequence command -> do Play !
@hplayer.on('zyre.playdir')
def doPlay(*data):
# print(data)
s = data[0]
hplayer.playlist.play( hplayer.files.selectDir(s)+'/'+HPlayer2.name()+'*' )
# Receive an exit command -> last seq
@hplayer.on('zyre.end')
def doExit():
hplayer.playlist.play( hplayer.files.selectDir(2)+'/'+HPlayer2.name()+'*' )
# Media end: next dir / or loop (based on directory name)
@hplayer.on('playlist.end')
# @midi.on('stop')
def endSequence():
if not iamLeader:
return
if 'loop' in hplayer.files.currentDir():
broadcast('playdir', hplayer.files.currentIndex())
elif hplayer.files.currentIndex() == 2:
broadcast('playdir', 0)
else:
broadcast('playdir', hplayer.files.nextIndex())
# Bind Keypad / GPIO events
#
hplayer.on('keypad.left', lambda: broadcast('playdir', 0))
hplayer.on('keypad.up', lambda: broadcast('playdir', 1))
hplayer.on('keypad.down', lambda: broadcast('playdir', 2))
hplayer.on('keypad.right', lambda: broadcast('playdir', 3))
hplayer.on('keypad.select', lambda: broadcast('stop'))
hplayer.on('gpio.21-on', lambda: broadcast('end'))
# Keyboard
#
hplayer.on('keyboard.KEY_KP0-down', lambda: broadcast('playdir', 0))
hplayer.on('keyboard.KEY_KP1-down', lambda: broadcast('playdir', 1))
hplayer.on('keyboard.KEY_KP2-down', lambda: broadcast('playdir', 2))
hplayer.on('keyboard.KEY_KP3-down', lambda: broadcast('playdir', 3))
hplayer.on('keyboard.KEY_KP4-down', lambda: broadcast('playdir', 4))
hplayer.on('keyboard.KEY_KP5-down', lambda: broadcast('playdir', 5))
hplayer.on('keyboard.KEY_KP6-down', lambda: broadcast('playdir', 6))
hplayer.on('keyboard.KEY_KP7-down', lambda: broadcast('playdir', 7))
hplayer.on('keyboard.KEY_KP8-down', lambda: broadcast('playdir', 8))
hplayer.on('keyboard.KEY_KP9-down', lambda: broadcast('playdir', 9))
hplayer.on('keyboard.KEY_KPENTER-down', lambda: broadcast('stop'))
hplayer.on('keyboard.KEY_KPDOT-down', lambda: broadcast('end'))
hplayer.on('keyboard.KEY_KPPLUS-down', lambda: broadcast('volume', hplayer.settings.get('volume')+1))
hplayer.on('keyboard.KEY_KPPLUS-hold', lambda: broadcast('volume', hplayer.settings.get('volume')+1))
hplayer.on('keyboard.KEY_KPMINUS-down', lambda: broadcast('volume', hplayer.settings.get('volume')-1))
hplayer.on('keyboard.KEY_KPMINUS-hold', lambda: broadcast('volume', hplayer.settings.get('volume')-1))
# PATCH Keypad LCD update
def lcd_update(self):
lines = ["", ""]
# Line 1 : SCENE + VOLUME
lines[0] = hplayer.files.currentDir().ljust(13, ' ')[:13]
lines[0] += str(hplayer.settings.get('volume')).rjust(3, ' ')[:3]
# Line 2 : MEDIA
if not player.status()['media']: lines[1] = '-stop-'
else: lines[1] = os.path.basename(player.status()['media'])[:-4]
lines[1] = lines[1].ljust(14, ' ')[:14]
lines[1] += str(hplayer.interface('zyre').activeCount()).rjust(2, ' ')[:2]
return lines
if hplayer.isRPi():
hplayer.interface('keypad').update = types.MethodType(lcd_update, hplayer.interface('keypad'))
# RUN
hplayer.run() # TODO: non blocking
| gpl-3.0 |
kalahbrown/HueBigSQL | desktop/core/ext-py/lxml/doc/mklatex.py | 28 | 10218 | # The script builds the LaTeX documentation.
# Testing:
# python mklatex.py latex .. 1.0
from docstructure import SITE_STRUCTURE, BASENAME_MAP
import os, shutil, re, sys, datetime
try:
set
except NameError:
# Python 2.3
from sets import Set as set
TARGET_FILE = "lxmldoc.tex"
RST2LATEX_OPTIONS = " ".join([
# "--no-toc-backlinks",
"--strip-comments",
"--language en",
# "--date",
"--use-latex-footnotes",
"--use-latex-citations",
"--use-latex-toc",
"--font-encoding=T1",
"--output-encoding=utf-8",
"--input-encoding=utf-8",
"--graphicx-option=pdftex",
])
htmlnsmap = {"h" : "http://www.w3.org/1999/xhtml"}
replace_invalid = re.compile(r'[-_/.\s\\]').sub
replace_content = re.compile("\{[^\}]*\}").sub
replace_epydoc_macros = re.compile(r'(,\s*amssymb|dvips\s*,\s*)').sub
replace_rst_macros = re.compile(r'(\\usepackage\{color}|\\usepackage\[[^]]*]\{hyperref})').sub
BASENAME_MAP = BASENAME_MAP.copy()
BASENAME_MAP.update({'api' : 'lxmlapi'})
# LaTeX snippets
DOCUMENT_CLASS = r"""
\documentclass[10pt,english]{report}
\usepackage[a4paper]{geometry}
\parindent0pt
\parskip1ex
"""
PYGMENTS_IMPORT = r"""
\usepackage{fancyvrb}
\input{_part_pygments.tex}
"""
EPYDOC_IMPORT = r"""
\input{_part_epydoc.tex}
"""
def write_chapter(master, title, filename):
filename = os.path.join(os.path.dirname(filename),
"_part_%s" % os.path.basename(filename))
master.write(r"""
\chapter{%s}
\label{%s}
\input{%s}
""" % (title, filename, filename))
# the program ----
def rest2latex(script, source_path, dest_path):
command = ('%s %s %s %s > %s' %
(sys.executable, script, RST2LATEX_OPTIONS,
source_path, dest_path))
os.system(command)
def build_pygments_macros(filename):
from pygments.formatters import LatexFormatter
text = LatexFormatter().get_style_defs()
f = file(filename, "w")
f.write(text)
f.write('\n')
f.close()
def copy_epydoc_macros(src, dest, existing_header_lines):
doc = file(src, 'r')
out = file(dest, "w")
for line in doc:
if line.startswith('%% generator') or line.startswith('% generated by '):
break
if line.startswith('%') or \
r'\documentclass' in line or \
r'\makeindex' in line or \
r'{inputenc}' in line:
continue
if line.startswith(r'\usepackage'):
if line in existing_header_lines:
continue
if '{hyperref}' in line:
line = line.replace('black', 'blue')
out.write( replace_epydoc_macros('', line) )
out.close()
doc.close()
def noop(input):
return input
counter_no = 0
def tex_postprocess(src, dest, want_header = False, process_line=noop):
"""
Postprocessing of the LaTeX file generated from ReST.
Reads file src and saves to dest only the true content
(without the document header and final) - so it is suitable
to be used as part of the longer document.
Returns the title of document
If want_header is set, returns also the document header (as
the list of lines).
"""
title = ''
header = []
add_header_line = header.append
global counter_no
counter_no = counter_no + 1
counter_text = "listcnt%d" % counter_no
search_title = re.compile(r'\\title{([^}]*)}').search
skipping = re.compile(r'(\\end{document}|\\tableofcontents)').search
src = file(src)
dest = file(dest, "w")
iter_lines = iter(src.readlines())
for l in iter_lines:
l = process_line(l)
if not l:
continue
if want_header:
add_header_line(replace_rst_macros('', l))
m = search_title(l)
if m:
title = m.group(0)
if l.startswith("\\maketitle"):
break
for l in iter_lines:
l = process_line(l)
if skipping(l):
# To-Do minitoc instead of tableofcontents
continue
elif "\hypertarget{old-versions}" in l:
break
elif "listcnt0" in l:
l = l.replace("listcnt0", counter_text)
dest.write(l)
if not title:
raise Exception("Bueee, no title")
return title, header
def publish(dirname, lxml_path, release):
if not os.path.exists(dirname):
os.mkdir(dirname)
book_title = "lxml %s" % release
doc_dir = os.path.join(lxml_path, 'doc')
script = os.path.join(doc_dir, 'rest2latex.py')
pubkey = os.path.join(doc_dir, 'pubkey.asc')
shutil.copy(pubkey, dirname)
# build pygments macros
build_pygments_macros(os.path.join(dirname, '_part_pygments.tex'))
# Used in postprocessing of generated LaTeX files
header = []
titles = {}
replace_interdoc_hyperrefs = re.compile(
r'\\href\{([^/}]+)[.]([^./}]+)\}').sub
replace_docinternal_hyperrefs = re.compile(
r'\\href\{\\#([^}]+)\}').sub
replace_image_paths = re.compile(
r'^(\\includegraphics{)').sub
def build_hyperref(match):
basename, extension = match.groups()
outname = BASENAME_MAP.get(basename, basename)
if '#' in extension:
anchor = extension.split('#')[-1]
return r"\hyperref[%s]" % anchor
elif extension != 'html':
return r'\href{http://codespeak.net/lxml/%s.%s}' % (
outname, extension)
else:
return r"\hyperref[_part_%s.tex]" % outname
def fix_relative_hyperrefs(line):
line = replace_image_paths(r'\1../html/', line)
if r'\href' not in line:
return line
line = replace_interdoc_hyperrefs(build_hyperref, line)
return replace_docinternal_hyperrefs(r'\hyperref[\1]', line)
# Building pages
have_epydoc_macros = False
for section, text_files in SITE_STRUCTURE:
for filename in text_files:
if filename.startswith('@'):
continue
#page_title = filename[1:]
#url = href_map[page_title]
#build_menu_entry(page_title, url, section_head)
basename = os.path.splitext(os.path.basename(filename))[0]
basename = BASENAME_MAP.get(basename, basename)
outname = basename + '.tex'
outpath = os.path.join(dirname, outname)
path = os.path.join(doc_dir, filename)
print "Creating %s" % outname
rest2latex(script, path, outpath)
final_name = os.path.join(dirname, os.path.dirname(outname),
"_part_%s" % os.path.basename(outname))
title, hd = tex_postprocess(outpath, final_name,
want_header = not header,
process_line=fix_relative_hyperrefs)
if not header:
header = hd
titles[outname] = title
# integrate generated API docs
print "Integrating API docs"
apidocsname = 'api.tex'
apipath = os.path.join(dirname, apidocsname)
tex_postprocess(apipath, os.path.join(dirname, "_part_%s" % apidocsname),
process_line=fix_relative_hyperrefs)
copy_epydoc_macros(apipath, os.path.join(dirname, '_part_epydoc.tex'),
set(header))
# convert CHANGES.txt
print "Integrating ChangeLog"
find_version_title = re.compile(
r'(.*\\section\{)([0-9][^\} ]*)\s+\(([^)]+)\)(\}.*)').search
def fix_changelog(line):
m = find_version_title(line)
if m:
line = "%sChanges in version %s, released %s%s" % m.groups()
else:
line = line.replace(r'\subsection{', r'\subsection*{')
return line
chgname = 'changes-%s.tex' % release
chgpath = os.path.join(dirname, chgname)
rest2latex(script,
os.path.join(lxml_path, 'CHANGES.txt'),
chgpath)
tex_postprocess(chgpath, os.path.join(dirname, "_part_%s" % chgname),
process_line=fix_changelog)
# Writing a master file
print "Building %s\n" % TARGET_FILE
master = file( os.path.join(dirname, TARGET_FILE), "w")
for hln in header:
if hln.startswith(r"\documentclass"):
#hln = hln.replace('article', 'book')
hln = DOCUMENT_CLASS
elif hln.startswith("%% generator ") or hln.startswith("% generated "):
master.write(EPYDOC_IMPORT)
elif hln.startswith(r"\begin{document}"):
# pygments and epydoc support
master.write(PYGMENTS_IMPORT)
elif hln.startswith(r"\title{"):
hln = replace_content(
r'{%s\\\\\\vspace{1cm}\\includegraphics[width=2.5cm]{../html/tagpython-big.png}}' % book_title, hln)
elif hln.startswith(r"\date{"):
hln = replace_content(
r'{%s}' % datetime.date.today().isoformat(), hln)
elif hln.startswith("pdftitle"):
hln = replace_content(
r'{%s}' % book_title, hln)
master.write(hln)
master.write("\\setcounter{page}{2}\n")
master.write("\\tableofcontents\n")
for section, text_files in SITE_STRUCTURE:
master.write("\n\n\\part{%s}\n" % section)
for filename in text_files:
if filename.startswith('@'):
continue
#print "Not yet implemented: %s" % filename[1:]
#page_title = filename[1:]
#url = href_map[page_title]
#build_menu_entry(page_title, url, section_head)
else:
basename = os.path.splitext(os.path.basename(filename))[0]
basename = BASENAME_MAP.get(basename, basename)
outname = basename + '.tex'
write_chapter(master, titles[outname], outname)
master.write("\\appendix\n")
master.write("\\begin{appendix}\n")
write_chapter(master, "Changes", chgname)
write_chapter(master, "Generated API documentation", apidocsname)
master.write("\\end{appendix}\n")
master.write("\\end{document}\n")
if __name__ == '__main__':
publish(sys.argv[1], sys.argv[2], sys.argv[3])
| apache-2.0 |
mancoast/CPythonPyc_test | cpython/243_test_mutants.py | 24 | 8409 | from test.test_support import verbose, TESTFN
import random
import os
# From SF bug #422121: Insecurities in dict comparison.
# Safety of code doing comparisons has been an historical Python weak spot.
# The problem is that comparison of structures written in C *naturally*
# wants to hold on to things like the size of the container, or "the
# biggest" containee so far, across a traversal of the container; but
# code to do containee comparisons can call back into Python and mutate
# the container in arbitrary ways while the C loop is in midstream. If the
# C code isn't extremely paranoid about digging things out of memory on
# each trip, and artificially boosting refcounts for the duration, anything
# from infinite loops to OS crashes can result (yes, I use Windows <wink>).
#
# The other problem is that code designed to provoke a weakness is usually
# white-box code, and so catches only the particular vulnerabilities the
# author knew to protect against. For example, Python's list.sort() code
# went thru many iterations as one "new" vulnerability after another was
# discovered.
#
# So the dict comparison test here uses a black-box approach instead,
# generating dicts of various sizes at random, and performing random
# mutations on them at random times. This proved very effective,
# triggering at least six distinct failure modes the first 20 times I
# ran it. Indeed, at the start, the driver never got beyond 6 iterations
# before the test died.
# The dicts are global to make it easy to mutate tham from within functions.
dict1 = {}
dict2 = {}
# The current set of keys in dict1 and dict2. These are materialized as
# lists to make it easy to pick a dict key at random.
dict1keys = []
dict2keys = []
# Global flag telling maybe_mutate() wether to *consider* mutating.
mutate = 0
# If global mutate is true, consider mutating a dict. May or may not
# mutate a dict even if mutate is true. If it does decide to mutate a
# dict, it picks one of {dict1, dict2} at random, and deletes a random
# entry from it; or, more rarely, adds a random element.
def maybe_mutate():
global mutate
if not mutate:
return
if random.random() < 0.5:
return
if random.random() < 0.5:
target, keys = dict1, dict1keys
else:
target, keys = dict2, dict2keys
if random.random() < 0.2:
# Insert a new key.
mutate = 0 # disable mutation until key inserted
while 1:
newkey = Horrid(random.randrange(100))
if newkey not in target:
break
target[newkey] = Horrid(random.randrange(100))
keys.append(newkey)
mutate = 1
elif keys:
# Delete a key at random.
i = random.randrange(len(keys))
key = keys[i]
del target[key]
# CAUTION: don't use keys.remove(key) here. Or do <wink>. The
# point is that .remove() would trigger more comparisons, and so
# also more calls to this routine. We're mutating often enough
# without that.
del keys[i]
# A horrid class that triggers random mutations of dict1 and dict2 when
# instances are compared.
class Horrid:
def __init__(self, i):
# Comparison outcomes are determined by the value of i.
self.i = i
# An artificial hashcode is selected at random so that we don't
# have any systematic relationship between comparison outcomes
# (based on self.i and other.i) and relative position within the
# hash vector (based on hashcode).
self.hashcode = random.randrange(1000000000)
def __hash__(self):
return self.hashcode
def __cmp__(self, other):
maybe_mutate() # The point of the test.
return cmp(self.i, other.i)
def __repr__(self):
return "Horrid(%d)" % self.i
# Fill dict d with numentries (Horrid(i), Horrid(j)) key-value pairs,
# where i and j are selected at random from the candidates list.
# Return d.keys() after filling.
def fill_dict(d, candidates, numentries):
d.clear()
for i in xrange(numentries):
d[Horrid(random.choice(candidates))] = \
Horrid(random.choice(candidates))
return d.keys()
# Test one pair of randomly generated dicts, each with n entries.
# Note that dict comparison is trivial if they don't have the same number
# of entires (then the "shorter" dict is instantly considered to be the
# smaller one, without even looking at the entries).
def test_one(n):
global mutate, dict1, dict2, dict1keys, dict2keys
# Fill the dicts without mutating them.
mutate = 0
dict1keys = fill_dict(dict1, range(n), n)
dict2keys = fill_dict(dict2, range(n), n)
# Enable mutation, then compare the dicts so long as they have the
# same size.
mutate = 1
if verbose:
print "trying w/ lengths", len(dict1), len(dict2),
while dict1 and len(dict1) == len(dict2):
if verbose:
print ".",
c = cmp(dict1, dict2)
if verbose:
print
# Run test_one n times. At the start (before the bugs were fixed), 20
# consecutive runs of this test each blew up on or before the sixth time
# test_one was run. So n doesn't have to be large to get an interesting
# test.
# OTOH, calling with large n is also interesting, to ensure that the fixed
# code doesn't hold on to refcounts *too* long (in which case memory would
# leak).
def test(n):
for i in xrange(n):
test_one(random.randrange(1, 100))
# See last comment block for clues about good values for n.
test(100)
##########################################################################
# Another segfault bug, distilled by Michael Hudson from a c.l.py post.
class Child:
def __init__(self, parent):
self.__dict__['parent'] = parent
def __getattr__(self, attr):
self.parent.a = 1
self.parent.b = 1
self.parent.c = 1
self.parent.d = 1
self.parent.e = 1
self.parent.f = 1
self.parent.g = 1
self.parent.h = 1
self.parent.i = 1
return getattr(self.parent, attr)
class Parent:
def __init__(self):
self.a = Child(self)
# Hard to say what this will print! May vary from time to time. But
# we're specifically trying to test the tp_print slot here, and this is
# the clearest way to do it. We print the result to a temp file so that
# the expected-output file doesn't need to change.
f = open(TESTFN, "w")
print >> f, Parent().__dict__
f.close()
os.unlink(TESTFN)
##########################################################################
# And another core-dumper from Michael Hudson.
dict = {}
# Force dict to malloc its table.
for i in range(1, 10):
dict[i] = i
f = open(TESTFN, "w")
class Machiavelli:
def __repr__(self):
dict.clear()
# Michael sez: "doesn't crash without this. don't know why."
# Tim sez: "luck of the draw; crashes with or without for me."
print >> f
return `"machiavelli"`
def __hash__(self):
return 0
dict[Machiavelli()] = Machiavelli()
print >> f, str(dict)
f.close()
os.unlink(TESTFN)
del f, dict
##########################################################################
# And another core-dumper from Michael Hudson.
dict = {}
# let's force dict to malloc its table
for i in range(1, 10):
dict[i] = i
class Machiavelli2:
def __eq__(self, other):
dict.clear()
return 1
def __hash__(self):
return 0
dict[Machiavelli2()] = Machiavelli2()
try:
dict[Machiavelli2()]
except KeyError:
pass
del dict
##########################################################################
# And another core-dumper from Michael Hudson.
dict = {}
# let's force dict to malloc its table
for i in range(1, 10):
dict[i] = i
class Machiavelli3:
def __init__(self, id):
self.id = id
def __eq__(self, other):
if self.id == other.id:
dict.clear()
return 1
else:
return 0
def __repr__(self):
return "%s(%s)"%(self.__class__.__name__, self.id)
def __hash__(self):
return 0
dict[Machiavelli3(1)] = Machiavelli3(0)
dict[Machiavelli3(2)] = Machiavelli3(0)
f = open(TESTFN, "w")
try:
try:
print >> f, dict[Machiavelli3(2)]
except KeyError:
pass
finally:
f.close()
os.unlink(TESTFN)
del dict
| gpl-3.0 |
sclabs/sccms-nonrel | django/contrib/gis/tests/geo3d/models.py | 404 | 1835 | from django.contrib.gis.db import models
class City3D(models.Model):
name = models.CharField(max_length=30)
point = models.PointField(dim=3)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Interstate2D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(srid=4269)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Interstate3D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(dim=3, srid=4269)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class InterstateProj2D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class InterstateProj3D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(dim=3, srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Polygon2D(models.Model):
name = models.CharField(max_length=30)
poly = models.PolygonField(srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Polygon3D(models.Model):
name = models.CharField(max_length=30)
poly = models.PolygonField(dim=3, srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Point2D(models.Model):
point = models.PointField()
objects = models.GeoManager()
class Point3D(models.Model):
point = models.PointField(dim=3)
objects = models.GeoManager()
class MultiPoint3D(models.Model):
mpoint = models.MultiPointField(dim=3)
objects = models.GeoManager()
| bsd-3-clause |
grupoprog3/proyecto_final | Entrega Final/flask/Lib/site-packages/sqlalchemy/testing/provision.py | 28 | 9388 | from sqlalchemy.engine import url as sa_url
from sqlalchemy import text
from sqlalchemy import exc
from sqlalchemy.util import compat
from . import config, engines
import os
import time
import logging
log = logging.getLogger(__name__)
FOLLOWER_IDENT = None
class register(object):
def __init__(self):
self.fns = {}
@classmethod
def init(cls, fn):
return register().for_db("*")(fn)
def for_db(self, dbname):
def decorate(fn):
self.fns[dbname] = fn
return self
return decorate
def __call__(self, cfg, *arg):
if isinstance(cfg, compat.string_types):
url = sa_url.make_url(cfg)
elif isinstance(cfg, sa_url.URL):
url = cfg
else:
url = cfg.db.url
backend = url.get_backend_name()
if backend in self.fns:
return self.fns[backend](cfg, *arg)
else:
return self.fns['*'](cfg, *arg)
def create_follower_db(follower_ident):
for cfg in _configs_for_db_operation():
_create_db(cfg, cfg.db, follower_ident)
def configure_follower(follower_ident):
for cfg in config.Config.all_configs():
_configure_follower(cfg, follower_ident)
def setup_config(db_url, options, file_config, follower_ident):
if follower_ident:
db_url = _follower_url_from_main(db_url, follower_ident)
db_opts = {}
_update_db_opts(db_url, db_opts)
eng = engines.testing_engine(db_url, db_opts)
_post_configure_engine(db_url, eng, follower_ident)
eng.connect().close()
cfg = config.Config.register(eng, db_opts, options, file_config)
if follower_ident:
_configure_follower(cfg, follower_ident)
return cfg
def drop_follower_db(follower_ident):
for cfg in _configs_for_db_operation():
_drop_db(cfg, cfg.db, follower_ident)
def _configs_for_db_operation():
hosts = set()
for cfg in config.Config.all_configs():
cfg.db.dispose()
for cfg in config.Config.all_configs():
url = cfg.db.url
backend = url.get_backend_name()
host_conf = (
backend,
url.username, url.host, url.database)
if host_conf not in hosts:
yield cfg
hosts.add(host_conf)
for cfg in config.Config.all_configs():
cfg.db.dispose()
@register.init
def _create_db(cfg, eng, ident):
raise NotImplementedError("no DB creation routine for cfg: %s" % eng.url)
@register.init
def _drop_db(cfg, eng, ident):
raise NotImplementedError("no DB drop routine for cfg: %s" % eng.url)
@register.init
def _update_db_opts(db_url, db_opts):
pass
@register.init
def _configure_follower(cfg, ident):
pass
@register.init
def _post_configure_engine(url, engine, follower_ident):
pass
@register.init
def _follower_url_from_main(url, ident):
url = sa_url.make_url(url)
url.database = ident
return url
@_update_db_opts.for_db("mssql")
def _mssql_update_db_opts(db_url, db_opts):
db_opts['legacy_schema_aliasing'] = False
@_follower_url_from_main.for_db("sqlite")
def _sqlite_follower_url_from_main(url, ident):
url = sa_url.make_url(url)
if not url.database or url.database == ':memory:':
return url
else:
return sa_url.make_url("sqlite:///%s.db" % ident)
@_post_configure_engine.for_db("sqlite")
def _sqlite_post_configure_engine(url, engine, follower_ident):
from sqlalchemy import event
@event.listens_for(engine, "connect")
def connect(dbapi_connection, connection_record):
# use file DBs in all cases, memory acts kind of strangely
# as an attached
if not follower_ident:
dbapi_connection.execute(
'ATTACH DATABASE "test_schema.db" AS test_schema')
else:
dbapi_connection.execute(
'ATTACH DATABASE "%s_test_schema.db" AS test_schema'
% follower_ident)
@_create_db.for_db("postgresql")
def _pg_create_db(cfg, eng, ident):
with eng.connect().execution_options(
isolation_level="AUTOCOMMIT") as conn:
try:
_pg_drop_db(cfg, conn, ident)
except Exception:
pass
currentdb = conn.scalar("select current_database()")
for attempt in range(3):
try:
conn.execute(
"CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
except exc.OperationalError as err:
if attempt != 2 and "accessed by other users" in str(err):
time.sleep(.2)
continue
else:
raise
else:
break
@_create_db.for_db("mysql")
def _mysql_create_db(cfg, eng, ident):
with eng.connect() as conn:
try:
_mysql_drop_db(cfg, conn, ident)
except Exception:
pass
conn.execute("CREATE DATABASE %s" % ident)
conn.execute("CREATE DATABASE %s_test_schema" % ident)
conn.execute("CREATE DATABASE %s_test_schema_2" % ident)
@_configure_follower.for_db("mysql")
def _mysql_configure_follower(config, ident):
config.test_schema = "%s_test_schema" % ident
config.test_schema_2 = "%s_test_schema_2" % ident
@_create_db.for_db("sqlite")
def _sqlite_create_db(cfg, eng, ident):
pass
@_drop_db.for_db("postgresql")
def _pg_drop_db(cfg, eng, ident):
with eng.connect().execution_options(
isolation_level="AUTOCOMMIT") as conn:
conn.execute(
text(
"select pg_terminate_backend(pid) from pg_stat_activity "
"where usename=current_user and pid != pg_backend_pid() "
"and datname=:dname"
), dname=ident)
conn.execute("DROP DATABASE %s" % ident)
@_drop_db.for_db("sqlite")
def _sqlite_drop_db(cfg, eng, ident):
if ident:
os.remove("%s_test_schema.db" % ident)
else:
os.remove("%s.db" % ident)
@_drop_db.for_db("mysql")
def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
conn.execute("DROP DATABASE %s_test_schema" % ident)
conn.execute("DROP DATABASE %s_test_schema_2" % ident)
conn.execute("DROP DATABASE %s" % ident)
@_create_db.for_db("oracle")
def _oracle_create_db(cfg, eng, ident):
# NOTE: make sure you've run "ALTER DATABASE default tablespace users" or
# similar, so that the default tablespace is not "system"; reflection will
# fail otherwise
with eng.connect() as conn:
conn.execute("create user %s identified by xe" % ident)
conn.execute("create user %s_ts1 identified by xe" % ident)
conn.execute("create user %s_ts2 identified by xe" % ident)
conn.execute("grant dba to %s" % (ident, ))
conn.execute("grant unlimited tablespace to %s" % ident)
conn.execute("grant unlimited tablespace to %s_ts1" % ident)
conn.execute("grant unlimited tablespace to %s_ts2" % ident)
@_configure_follower.for_db("oracle")
def _oracle_configure_follower(config, ident):
config.test_schema = "%s_ts1" % ident
config.test_schema_2 = "%s_ts2" % ident
def _ora_drop_ignore(conn, dbname):
try:
conn.execute("drop user %s cascade" % dbname)
log.info("Reaped db: %s", dbname)
return True
except exc.DatabaseError as err:
log.warning("couldn't drop db: %s", err)
return False
@_drop_db.for_db("oracle")
def _oracle_drop_db(cfg, eng, ident):
with eng.connect() as conn:
# cx_Oracle seems to occasionally leak open connections when a large
# suite it run, even if we confirm we have zero references to
# connection objects.
# while there is a "kill session" command in Oracle,
# it unfortunately does not release the connection sufficiently.
_ora_drop_ignore(conn, ident)
_ora_drop_ignore(conn, "%s_ts1" % ident)
_ora_drop_ignore(conn, "%s_ts2" % ident)
def reap_oracle_dbs(eng, idents_file):
log.info("Reaping Oracle dbs...")
with eng.connect() as conn:
with open(idents_file) as file_:
idents = set(line.strip() for line in file_)
log.info("identifiers in file: %s", ", ".join(idents))
to_reap = conn.execute(
"select u.username from all_users u where username "
"like 'TEST_%' and not exists (select username "
"from v$session where username=u.username)")
all_names = set([username.lower() for (username, ) in to_reap])
to_drop = set()
for name in all_names:
if name.endswith("_ts1") or name.endswith("_ts2"):
continue
elif name in idents:
to_drop.add(name)
if "%s_ts1" % name in all_names:
to_drop.add("%s_ts1" % name)
if "%s_ts2" % name in all_names:
to_drop.add("%s_ts2" % name)
dropped = total = 0
for total, username in enumerate(to_drop, 1):
if _ora_drop_ignore(conn, username):
dropped += 1
log.info(
"Dropped %d out of %d stale databases detected", dropped, total)
@_follower_url_from_main.for_db("oracle")
def _oracle_follower_url_from_main(url, ident):
url = sa_url.make_url(url)
url.username = ident
url.password = 'xe'
return url
| apache-2.0 |
skycucumber/Messaging-Gateway | webapp/venv/lib/python2.7/site-packages/sqlalchemy/sql/naming.py | 33 | 4588 | # sqlalchemy/naming.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Establish constraint and index naming conventions.
"""
from .schema import Constraint, ForeignKeyConstraint, PrimaryKeyConstraint, \
UniqueConstraint, CheckConstraint, Index, Table, Column
from .. import event, events
from .. import exc
from .elements import _truncated_label, _defer_name, _defer_none_name, conv
import re
class ConventionDict(object):
def __init__(self, const, table, convention):
self.const = const
self._is_fk = isinstance(const, ForeignKeyConstraint)
self.table = table
self.convention = convention
self._const_name = const.name
def _key_table_name(self):
return self.table.name
def _column_X(self, idx):
if self._is_fk:
fk = self.const.elements[idx]
return fk.parent
else:
return list(self.const.columns)[idx]
def _key_constraint_name(self):
if isinstance(self._const_name, (type(None), _defer_none_name)):
raise exc.InvalidRequestError(
"Naming convention including "
"%(constraint_name)s token requires that "
"constraint is explicitly named."
)
if not isinstance(self._const_name, conv):
self.const.name = None
return self._const_name
def _key_column_X_name(self, idx):
return self._column_X(idx).name
def _key_column_X_label(self, idx):
return self._column_X(idx)._label
def _key_referred_table_name(self):
fk = self.const.elements[0]
refs = fk.target_fullname.split(".")
if len(refs) == 3:
refschema, reftable, refcol = refs
else:
reftable, refcol = refs
return reftable
def _key_referred_column_X_name(self, idx):
fk = self.const.elements[idx]
refs = fk.target_fullname.split(".")
if len(refs) == 3:
refschema, reftable, refcol = refs
else:
reftable, refcol = refs
return refcol
def __getitem__(self, key):
if key in self.convention:
return self.convention[key](self.const, self.table)
elif hasattr(self, '_key_%s' % key):
return getattr(self, '_key_%s' % key)()
else:
col_template = re.match(r".*_?column_(\d+)_.+", key)
if col_template:
idx = col_template.group(1)
attr = "_key_" + key.replace(idx, "X")
idx = int(idx)
if hasattr(self, attr):
return getattr(self, attr)(idx)
raise KeyError(key)
_prefix_dict = {
Index: "ix",
PrimaryKeyConstraint: "pk",
CheckConstraint: "ck",
UniqueConstraint: "uq",
ForeignKeyConstraint: "fk"
}
def _get_convention(dict_, key):
for super_ in key.__mro__:
if super_ in _prefix_dict and _prefix_dict[super_] in dict_:
return dict_[_prefix_dict[super_]]
elif super_ in dict_:
return dict_[super_]
else:
return None
def _constraint_name_for_table(const, table):
metadata = table.metadata
convention = _get_convention(metadata.naming_convention, type(const))
if isinstance(const.name, conv):
return const.name
elif convention is not None and (
const.name is None or not isinstance(const.name, conv) and
"constraint_name" in convention
):
return conv(
convention % ConventionDict(const, table,
metadata.naming_convention)
)
elif isinstance(convention, _defer_none_name):
return None
@event.listens_for(Constraint, "after_parent_attach")
@event.listens_for(Index, "after_parent_attach")
def _constraint_name(const, table):
if isinstance(table, Column):
# for column-attached constraint, set another event
# to link the column attached to the table as this constraint
# associated with the table.
event.listen(table, "after_parent_attach",
lambda col, table: _constraint_name(const, table)
)
elif isinstance(table, Table):
if isinstance(const.name, (conv, _defer_name)):
return
newname = _constraint_name_for_table(const, table)
if newname is not None:
const.name = newname
| gpl-2.0 |
RuiNascimento/krepo | script.module.lambdascrapers/lib/lambdascrapers/sources_notworking/lambdascrapers(11_9)/flixanity.py | 4 | 5330 | # -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @Daddy_Blamo wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import re,traceback,urllib,urlparse,json,base64,time
from resources.lib.modules import cleantitle
from resources.lib.modules import dom_parser2
from resources.lib.modules import client
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['movieshd.tv', 'movieshd.is', 'movieshd.watch', 'flixanity.is', 'flixanity.me','istream.is','flixanity.online','flixanity.cc','123movies.it']
self.base_link = 'http://123movieser.com'
self.search_link = '/watch/%s-%s-online-free-123movies.html'
def movie(self, imdb, title, localtitle, aliases, year):
try:
clean_title = cleantitle.geturl(title)
url = urlparse.urljoin(self.base_link, (self.search_link %(clean_title,year)))
return url
except:
failure = traceback.format_exc()
log_utils.log('Flixanity - Exception: \n' + str(failure))
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': tvshowtitle})
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('Flixanity - Exception: \n' + str(failure))
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
clean_title = cleantitle.geturl(url['tvshowtitle'])+'-s%02d' % int(season)
url = urlparse.urljoin(self.base_link, (self.search_link %(clean_title,url['year'])))
r = client.request(url)
r = dom_parser2.parse_dom(r, 'div', {'id': 'ip_episode'})
r = [dom_parser2.parse_dom(i, 'a', req=['href']) for i in r if i]
for i in r[0]:
if i.content == 'Episode %s'%episode:
url = i.attrs['href']
return url
except:
failure = traceback.format_exc()
log_utils.log('Flixanity - Exception: \n' + str(failure))
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
r = client.request(url)
quality = re.findall(">(\w+)<\/p",r)
if quality[0] == "HD":
quality = "720p"
else:
quality = "SD"
r = dom_parser2.parse_dom(r, 'div', {'id': 'servers-list'})
r = [dom_parser2.parse_dom(i, 'a', req=['href']) for i in r if i]
for i in r[0]:
url = {'url': i.attrs['href'], 'data-film': i.attrs['data-film'], 'data-server': i.attrs['data-server'], 'data-name' : i.attrs['data-name']}
url = urllib.urlencode(url)
sources.append({'source': i.content, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, 'debridonly': False})
return sources
except:
failure = traceback.format_exc()
log_utils.log('Flixanity - Exception: \n' + str(failure))
return
def resolve(self, url):
try:
urldata = urlparse.parse_qs(url)
urldata = dict((i, urldata[i][0]) for i in urldata)
post = {'ipplugins': 1,'ip_film': urldata['data-film'], 'ip_server': urldata['data-server'], 'ip_name': urldata['data-name'],'fix': "0"}
p1 = client.request('http://123movieser.com/ip.file/swf/plugins/ipplugins.php', post=post, referer=urldata['url'], XHR=True)
p1 = json.loads(p1)
p2 = client.request('http://123movieser.com/ip.file/swf/ipplayer/ipplayer.php?u=%s&s=%s&n=0' %(p1['s'],urldata['data-server']))
p2 = json.loads(p2)
p3 = client.request('http://123movieser.com/ip.file/swf/ipplayer/api.php?hash=%s' %(p2['hash']))
p3 = json.loads(p3)
n = p3['status']
if n == False:
p2 = client.request('http://123movieser.com/ip.file/swf/ipplayer/ipplayer.php?u=%s&s=%s&n=1' %(p1['s'],urldata['data-server']))
p2 = json.loads(p2)
url = "https:%s" %p2["data"].replace("\/","/")
return url
except:
failure = traceback.format_exc()
log_utils.log('Flixanity - Exception: \n' + str(failure))
return | gpl-2.0 |
soulxu/libvirt-xuhj | examples/python/dominfo.py | 19 | 2063 | #!/usr/bin/env python
# dominfo - print some information about a domain
import libvirt
import sys
import os
import libxml2
import pdb
def usage():
print 'Usage: %s DOMAIN' % sys.argv[0]
print ' Print information about the domain DOMAIN'
def print_section(title):
print "\n%s" % title
print "=" * 60
def print_entry(key, value):
print "%-10s %-10s" % (key, value)
def print_xml(key, ctx, path):
res = ctx.xpathEval(path)
if res is None or len(res) == 0:
value="Unknown"
else:
value = res[0].content
print_entry(key, value)
return value
if len(sys.argv) != 2:
usage()
sys.exit(2)
name = sys.argv[1]
# Connect to libvirt
conn = libvirt.openReadOnly(None)
if conn == None:
print 'Failed to open connection to the hypervisor'
sys.exit(1)
try:
dom = conn.lookupByName(name)
# Annoyiingly, libvirt prints its own error message here
except libvirt.libvirtError:
print "Domain %s is not runing" % name
sys.exit(0)
info = dom.info()
print_section("Domain info")
print_entry("State:", info[0])
print_entry("MaxMem:", info[1])
print_entry("UsedMem:", info[2])
print_entry("VCPUs:", info[3])
# Read some info from the XML desc
xmldesc = dom.XMLDesc(0)
doc = libxml2.parseDoc(xmldesc)
ctx = doc.xpathNewContext()
print_section("Kernel")
print_xml("Type:", ctx, "/domain/os/type")
print_xml("Kernel:", ctx, "/domain/os/kernel")
print_xml("initrd:", ctx, "/domain/os/initrd")
print_xml("cmdline:", ctx, "/domain/os/cmdline")
print_section("Devices")
devs = ctx.xpathEval("/domain/devices/*")
for d in devs:
ctx.setContextNode(d)
#pdb.set_trace()
type = print_xml("Type:", ctx, "@type")
if type == "file":
print_xml("Source:", ctx, "source/@file")
print_xml("Target:", ctx, "target/@dev")
elif type == "block":
print_xml("Source:", ctx, "source/@dev")
print_xml("Target:", ctx, "target/@dev")
elif type == "bridge":
print_xml("Source:", ctx, "source/@bridge")
print_xml("MAC Addr:", ctx, "mac/@address")
| lgpl-2.1 |
charnley/rmsd | rmsd/calculate_rmsd.py | 1 | 48929 | #!/usr/bin/env python
__doc__ = """
Calculate Root-mean-square deviation (RMSD) between structure A and B, in XYZ
or PDB format, using transformation and rotation.
For more information, usage, example and citation read more at
https://github.com/charnley/rmsd
"""
__version__ = "1.4"
import argparse
import copy
import gzip
import pathlib
import re
import sys
import numpy as np
from scipy.optimize import linear_sum_assignment
from scipy.spatial import distance_matrix
from scipy.spatial.distance import cdist
try:
import qml
except ImportError:
qml = None
METHOD_KABSCH = "kabsch"
METHOD_QUATERNION = "quaternion"
METHOD_NOROTATION = "none"
ROTATION_METHODS = [METHOD_KABSCH, METHOD_QUATERNION, METHOD_NOROTATION]
REORDER_NONE = "none"
REORDER_QML = "qml"
REORDER_HUNGARIAN = "hungarian"
REORDER_INERTIA_HUNGARIAN = "inertia-hungarian"
REORDER_BRUTE = "brute"
REORDER_DISTANCE = "distance"
REORDER_METHODS = [
REORDER_NONE,
REORDER_QML,
REORDER_HUNGARIAN,
REORDER_INERTIA_HUNGARIAN,
REORDER_BRUTE,
REORDER_DISTANCE,
]
AXIS_SWAPS = np.array([[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 1, 0], [2, 0, 1]])
AXIS_REFLECTIONS = np.array(
[
[1, 1, 1],
[-1, 1, 1],
[1, -1, 1],
[1, 1, -1],
[-1, -1, 1],
[-1, 1, -1],
[1, -1, -1],
[-1, -1, -1],
]
)
ELEMENT_WEIGHTS = {
1: 1.00797,
2: 4.00260,
3: 6.941,
4: 9.01218,
5: 10.81,
6: 12.011,
7: 14.0067,
8: 15.9994,
9: 18.998403,
10: 20.179,
11: 22.98977,
12: 24.305,
13: 26.98154,
14: 28.0855,
15: 30.97376,
16: 32.06,
17: 35.453,
19: 39.0983,
18: 39.948,
20: 40.08,
21: 44.9559,
22: 47.90,
23: 50.9415,
24: 51.996,
25: 54.9380,
26: 55.847,
28: 58.70,
27: 58.9332,
29: 63.546,
30: 65.38,
31: 69.72,
32: 72.59,
33: 74.9216,
34: 78.96,
35: 79.904,
36: 83.80,
37: 85.4678,
38: 87.62,
39: 88.9059,
40: 91.22,
41: 92.9064,
42: 95.94,
43: 98,
44: 101.07,
45: 102.9055,
46: 106.4,
47: 107.868,
48: 112.41,
49: 114.82,
50: 118.69,
51: 121.75,
53: 126.9045,
52: 127.60,
54: 131.30,
55: 132.9054,
56: 137.33,
57: 138.9055,
58: 140.12,
59: 140.9077,
60: 144.24,
61: 145,
62: 150.4,
63: 151.96,
64: 157.25,
65: 158.9254,
66: 162.50,
67: 164.9304,
68: 167.26,
69: 168.9342,
70: 173.04,
71: 174.967,
72: 178.49,
73: 180.9479,
74: 183.85,
75: 186.207,
76: 190.2,
77: 192.22,
78: 195.09,
79: 196.9665,
80: 200.59,
81: 204.37,
82: 207.2,
83: 208.9804,
84: 209,
85: 210,
86: 222,
87: 223,
88: 226.0254,
89: 227.0278,
91: 231.0359,
90: 232.0381,
93: 237.0482,
92: 238.029,
94: 242,
95: 243,
97: 247,
96: 247,
102: 250,
98: 251,
99: 252,
108: 255,
109: 256,
100: 257,
101: 258,
103: 260,
104: 261,
107: 262,
105: 262,
106: 263,
110: 269,
111: 272,
112: 277,
}
ELEMENT_NAMES = {
1: "H",
2: "He",
3: "Li",
4: "Be",
5: "B",
6: "C",
7: "N",
8: "O",
9: "F",
10: "Ne",
11: "Na",
12: "Mg",
13: "Al",
14: "Si",
15: "P",
16: "S",
17: "Cl",
18: "Ar",
19: "K",
20: "Ca",
21: "Sc",
22: "Ti",
23: "V",
24: "Cr",
25: "Mn",
26: "Fe",
27: "Co",
28: "Ni",
29: "Cu",
30: "Zn",
31: "Ga",
32: "Ge",
33: "As",
34: "Se",
35: "Br",
36: "Kr",
37: "Rb",
38: "Sr",
39: "Y",
40: "Zr",
41: "Nb",
42: "Mo",
43: "Tc",
44: "Ru",
45: "Rh",
46: "Pd",
47: "Ag",
48: "Cd",
49: "In",
50: "Sn",
51: "Sb",
52: "Te",
53: "I",
54: "Xe",
55: "Cs",
56: "Ba",
57: "La",
58: "Ce",
59: "Pr",
60: "Nd",
61: "Pm",
62: "Sm",
63: "Eu",
64: "Gd",
65: "Tb",
66: "Dy",
67: "Ho",
68: "Er",
69: "Tm",
70: "Yb",
71: "Lu",
72: "Hf",
73: "Ta",
74: "W",
75: "Re",
76: "Os",
77: "Ir",
78: "Pt",
79: "Au",
80: "Hg",
81: "Tl",
82: "Pb",
83: "Bi",
84: "Po",
85: "At",
86: "Rn",
87: "Fr",
88: "Ra",
89: "Ac",
90: "Th",
91: "Pa",
92: "U",
93: "Np",
94: "Pu",
95: "Am",
96: "Cm",
97: "Bk",
98: "Cf",
99: "Es",
100: "Fm",
101: "Md",
102: "No",
103: "Lr",
104: "Rf",
105: "Db",
106: "Sg",
107: "Bh",
108: "Hs",
109: "Mt",
110: "Ds",
111: "Rg",
112: "Cn",
114: "Uuq",
116: "Uuh",
}
NAMES_ELEMENT = {value: key for key, value in ELEMENT_NAMES.items()}
def str_atom(atom):
"""
Convert atom type from integer to string
Parameters
----------
atoms : string
Returns
-------
atoms : integer
"""
atom = ELEMENT_NAMES[atom]
return atom
def int_atom(atom):
"""
Convert atom type from string to integer
Parameters
----------
atoms : string
Returns
-------
atoms : integer
"""
atom = atom.capitalize()
return NAMES_ELEMENT[atom]
def rmsd(V, W):
"""
Calculate Root-mean-square deviation from two sets of vectors V and W.
Parameters
----------
V : array
(N,D) matrix, where N is points and D is dimension.
W : array
(N,D) matrix, where N is points and D is dimension.
Returns
-------
rmsd : float
Root-mean-square deviation between the two vectors
"""
diff = np.array(V) - np.array(W)
N = len(V)
return np.sqrt((diff * diff).sum() / N)
def kabsch_rmsd(P, Q, W=None, translate=False):
"""
Rotate matrix P unto Q using Kabsch algorithm and calculate the RMSD.
An optional vector of weights W may be provided.
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
W : array or None
(N) vector, where N is points.
translate : bool
Use centroids to translate vector P and Q unto each other.
Returns
-------
rmsd : float
root-mean squared deviation
"""
if translate:
Q = Q - centroid(Q)
P = P - centroid(P)
if W is not None:
return kabsch_weighted_rmsd(P, Q, W)
P = kabsch_rotate(P, Q)
return rmsd(P, Q)
def kabsch_rotate(P, Q):
"""
Rotate matrix P unto matrix Q using Kabsch algorithm.
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
Returns
-------
P : array
(N,D) matrix, where N is points and D is dimension,
rotated
"""
U = kabsch(P, Q)
# Rotate P
P = np.dot(P, U)
return P
def kabsch_fit(P, Q, W=None):
"""
Rotate and translate matrix P unto matrix Q using Kabsch algorithm.
An optional vector of weights W may be provided.
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
W : array or None
(N) vector, where N is points.
Returns
-------
P : array
(N,D) matrix, where N is points and D is dimension,
rotated and translated.
"""
if W is not None:
P = kabsch_weighted_fit(P, Q, W, return_rmsd=False)
else:
QC = centroid(Q)
Q = Q - QC
P = P - centroid(P)
P = kabsch_rotate(P, Q) + QC
return P
def kabsch(P, Q):
"""
Using the Kabsch algorithm with two sets of paired point P and Q, centered
around the centroid. Each vector set is represented as an NxD
matrix, where D is the the dimension of the space.
The algorithm works in three steps:
- a centroid translation of P and Q (assumed done before this function
call)
- the computation of a covariance matrix C
- computation of the optimal rotation matrix U
For more info see http://en.wikipedia.org/wiki/Kabsch_algorithm
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
Returns
-------
U : matrix
Rotation matrix (D,D)
"""
# Computation of the covariance matrix
C = np.dot(np.transpose(P), Q)
# Computation of the optimal rotation matrix
# This can be done using singular value decomposition (SVD)
# Getting the sign of the det(V)*(W) to decide
# whether we need to correct our rotation matrix to ensure a
# right-handed coordinate system.
# And finally calculating the optimal rotation matrix U
# see http://en.wikipedia.org/wiki/Kabsch_algorithm
V, S, W = np.linalg.svd(C)
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
S[-1] = -S[-1]
V[:, -1] = -V[:, -1]
# Create Rotation matrix U
U = np.dot(V, W)
return U
def kabsch_weighted(P, Q, W=None):
"""
Using the Kabsch algorithm with two sets of paired point P and Q.
Each vector set is represented as an NxD matrix, where D is the
dimension of the space.
An optional vector of weights W may be provided.
Note that this algorithm does not require that P and Q have already
been overlayed by a centroid translation.
The function returns the rotation matrix U, translation vector V,
and RMS deviation between Q and P', where P' is:
P' = P * U + V
For more info see http://en.wikipedia.org/wiki/Kabsch_algorithm
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
W : array or None
(N) vector, where N is points.
Returns
-------
U : matrix
Rotation matrix (D,D)
V : vector
Translation vector (D)
RMSD : float
Root mean squared deviation between P and Q
"""
# Computation of the weighted covariance matrix
CMP = np.zeros(3)
CMQ = np.zeros(3)
C = np.zeros((3, 3))
if W is None:
W = np.ones(len(P)) / len(P)
W = np.array([W, W, W]).T
# NOTE UNUSED psq = 0.0
# NOTE UNUSED qsq = 0.0
iw = 3.0 / W.sum()
n = len(P)
for i in range(3):
for j in range(n):
for k in range(3):
C[i, k] += P[j, i] * Q[j, k] * W[j, i]
CMP = (P * W).sum(axis=0)
CMQ = (Q * W).sum(axis=0)
PSQ = (P * P * W).sum() - (CMP * CMP).sum() * iw
QSQ = (Q * Q * W).sum() - (CMQ * CMQ).sum() * iw
C = (C - np.outer(CMP, CMQ) * iw) * iw
# Computation of the optimal rotation matrix
# This can be done using singular value decomposition (SVD)
# Getting the sign of the det(V)*(W) to decide
# whether we need to correct our rotation matrix to ensure a
# right-handed coordinate system.
# And finally calculating the optimal rotation matrix U
# see http://en.wikipedia.org/wiki/Kabsch_algorithm
V, S, W = np.linalg.svd(C)
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
S[-1] = -S[-1]
V[:, -1] = -V[:, -1]
# Create Rotation matrix U, translation vector V, and calculate RMSD:
U = np.dot(V, W)
msd = (PSQ + QSQ) * iw - 2.0 * S.sum()
if msd < 0.0:
msd = 0.0
rmsd_ = np.sqrt(msd)
V = np.zeros(3)
for i in range(3):
t = (U[i, :] * CMQ).sum()
V[i] = CMP[i] - t
V = V * iw
return U, V, rmsd_
def kabsch_weighted_fit(P, Q, W=None, return_rmsd=False):
"""
Fit P to Q with optional weights W.
Also returns the RMSD of the fit if return_rmsd=True.
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
W : vector
(N) vector, where N is points
rmsd : Bool
If True, rmsd is returned as well as the fitted coordinates.
Returns
-------
P' : array
(N,D) matrix, where N is points and D is dimension.
RMSD : float
if the function is called with rmsd=True
"""
R, T, rmsd_ = kabsch_weighted(Q, P, W)
PNEW = np.dot(P, R.T) + T
if return_rmsd:
return PNEW, rmsd_
else:
return PNEW
def kabsch_weighted_rmsd(P, Q, W=None):
"""
Calculate the RMSD between P and Q with optional weighhts W
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
W : vector
(N) vector, where N is points
Returns
-------
RMSD : float
"""
R, T, w_rmsd = kabsch_weighted(P, Q, W)
return w_rmsd
def quaternion_rmsd(P, Q):
"""
Rotate matrix P unto Q and calculate the RMSD
based on doi:10.1016/1049-9660(91)90036-O
Parameters
----------
P : array
(N,D) matrix, where N is points and D is dimension.
Q : array
(N,D) matrix, where N is points and D is dimension.
Returns
-------
rmsd : float
"""
rot = quaternion_rotate(P, Q)
P = np.dot(P, rot)
return rmsd(P, Q)
def quaternion_transform(r):
"""
Get optimal rotation
note: translation will be zero when the centroids of each molecule are the
same
"""
Wt_r = makeW(*r).T
Q_r = makeQ(*r)
rot = Wt_r.dot(Q_r)[:3, :3]
return rot
def makeW(r1, r2, r3, r4=0):
"""
matrix involved in quaternion rotation
"""
W = np.asarray(
[
[r4, r3, -r2, r1],
[-r3, r4, r1, r2],
[r2, -r1, r4, r3],
[-r1, -r2, -r3, r4],
]
)
return W
def makeQ(r1, r2, r3, r4=0):
"""
matrix involved in quaternion rotation
"""
Q = np.asarray(
[
[r4, -r3, r2, r1],
[r3, r4, -r1, r2],
[-r2, r1, r4, r3],
[-r1, -r2, -r3, r4],
]
)
return Q
def quaternion_rotate(X, Y):
"""
Calculate the rotation
Parameters
----------
X : array
(N,D) matrix, where N is points and D is dimension.
Y: array
(N,D) matrix, where N is points and D is dimension.
Returns
-------
rot : matrix
Rotation matrix (D,D)
"""
N = X.shape[0]
W = np.asarray([makeW(*Y[k]) for k in range(N)])
Q = np.asarray([makeQ(*X[k]) for k in range(N)])
Qt_dot_W = np.asarray([np.dot(Q[k].T, W[k]) for k in range(N)])
# NOTE UNUSED W_minus_Q = np.asarray([W[k] - Q[k] for k in range(N)])
A = np.sum(Qt_dot_W, axis=0)
eigen = np.linalg.eigh(A)
r = eigen[1][:, eigen[0].argmax()]
rot = quaternion_transform(r)
return rot
def centroid(X):
"""
Centroid is the mean position of all the points in all of the coordinate
directions, from a vectorset X.
https://en.wikipedia.org/wiki/Centroid
C = sum(X)/len(X)
Parameters
----------
X : array
(N,D) matrix, where N is points and D is dimension.
Returns
-------
C : float
centroid
"""
C = X.mean(axis=0)
return C
def hungarian_vectors(p_vecs, q_vecs, sigma=1e-0, use_kernel=True):
"""
Hungarian cost assignment of a similiarty molecule kernel.
Note: Assumes p and q are atoms of same type
Parameters
----------
p_vecs : array
(N,L) matrix, where N is no. of atoms and L is representation length
q_vecs : array
(N,L) matrix, where N is no. of atoms and L is representation length
Returns
-------
indices_b : array
(N) view vector of reordered assignment
"""
if use_kernel:
# Calculate cost matrix from similarity kernel
K = qml.kernels.laplacian_kernel(p_vecs, q_vecs, sigma)
K *= -1.0
K += 1.0
else:
K = distance_matrix(p_vecs, q_vecs)
# Perform Hungarian analysis on distance matrix between atoms of 1st
# structure and trial structure
indices_a, indices_b = linear_sum_assignment(K)
return indices_b
def reorder_similarity(p_atoms, q_atoms, p_coord, q_coord, use_kernel=True):
"""
Re-orders the input atom list and xyz coordinates using QML similarity
the Hungarian method for assignment.
Parameters
----------
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_coord : array
(N,D) matrix, where N is points and D is dimension
q_coord : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
view_reorder : array
(N,1) matrix, reordered indexes of atom alignment based on the
coordinates of the atoms
"""
if qml is None:
raise ImportError(
"QML is not installed. Package is avaliable from"
"\n github.com/qmlcode/qml"
"\n pip install qml"
)
if isinstance(p_atoms[0], str):
p_atoms = [int_atom(atom) for atom in p_atoms]
q_atoms = [int_atom(atom) for atom in q_atoms]
p_atoms = np.array(p_atoms)
q_atoms = np.array(q_atoms)
elements = np.unique(p_atoms)
n_atoms = p_atoms.shape[0]
distance_cut = 20.0
parameters = {
"elements": elements,
"pad": n_atoms,
"rcut": distance_cut,
"acut": distance_cut,
}
p_vecs = qml.representations.generate_fchl_acsf(p_atoms, p_coord, **parameters)
q_vecs = qml.representations.generate_fchl_acsf(q_atoms, q_coord, **parameters)
# generate full view from q shape to fill in atom view on the fly
view_reorder = np.zeros(q_atoms.shape, dtype=int)
for atom in elements:
(p_atom_idx,) = np.where(p_atoms == atom)
(q_atom_idx,) = np.where(q_atoms == atom)
p_vecs_atom = p_vecs[p_atom_idx]
q_vecs_atom = q_vecs[q_atom_idx]
view = hungarian_vectors(p_vecs_atom, q_vecs_atom, use_kernel=use_kernel)
view_reorder[p_atom_idx] = q_atom_idx[view]
return view_reorder
def reorder_distance(p_atoms, q_atoms, p_coord, q_coord):
"""
Re-orders the input atom list and xyz coordinates by atom type and then by
distance of each atom from the centroid.
Parameters
----------
atoms : array
(N,1) matrix, where N is points holding the atoms' names
coord : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
atoms_reordered : array
(N,1) matrix, where N is points holding the ordered atoms' names
coords_reordered : array
(N,D) matrix, where N is points and D is dimension (rows re-ordered)
"""
# Find unique atoms
unique_atoms = np.unique(p_atoms)
# generate full view from q shape to fill in atom view on the fly
view_reorder = np.zeros(q_atoms.shape, dtype=int)
for atom in unique_atoms:
(p_atom_idx,) = np.where(p_atoms == atom)
(q_atom_idx,) = np.where(q_atoms == atom)
A_coord = p_coord[p_atom_idx]
B_coord = q_coord[q_atom_idx]
# Calculate distance from each atom to centroid
A_norms = np.linalg.norm(A_coord, axis=1)
B_norms = np.linalg.norm(B_coord, axis=1)
reorder_indices_A = np.argsort(A_norms)
reorder_indices_B = np.argsort(B_norms)
# Project the order of P onto Q
translator = np.argsort(reorder_indices_A)
view = reorder_indices_B[translator]
view_reorder[p_atom_idx] = q_atom_idx[view]
return view_reorder
def hungarian(A, B):
"""
Hungarian reordering.
Assume A and B are coordinates for atoms of SAME type only
"""
# should be kabasch here i think
distances = cdist(A, B, "euclidean")
# Perform Hungarian analysis on distance matrix between atoms of 1st
# structure and trial structure
indices_a, indices_b = linear_sum_assignment(distances)
return indices_b
def reorder_hungarian(p_atoms, q_atoms, p_coord, q_coord):
"""
Re-orders the input atom list and xyz coordinates using the Hungarian
method (using optimized column results)
Parameters
----------
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_coord : array
(N,D) matrix, where N is points and D is dimension
q_coord : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
view_reorder : array
(N,1) matrix, reordered indexes of atom alignment based on the
coordinates of the atoms
"""
# Find unique atoms
unique_atoms = np.unique(p_atoms)
# generate full view from q shape to fill in atom view on the fly
view_reorder = np.zeros(q_atoms.shape, dtype=int)
view_reorder -= 1
for atom in unique_atoms:
(p_atom_idx,) = np.where(p_atoms == atom)
(q_atom_idx,) = np.where(q_atoms == atom)
A_coord = p_coord[p_atom_idx]
B_coord = q_coord[q_atom_idx]
view = hungarian(A_coord, B_coord)
view_reorder[p_atom_idx] = q_atom_idx[view]
return view_reorder
def reorder_inertia_hungarian(p_atoms, q_atoms, p_coord, q_coord):
"""
Align the principal intertia axis and then re-orders the input atom list
and xyz coordinates using the Hungarian method (using optimized column
results)
Parameters
----------
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_coord : array
(N,D) matrix, where N is points and D is dimension
q_coord : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
view_reorder : array
(N,1) matrix, reordered indexes of atom alignment based on the
coordinates of the atoms
"""
# get the principal axis of P and Q
p_axis = get_principal_axis(p_atoms, p_coord)
q_axis = get_principal_axis(q_atoms, q_coord)
# rotate Q onto P considering that the axis are parallel and antiparallel
U1 = rotation_matrix_vectors(p_axis, q_axis)
U2 = rotation_matrix_vectors(p_axis, -q_axis)
q_coord1 = np.dot(q_coord, U1)
q_coord2 = np.dot(q_coord, U2)
q_review1 = reorder_hungarian(p_atoms, q_atoms, p_coord, q_coord1)
q_review2 = reorder_hungarian(p_atoms, q_atoms, p_coord, q_coord2)
q_coord1 = q_coord1[q_review1]
q_coord2 = q_coord2[q_review2]
rmsd1 = kabsch_rmsd(p_coord, q_coord1)
rmsd2 = kabsch_rmsd(p_coord, q_coord2)
if rmsd1 < rmsd2:
return q_review1
else:
return q_review2
def generate_permutations(elements, n):
"""
Heap's algorithm for generating all n! permutations in a list
https://en.wikipedia.org/wiki/Heap%27s_algorithm
"""
c = [0] * n
yield elements
i = 0
while i < n:
if c[i] < i:
if i % 2 == 0:
elements[0], elements[i] = elements[i], elements[0]
else:
elements[c[i]], elements[i] = elements[i], elements[c[i]]
yield elements
c[i] += 1
i = 0
else:
c[i] = 0
i += 1
def brute_permutation(A, B):
"""
Re-orders the input atom list and xyz coordinates using the brute force
method of permuting all rows of the input coordinates
Parameters
----------
A : array
(N,D) matrix, where N is points and D is dimension
B : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
view : array
(N,1) matrix, reordered view of B projected to A
"""
rmsd_min = np.inf
view_min = None
# Sets initial ordering for row indices to [0, 1, 2, ..., len(A)], used in
# brute-force method
num_atoms = A.shape[0]
initial_order = list(range(num_atoms))
for reorder_indices in generate_permutations(initial_order, num_atoms):
# Re-order the atom array and coordinate matrix
coords_ordered = B[reorder_indices]
# Calculate the RMSD between structure 1 and the Hungarian re-ordered
# structure 2
rmsd_temp = kabsch_rmsd(A, coords_ordered)
# Replaces the atoms and coordinates with the current structure if the
# RMSD is lower
if rmsd_temp < rmsd_min:
rmsd_min = rmsd_temp
view_min = copy.deepcopy(reorder_indices)
return view_min
def reorder_brute(p_atoms, q_atoms, p_coord, q_coord):
"""
Re-orders the input atom list and xyz coordinates using all permutation of
rows (using optimized column results)
Parameters
----------
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
q_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_coord : array
(N,D) matrix, where N is points and D is dimension
q_coord : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
view_reorder : array
(N,1) matrix, reordered indexes of atom alignment based on the
coordinates of the atoms
"""
# Find unique atoms
unique_atoms = np.unique(p_atoms)
# generate full view from q shape to fill in atom view on the fly
view_reorder = np.zeros(q_atoms.shape, dtype=int)
view_reorder -= 1
for atom in unique_atoms:
(p_atom_idx,) = np.where(p_atoms == atom)
(q_atom_idx,) = np.where(q_atoms == atom)
A_coord = p_coord[p_atom_idx]
B_coord = q_coord[q_atom_idx]
view = brute_permutation(A_coord, B_coord)
view_reorder[p_atom_idx] = q_atom_idx[view]
return view_reorder
def check_reflections(
p_atoms,
q_atoms,
p_coord,
q_coord,
reorder_method=reorder_hungarian,
rotation_method=kabsch_rmsd,
keep_stereo=False,
):
"""
Minimize RMSD using reflection planes for molecule P and Q
Warning: This will affect stereo-chemistry
Parameters
----------
p_atoms : array
(N,1) matrix, where N is points holding the atoms' names
q_atoms : array
(N,1) matrix, where N is points holding the atoms' names
p_coord : array
(N,D) matrix, where N is points and D is dimension
q_coord : array
(N,D) matrix, where N is points and D is dimension
Returns
-------
min_rmsd
min_swap
min_reflection
min_review
"""
min_rmsd = np.inf
min_swap = None
min_reflection = None
min_review = None
tmp_review = None
swap_mask = [1, -1, -1, 1, -1, 1]
reflection_mask = [1, -1, -1, -1, 1, 1, 1, -1]
for swap, i in zip(AXIS_SWAPS, swap_mask):
for reflection, j in zip(AXIS_REFLECTIONS, reflection_mask):
# skip enantiomers
if keep_stereo and i * j == -1:
continue
tmp_atoms = copy.copy(q_atoms)
tmp_coord = copy.deepcopy(q_coord)
tmp_coord = tmp_coord[:, swap]
tmp_coord = np.dot(tmp_coord, np.diag(reflection))
tmp_coord -= centroid(tmp_coord)
# Reorder
if reorder_method is not None:
tmp_review = reorder_method(p_atoms, tmp_atoms, p_coord, tmp_coord)
tmp_coord = tmp_coord[tmp_review]
tmp_atoms = tmp_atoms[tmp_review]
# Rotation
if rotation_method is None:
this_rmsd = rmsd(p_coord, tmp_coord)
else:
this_rmsd = rotation_method(p_coord, tmp_coord)
if this_rmsd < min_rmsd:
min_rmsd = this_rmsd
min_swap = swap
min_reflection = reflection
min_review = tmp_review
if not (p_atoms == q_atoms[min_review]).all():
print("error: Not aligned")
quit()
return min_rmsd, min_swap, min_reflection, min_review
def rotation_matrix_vectors(v1, v2):
"""
Returns the rotation matrix that rotates v1 onto v2
using Rodrigues' rotation formula.
(see https://math.stackexchange.com/a/476311)
----------
v1 : array
Dim 3 float array
v2 : array
Dim 3 float array
Return
------
output : 3x3 matrix
Rotation matrix
"""
if (v1 == v2).all():
rot = np.eye(3)
# return a rotation of pi around the y-axis
elif (v1 == -v2).all():
rot = np.array([[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]])
else:
v = np.cross(v1, v2)
s = np.linalg.norm(v)
c = np.vdot(v1, v2)
vx = np.array([[0.0, -v[2], v[1]], [v[2], 0.0, -v[0]], [-v[1], v[0], 0.0]])
rot = np.eye(3) + vx + np.dot(vx, vx) * ((1.0 - c) / (s * s))
return rot
def get_cm(atoms, V):
"""
Get the center of mass of V.
----------
atoms : list
List of atomic types
V : array
(N,3) matrix of atomic coordinates
Return
------
output : (3) array
The CM vector
"""
if isinstance(atoms[0], str):
atoms = [int_atom(atom) for atom in atoms]
weights = [ELEMENT_WEIGHTS[x] for x in atoms]
center_of_mass = np.average(V, axis=0, weights=weights)
return center_of_mass
def get_inertia_tensor(atoms, V):
"""
Get the tensor of intertia of V.
----------
atoms : list
List of atomic types
V : array
(N,3) matrix of atomic coordinates
Return
------
output : 3x3 float matrix
The tensor of inertia
"""
if isinstance(atoms[0], str):
atoms = [int_atom(atom) for atom in atoms]
CV = V - get_cm(atoms, V)
Ixx = 0.0
Iyy = 0.0
Izz = 0.0
Ixy = 0.0
Ixz = 0.0
Iyz = 0.0
for sp, acoord in zip(atoms, CV):
amass = ELEMENT_WEIGHTS[sp]
Ixx += amass * (acoord[1] * acoord[1] + acoord[2] * acoord[2])
Iyy += amass * (acoord[0] * acoord[0] + acoord[2] * acoord[2])
Izz += amass * (acoord[0] * acoord[0] + acoord[1] * acoord[1])
Ixy += -amass * acoord[0] * acoord[1]
Ixz += -amass * acoord[0] * acoord[2]
Iyz += -amass * acoord[1] * acoord[2]
return np.array([[Ixx, Ixy, Ixz], [Ixy, Iyy, Iyz], [Ixz, Iyz, Izz]])
def get_principal_axis(atoms, V):
"""
Get the molecule's principal axis.
----------
atoms : list
List of atomic types
V : array
(N,3) matrix of atomic coordinates
Return
------
output : array
Array of dim 3 containing the principal axis
"""
inertia = get_inertia_tensor(atoms, V)
eigval, eigvec = np.linalg.eig(inertia)
return eigvec[np.argmax(eigval)]
def set_coordinates(atoms, V, title="", decimals=8):
"""
Print coordinates V with corresponding atoms to stdout in XYZ format.
Parameters
----------
atoms : list
List of atomic types
V : array
(N,3) matrix of atomic coordinates
title : string (optional)
Title of molecule
decimals : int (optional)
number of decimals for the coordinates
Return
------
output : str
Molecule in XYZ format
"""
N, D = V.shape
if not isinstance(atoms[0], str):
atoms = [str_atom(atom) for atom in atoms]
fmt = "{:<2}" + (" {:15." + str(decimals) + "f}") * 3
out = list()
out += [str(N)]
out += [title]
for i in range(N):
atom = atoms[i]
out += [fmt.format(atom, V[i, 0], V[i, 1], V[i, 2])]
return "\n".join(out)
def print_coordinates(atoms, V, title=""):
"""
Print coordinates V with corresponding atoms to stdout in XYZ format.
Parameters
----------
atoms : list
List of element types
V : array
(N,3) matrix of atomic coordinates
title : string (optional)
Title of molecule
"""
print(set_coordinates(atoms, V, title=title))
def get_coordinates(filename, fmt, is_gzip=False, return_atoms_as_int=False):
"""
Get coordinates from filename in format fmt. Supports XYZ and PDB.
Parameters
----------
filename : string
Filename to read
fmt : string
Format of filename. Either xyz or pdb.
Returns
-------
atoms : list
List of atomic types
V : array
(N,3) where N is number of atoms
"""
if fmt == "xyz":
get_func = get_coordinates_xyz
elif fmt == "pdb":
get_func = get_coordinates_pdb
else:
exit("Could not recognize file format: {:s}".format(fmt))
val = get_func(filename, is_gzip=is_gzip, return_atoms_as_int=return_atoms_as_int)
return val
def get_coordinates_pdb(filename, is_gzip=False, return_atoms_as_int=False):
"""
Get coordinates from the first chain in a pdb file
and return a vectorset with all the coordinates.
Parameters
----------
filename : string
Filename to read
Returns
-------
atoms : list
List of atomic types
V : array
(N,3) where N is number of atoms
"""
# PDB files tend to be a bit of a mess. The x, y and z coordinates
# are supposed to be in column 31-38, 39-46 and 47-54, but this is
# not always the case.
# Because of this the three first columns containing a decimal is used.
# Since the format doesn't require a space between columns, we use the
# above column indices as a fallback.
x_column = None
V = list()
# Same with atoms and atom naming.
# The most robust way to do this is probably
# to assume that the atomtype is given in column 3.
atoms = list()
if is_gzip:
openfunc = gzip.open
openarg = "rt"
else:
openfunc = open
openarg = "r"
with openfunc(filename, openarg) as f:
lines = f.readlines()
for line in lines:
if line.startswith("TER") or line.startswith("END"):
break
if line.startswith("ATOM"):
tokens = line.split()
# Try to get the atomtype
try:
atom = tokens[2][0]
if atom in ("H", "C", "N", "O", "S", "P"):
atoms.append(atom)
else:
# e.g. 1HD1
atom = tokens[2][1]
if atom == "H":
atoms.append(atom)
else:
raise Exception
except ValueError:
msg = f"error: Parsing atomtype for the following line:" f" \n{line}"
exit(msg)
if x_column is None:
try:
# look for x column
for i, x in enumerate(tokens):
if "." in x and "." in tokens[i + 1] and "." in tokens[i + 2]:
x_column = i
break
except IndexError:
msg = "error: Parsing coordinates " "for the following line:" f"\n{line}"
exit(msg)
# Try to read the coordinates
try:
V.append(np.asarray(tokens[x_column : x_column + 3], dtype=float))
except ValueError:
# If that doesn't work, use hardcoded indices
try:
x = line[30:38]
y = line[38:46]
z = line[46:54]
V.append(np.asarray([x, y, z], dtype=float))
except ValueError:
msg = "error: Parsing input " "for the following line:" f"\n{line}"
exit(msg)
if return_atoms_as_int:
atoms = [int_atom(atom) for atom in atoms]
V = np.asarray(V)
atoms = np.asarray(atoms)
assert V.shape[0] == atoms.size
return atoms, V
def get_coordinates_xyz(filename, is_gzip=False, return_atoms_as_int=False):
"""
Get coordinates from filename and return a vectorset with all the
coordinates, in XYZ format.
Parameters
----------
filename : string
Filename to read
Returns
-------
atoms : list
List of atomic types
V : array
(N,3) where N is number of atoms
"""
if is_gzip:
openfunc = gzip.open
openarg = "rt"
else:
openfunc = open
openarg = "r"
f = openfunc(filename, openarg)
V = list()
atoms = list()
n_atoms = 0
# Read the first line to obtain the number of atoms to read
try:
n_atoms = int(f.readline())
except ValueError:
exit("error: Could not obtain the number of atoms in the .xyz file.")
# Skip the title line
f.readline()
# Use the number of atoms to not read beyond the end of a file
for lines_read, line in enumerate(f):
if lines_read == n_atoms:
break
values = line.split()
if len(values) < 4:
atom = re.findall(r"[a-zA-Z]+", line)[0]
atom = atom.upper()
numbers = re.findall(r"[-]?\d+\.\d*(?:[Ee][-\+]\d+)?", line)
numbers = [float(number) for number in numbers]
else:
atom = values[0]
numbers = [float(number) for number in values[1:]]
# The numbers are not valid unless we obtain exacly three
if len(numbers) >= 3:
V.append(np.array(numbers)[:3])
atoms.append(atom)
else:
msg = (
f"Reading the .xyz file failed in line {lines_read + 2}."
"Please check the format."
)
exit(msg)
f.close()
try:
# I've seen examples where XYZ are written with integer atoms types
atoms = [int(atom) for atom in atoms]
atoms = [str_atom(atom) for atom in atoms]
except ValueError:
# Correct atom spelling
atoms = [atom.capitalize() for atom in atoms]
if return_atoms_as_int:
atoms = [int_atom(atom) for atom in atoms]
atoms = np.array(atoms)
V = np.array(V)
return atoms, V
def parse_arguments(args=None):
description = __doc__
version_msg = f"""
rmsd {__version__}
See https://github.com/charnley/rmsd for citation information
"""
epilog = """
"""
valid_reorder_methods = ", ".join(REORDER_METHODS)
valid_rotation_methods = ", ".join(ROTATION_METHODS)
parser = argparse.ArgumentParser(
usage="calculate_rmsd [options] FILE_A FILE_B",
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=epilog,
)
# Input structures
parser.add_argument(
"structure_a",
metavar="FILE_A",
type=str,
help="structures in .xyz or .pdb format",
)
parser.add_argument("structure_b", metavar="FILE_B", type=str)
# Admin
parser.add_argument("-v", "--version", action="version", version=version_msg)
# Rotation
parser.add_argument(
"-r",
"--rotation",
action="store",
default="kabsch",
help=(
"select rotation method. Valid methods are "
f"{valid_rotation_methods}. "
"Default is Kabsch."
),
metavar="METHOD",
)
# Reorder arguments
parser.add_argument(
"-e",
"--reorder",
action="store_true",
help="align the atoms of molecules",
)
parser.add_argument(
"--reorder-method",
action="store",
default="hungarian",
metavar="METHOD",
help=(
"select reorder method. Valid method are "
f"{valid_reorder_methods}. "
"Default is Hungarian."
),
)
parser.add_argument(
"-ur",
"--use-reflections",
action="store_true",
help=(
"scan through reflections in planes "
"(eg Y transformed to -Y -> X, -Y, Z) "
"and axis changes, (eg X and Z coords exchanged -> Z, Y, X). "
"This will affect stereo-chemistry."
),
)
parser.add_argument(
"-urks",
"--use-reflections-keep-stereo",
action="store_true",
help=(
"scan through reflections in planes "
"(eg Y transformed to -Y -> X, -Y, Z) "
"and axis changes, (eg X and Z coords exchanged -> Z, Y, X). "
"Stereo-chemistry will be kept."
),
)
# Filter
index_group = parser.add_mutually_exclusive_group()
index_group.add_argument(
"-nh",
"--ignore-hydrogen",
"--no-hydrogen",
action="store_true",
help="ignore hydrogens when calculating RMSD",
)
index_group.add_argument(
"--remove-idx",
nargs="+",
type=int,
help="index list of atoms NOT to consider",
metavar="IDX",
)
index_group.add_argument(
"--add-idx",
nargs="+",
type=int,
help="index list of atoms to consider",
metavar="IDX",
)
parser.add_argument(
"--format",
action="store",
help="format of input files. valid format are xyz and pdb",
metavar="FMT",
)
parser.add_argument(
"--format-is-gzip",
action="store_true",
default=False,
help=argparse.SUPPRESS,
)
parser.add_argument(
"-p",
"--output",
"--print",
action="store_true",
help=(
"print out structure B, "
"centered and rotated unto structure A's coordinates "
"in XYZ format"
),
)
if args is None:
args = parser.parse_args()
else:
args = parser.parse_args(args)
# Check illegal combinations
if args.output and args.reorder and (args.ignore_hydrogen or args.add_idx or args.remove_idx):
print(
"error: Cannot reorder atoms and print structure, "
"when excluding atoms (such as --ignore-hydrogen)"
)
sys.exit()
if (
args.use_reflections
and args.output
and (args.ignore_hydrogen or args.add_idx or args.remove_idx)
):
print(
"error: Cannot use reflections on atoms and print, "
"when excluding atoms (such as --ignore-hydrogen)"
)
sys.exit()
# Check methods
args.rotation = args.rotation.lower()
if args.rotation not in ROTATION_METHODS:
print(
f"error: Unknown rotation method: '{args.rotation}'. "
f"Please use {valid_rotation_methods}"
)
sys.exit()
# Check reorder methods
args.reorder_method = args.reorder_method.lower()
if args.reorder_method not in REORDER_METHODS:
print(
f'error: Unknown reorder method: "{args.reorder_method}". '
f"Please use {valid_reorder_methods}"
)
sys.exit()
# Check fileformat
if args.format is None:
filename = args.structure_a
suffixes = pathlib.Path(filename).suffixes
if len(suffixes) == 0:
ext = None
elif suffixes[-1] == ".gz":
args.format_is_gzip = True
ext = suffixes[-2].strip(".")
else:
ext = suffixes[-1].strip(".")
args.format = ext
return args
def main(args=None):
# Parse arguments
args = parse_arguments(args)
# As default, load the extension as format
# Parse pdb.gz and xyz.gz as pdb and xyz formats
p_all_atoms, p_all = get_coordinates(
args.structure_a,
args.format,
is_gzip=args.format_is_gzip,
return_atoms_as_int=True,
)
q_all_atoms, q_all = get_coordinates(
args.structure_b,
args.format,
is_gzip=args.format_is_gzip,
return_atoms_as_int=True,
)
p_size = p_all.shape[0]
q_size = q_all.shape[0]
if not p_size == q_size:
print("error: Structures not same size")
sys.exit()
if np.count_nonzero(p_all_atoms != q_all_atoms) and not args.reorder:
msg = """
error: Atoms are not in the same order.
Use --reorder to align the atoms (can be expensive for large structures).
Please see --help or documentation for more information or
https://github.com/charnley/rmsd for further examples.
"""
print(msg)
sys.exit()
# Set local view
p_view = None
q_view = None
if args.ignore_hydrogen:
assert type(p_all_atoms[0]) != str
assert type(q_all_atoms[0]) != str
p_view = np.where(p_all_atoms != 1)
q_view = np.where(q_all_atoms != 1)
elif args.remove_idx:
index = range(p_size)
index = set(index) - set(args.remove_idx)
index = list(index)
p_view = index
q_view = index
elif args.add_idx:
p_view = args.add_idx
q_view = args.add_idx
# Set local view
if p_view is None:
p_coord = copy.deepcopy(p_all)
q_coord = copy.deepcopy(q_all)
p_atoms = copy.deepcopy(p_all_atoms)
q_atoms = copy.deepcopy(q_all_atoms)
else:
p_coord = copy.deepcopy(p_all[p_view])
q_coord = copy.deepcopy(q_all[q_view])
p_atoms = copy.deepcopy(p_all_atoms[p_view])
q_atoms = copy.deepcopy(q_all_atoms[q_view])
# Recenter to centroid
p_cent = centroid(p_coord)
q_cent = centroid(q_coord)
p_coord -= p_cent
q_coord -= q_cent
# set rotation method
if args.rotation.lower() == METHOD_KABSCH:
rotation_method = kabsch_rmsd
elif args.rotation.lower() == METHOD_QUATERNION:
rotation_method = quaternion_rmsd
else:
rotation_method = None
# set reorder method
if not args.reorder:
reorder_method = None
elif args.reorder_method == REORDER_QML:
reorder_method = reorder_similarity
elif args.reorder_method == REORDER_HUNGARIAN:
reorder_method = reorder_hungarian
elif args.reorder_method == REORDER_INERTIA_HUNGARIAN:
reorder_method = reorder_inertia_hungarian
elif args.reorder_method == REORDER_BRUTE:
reorder_method = reorder_brute
elif args.reorder_method == REORDER_DISTANCE:
reorder_method = reorder_distance
# Save the resulting RMSD
result_rmsd = None
if args.use_reflections:
result_rmsd, _, _, q_review = check_reflections(
p_atoms,
q_atoms,
p_coord,
q_coord,
reorder_method=reorder_method,
rotation_method=rotation_method,
)
elif args.use_reflections_keep_stereo:
result_rmsd, _, _, q_review = check_reflections(
p_atoms,
q_atoms,
p_coord,
q_coord,
reorder_method=reorder_method,
rotation_method=rotation_method,
keep_stereo=True,
)
elif args.reorder:
q_review = reorder_method(p_atoms, q_atoms, p_coord, q_coord)
q_coord = q_coord[q_review]
q_atoms = q_atoms[q_review]
if not all(p_atoms == q_atoms):
print(
"error: Structure not aligned. "
"Please submit bug report at "
"http://github.com/charnley/rmsd"
)
sys.exit()
# print result
if args.output:
if args.reorder:
if q_review.shape[0] != q_all.shape[0]:
print("error: Reorder length error. " "Full atom list needed for --print")
quit()
q_all = q_all[q_review]
q_all_atoms = q_all_atoms[q_review]
# Get rotation matrix
U = kabsch(q_coord, p_coord)
# recenter all atoms and rotate all atoms
q_all -= q_cent
q_all = np.dot(q_all, U)
# center q on p's original coordinates
q_all += p_cent
# done and done
xyz = set_coordinates(q_all_atoms, q_all, title=f"{args.structure_b} - modified")
print(xyz)
else:
if result_rmsd:
pass
elif rotation_method is None:
result_rmsd = rmsd(p_coord, q_coord)
else:
result_rmsd = rotation_method(p_coord, q_coord)
print("{0}".format(result_rmsd))
if __name__ == "__main__":
main()
| bsd-2-clause |
edouard-lopez/ansible-modules-core | commands/script.py | 125 | 2330 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: script
version_added: "0.9"
short_description: Runs a local script on a remote node after transferring it
description:
- "The M(script) module takes the script name followed by a list of
space-delimited arguments. "
- "The local script at path will be transferred to the remote node and then executed. "
- "The given script will be processed through the shell environment on the remote node. "
- "This module does not require python on the remote system, much like
the M(raw) module. "
options:
free_form:
description:
- path to the local script file followed by optional arguments.
required: true
default: null
aliases: []
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
version_added: "1.5"
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
required: no
default: null
version_added: "1.5"
notes:
- It is usually preferable to write Ansible modules than pushing scripts. Convert your script to an Ansible module for bonus points!
author:
- Ansible Core Team
- Michael DeHaan
"""
EXAMPLES = '''
# Example from Ansible Playbooks
- script: /some/local/script.sh --some-arguments 1234
# Run a script that creates a file, but only if the file is not yet created
- script: /some/local/create_file.sh --some-arguments 1234 creates=/the/created/file.txt
# Run a script that removes a file, but only if the file is not yet removed
- script: /some/local/remove_file.sh --some-arguments 1234 removes=/the/removed/file.txt
'''
| gpl-3.0 |
iamutkarshtiwari/sympy | sympy/functions/elementary/tests/test_hyperbolic.py | 44 | 25001 | from sympy import symbols, Symbol, sinh, nan, oo, zoo, pi, asinh, acosh, log, sqrt, \
coth, I, cot, E, tanh, tan, cosh, cos, S, sin, Rational, atanh, acoth, \
Integer, O, exp, sech, sec, csch, asech, acos, expand_mul
from sympy.utilities.pytest import raises
def test_sinh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert sinh(nan) == nan
assert sinh(zoo) == nan
assert sinh(oo) == oo
assert sinh(-oo) == -oo
assert sinh(0) == 0
assert sinh(1) == sinh(1)
assert sinh(-1) == -sinh(1)
assert sinh(x) == sinh(x)
assert sinh(-x) == -sinh(x)
assert sinh(pi) == sinh(pi)
assert sinh(-pi) == -sinh(pi)
assert sinh(2**1024 * E) == sinh(2**1024 * E)
assert sinh(-2**1024 * E) == -sinh(2**1024 * E)
assert sinh(pi*I) == 0
assert sinh(-pi*I) == 0
assert sinh(2*pi*I) == 0
assert sinh(-2*pi*I) == 0
assert sinh(-3*10**73*pi*I) == 0
assert sinh(7*10**103*pi*I) == 0
assert sinh(pi*I/2) == I
assert sinh(-pi*I/2) == -I
assert sinh(5*pi*I/2) == I
assert sinh(7*pi*I/2) == -I
assert sinh(pi*I/3) == S.Half*sqrt(3)*I
assert sinh(-2*pi*I/3) == -S.Half*sqrt(3)*I
assert sinh(pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(17*pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-3*pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(pi*I/6) == S.Half*I
assert sinh(-pi*I/6) == -S.Half*I
assert sinh(7*pi*I/6) == -S.Half*I
assert sinh(-5*pi*I/6) == -S.Half*I
assert sinh(pi*I/105) == sin(pi/105)*I
assert sinh(-pi*I/105) == -sin(pi/105)*I
assert sinh(2 + 3*I) == sinh(2 + 3*I)
assert sinh(x*I) == sin(x)*I
assert sinh(k*pi*I) == 0
assert sinh(17*k*pi*I) == 0
assert sinh(k*pi*I/2) == sin(k*pi/2)*I
def test_sinh_series():
x = Symbol('x')
assert sinh(x).series(x, 0, 10) == \
x + x**3/6 + x**5/120 + x**7/5040 + x**9/362880 + O(x**10)
def test_cosh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert cosh(nan) == nan
assert cosh(zoo) == nan
assert cosh(oo) == oo
assert cosh(-oo) == oo
assert cosh(0) == 1
assert cosh(1) == cosh(1)
assert cosh(-1) == cosh(1)
assert cosh(x) == cosh(x)
assert cosh(-x) == cosh(x)
assert cosh(pi*I) == cos(pi)
assert cosh(-pi*I) == cos(pi)
assert cosh(2**1024 * E) == cosh(2**1024 * E)
assert cosh(-2**1024 * E) == cosh(2**1024 * E)
assert cosh(pi*I/2) == 0
assert cosh(-pi*I/2) == 0
assert cosh((-3*10**73 + 1)*pi*I/2) == 0
assert cosh((7*10**103 + 1)*pi*I/2) == 0
assert cosh(pi*I) == -1
assert cosh(-pi*I) == -1
assert cosh(5*pi*I) == -1
assert cosh(8*pi*I) == 1
assert cosh(pi*I/3) == S.Half
assert cosh(-2*pi*I/3) == -S.Half
assert cosh(pi*I/4) == S.Half*sqrt(2)
assert cosh(-pi*I/4) == S.Half*sqrt(2)
assert cosh(11*pi*I/4) == -S.Half*sqrt(2)
assert cosh(-3*pi*I/4) == -S.Half*sqrt(2)
assert cosh(pi*I/6) == S.Half*sqrt(3)
assert cosh(-pi*I/6) == S.Half*sqrt(3)
assert cosh(7*pi*I/6) == -S.Half*sqrt(3)
assert cosh(-5*pi*I/6) == -S.Half*sqrt(3)
assert cosh(pi*I/105) == cos(pi/105)
assert cosh(-pi*I/105) == cos(pi/105)
assert cosh(2 + 3*I) == cosh(2 + 3*I)
assert cosh(x*I) == cos(x)
assert cosh(k*pi*I) == cos(k*pi)
assert cosh(17*k*pi*I) == cos(17*k*pi)
assert cosh(k*pi) == cosh(k*pi)
def test_cosh_series():
x = Symbol('x')
assert cosh(x).series(x, 0, 10) == \
1 + x**2/2 + x**4/24 + x**6/720 + x**8/40320 + O(x**10)
def test_tanh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert tanh(nan) == nan
assert tanh(zoo) == nan
assert tanh(oo) == 1
assert tanh(-oo) == -1
assert tanh(0) == 0
assert tanh(1) == tanh(1)
assert tanh(-1) == -tanh(1)
assert tanh(x) == tanh(x)
assert tanh(-x) == -tanh(x)
assert tanh(pi) == tanh(pi)
assert tanh(-pi) == -tanh(pi)
assert tanh(2**1024 * E) == tanh(2**1024 * E)
assert tanh(-2**1024 * E) == -tanh(2**1024 * E)
assert tanh(pi*I) == 0
assert tanh(-pi*I) == 0
assert tanh(2*pi*I) == 0
assert tanh(-2*pi*I) == 0
assert tanh(-3*10**73*pi*I) == 0
assert tanh(7*10**103*pi*I) == 0
assert tanh(pi*I/2) == tanh(pi*I/2)
assert tanh(-pi*I/2) == -tanh(pi*I/2)
assert tanh(5*pi*I/2) == tanh(5*pi*I/2)
assert tanh(7*pi*I/2) == tanh(7*pi*I/2)
assert tanh(pi*I/3) == sqrt(3)*I
assert tanh(-2*pi*I/3) == sqrt(3)*I
assert tanh(pi*I/4) == I
assert tanh(-pi*I/4) == -I
assert tanh(17*pi*I/4) == I
assert tanh(-3*pi*I/4) == I
assert tanh(pi*I/6) == I/sqrt(3)
assert tanh(-pi*I/6) == -I/sqrt(3)
assert tanh(7*pi*I/6) == I/sqrt(3)
assert tanh(-5*pi*I/6) == I/sqrt(3)
assert tanh(pi*I/105) == tan(pi/105)*I
assert tanh(-pi*I/105) == -tan(pi/105)*I
assert tanh(2 + 3*I) == tanh(2 + 3*I)
assert tanh(x*I) == tan(x)*I
assert tanh(k*pi*I) == 0
assert tanh(17*k*pi*I) == 0
assert tanh(k*pi*I/2) == tan(k*pi/2)*I
def test_tanh_series():
x = Symbol('x')
assert tanh(x).series(x, 0, 10) == \
x - x**3/3 + 2*x**5/15 - 17*x**7/315 + 62*x**9/2835 + O(x**10)
def test_coth():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert coth(nan) == nan
assert coth(zoo) == nan
assert coth(oo) == 1
assert coth(-oo) == -1
assert coth(0) == coth(0)
assert coth(0) == zoo
assert coth(1) == coth(1)
assert coth(-1) == -coth(1)
assert coth(x) == coth(x)
assert coth(-x) == -coth(x)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == cot(pi)*I
assert coth(2**1024 * E) == coth(2**1024 * E)
assert coth(-2**1024 * E) == -coth(2**1024 * E)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == I*cot(pi)
assert coth(2*pi*I) == -I*cot(2*pi)
assert coth(-2*pi*I) == I*cot(2*pi)
assert coth(-3*10**73*pi*I) == I*cot(3*10**73*pi)
assert coth(7*10**103*pi*I) == -I*cot(7*10**103*pi)
assert coth(pi*I/2) == 0
assert coth(-pi*I/2) == 0
assert coth(5*pi*I/2) == 0
assert coth(7*pi*I/2) == 0
assert coth(pi*I/3) == -I/sqrt(3)
assert coth(-2*pi*I/3) == -I/sqrt(3)
assert coth(pi*I/4) == -I
assert coth(-pi*I/4) == I
assert coth(17*pi*I/4) == -I
assert coth(-3*pi*I/4) == -I
assert coth(pi*I/6) == -sqrt(3)*I
assert coth(-pi*I/6) == sqrt(3)*I
assert coth(7*pi*I/6) == -sqrt(3)*I
assert coth(-5*pi*I/6) == -sqrt(3)*I
assert coth(pi*I/105) == -cot(pi/105)*I
assert coth(-pi*I/105) == cot(pi/105)*I
assert coth(2 + 3*I) == coth(2 + 3*I)
assert coth(x*I) == -cot(x)*I
assert coth(k*pi*I) == -cot(k*pi)*I
assert coth(17*k*pi*I) == -cot(17*k*pi)*I
assert coth(k*pi*I) == -cot(k*pi)*I
def test_coth_series():
x = Symbol('x')
assert coth(x).series(x, 0, 8) == \
1/x + x/3 - x**3/45 + 2*x**5/945 - x**7/4725 + O(x**8)
def test_csch():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
n = Symbol('n', positive=True)
assert csch(nan) == nan
assert csch(zoo) == nan
assert csch(oo) == 0
assert csch(-oo) == 0
assert csch(0) == zoo
assert csch(-1) == -csch(1)
assert csch(-x) == -csch(x)
assert csch(-pi) == -csch(pi)
assert csch(-2**1024 * E) == -csch(2**1024 * E)
assert csch(pi*I) == zoo
assert csch(-pi*I) == zoo
assert csch(2*pi*I) == zoo
assert csch(-2*pi*I) == zoo
assert csch(-3*10**73*pi*I) == zoo
assert csch(7*10**103*pi*I) == zoo
assert csch(pi*I/2) == -I
assert csch(-pi*I/2) == I
assert csch(5*pi*I/2) == -I
assert csch(7*pi*I/2) == I
assert csch(pi*I/3) == -2/sqrt(3)*I
assert csch(-2*pi*I/3) == 2/sqrt(3)*I
assert csch(pi*I/4) == -sqrt(2)*I
assert csch(-pi*I/4) == sqrt(2)*I
assert csch(7*pi*I/4) == sqrt(2)*I
assert csch(-3*pi*I/4) == sqrt(2)*I
assert csch(pi*I/6) == -2*I
assert csch(-pi*I/6) == 2*I
assert csch(7*pi*I/6) == 2*I
assert csch(-7*pi*I/6) == -2*I
assert csch(-5*pi*I/6) == 2*I
assert csch(pi*I/105) == -1/sin(pi/105)*I
assert csch(-pi*I/105) == 1/sin(pi/105)*I
assert csch(x*I) == -1/sin(x)*I
assert csch(k*pi*I) == zoo
assert csch(17*k*pi*I) == zoo
assert csch(k*pi*I/2) == -1/sin(k*pi/2)*I
assert csch(n).is_real is True
def test_csch_series():
x = Symbol('x')
assert csch(x).series(x, 0, 10) == \
1/ x - x/6 + 7*x**3/360 - 31*x**5/15120 + 127*x**7/604800 \
- 73*x**9/3421440 + O(x**10)
def test_sech():
x, y = symbols('x, y')
k = Symbol('k', integer=True)
n = Symbol('n', positive=True)
assert sech(nan) == nan
assert sech(zoo) == nan
assert sech(oo) == 0
assert sech(-oo) == 0
assert sech(0) == 1
assert sech(-1) == sech(1)
assert sech(-x) == sech(x)
assert sech(pi*I) == sec(pi)
assert sech(-pi*I) == sec(pi)
assert sech(-2**1024 * E) == sech(2**1024 * E)
assert sech(pi*I/2) == zoo
assert sech(-pi*I/2) == zoo
assert sech((-3*10**73 + 1)*pi*I/2) == zoo
assert sech((7*10**103 + 1)*pi*I/2) == zoo
assert sech(pi*I) == -1
assert sech(-pi*I) == -1
assert sech(5*pi*I) == -1
assert sech(8*pi*I) == 1
assert sech(pi*I/3) == 2
assert sech(-2*pi*I/3) == -2
assert sech(pi*I/4) == sqrt(2)
assert sech(-pi*I/4) == sqrt(2)
assert sech(5*pi*I/4) == -sqrt(2)
assert sech(-5*pi*I/4) == -sqrt(2)
assert sech(pi*I/6) == 2/sqrt(3)
assert sech(-pi*I/6) == 2/sqrt(3)
assert sech(7*pi*I/6) == -2/sqrt(3)
assert sech(-5*pi*I/6) == -2/sqrt(3)
assert sech(pi*I/105) == 1/cos(pi/105)
assert sech(-pi*I/105) == 1/cos(pi/105)
assert sech(x*I) == 1/cos(x)
assert sech(k*pi*I) == 1/cos(k*pi)
assert sech(17*k*pi*I) == 1/cos(17*k*pi)
assert sech(n).is_real is True
def test_sech_series():
x = Symbol('x')
assert sech(x).series(x, 0, 10) == \
1 - x**2/2 + 5*x**4/24 - 61*x**6/720 + 277*x**8/8064 + O(x**10)
def test_asinh():
x, y = symbols('x,y')
assert asinh(x) == asinh(x)
assert asinh(-x) == -asinh(x)
assert asinh(nan) == nan
assert asinh( 0) == 0
assert asinh(+1) == log(sqrt(2) + 1)
assert asinh(-1) == log(sqrt(2) - 1)
assert asinh(I) == pi*I/2
assert asinh(-I) == -pi*I/2
assert asinh(I/2) == pi*I/6
assert asinh(-I/2) == -pi*I/6
assert asinh(oo) == oo
assert asinh(-oo) == -oo
assert asinh(I*oo) == oo
assert asinh(-I *oo) == -oo
assert asinh(zoo) == zoo
assert asinh(I *(sqrt(3) - 1)/(2**(S(3)/2))) == pi*I/12
assert asinh(-I *(sqrt(3) - 1)/(2**(S(3)/2))) == -pi*I/12
assert asinh(I*(sqrt(5) - 1)/4) == pi*I/10
assert asinh(-I*(sqrt(5) - 1)/4) == -pi*I/10
assert asinh(I*(sqrt(5) + 1)/4) == 3*pi*I/10
assert asinh(-I*(sqrt(5) + 1)/4) == -3*pi*I/10
def test_asinh_series():
x = Symbol('x')
assert asinh(x).series(x, 0, 8) == \
x - x**3/6 + 3*x**5/40 - 5*x**7/112 + O(x**8)
t5 = asinh(x).taylor_term(5, x)
assert t5 == 3*x**5/40
assert asinh(x).taylor_term(7, x, t5, 0) == -5*x**7/112
def test_acosh():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcCosh/03/01/
# at specific points
x = Symbol('x')
assert acosh(-x) == acosh(-x)
assert acosh(1) == 0
assert acosh(-1) == pi*I
assert acosh(0) == I*pi/2
assert acosh(Rational(1, 2)) == I*pi/3
assert acosh(Rational(-1, 2)) == 2*pi*I/3
assert acosh(zoo) == oo
assert acosh(I) == log(I*(1 + sqrt(2)))
assert acosh(-I) == log(-I*(1 + sqrt(2)))
assert acosh((sqrt(3) - 1)/(2*sqrt(2))) == 5*pi*I/12
assert acosh(-(sqrt(3) - 1)/(2*sqrt(2))) == 7*pi*I/12
assert acosh(sqrt(2)/2) == I*pi/4
assert acosh(-sqrt(2)/2) == 3*I*pi/4
assert acosh(sqrt(3)/2) == I*pi/6
assert acosh(-sqrt(3)/2) == 5*I*pi/6
assert acosh(sqrt(2 + sqrt(2))/2) == I*pi/8
assert acosh(-sqrt(2 + sqrt(2))/2) == 7*I*pi/8
assert acosh(sqrt(2 - sqrt(2))/2) == 3*I*pi/8
assert acosh(-sqrt(2 - sqrt(2))/2) == 5*I*pi/8
assert acosh((1 + sqrt(3))/(2*sqrt(2))) == I*pi/12
assert acosh(-(1 + sqrt(3))/(2*sqrt(2))) == 11*I*pi/12
assert acosh((sqrt(5) + 1)/4) == I*pi/5
assert acosh(-(sqrt(5) + 1)/4) == 4*I*pi/5
assert str(acosh(5*I).n(6)) == '2.31244 + 1.5708*I'
assert str(acosh(-5*I).n(6)) == '2.31244 - 1.5708*I'
def test_acosh_infinities():
assert acosh(oo) == oo
assert acosh(-oo) == oo
assert acosh(I*oo) == oo
assert acosh(-I*oo) == oo
def test_acosh_series():
x = Symbol('x')
assert acosh(x).series(x, 0, 8) == \
-I*x + pi*I/2 - I*x**3/6 - 3*I*x**5/40 - 5*I*x**7/112 + O(x**8)
t5 = acosh(x).taylor_term(5, x)
assert t5 == - 3*I*x**5/40
assert acosh(x).taylor_term(7, x, t5, 0) == - 5*I*x**7/112
# TODO please write more tests -- see issue 3751
def test_asech():
x = Symbol('x')
assert asech(-x) == asech(-x)
# values at fixed points
assert asech(1) == 0
assert asech(-1) == pi*I
assert asech(0) == oo
assert asech(2) == I*pi/3
assert asech(-2) == 2*I*pi / 3
assert asech(I) == log(1 + sqrt(2)) - I*pi/2
assert asech(-I) == log(1 + sqrt(2)) + I*pi/2
assert asech(sqrt(2) - sqrt(6)) == 11*I*pi / 12
assert asech(sqrt(2 - 2/sqrt(5))) == I*pi / 10
assert asech(-sqrt(2 - 2/sqrt(5))) == 9*I*pi / 10
assert asech(2 / sqrt(2 + sqrt(2))) == I*pi / 8
assert asech(-2 / sqrt(2 + sqrt(2))) == 7*I*pi / 8
assert asech(sqrt(5) - 1) == I*pi / 5
assert asech(1 - sqrt(5)) == 4*I*pi / 5
assert asech(-sqrt(2*(2 + sqrt(2)))) == 5*I*pi / 8
# properties
# asech(x) == acosh(1/x)
assert asech(sqrt(2)) == acosh(1/sqrt(2))
assert asech(2/sqrt(3)) == acosh(sqrt(3)/2)
assert asech(2/sqrt(2 + sqrt(2))) == acosh(sqrt(2 + sqrt(2))/2)
assert asech(S(2)) == acosh(1/S(2))
# asech(x) == I*acos(x)
# (Note: the exact formula is asech(x) == +/- I*acos(x))
assert asech(-sqrt(2)) == I*acos(-1/sqrt(2))
assert asech(-2/sqrt(3)) == I*acos(-sqrt(3)/2)
assert asech(-S(2)) == I*acos(-S.Half)
assert asech(-2/sqrt(2)) == I*acos(-sqrt(2)/2)
# sech(asech(x)) / x == 1
assert expand_mul(sech(asech(sqrt(6) - sqrt(2))) / (sqrt(6) - sqrt(2))) == 1
assert expand_mul(sech(asech(sqrt(6) + sqrt(2))) / (sqrt(6) + sqrt(2))) == 1
assert (sech(asech(sqrt(2 + 2/sqrt(5)))) / (sqrt(2 + 2/sqrt(5)))).simplify() == 1
assert (sech(asech(-sqrt(2 + 2/sqrt(5)))) / (-sqrt(2 + 2/sqrt(5)))).simplify() == 1
assert (sech(asech(sqrt(2*(2 + sqrt(2))))) / (sqrt(2*(2 + sqrt(2))))).simplify() == 1
assert expand_mul(sech(asech((1 + sqrt(5)))) / ((1 + sqrt(5)))) == 1
assert expand_mul(sech(asech((-1 - sqrt(5)))) / ((-1 - sqrt(5)))) == 1
assert expand_mul(sech(asech((-sqrt(6) - sqrt(2)))) / ((-sqrt(6) - sqrt(2)))) == 1
# numerical evaluation
assert str(asech(5*I).n(6)) == '0.19869 - 1.5708*I'
assert str(asech(-5*I).n(6)) == '0.19869 + 1.5708*I'
def test_asech_infinities():
assert asech(oo) == I*pi/2
assert asech(-oo) == I*pi/2
assert asech(zoo) == nan
def test_atanh():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcTanh/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert atanh(0) == 0
assert atanh(I) == I*pi/4
assert atanh(-I) == -I*pi/4
assert atanh(1) == oo
assert atanh(-1) == -oo
# at infinites
assert atanh(I*oo) == I*pi/2
assert atanh(-I*oo) == -I*pi/2
assert atanh(zoo) == nan
#properties
assert atanh(-x) == -atanh(x)
assert atanh(I/sqrt(3)) == I*pi/6
assert atanh(-I/sqrt(3)) == -I*pi/6
assert atanh(I*sqrt(3)) == I*pi/3
assert atanh(-I*sqrt(3)) == -I*pi/3
assert atanh(I*(1 + sqrt(2))) == 3*pi*I/8
assert atanh(I*(sqrt(2) - 1)) == pi*I/8
assert atanh(I*(1 - sqrt(2))) == -pi*I/8
assert atanh(-I*(1 + sqrt(2))) == -3*pi*I/8
assert atanh(I*sqrt(5 + 2*sqrt(5))) == 2*I*pi/5
assert atanh(-I*sqrt(5 + 2*sqrt(5))) == -2*I*pi/5
assert atanh(I*(2 - sqrt(3))) == pi*I/12
assert atanh(I*(sqrt(3) - 2)) == -pi*I/12
assert atanh(oo) == -I*pi/2
def test_atanh_series():
x = Symbol('x')
assert atanh(x).series(x, 0, 10) == \
x + x**3/3 + x**5/5 + x**7/7 + x**9/9 + O(x**10)
def test_atanh_infinities():
assert atanh(oo) == -I*pi/2
assert atanh(-oo) == I*pi/2
# TODO please write more tests -- see issue 3751
def test_acoth():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcCoth/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert acoth(0) == I*pi/2
assert acoth(I) == -I*pi/4
assert acoth(-I) == I*pi/4
assert acoth(1) == oo
assert acoth(-1) == -oo
# at infinites
assert acoth(oo) == 0
assert acoth(-oo) == 0
assert acoth(I*oo) == 0
assert acoth(-I*oo) == 0
assert acoth(zoo) == 0
#properties
assert acoth(-x) == -acoth(x)
assert acoth(I/sqrt(3)) == -I*pi/3
assert acoth(-I/sqrt(3)) == I*pi/3
assert acoth(I*sqrt(3)) == -I*pi/6
assert acoth(-I*sqrt(3)) == I*pi/6
assert acoth(I*(1 + sqrt(2))) == -pi*I/8
assert acoth(-I*(sqrt(2) + 1)) == pi*I/8
assert acoth(I*(1 - sqrt(2))) == 3*pi*I/8
assert acoth(I*(sqrt(2) - 1)) == -3*pi*I/8
assert acoth(I*sqrt(5 + 2*sqrt(5))) == -I*pi/10
assert acoth(-I*sqrt(5 + 2*sqrt(5))) == I*pi/10
assert acoth(I*(2 + sqrt(3))) == -pi*I/12
assert acoth(-I*(2 + sqrt(3))) == pi*I/12
assert acoth(I*(2 - sqrt(3))) == -5*pi*I/12
assert acoth(I*(sqrt(3) - 2)) == 5*pi*I/12
def test_acoth_series():
x = Symbol('x')
assert acoth(x).series(x, 0, 10) == \
I*pi/2 + x + x**3/3 + x**5/5 + x**7/7 + x**9/9 + O(x**10)
def test_inverses():
x = Symbol('x')
assert sinh(x).inverse() == asinh
raises(AttributeError, lambda: cosh(x).inverse())
assert tanh(x).inverse() == atanh
assert coth(x).inverse() == acoth
assert asinh(x).inverse() == sinh
assert acosh(x).inverse() == cosh
assert atanh(x).inverse() == tanh
assert acoth(x).inverse() == coth
assert asech(x).inverse() == sech
def test_leading_term():
x = Symbol('x')
assert cosh(x).as_leading_term(x) == 1
assert coth(x).as_leading_term(x) == 1/x
assert acosh(x).as_leading_term(x) == I*pi/2
assert acoth(x).as_leading_term(x) == I*pi/2
for func in [sinh, tanh, asinh, atanh]:
assert func(x).as_leading_term(x) == x
for func in [sinh, cosh, tanh, coth, asinh, acosh, atanh, acoth]:
for arg in (1/x, S.Half):
eq = func(arg)
assert eq.as_leading_term(x) == eq
for func in [csch, sech]:
eq = func(S.Half)
assert eq.as_leading_term(x) == eq
def test_complex():
a, b = symbols('a,b', real=True)
z = a + b*I
for func in [sinh, cosh, tanh, coth, sech, csch]:
assert func(z).conjugate() == func(a - b*I)
for deep in [True, False]:
assert sinh(z).expand(
complex=True, deep=deep) == sinh(a)*cos(b) + I*cosh(a)*sin(b)
assert cosh(z).expand(
complex=True, deep=deep) == cosh(a)*cos(b) + I*sinh(a)*sin(b)
assert tanh(z).expand(complex=True, deep=deep) == sinh(a)*cosh(
a)/(cos(b)**2 + sinh(a)**2) + I*sin(b)*cos(b)/(cos(b)**2 + sinh(a)**2)
assert coth(z).expand(complex=True, deep=deep) == sinh(a)*cosh(
a)/(sin(b)**2 + sinh(a)**2) - I*sin(b)*cos(b)/(sin(b)**2 + sinh(a)**2)
assert csch(z).expand(complex=True, deep=deep) == cos(b) * sinh(a) / (sin(b)**2\
*cosh(a)**2 + cos(b)**2 * sinh(a)**2) - I*sin(b) * cosh(a) / (sin(b)**2\
*cosh(a)**2 + cos(b)**2 * sinh(a)**2)
assert sech(z).expand(complex=True, deep=deep) == cos(b) * cosh(a) / (sin(b)**2\
*sinh(a)**2 + cos(b)**2 * cosh(a)**2) - I*sin(b) * sinh(a) / (sin(b)**2\
*sinh(a)**2 + cos(b)**2 * cosh(a)**2)
def test_complex_2899():
a, b = symbols('a,b', real=True)
for deep in [True, False]:
for func in [sinh, cosh, tanh, coth]:
assert func(a).expand(complex=True, deep=deep) == func(a)
def test_simplifications():
x = Symbol('x')
assert sinh(asinh(x)) == x
assert sinh(acosh(x)) == sqrt(x - 1) * sqrt(x + 1)
assert sinh(atanh(x)) == x/sqrt(1 - x**2)
assert sinh(acoth(x)) == 1/(sqrt(x - 1) * sqrt(x + 1))
assert cosh(asinh(x)) == sqrt(1 + x**2)
assert cosh(acosh(x)) == x
assert cosh(atanh(x)) == 1/sqrt(1 - x**2)
assert cosh(acoth(x)) == x/(sqrt(x - 1) * sqrt(x + 1))
assert tanh(asinh(x)) == x/sqrt(1 + x**2)
assert tanh(acosh(x)) == sqrt(x - 1) * sqrt(x + 1) / x
assert tanh(atanh(x)) == x
assert tanh(acoth(x)) == 1/x
assert coth(asinh(x)) == sqrt(1 + x**2)/x
assert coth(acosh(x)) == x/(sqrt(x - 1) * sqrt(x + 1))
assert coth(atanh(x)) == 1/x
assert coth(acoth(x)) == x
assert csch(asinh(x)) == 1/x
assert csch(acosh(x)) == 1/(sqrt(x - 1) * sqrt(x + 1))
assert csch(atanh(x)) == sqrt(1 - x**2)/x
assert csch(acoth(x)) == sqrt(x - 1) * sqrt(x + 1)
assert sech(asinh(x)) == 1/sqrt(1 + x**2)
assert sech(acosh(x)) == 1/x
assert sech(atanh(x)) == sqrt(1 - x**2)
assert sech(acoth(x)) == sqrt(x - 1) * sqrt(x + 1)/x
def test_issue_4136():
assert cosh(asinh(Integer(3)/2)) == sqrt(Integer(13)/4)
def test_sinh_rewrite():
x = Symbol('x')
assert sinh(x).rewrite(exp) == (exp(x) - exp(-x))/2 \
== sinh(x).rewrite('tractable')
assert sinh(x).rewrite(cosh) == -I*cosh(x + I*pi/2)
tanh_half = tanh(S.Half*x)
assert sinh(x).rewrite(tanh) == 2*tanh_half/(1 - tanh_half**2)
coth_half = coth(S.Half*x)
assert sinh(x).rewrite(coth) == 2*coth_half/(coth_half**2 - 1)
def test_cosh_rewrite():
x = Symbol('x')
assert cosh(x).rewrite(exp) == (exp(x) + exp(-x))/2 \
== cosh(x).rewrite('tractable')
assert cosh(x).rewrite(sinh) == -I*sinh(x + I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert cosh(x).rewrite(tanh) == (1 + tanh_half)/(1 - tanh_half)
coth_half = coth(S.Half*x)**2
assert cosh(x).rewrite(coth) == (coth_half + 1)/(coth_half - 1)
def test_tanh_rewrite():
x = Symbol('x')
assert tanh(x).rewrite(exp) == (exp(x) - exp(-x))/(exp(x) + exp(-x)) \
== tanh(x).rewrite('tractable')
assert tanh(x).rewrite(sinh) == I*sinh(x)/sinh(I*pi/2 - x)
assert tanh(x).rewrite(cosh) == I*cosh(I*pi/2 - x)/cosh(x)
assert tanh(x).rewrite(coth) == 1/coth(x)
def test_coth_rewrite():
x = Symbol('x')
assert coth(x).rewrite(exp) == (exp(x) + exp(-x))/(exp(x) - exp(-x)) \
== coth(x).rewrite('tractable')
assert coth(x).rewrite(sinh) == -I*sinh(I*pi/2 - x)/sinh(x)
assert coth(x).rewrite(cosh) == -I*cosh(x)/cosh(I*pi/2 - x)
assert coth(x).rewrite(tanh) == 1/tanh(x)
def test_csch_rewrite():
x = Symbol('x')
assert csch(x).rewrite(exp) == 1 / (exp(x)/2 - exp(-x)/2) \
== csch(x).rewrite('tractable')
assert csch(x).rewrite(cosh) == I/cosh(x + I*pi/2)
tanh_half = tanh(S.Half*x)
assert csch(x).rewrite(tanh) == (1 - tanh_half**2)/(2*tanh_half)
coth_half = coth(S.Half*x)
assert csch(x).rewrite(coth) == (coth_half**2 - 1)/(2*coth_half)
def test_sech_rewrite():
x = Symbol('x')
assert sech(x).rewrite(exp) == 1 / (exp(x)/2 + exp(-x)/2) \
== sech(x).rewrite('tractable')
assert sech(x).rewrite(sinh) == I/sinh(x + I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert sech(x).rewrite(tanh) == (1 - tanh_half)/(1 + tanh_half)
coth_half = coth(S.Half*x)**2
assert sech(x).rewrite(coth) == (coth_half - 1)/(coth_half + 1)
def test_derivs():
x = Symbol('x')
assert coth(x).diff(x) == -sinh(x)**(-2)
assert sinh(x).diff(x) == cosh(x)
assert cosh(x).diff(x) == sinh(x)
assert tanh(x).diff(x) == -tanh(x)**2 + 1
assert csch(x).diff(x) == -coth(x)*csch(x)
assert sech(x).diff(x) == -tanh(x)*sech(x)
assert acoth(x).diff(x) == 1/(-x**2 + 1)
assert asinh(x).diff(x) == 1/sqrt(x**2 + 1)
assert acosh(x).diff(x) == 1/sqrt(x**2 - 1)
assert atanh(x).diff(x) == 1/(-x**2 + 1)
assert asech(x).diff(x) == -1/(x*sqrt(1 - x**2))
def test_sinh_expansion():
x, y = symbols('x,y')
assert sinh(x+y).expand(trig=True) == sinh(x)*cosh(y) + cosh(x)*sinh(y)
assert sinh(2*x).expand(trig=True) == 2*sinh(x)*cosh(x)
assert sinh(3*x).expand(trig=True).expand() == \
sinh(x)**3 + 3*sinh(x)*cosh(x)**2
def test_cosh_expansion():
x, y = symbols('x,y')
assert cosh(x+y).expand(trig=True) == cosh(x)*cosh(y) + sinh(x)*sinh(y)
assert cosh(2*x).expand(trig=True) == cosh(x)**2 + sinh(x)**2
assert cosh(3*x).expand(trig=True).expand() == \
3*sinh(x)**2*cosh(x) + cosh(x)**3
| bsd-3-clause |
ajduncan/downspout | downspout/utils.py | 1 | 4412 | #!/usr/bin/env python
"""Utilities for downloading, saving and tagging files from the cloud."""
from collections import defaultdict
import importlib
import json
import os
import sys
import string
import time
import requests
import stagger
from downspout import settings
tree = lambda: defaultdict(tree)
def safe_filename(filename):
filename = filename.replace(' ', '_')
valid_characters = "-_.(){0}{1}".format(string.ascii_letters,
string.digits)
safe_name = ''.join(c for c in filename if c in valid_characters)
return safe_name
# cleaned up
# http://stackoverflow.com/questions/20801034/how-to-measure-download-speed-and-progress-using-requests
def get_file(track_folder, track_filename, artist, title, url):
try:
os.makedirs(track_folder, exist_ok=True)
filename = "{0}/{1}".format(track_folder, track_filename)
if not os.path.isfile(filename):
short_url = (url[:50] + ' ...') if len(url) > 50 else url
print("Saving {0} from {1} to {2}".format(
track_filename, short_url, track_folder))
with open(filename, 'wb') as f:
start = time.clock()
r = requests.get(url, stream=True)
total_length = r.headers.get('content-length')
dl = 0
if total_length is None: # no content length header
f.write(r.content)
else:
for chunk in r.iter_content(1024):
dl += len(chunk)
f.write(chunk)
done = int(50 * dl / int(total_length))
sys.stdout.write("\r[{0}{1}] {2} bps".format(
'.' * done, ' ' * (50 - done), dl // (time.clock() - start)))
print('')
elapsed = time.clock() - start
tagfile(filename, artist, title)
print("Download completed in: {}".format(round(elapsed, 2)))
else:
print("Already downloaded: {}".format(filename))
except:
pass
print('')
return elapsed
# provided ID3 information, tag the file.
def tagfile(filename, artist, title):
try:
tag = stagger.Tag24() # read_tag(filename)
tag.artist = artist
tag.title = title
tag.write(filename)
except:
print("Error tagging file: {}".format(sys.exc_info()[0]))
def download_from_metadata(metadata, artist, service):
safe_artist = safe_filename(artist)
for track_title in metadata[artist]['tracks']:
track_url = metadata[artist]['tracks'][track_title]['url']
track_album = metadata[artist]['tracks'][track_title]['album']
track_extension = metadata[artist]['tracks'][track_title]['encoding']
track_number = metadata[artist]['tracks'][track_title]['track_number']
track_folder = metadata[artist]['tracks'][track_title]['track_folder']
track_filename = metadata[artist]['tracks'][track_title]['track_filename']
try:
get_file(
track_folder, track_filename, artist, track_title, track_url)
except:
pass
print('')
print('')
# print/dump metadata
def dump_metadata(metadata):
print(json.dumps(metadata, sort_keys=True, indent=4))
# provided artist and service, return metadata about the artists tracks, albums, etc.
def metadata_by_artist(service, artist):
try:
module = importlib.import_module('downspout.' + service)
except ImportError:
print("Service unknown: '{}'".format(service))
return None
fetch_metadata = getattr(module, service + '_fetch_metadata', lambda: None)
return fetch_metadata(artist)
# fetch all media for artist from service
def fetch(service, artist):
metadata = metadata_by_artist(service, artist)
if metadata:
download_from_metadata(metadata, artist, service)
return metadata
else:
return False
# fetch everything from file
def fetch_all(filename):
records = open(filename, 'r')
for media in records:
[service, artist] = [item.strip() for item in media.split(',')]
metadata = metadata_by_artist(service, artist)
if metadata:
# dump_metadata(metadata)
download_from_metadata(metadata, artist, service)
return True
| mit |
isrohutamahopetechnik/MissionPlanner | Lib/site-packages/numpy/ma/__init__.py | 54 | 1510 | """
=============
Masked Arrays
=============
Arrays sometimes contain invalid or missing data. When doing operations
on such arrays, we wish to suppress invalid values, which is the purpose masked
arrays fulfill (an example of typical use is given below).
For example, examine the following array:
>>> x = np.array([2, 1, 3, np.nan, 5, 2, 3, np.nan])
When we try to calculate the mean of the data, the result is undetermined:
>>> np.mean(x)
nan
The mean is calculated using roughly ``np.sum(x)/len(x)``, but since
any number added to ``NaN`` [1]_ produces ``NaN``, this doesn't work. Enter
masked arrays:
>>> m = np.ma.masked_array(x, np.isnan(x))
>>> m
masked_array(data = [2.0 1.0 3.0 -- 5.0 2.0 3.0 --],
mask = [False False False True False False False True],
fill_value=1e+20)
Here, we construct a masked array that suppress all ``NaN`` values. We
may now proceed to calculate the mean of the other values:
>>> np.mean(m)
2.6666666666666665
.. [1] Not-a-Number, a floating point value that is the result of an
invalid operation.
"""
__author__ = "Pierre GF Gerard-Marchant ($Author: jarrod.millman $)"
__version__ = '1.0'
__revision__ = "$Revision: 3473 $"
__date__ = '$Date: 2007-10-29 17:18:13 +0200 (Mon, 29 Oct 2007) $'
import core
from core import *
import extras
from extras import *
__all__ = ['core', 'extras']
__all__ += core.__all__
__all__ += extras.__all__
from numpy.testing import Tester
test = Tester(__file__).test
bench = Tester(__file__).bench
| gpl-3.0 |
Pulgama/supriya | supriya/daw/RackDevice.py | 1 | 1657 | from typing import List, Tuple
from uqbar.containers import UniqueTreeTuple
from .AudioChain import AudioChain
from .Chain import Chain
from .ChainContainer import ChainContainer
from .Device import Device
from .DeviceType import DeviceType
from .InstrumentChain import InstrumentChain
from .MidiChain import MidiChain
from .MixerContext import MixerContext
from .Note import Note
class RackDevice(Device, UniqueTreeTuple, MixerContext):
### INITIALIZER ###
def __init__(self):
Device.__init__(self)
MixerContext.__init__(self)
### PRIVATE PROPERTIES ###
@property
def _device_type(self):
chain_class = self.chains._node_class
if chain_class is AudioChain:
return DeviceType.AUDIO
elif chain_class is InstrumentChain:
return DeviceType.INSTRUMENT
elif chain_class is MidiChain:
return DeviceType.MIDI
raise ValueError(chain_class)
### PUBLIC METHODS ###
def add_chain(self) -> Chain:
chain = self.chains._node_class()
self.chains.append(chain)
return chain
def perform(
self, moment, start_notes, stop_notes
) -> List[Tuple["Device", List[Note], List[Note]]]:
results = []
for chain in self.chains:
if chain.devices:
results.append((chain.devices[0], start_notes, stop_notes))
if not results:
next_device = self.next_device()
return [(next_device, start_notes, stop_notes)]
return results
### PUBLIC PROPERTIES ###
@property
def chains(self) -> ChainContainer:
return self.chains
| mit |
paninetworks/neutron | neutron/tests/unit/plugins/ml2/drivers/openvswitch/agent/ovs_test_base.py | 13 | 2160 | # Copyright (C) 2014,2015 VA Linux Systems Japan K.K.
# Copyright (C) 2014 Fumihiko Kakuma <kakuma at valinux co jp>
# Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import importutils
from neutron.tests import base
_AGENT_PACKAGE = 'neutron.plugins.ml2.drivers.openvswitch.agent'
_AGENT_NAME = _AGENT_PACKAGE + '.ovs_neutron_agent'
_DVR_AGENT_NAME = ('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_dvr_neutron_agent')
class OVSAgentConfigTestBase(base.BaseTestCase):
def setUp(self):
super(OVSAgentConfigTestBase, self).setUp()
self.mod_agent = importutils.import_module(_AGENT_NAME)
self.mod_dvr_agent = importutils.import_module(_DVR_AGENT_NAME)
class OVSAgentTestBase(OVSAgentConfigTestBase):
def setUp(self):
super(OVSAgentTestBase, self).setUp()
self.br_int_cls = importutils.import_class(self._BR_INT_CLASS)
self.br_phys_cls = importutils.import_class(self._BR_PHYS_CLASS)
self.br_tun_cls = importutils.import_class(self._BR_TUN_CLASS)
def _bridge_classes(self):
return {
'br_int': self.br_int_cls,
'br_phys': self.br_phys_cls,
'br_tun': self.br_tun_cls,
}
class OVSOFCtlTestBase(OVSAgentTestBase):
_DRIVER_PACKAGE = _AGENT_PACKAGE + '.openflow.ovs_ofctl'
_BR_INT_CLASS = _DRIVER_PACKAGE + '.br_int.OVSIntegrationBridge'
_BR_TUN_CLASS = _DRIVER_PACKAGE + '.br_tun.OVSTunnelBridge'
_BR_PHYS_CLASS = _DRIVER_PACKAGE + '.br_phys.OVSPhysicalBridge'
| apache-2.0 |
madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/functionslocal/vtable/getschema.py | 1 | 1778 | import setpath
import functions
import json
registered=True
#It returns the columns names of the table (either on a string or on a table)
# getschema outputformat=1 select * from table; -->retrun string col1,col2,col3
# getschema outputformat=0 select * from table; -->return table
# getschema select * from table; --> return table
class getschema(functions.vtable.vtbase.VT):
def VTiter(self, *parsedArgs,**envars):
largs, dictargs = self.full_parse(parsedArgs)
if 'query' not in dictargs:
raise functions.OperatorError(__name__.rsplit('.')[-1],"No query argument ")
query = dictargs['query']
outputformat = 0
if 'outputformat' in dictargs:
outputformat = int(dictargs['outputformat'])
cur = envars['db'].cursor()
c=cur.execute(query)
schema = cur.getdescriptionsafe()
schema1= []
schema2= ''
for item in xrange(len(schema)):
schema1.append(str(schema[item][0]))
schema2=schema2 + str(schema[item][0])+','
schema2=schema2[:-1]
# print schema1
# print schema2
# print schema1
yield [('schema',)]
if outputformat == 0:
for x in schema1:
yield tuple((x,))
if outputformat == 1:
yield (schema2,)
def Source():
return functions.vtable.vtbase.VTGenerator(getschema)
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
import setpath
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.tes | mit |
MayOneUS/pledgeservice | lib/mailchimp/requests/api.py | 361 | 4344 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request in seconds.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
session = sessions.Session()
return session.request(method=method, url=url, **kwargs)
def get(url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('post', url, data=data, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('delete', url, **kwargs)
| apache-2.0 |
chatea/NewsMap | lib/bs4/builder/_htmlparser.py | 71 | 9102 | """Use the HTMLParser library to parse HTML files that aren't too bad."""
__all__ = [
'HTMLParserTreeBuilder',
]
from HTMLParser import HTMLParser
try:
from HTMLParser import HTMLParseError
except ImportError, e:
# HTMLParseError is removed in Python 3.5. Since it can never be
# thrown in 3.5, we can just define our own class as a placeholder.
class HTMLParseError(Exception):
pass
import sys
import warnings
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
# argument, which we'd like to set to False. Unfortunately,
# http://bugs.python.org/issue13273 makes strict=True a better bet
# before Python 3.2.3.
#
# At the end of this file, we monkeypatch HTMLParser so that
# strict=True works well on Python 3.2.2.
major, minor, release = sys.version_info[:3]
CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3
CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3
CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4
from bs4.element import (
CData,
Comment,
Declaration,
Doctype,
ProcessingInstruction,
)
from bs4.dammit import EntitySubstitution, UnicodeDammit
from bs4.builder import (
HTML,
HTMLTreeBuilder,
STRICT,
)
HTMLPARSER = 'html.parser'
class BeautifulSoupHTMLParser(HTMLParser):
def handle_starttag(self, name, attrs):
# XXX namespace
attr_dict = {}
for key, value in attrs:
# Change None attribute values to the empty string
# for consistency with the other tree builders.
if value is None:
value = ''
attr_dict[key] = value
attrvalue = '""'
self.soup.handle_starttag(name, None, None, attr_dict)
def handle_endtag(self, name):
self.soup.handle_endtag(name)
def handle_data(self, data):
self.soup.handle_data(data)
def handle_charref(self, name):
# XXX workaround for a bug in HTMLParser. Remove this once
# it's fixed in all supported versions.
# http://bugs.python.org/issue13633
if name.startswith('x'):
real_name = int(name.lstrip('x'), 16)
elif name.startswith('X'):
real_name = int(name.lstrip('X'), 16)
else:
real_name = int(name)
try:
data = unichr(real_name)
except (ValueError, OverflowError), e:
data = u"\N{REPLACEMENT CHARACTER}"
self.handle_data(data)
def handle_entityref(self, name):
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
if character is not None:
data = character
else:
data = "&%s;" % name
self.handle_data(data)
def handle_comment(self, data):
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(Comment)
def handle_decl(self, data):
self.soup.endData()
if data.startswith("DOCTYPE "):
data = data[len("DOCTYPE "):]
elif data == 'DOCTYPE':
# i.e. "<!DOCTYPE>"
data = ''
self.soup.handle_data(data)
self.soup.endData(Doctype)
def unknown_decl(self, data):
if data.upper().startswith('CDATA['):
cls = CData
data = data[len('CDATA['):]
else:
cls = Declaration
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(cls)
def handle_pi(self, data):
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(ProcessingInstruction)
class HTMLParserTreeBuilder(HTMLTreeBuilder):
is_xml = False
picklable = True
NAME = HTMLPARSER
features = [NAME, HTML, STRICT]
def __init__(self, *args, **kwargs):
if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED:
kwargs['strict'] = False
if CONSTRUCTOR_TAKES_CONVERT_CHARREFS:
kwargs['convert_charrefs'] = False
self.parser_args = (args, kwargs)
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None, exclude_encodings=None):
"""
:return: A 4-tuple (markup, original encoding, encoding
declared within markup, whether any characters had to be
replaced with REPLACEMENT CHARACTER).
"""
if isinstance(markup, unicode):
yield (markup, None, None, False)
return
try_encodings = [user_specified_encoding, document_declared_encoding]
dammit = UnicodeDammit(markup, try_encodings, is_html=True,
exclude_encodings=exclude_encodings)
yield (dammit.markup, dammit.original_encoding,
dammit.declared_html_encoding,
dammit.contains_replacement_characters)
def feed(self, markup):
args, kwargs = self.parser_args
parser = BeautifulSoupHTMLParser(*args, **kwargs)
parser.soup = self.soup
try:
parser.feed(markup)
except HTMLParseError, e:
warnings.warn(RuntimeWarning(
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
raise e
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
# string.
#
# XXX This code can be removed once most Python 3 users are on 3.2.3.
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
import re
attrfind_tolerant = re.compile(
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
locatestarttagend = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:\s+ # whitespace before attribute name
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
(?:\s*=\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|\"[^\"]*\" # LIT-enclosed value
|[^'\">\s]+ # bare value
)
)?
)
)*
\s* # trailing whitespace
""", re.VERBOSE)
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
from html.parser import tagfind, attrfind
def parse_starttag(self, i):
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
return endpos
rawdata = self.rawdata
self.__starttag_text = rawdata[i:endpos]
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
match = tagfind.match(rawdata, i+1)
assert match, 'unexpected call to parse_starttag()'
k = match.end()
self.lasttag = tag = rawdata[i+1:k].lower()
while k < endpos:
if self.strict:
m = attrfind.match(rawdata, k)
else:
m = attrfind_tolerant.match(rawdata, k)
if not m:
break
attrname, rest, attrvalue = m.group(1, 2, 3)
if not rest:
attrvalue = None
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
if attrvalue:
attrvalue = self.unescape(attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = m.end()
end = rawdata[k:endpos].strip()
if end not in (">", "/>"):
lineno, offset = self.getpos()
if "\n" in self.__starttag_text:
lineno = lineno + self.__starttag_text.count("\n")
offset = len(self.__starttag_text) \
- self.__starttag_text.rfind("\n")
else:
offset = offset + len(self.__starttag_text)
if self.strict:
self.error("junk characters in start tag: %r"
% (rawdata[k:endpos][:20],))
self.handle_data(rawdata[i:endpos])
return endpos
if end.endswith('/>'):
# XHTML-style empty tag: <span attr="value" />
self.handle_startendtag(tag, attrs)
else:
self.handle_starttag(tag, attrs)
if tag in self.CDATA_CONTENT_ELEMENTS:
self.set_cdata_mode(tag)
return endpos
def set_cdata_mode(self, elem):
self.cdata_elem = elem.lower()
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
CONSTRUCTOR_TAKES_STRICT = True
| apache-2.0 |
r3tard/BartusBot | lib/pyasn1/type/tag.py | 162 | 4499 | # ASN.1 types tags
from operator import getitem
from pyasn1 import error
tagClassUniversal = 0x00
tagClassApplication = 0x40
tagClassContext = 0x80
tagClassPrivate = 0xC0
tagFormatSimple = 0x00
tagFormatConstructed = 0x20
tagCategoryImplicit = 0x01
tagCategoryExplicit = 0x02
tagCategoryUntagged = 0x04
class Tag:
def __init__(self, tagClass, tagFormat, tagId):
if tagId < 0:
raise error.PyAsn1Error(
'Negative tag ID (%s) not allowed' % (tagId,)
)
self.__tag = (tagClass, tagFormat, tagId)
self.uniq = (tagClass, tagId)
self.__hashedUniqTag = hash(self.uniq)
def __str__(self):
return '[%s:%s:%s]' % self.__tag
def __repr__(self):
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
(self.__class__.__name__,) + self.__tag
)
# These is really a hotspot -- expose public "uniq" attribute to save on
# function calls
def __eq__(self, other): return self.uniq == other.uniq
def __ne__(self, other): return self.uniq != other.uniq
def __lt__(self, other): return self.uniq < other.uniq
def __le__(self, other): return self.uniq <= other.uniq
def __gt__(self, other): return self.uniq > other.uniq
def __ge__(self, other): return self.uniq >= other.uniq
def __hash__(self): return self.__hashedUniqTag
def __getitem__(self, idx): return self.__tag[idx]
def __and__(self, otherTag):
(tagClass, tagFormat, tagId) = otherTag
return self.__class__(
self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId
)
def __or__(self, otherTag):
(tagClass, tagFormat, tagId) = otherTag
return self.__class__(
self.__tag[0]|tagClass,
self.__tag[1]|tagFormat,
self.__tag[2]|tagId
)
def asTuple(self): return self.__tag # __getitem__() is slow
class TagSet:
def __init__(self, baseTag=(), *superTags):
self.__baseTag = baseTag
self.__superTags = superTags
self.__hashedSuperTags = hash(superTags)
_uniq = ()
for t in superTags:
_uniq = _uniq + t.uniq
self.uniq = _uniq
self.__lenOfSuperTags = len(superTags)
def __str__(self):
return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]'
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
'(), ' + ', '.join([repr(x) for x in self.__superTags])
)
def __add__(self, superTag):
return self.__class__(
self.__baseTag, *self.__superTags + (superTag,)
)
def __radd__(self, superTag):
return self.__class__(
self.__baseTag, *(superTag,) + self.__superTags
)
def tagExplicitly(self, superTag):
tagClass, tagFormat, tagId = superTag
if tagClass == tagClassUniversal:
raise error.PyAsn1Error(
'Can\'t tag with UNIVERSAL-class tag'
)
if tagFormat != tagFormatConstructed:
superTag = Tag(tagClass, tagFormatConstructed, tagId)
return self + superTag
def tagImplicitly(self, superTag):
tagClass, tagFormat, tagId = superTag
if self.__superTags:
superTag = Tag(tagClass, self.__superTags[-1][1], tagId)
return self[:-1] + superTag
def getBaseTag(self): return self.__baseTag
def __getitem__(self, idx):
if isinstance(idx, slice):
return self.__class__(
self.__baseTag, *getitem(self.__superTags, idx)
)
return self.__superTags[idx]
def __eq__(self, other): return self.uniq == other.uniq
def __ne__(self, other): return self.uniq != other.uniq
def __lt__(self, other): return self.uniq < other.uniq
def __le__(self, other): return self.uniq <= other.uniq
def __gt__(self, other): return self.uniq > other.uniq
def __ge__(self, other): return self.uniq >= other.uniq
def __hash__(self): return self.__hashedSuperTags
def __len__(self): return self.__lenOfSuperTags
def isSuperTagSetOf(self, tagSet):
if len(tagSet) < self.__lenOfSuperTags:
return
idx = self.__lenOfSuperTags - 1
while idx >= 0:
if self.__superTags[idx] != tagSet[idx]:
return
idx = idx - 1
return 1
def initTagSet(tag): return TagSet(tag, tag)
| apache-2.0 |
Zord13appdesa/python-for-android | python-build/python-libs/gdata/src/gdata/youtube/__init__.py | 297 | 25623 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = ('api.stephaniel@gmail.com (Stephanie Liu)'
', api.jhartmann@gmail.com (Jochen Hartmann)')
import atom
import gdata
import gdata.media as Media
import gdata.geo as Geo
YOUTUBE_NAMESPACE = 'http://gdata.youtube.com/schemas/2007'
YOUTUBE_FORMAT = '{http://gdata.youtube.com/schemas/2007}format'
YOUTUBE_DEVELOPER_TAG_SCHEME = '%s/%s' % (YOUTUBE_NAMESPACE,
'developertags.cat')
YOUTUBE_SUBSCRIPTION_TYPE_SCHEME = '%s/%s' % (YOUTUBE_NAMESPACE,
'subscriptiontypes.cat')
class Username(atom.AtomBase):
"""The YouTube Username element"""
_tag = 'username'
_namespace = YOUTUBE_NAMESPACE
class QueryString(atom.AtomBase):
"""The YouTube QueryString element"""
_tag = 'queryString'
_namespace = YOUTUBE_NAMESPACE
class FirstName(atom.AtomBase):
"""The YouTube FirstName element"""
_tag = 'firstName'
_namespace = YOUTUBE_NAMESPACE
class LastName(atom.AtomBase):
"""The YouTube LastName element"""
_tag = 'lastName'
_namespace = YOUTUBE_NAMESPACE
class Age(atom.AtomBase):
"""The YouTube Age element"""
_tag = 'age'
_namespace = YOUTUBE_NAMESPACE
class Books(atom.AtomBase):
"""The YouTube Books element"""
_tag = 'books'
_namespace = YOUTUBE_NAMESPACE
class Gender(atom.AtomBase):
"""The YouTube Gender element"""
_tag = 'gender'
_namespace = YOUTUBE_NAMESPACE
class Company(atom.AtomBase):
"""The YouTube Company element"""
_tag = 'company'
_namespace = YOUTUBE_NAMESPACE
class Hobbies(atom.AtomBase):
"""The YouTube Hobbies element"""
_tag = 'hobbies'
_namespace = YOUTUBE_NAMESPACE
class Hometown(atom.AtomBase):
"""The YouTube Hometown element"""
_tag = 'hometown'
_namespace = YOUTUBE_NAMESPACE
class Location(atom.AtomBase):
"""The YouTube Location element"""
_tag = 'location'
_namespace = YOUTUBE_NAMESPACE
class Movies(atom.AtomBase):
"""The YouTube Movies element"""
_tag = 'movies'
_namespace = YOUTUBE_NAMESPACE
class Music(atom.AtomBase):
"""The YouTube Music element"""
_tag = 'music'
_namespace = YOUTUBE_NAMESPACE
class Occupation(atom.AtomBase):
"""The YouTube Occupation element"""
_tag = 'occupation'
_namespace = YOUTUBE_NAMESPACE
class School(atom.AtomBase):
"""The YouTube School element"""
_tag = 'school'
_namespace = YOUTUBE_NAMESPACE
class Relationship(atom.AtomBase):
"""The YouTube Relationship element"""
_tag = 'relationship'
_namespace = YOUTUBE_NAMESPACE
class Recorded(atom.AtomBase):
"""The YouTube Recorded element"""
_tag = 'recorded'
_namespace = YOUTUBE_NAMESPACE
class Statistics(atom.AtomBase):
"""The YouTube Statistics element."""
_tag = 'statistics'
_namespace = YOUTUBE_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['viewCount'] = 'view_count'
_attributes['videoWatchCount'] = 'video_watch_count'
_attributes['subscriberCount'] = 'subscriber_count'
_attributes['lastWebAccess'] = 'last_web_access'
_attributes['favoriteCount'] = 'favorite_count'
def __init__(self, view_count=None, video_watch_count=None,
favorite_count=None, subscriber_count=None, last_web_access=None,
extension_elements=None, extension_attributes=None, text=None):
self.view_count = view_count
self.video_watch_count = video_watch_count
self.subscriber_count = subscriber_count
self.last_web_access = last_web_access
self.favorite_count = favorite_count
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class Status(atom.AtomBase):
"""The YouTube Status element"""
_tag = 'status'
_namespace = YOUTUBE_NAMESPACE
class Position(atom.AtomBase):
"""The YouTube Position element. The position in a playlist feed."""
_tag = 'position'
_namespace = YOUTUBE_NAMESPACE
class Racy(atom.AtomBase):
"""The YouTube Racy element."""
_tag = 'racy'
_namespace = YOUTUBE_NAMESPACE
class Description(atom.AtomBase):
"""The YouTube Description element."""
_tag = 'description'
_namespace = YOUTUBE_NAMESPACE
class Private(atom.AtomBase):
"""The YouTube Private element."""
_tag = 'private'
_namespace = YOUTUBE_NAMESPACE
class NoEmbed(atom.AtomBase):
"""The YouTube VideoShare element. Whether a video can be embedded or not."""
_tag = 'noembed'
_namespace = YOUTUBE_NAMESPACE
class Comments(atom.AtomBase):
"""The GData Comments element"""
_tag = 'comments'
_namespace = gdata.GDATA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, feed_link=None, extension_elements=None,
extension_attributes=None, text=None):
self.feed_link = feed_link
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class Rating(atom.AtomBase):
"""The GData Rating element"""
_tag = 'rating'
_namespace = gdata.GDATA_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['min'] = 'min'
_attributes['max'] = 'max'
_attributes['numRaters'] = 'num_raters'
_attributes['average'] = 'average'
def __init__(self, min=None, max=None,
num_raters=None, average=None, extension_elements=None,
extension_attributes=None, text=None):
self.min = min
self.max = max
self.num_raters = num_raters
self.average = average
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class YouTubePlaylistVideoEntry(gdata.GDataEntry):
"""Represents a YouTubeVideoEntry on a YouTubePlaylist."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
_children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
Description)
_children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
_children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}location' % YOUTUBE_NAMESPACE] = ('location', Location)
_children['{%s}position' % YOUTUBE_NAMESPACE] = ('position', Position)
_children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, feed_link=None, description=None,
rating=None, comments=None, statistics=None,
location=None, position=None, media=None,
extension_elements=None, extension_attributes=None):
self.feed_link = feed_link
self.description = description
self.rating = rating
self.comments = comments
self.statistics = statistics
self.location = location
self.position = position
self.media = media
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id,
link=link, published=published, title=title,
updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
class YouTubeVideoCommentEntry(gdata.GDataEntry):
"""Represents a comment on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
class YouTubeSubscriptionEntry(gdata.GDataEntry):
"""Represents a subscription entry on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
_children['{%s}queryString' % YOUTUBE_NAMESPACE] = (
'query_string', QueryString)
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, username=None, query_string=None, feed_link=None,
extension_elements=None, extension_attributes=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated)
self.username = username
self.query_string = query_string
self.feed_link = feed_link
def GetSubscriptionType(self):
"""Retrieve the type of this subscription.
Returns:
A string that is either 'channel, 'query' or 'favorites'
"""
for category in self.category:
if category.scheme == YOUTUBE_SUBSCRIPTION_TYPE_SCHEME:
return category.term
class YouTubeVideoResponseEntry(gdata.GDataEntry):
"""Represents a video response. """
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
_children['{%s}noembed' % YOUTUBE_NAMESPACE] = ('noembed', NoEmbed)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}racy' % YOUTUBE_NAMESPACE] = ('racy', Racy)
_children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None, rating=None,
noembed=None, statistics=None, racy=None, media=None,
extension_elements=None, extension_attributes=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated)
self.rating = rating
self.noembed = noembed
self.statistics = statistics
self.racy = racy
self.media = media or Media.Group()
class YouTubeContactEntry(gdata.GDataEntry):
"""Represents a contact entry."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
_children['{%s}status' % YOUTUBE_NAMESPACE] = ('status', Status)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None,
username=None, status=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated)
self.username = username
self.status = status
class YouTubeVideoEntry(gdata.GDataEntry):
"""Represents a video on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
_children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
_children['{%s}noembed' % YOUTUBE_NAMESPACE] = ('noembed', NoEmbed)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}recorded' % YOUTUBE_NAMESPACE] = ('recorded', Recorded)
_children['{%s}racy' % YOUTUBE_NAMESPACE] = ('racy', Racy)
_children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
_children['{%s}where' % gdata.geo.GEORSS_NAMESPACE] = ('geo', Geo.Where)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None, rating=None,
noembed=None, statistics=None, racy=None, media=None, geo=None,
recorded=None, comments=None, extension_elements=None,
extension_attributes=None):
self.rating = rating
self.noembed = noembed
self.statistics = statistics
self.racy = racy
self.comments = comments
self.media = media or Media.Group()
self.geo = geo
self.recorded = recorded
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
def GetSwfUrl(self):
"""Return the URL for the embeddable Video
Returns:
URL of the embeddable video
"""
if self.media.content:
for content in self.media.content:
if content.extension_attributes[YOUTUBE_FORMAT] == '5':
return content.url
else:
return None
def AddDeveloperTags(self, developer_tags):
"""Add a developer tag for this entry.
Developer tags can only be set during the initial upload.
Arguments:
developer_tags: A list of developer tags as strings.
Returns:
A list of all developer tags for this video entry.
"""
for tag_text in developer_tags:
self.media.category.append(gdata.media.Category(
text=tag_text, label=tag_text, scheme=YOUTUBE_DEVELOPER_TAG_SCHEME))
return self.GetDeveloperTags()
def GetDeveloperTags(self):
"""Retrieve developer tags for this video entry."""
developer_tags = []
for category in self.media.category:
if category.scheme == YOUTUBE_DEVELOPER_TAG_SCHEME:
developer_tags.append(category)
if len(developer_tags) > 0:
return developer_tags
def GetYouTubeCategoryAsString(self):
"""Convenience method to return the YouTube category as string.
YouTubeVideoEntries can contain multiple Category objects with differing
schemes. This method returns only the category with the correct
scheme, ignoring developer tags.
"""
for category in self.media.category:
if category.scheme != YOUTUBE_DEVELOPER_TAG_SCHEME:
return category.text
class YouTubeUserEntry(gdata.GDataEntry):
"""Represents a user on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
_children['{%s}firstName' % YOUTUBE_NAMESPACE] = ('first_name', FirstName)
_children['{%s}lastName' % YOUTUBE_NAMESPACE] = ('last_name', LastName)
_children['{%s}age' % YOUTUBE_NAMESPACE] = ('age', Age)
_children['{%s}books' % YOUTUBE_NAMESPACE] = ('books', Books)
_children['{%s}gender' % YOUTUBE_NAMESPACE] = ('gender', Gender)
_children['{%s}company' % YOUTUBE_NAMESPACE] = ('company', Company)
_children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
Description)
_children['{%s}hobbies' % YOUTUBE_NAMESPACE] = ('hobbies', Hobbies)
_children['{%s}hometown' % YOUTUBE_NAMESPACE] = ('hometown', Hometown)
_children['{%s}location' % YOUTUBE_NAMESPACE] = ('location', Location)
_children['{%s}movies' % YOUTUBE_NAMESPACE] = ('movies', Movies)
_children['{%s}music' % YOUTUBE_NAMESPACE] = ('music', Music)
_children['{%s}occupation' % YOUTUBE_NAMESPACE] = ('occupation', Occupation)
_children['{%s}school' % YOUTUBE_NAMESPACE] = ('school', School)
_children['{%s}relationship' % YOUTUBE_NAMESPACE] = ('relationship',
Relationship)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
_children['{%s}thumbnail' % gdata.media.MEDIA_NAMESPACE] = ('thumbnail',
Media.Thumbnail)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None,
username=None, first_name=None, last_name=None, age=None,
books=None, gender=None, company=None, description=None,
hobbies=None, hometown=None, location=None, movies=None,
music=None, occupation=None, school=None, relationship=None,
statistics=None, feed_link=None, extension_elements=None,
extension_attributes=None, text=None):
self.username = username
self.first_name = first_name
self.last_name = last_name
self.age = age
self.books = books
self.gender = gender
self.company = company
self.description = description
self.hobbies = hobbies
self.hometown = hometown
self.location = location
self.movies = movies
self.music = music
self.occupation = occupation
self.school = school
self.relationship = relationship
self.statistics = statistics
self.feed_link = feed_link
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id,
link=link, published=published,
title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
class YouTubeVideoFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a video feed on YouTube."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [YouTubeVideoEntry])
class YouTubePlaylistEntry(gdata.GDataEntry):
"""Represents a playlist in YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
Description)
_children['{%s}private' % YOUTUBE_NAMESPACE] = ('private',
Private)
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, private=None, feed_link=None,
description=None, extension_elements=None,
extension_attributes=None):
self.description = description
self.private = private
self.feed_link = feed_link
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id,
link=link, published=published, title=title,
updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
class YouTubePlaylistFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of a user's playlists """
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubePlaylistEntry])
class YouTubePlaylistVideoFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of video entry on a playlist."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubePlaylistVideoEntry])
class YouTubeContactFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of a users contacts."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeContactEntry])
class YouTubeSubscriptionFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of a users subscriptions."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeSubscriptionEntry])
class YouTubeVideoCommentFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of comments for a video."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeVideoCommentEntry])
class YouTubeVideoResponseFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of video responses."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeVideoResponseEntry])
def YouTubeVideoFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoFeed, xml_string)
def YouTubeVideoEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoEntry, xml_string)
def YouTubeContactFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeContactFeed, xml_string)
def YouTubeContactEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeContactEntry, xml_string)
def YouTubeVideoCommentFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoCommentFeed, xml_string)
def YouTubeVideoCommentEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoCommentEntry, xml_string)
def YouTubeUserFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoFeed, xml_string)
def YouTubeUserEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeUserEntry, xml_string)
def YouTubePlaylistFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistFeed, xml_string)
def YouTubePlaylistVideoFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistVideoFeed, xml_string)
def YouTubePlaylistEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistEntry, xml_string)
def YouTubePlaylistVideoEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistVideoEntry, xml_string)
def YouTubeSubscriptionFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeSubscriptionFeed, xml_string)
def YouTubeSubscriptionEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeSubscriptionEntry, xml_string)
def YouTubeVideoResponseFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoResponseFeed, xml_string)
def YouTubeVideoResponseEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoResponseEntry, xml_string)
| apache-2.0 |
miing/mci_migo | api/v10/forms.py | 1 | 2136 | # Copyright 2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
# We use Django forms for webservice input validation
from django import forms
from django.forms import fields
from django.utils.translation import ugettext as _
from identityprovider.models.captcha import Captcha
from identityprovider.validators import validate_password_policy
class WebserviceCreateAccountForm(forms.Form):
email = fields.EmailField()
password = fields.CharField(max_length=256,
validators=[validate_password_policy])
captcha_id = fields.CharField(max_length=1024)
captcha_solution = fields.CharField(max_length=256)
remote_ip = fields.CharField(max_length=256)
displayname = fields.CharField(max_length=256, required=False)
platform = fields.TypedChoiceField(choices=[
('web', 'Web'), ('desktop', 'Desktop'), ('mobile', 'Mobile')],
empty_value='desktop', required=False)
validate_redirect_to = fields.CharField(required=False)
def clean_validate_redirect_to(self):
validate_redirect_to = self.cleaned_data.get('validate_redirect_to')
# return None instead of '' as the default value
if not validate_redirect_to:
validate_redirect_to = None
return validate_redirect_to
def clean(self):
cleaned_data = self.cleaned_data
captcha_id = cleaned_data.get('captcha_id')
captcha_solution = cleaned_data.get('captcha_solution')
# The remote IP address is absolutely required, and comes from
# SSO itself, not from the client. If it's missing, it's a
# programming error, and should not be returned to the client
# as a validation error. So, we use a normal key lookup here.
remote_ip = cleaned_data['remote_ip']
captcha = Captcha(captcha_id)
email = cleaned_data.get('email', '')
if captcha.verify(captcha_solution, remote_ip, email):
return cleaned_data
# not verified
raise forms.ValidationError(_("Wrong captcha solution."))
| agpl-3.0 |
akeyong/vnpy | vn.api/vn.lhang/vnlhang.py | 10 | 10163 | # encoding: utf-8
import urllib
import hashlib
import requests
from Queue import Queue, Empty
from threading import Thread
from time import sleep
LHANG_API_ROOT ="https://api.lhang.com/v1/"
FUNCTION_TICKER = ('ticker.do', 'get')
FUNCTION_DEPTH = ('depth.do', 'get')
FUNCTION_TRADES = ('trades.do', 'get')
FUNCTION_KLINE = ('kline.do', 'get')
FUNCTION_USERINFO = ('user_info.do', 'post')
FUNCTION_CREATEORDER = ('create_order.do', 'post')
FUNCTION_CANCELORDER = ('cancel_order.do', 'post')
FUNCTION_ORDERSINFO = ('orders_info.do', 'post')
FUNCTION_ORDERSINFOHISTORY = ('orders_info_history.do', 'post')
#----------------------------------------------------------------------
def signature(params, secretKey):
"""生成签名"""
params = sorted(params.iteritems(), key=lambda d:d[0], reverse=False)
params.append(('secret_key', secretKey))
message = urllib.urlencode(params)
m = hashlib.md5()
m.update(message)
m.digest()
sig=m.hexdigest()
return sig
########################################################################
class LhangApi(object):
""""""
DEBUG = True
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.apiKey = ''
self.secretKey = ''
self.interval = 1 # 每次请求的间隔等待
self.active = False # API工作状态
self.reqID = 0 # 请求编号
self.reqQueue = Queue() # 请求队列
self.reqThread = Thread(target=self.processQueue) # 请求处理线程
#----------------------------------------------------------------------
def init(self, apiKey, secretKey, interval):
"""初始化"""
self.apiKey = apiKey
self.secretKey = secretKey
self.interval = interval
self.active = True
self.reqThread.start()
#----------------------------------------------------------------------
def exit(self):
"""退出"""
self.active = False
if self.reqThread.isAlive():
self.reqThread.join()
#----------------------------------------------------------------------
def processRequest(self, req):
"""处理请求"""
# 读取方法和参数
api, method = req['function']
params = req['params']
url = LHANG_API_ROOT + api
# 在参数中增加必须的字段
params['api_key'] = self.apiKey
# 添加签名
sign = signature(params, self.secretKey)
params['sign'] = sign
# 发送请求
payload = urllib.urlencode(params)
r = requests.request(method, url, params=payload)
if r.status_code == 200:
data = r.json()
return data
else:
return None
#----------------------------------------------------------------------
def processQueue(self):
"""处理请求队列中的请求"""
while self.active:
try:
req = self.reqQueue.get(block=True, timeout=1) # 获取请求的阻塞为一秒
callback = req['callback']
reqID = req['reqID']
data = self.processRequest(req)
# 请求失败
if data is None:
error = u'请求失败'
self.onError(error, req, reqID)
elif 'error_code' in data:
error = u'请求出错,错误代码:%s' % data['error_code']
self.onError(error, req, reqID)
# 请求成功
else:
if self.DEBUG:
print callback.__name__
callback(data, req, reqID)
# 流控等待
sleep(self.interval)
except Empty:
pass
#----------------------------------------------------------------------
def sendRequest(self, function, params, callback):
"""发送请求"""
# 请求编号加1
self.reqID += 1
# 生成请求字典并放入队列中
req = {}
req['function'] = function
req['params'] = params
req['callback'] = callback
req['reqID'] = self.reqID
self.reqQueue.put(req)
# 返回请求编号
return self.reqID
#----------------------------------------------------------------------
def onError(self, error, req, reqID):
"""错误推送"""
print error, req, reqID
###############################################
# 行情接口
###############################################
#----------------------------------------------------------------------
def getTicker(self, symbol):
"""查询行情"""
function = FUNCTION_TICKER
params = {'symbol': symbol}
callback = self.onGetTicker
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def getDepth(self, symbol, size, merge):
"""查询深度"""
function = FUNCTION_DEPTH
params = {
'symbol': symbol,
'size': size,
'mege': merge
}
callback = self.onGetDepth
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def getTrades(self, symbol, size, time):
"""查询历史成交"""
function = FUNCTION_TRADES
params = {
'symbol': symbol,
'size': size,
'time': time
}
callback = self.onGetTrades
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def getKline(self, symbol, size, type_, time):
"""查询K线"""
function = FUNCTION_TRADES
params = {
'symbol': symbol,
'size': size,
'type': type_,
'time': time
}
callback = self.onGetKline
return self.sendRequest(function, params, callback)
#----------------------------------------------------------------------
def onGetTicker(self, data, req, reqID):
"""查询行情回调"""
print data, reqID
# ----------------------------------------------------------------------
def onGetDepth(self, data, req, reqID):
"""查询深度回调"""
print data, reqID
# ----------------------------------------------------------------------
def onGetTrades(self, data, req, reqID):
"""查询历史成交"""
print data, reqID
# ----------------------------------------------------------------------
def onGetKline(self, data, req, reqID):
"""查询K线回报"""
print data, reqID
###############################################
# 交易接口
###############################################
# ----------------------------------------------------------------------
def getUserInfo(self):
"""查询K线"""
function = FUNCTION_USERINFO
params = {}
callback = self.onGetUserInfo
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def createOrder(self, symbol, type_, price, amount):
"""发送委托"""
function = FUNCTION_CREATEORDER
params = {
'symbol': symbol,
'type': type_,
'price': price,
'amount': amount
}
callback = self.onCreateOrder
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def cancelOrder(self, symbol, orderId):
"""撤单"""
function = FUNCTION_CANCELORDER
params = {
'symbol': symbol,
'order_id': orderId
}
callback = self.onCancelOrder
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def getOrdersInfo(self, symbol, orderId):
"""查询委托"""
function = FUNCTION_ORDERSINFO
params = {
'symbol': symbol,
'order_id': orderId
}
callback = self.onGetOrdersInfo
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def getOrdersInfoHistory(self, symbol, status, currentPage, pageLength):
"""撤单"""
function = FUNCTION_ORDERSINFOHISTORY
params = {
'symbol': symbol,
'status': status,
'current_page': currentPage,
'page_length': pageLength
}
callback = self.onGetOrdersInfoHistory
return self.sendRequest(function, params, callback)
# ----------------------------------------------------------------------
def onGetUserInfo(self, data, req, reqID):
"""查询K线回报"""
print data, reqID
# ----------------------------------------------------------------------
def onCreateOrder(self, data, req, reqID):
"""委托回报"""
print data, reqID
# ----------------------------------------------------------------------
def onCancelOrder(self, data, req, reqID):
"""撤单回报"""
print data, reqID
# ----------------------------------------------------------------------
def onGetOrdersInfo(self, data, req, reqID):
"""查询委托回报"""
print data, reqID
# ----------------------------------------------------------------------
def onGetOrdersInfoHistory(self, data, req, reqID):
"""撤单回报"""
print data, reqID
| mit |
freedomflyer/test | examples/delay.py | 1 | 2489 | import sys
import matplotlib
sys.path.append('..')
from src.sim import Sim
from src import node
from src import link
from src import packet
from networks.network import Network
import random
class Generator(object):
def __init__(self,node,destination,load,duration):
self.node = node
self.load = load
self.duration = duration
self.start = 0
self.ident = 1
self.destination = destination
def handle(self,event):
# quit if done
now = Sim.scheduler.current_time()
if (now - self.start) > self.duration:
return
# generate a packet
self.ident += 1
p = packet.Packet(destination_address=self.destination,ident=self.ident,protocol='delay',length=1000)
Sim.scheduler.add(delay=0, event=p, handler=self.node.send_packet)
# schedule the next time we should generate a packet
Sim.scheduler.add(delay=random.expovariate(self.load), event='generate', handler=self.handle)
class DelayHandler(object):
def __init__(self):
self.iteration = 0
print "It\tCurrent Time\tPacket Ident\tCreated At\tElapsed Time\tTransm Delay\tProp Delay\tQueue Delay"
def receive_packet(self, packet):
self.iteration += 1
print "%d\t%f\t%f\t%f\t%f\t%f\t%f\t%f" % \
(self.iteration, Sim.scheduler.current_time(), packet.ident, packet.created, \
(Sim.scheduler.current_time() - packet.created), packet.transmission_delay, \
packet.propagation_delay, packet.queueing_delay)
if __name__ == '__main__':
# parameters
Sim.scheduler.reset()
# setup network
net = Network('../networks/one-hop.txt')
# setup routes
n1 = net.get_node('n1')
n2 = net.get_node('n2')
n1.add_forwarding_entry(address=n2.get_address('n1'),link=n1.links[0])
n2.add_forwarding_entry(address=n1.get_address('n2'),link=n2.links[0])
# setup app
d = DelayHandler()
net.nodes['n2'].add_protocol(protocol="delay",handler=d)
# setup packet generator
destination = n2.get_address('n1')
max_rate = 1000000/(1000*8)
load = .8 *max_rate
g = Generator(node=n1,destination=destination,load=load,duration=10)
Sim.scheduler.add(delay=0, event='generate', handler=g.handle)
# run the simulation
Sim.scheduler.run()
#141 9.989235 142.000000 9.980235 0.009000 0.008000 0.001000 0.000000
#962 10.009724 963.000000 9.995443 0.014281 0.008000 0.001000 0.005281 | gpl-2.0 |
yawnosnorous/python-for-android | python-build/python-libs/gdata/samples/oauth/2_legged_oauth.py | 128 | 2463 | #!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'e.bidelman (Eric Bidelman)'
import gdata.contacts
import gdata.contacts.service
import gdata.docs
import gdata.docs.service
CONSUMER_KEY = 'yourdomain.com'
CONSUMER_SECRET = 'YOUR_CONSUMER_KEY'
SIG_METHOD = gdata.auth.OAuthSignatureMethod.HMAC_SHA1
requestor_id = 'any.user@yourdomain.com'
# Contacts Data API ============================================================
contacts = gdata.contacts.service.ContactsService()
contacts.SetOAuthInputParameters(
SIG_METHOD, CONSUMER_KEY, consumer_secret=CONSUMER_SECRET,
two_legged_oauth=True, requestor_id=requestor_id)
# GET - fetch user's contact list
print "\nList of contacts for %s:" % (requestor_id,)
feed = contacts.GetContactsFeed()
for entry in feed.entry:
print entry.title.text
# GET - fetch another user's contact list
requestor_id = 'another_user@yourdomain.com'
print "\nList of contacts for %s:" % (requestor_id,)
contacts.GetOAuthInputParameters().requestor_id = requestor_id
feed = contacts.GetContactsFeed()
for entry in feed.entry:
print entry.title.text
# Google Documents List Data API ===============================================
docs = gdata.docs.service.DocsService()
docs.SetOAuthInputParameters(
SIG_METHOD, CONSUMER_KEY, consumer_secret=CONSUMER_SECRET,
two_legged_oauth=True, requestor_id=requestor_id)
# POST - upload a document
print "\nUploading document to %s's Google Documents account:" % (requestor_id,)
ms = gdata.MediaSource(
file_path='/path/to/test.txt',
content_type=gdata.docs.service.SUPPORTED_FILETYPES['TXT'])
# GET - fetch user's document list
entry = docs.UploadDocument(ms, 'Company Perks')
print 'Document now accessible online at:', entry.GetAlternateLink().href
print "\nList of Google Documents for %s" % (requestor_id,)
feed = docs.GetDocumentListFeed()
for entry in feed.entry:
print entry.title.text
| apache-2.0 |
Arno-Nymous/pyload | module/plugins/hoster/QuickshareCz.py | 8 | 3501 | # -*- coding: utf-8 -*-
import re
from ..internal.SimpleHoster import SimpleHoster
class QuickshareCz(SimpleHoster):
__name__ = "QuickshareCz"
__type__ = "hoster"
__version__ = "0.64"
__status__ = "testing"
__pattern__ = r'http://(?:[^/]*\.)?quickshare\.cz/stahnout-soubor/.+'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool",
"Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10)]
__description__ = """Quickshare.cz hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
NAME_PATTERN = r'<th width="145px">Název:</th>\s*<td style="word-wrap:break-word;">(?P<N>.+?)</td>'
SIZE_PATTERN = r'<th>Velikost:</th>\s*<td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</td>'
OFFLINE_PATTERN = r'<script type="text/javascript">location\.href=\'/chyba\';</script>'
def process(self, pyfile):
self.data = self.load(pyfile.url)
self.get_fileInfo()
#: Parse js variables
self.jsvars = dict(
(x, y.strip("'")) for x, y in re.findall(
r"var (\w+) = ([\d.]+|'.+?')", self.data))
self.log_debug(self.jsvars)
pyfile.name = self.jsvars['ID3']
#: Determine download type - free or premium
if self.premium:
if 'UU_prihlasen' in self.jsvars:
if self.jsvars['UU_prihlasen'] == "0":
self.log_warning(_("User not logged in"))
self.account.relogin()
self.retry()
elif float(self.jsvars['UU_kredit']) < float(self.jsvars['kredit_odecet']):
self.log_warning(_("Not enough credit left"))
self.premium = False
if self.premium:
self.handle_premium(pyfile)
else:
self.handle_free(pyfile)
if self.scan_download(
{'error': re.compile(r'\AChyba!')}, read_size=100):
self.fail(_("File not found or plugin defect"))
def handle_free(self, pyfile):
#: Get download url
download_url = '%s/download.php' % self.jsvars['server']
data = dict(
(x, self.jsvars[x]) for x in self.jsvars if x in (
"ID1", "ID2", "ID3", "ID4"))
self.log_debug("FREE URL1:" + download_url, data)
header = self.load(download_url, post=data, just_header=True)
self.link = header.get('location')
if not self.link:
self.fail(_("File not found"))
self.log_debug("FREE URL2:" + self.link)
#: Check errors
m = re.search(r'/chyba/(\d+)', self.link)
if m is not None:
if m.group(1) == "1":
self.retry(60, 2 * 60, "This IP is already downloading")
elif m.group(1) == "2":
self.retry(60, 60, "No free slots available")
else:
self.fail(_("Error %d") % m.group(1))
def handle_premium(self, pyfile):
download_url = '%s/download_premium.php' % self.jsvars['server']
data = dict(
(x, self.jsvars[x]) for x in self.jsvars if x in (
"ID1", "ID2", "ID4", "ID5"))
self.download(download_url, get=data)
| gpl-3.0 |
mawentao007/reading_grab | test/spider_error.py | 13 | 2221 | from grab.spider import Spider, Task
import logging
from test.util import BaseGrabTestCase, build_spider
# That URLs breaks Grab's URL normalization process
# with error "label empty or too long"
INVALID_URL = 'http://13354&altProductId=6423589&productId=6423589'\
'&altProductStoreId=13713&catalogId=10001'\
'&categoryId=28678&productStoreId=13713'\
'http://www.textbooksnow.com/webapp/wcs/stores'\
'/servlet/ProductDisplay?langId=-1&storeId='
class SpiderErrorTestCase(BaseGrabTestCase):
def setUp(self):
self.server.reset()
def test_generator_with_invalid_url(self):
class SomeSpider(Spider):
def task_generator(self):
yield Task('page', url=INVALID_URL)
bot = build_spider(SomeSpider)
bot.run()
def test_redirect_with_invalid_url(self):
server = self.server
class TestSpider(Spider):
def task_generator(self):
self.done_counter = 0
yield Task('page', url=server.get_url())
def task_page(self, grab, task):
pass
self.server.response_once['code'] = 301
self.server.response_once['headers'] = [
('Location', INVALID_URL),
]
bot = build_spider(TestSpider, network_try_limit=1)
bot.run()
'''
# That test case ruins the spider instance :(
def test_redirect_with_invalid_byte(self):
url = self.server.get_url()
invalid_url = b'http://\xa0' + url.encode('ascii')
def callback(server):
server.set_status(301)
server.add_header('Location', invalid_url)
server.write('')
server.finish()
class TestSpider(Spider):
def task_generator(self):
#yield Task('page', url='http://www.tripadvisor.com/ShowUrl?&excludeFromVS=false&odc=BusinessListingsUrl&d=4289178&url=1')
#yield Task('page', invalid_url)
yield Task('page', url)
def task_page(self, grab, task):
pass
self.server.response['callback'] = callback
bot = TestSpider()
bot.run()
'''
| mit |
chidea/GoPythonDLLWrapper | bin/lib/xmlrpc/server.py | 96 | 36597 | r"""XML-RPC Servers.
This module can be used to create simple XML-RPC servers
by creating a server and either installing functions, a
class instance, or by extending the SimpleXMLRPCServer
class.
It can also be used to handle XML-RPC requests in a CGI
environment using CGIXMLRPCRequestHandler.
The Doc* classes can be used to create XML-RPC servers that
serve pydoc-style documentation in response to HTTP
GET requests. This documentation is dynamically generated
based on the functions and methods registered with the
server.
A list of possible usage patterns follows:
1. Install functions:
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.serve_forever()
2. Install an instance:
class MyFuncs:
def __init__(self):
# make all of the sys functions available through sys.func_name
import sys
self.sys = sys
def _listMethods(self):
# implement this method so that system.listMethods
# knows to advertise the sys methods
return list_public_methods(self) + \
['sys.' + method for method in list_public_methods(self.sys)]
def pow(self, x, y): return pow(x, y)
def add(self, x, y) : return x + y
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(MyFuncs())
server.serve_forever()
3. Install an instance with custom dispatch method:
class Math:
def _listMethods(self):
# this method must be present for system.listMethods
# to work
return ['add', 'pow']
def _methodHelp(self, method):
# this method must be present for system.methodHelp
# to work
if method == 'add':
return "add(2,3) => 5"
elif method == 'pow':
return "pow(x, y[, z]) => number"
else:
# By convention, return empty
# string if no help is available
return ""
def _dispatch(self, method, params):
if method == 'pow':
return pow(*params)
elif method == 'add':
return params[0] + params[1]
else:
raise ValueError('bad method')
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(Math())
server.serve_forever()
4. Subclass SimpleXMLRPCServer:
class MathServer(SimpleXMLRPCServer):
def _dispatch(self, method, params):
try:
# We are forcing the 'export_' prefix on methods that are
# callable through XML-RPC to prevent potential security
# problems
func = getattr(self, 'export_' + method)
except AttributeError:
raise Exception('method "%s" is not supported' % method)
else:
return func(*params)
def export_add(self, x, y):
return x + y
server = MathServer(("localhost", 8000))
server.serve_forever()
5. CGI script:
server = CGIXMLRPCRequestHandler()
server.register_function(pow)
server.handle_request()
"""
# Written by Brian Quinlan (brian@sweetapp.com).
# Based on code written by Fredrik Lundh.
from xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode
from http.server import BaseHTTPRequestHandler
import http.server
import socketserver
import sys
import os
import re
import pydoc
import inspect
import traceback
try:
import fcntl
except ImportError:
fcntl = None
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
"""resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
Resolves a dotted attribute name to an object. Raises
an AttributeError if any attribute in the chain starts with a '_'.
If the optional allow_dotted_names argument is false, dots are not
supported and this function operates similar to getattr(obj, attr).
"""
if allow_dotted_names:
attrs = attr.split('.')
else:
attrs = [attr]
for i in attrs:
if i.startswith('_'):
raise AttributeError(
'attempt to access private attribute "%s"' % i
)
else:
obj = getattr(obj,i)
return obj
def list_public_methods(obj):
"""Returns a list of attribute strings, found in the specified
object, which represent callable attributes"""
return [member for member in dir(obj)
if not member.startswith('_') and
callable(getattr(obj, member))]
class SimpleXMLRPCDispatcher:
"""Mix-in class that dispatches XML-RPC requests.
This class is used to register XML-RPC method handlers
and then to dispatch them. This class doesn't need to be
instanced directly when used by SimpleXMLRPCServer but it
can be instanced when used by the MultiPathXMLRPCServer
"""
def __init__(self, allow_none=False, encoding=None,
use_builtin_types=False):
self.funcs = {}
self.instance = None
self.allow_none = allow_none
self.encoding = encoding or 'utf-8'
self.use_builtin_types = use_builtin_types
def register_instance(self, instance, allow_dotted_names=False):
"""Registers an instance to respond to XML-RPC requests.
Only one instance can be installed at a time.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called. Methods beginning with an '_'
are considered private and will not be called by
SimpleXMLRPCServer.
If a registered function matches a XML-RPC request, then it
will be called instead of the registered instance.
If the optional allow_dotted_names argument is true and the
instance does not have a _dispatch method, method names
containing dots are supported and resolved, as long as none of
the name segments start with an '_'.
*** SECURITY WARNING: ***
Enabling the allow_dotted_names options allows intruders
to access your module's global variables and may allow
intruders to execute arbitrary code on your machine. Only
use this option on a secure, closed network.
"""
self.instance = instance
self.allow_dotted_names = allow_dotted_names
def register_function(self, function, name=None):
"""Registers a function to respond to XML-RPC requests.
The optional name argument can be used to set a Unicode name
for the function.
"""
if name is None:
name = function.__name__
self.funcs[name] = function
def register_introspection_functions(self):
"""Registers the XML-RPC introspection methods in the system
namespace.
see http://xmlrpc.usefulinc.com/doc/reserved.html
"""
self.funcs.update({'system.listMethods' : self.system_listMethods,
'system.methodSignature' : self.system_methodSignature,
'system.methodHelp' : self.system_methodHelp})
def register_multicall_functions(self):
"""Registers the XML-RPC multicall method in the system
namespace.
see http://www.xmlrpc.com/discuss/msgReader$1208"""
self.funcs.update({'system.multicall' : self.system_multicall})
def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
"""Dispatches an XML-RPC method from marshalled (XML) data.
XML-RPC methods are dispatched from the marshalled (XML) data
using the _dispatch method and the result is returned as
marshalled data. For backwards compatibility, a dispatch
function can be provided as an argument (see comment in
SimpleXMLRPCRequestHandler.do_POST) but overriding the
existing method through subclassing is the preferred means
of changing method dispatch behavior.
"""
try:
params, method = loads(data, use_builtin_types=self.use_builtin_types)
# generate response
if dispatch_method is not None:
response = dispatch_method(method, params)
else:
response = self._dispatch(method, params)
# wrap response in a singleton tuple
response = (response,)
response = dumps(response, methodresponse=1,
allow_none=self.allow_none, encoding=self.encoding)
except Fault as fault:
response = dumps(fault, allow_none=self.allow_none,
encoding=self.encoding)
except:
# report exception back to server
exc_type, exc_value, exc_tb = sys.exc_info()
response = dumps(
Fault(1, "%s:%s" % (exc_type, exc_value)),
encoding=self.encoding, allow_none=self.allow_none,
)
return response.encode(self.encoding)
def system_listMethods(self):
"""system.listMethods() => ['add', 'subtract', 'multiple']
Returns a list of the methods supported by the server."""
methods = set(self.funcs.keys())
if self.instance is not None:
# Instance can implement _listMethod to return a list of
# methods
if hasattr(self.instance, '_listMethods'):
methods |= set(self.instance._listMethods())
# if the instance has a _dispatch method then we
# don't have enough information to provide a list
# of methods
elif not hasattr(self.instance, '_dispatch'):
methods |= set(list_public_methods(self.instance))
return sorted(methods)
def system_methodSignature(self, method_name):
"""system.methodSignature('add') => [double, int, int]
Returns a list describing the signature of the method. In the
above example, the add method takes two integers as arguments
and returns a double result.
This server does NOT support system.methodSignature."""
# See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
return 'signatures not supported'
def system_methodHelp(self, method_name):
"""system.methodHelp('add') => "Adds two integers together"
Returns a string containing documentation for the specified method."""
method = None
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method
if hasattr(self.instance, '_methodHelp'):
return self.instance._methodHelp(method_name)
# if the instance has a _dispatch method then we
# don't have enough information to provide help
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name,
self.allow_dotted_names
)
except AttributeError:
pass
# Note that we aren't checking that the method actually
# be a callable object of some kind
if method is None:
return ""
else:
return pydoc.getdoc(method)
def system_multicall(self, call_list):
"""system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
[[4], ...]
Allows the caller to package multiple XML-RPC calls into a single
request.
See http://www.xmlrpc.com/discuss/msgReader$1208
"""
results = []
for call in call_list:
method_name = call['methodName']
params = call['params']
try:
# XXX A marshalling error in any response will fail the entire
# multicall. If someone cares they should fix this.
results.append([self._dispatch(method_name, params)])
except Fault as fault:
results.append(
{'faultCode' : fault.faultCode,
'faultString' : fault.faultString}
)
except:
exc_type, exc_value, exc_tb = sys.exc_info()
results.append(
{'faultCode' : 1,
'faultString' : "%s:%s" % (exc_type, exc_value)}
)
return results
def _dispatch(self, method, params):
"""Dispatches the XML-RPC method.
XML-RPC calls are forwarded to a registered function that
matches the called XML-RPC method name. If no such function
exists then the call is forwarded to the registered instance,
if available.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called.
Methods beginning with an '_' are considered private and will
not be called.
"""
func = None
try:
# check to see if a matching function has been registered
func = self.funcs[method]
except KeyError:
if self.instance is not None:
# check for a _dispatch method
if hasattr(self.instance, '_dispatch'):
return self.instance._dispatch(method, params)
else:
# call instance method directly
try:
func = resolve_dotted_attribute(
self.instance,
method,
self.allow_dotted_names
)
except AttributeError:
pass
if func is not None:
return func(*params)
else:
raise Exception('method "%s" is not supported' % method)
class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler):
"""Simple XML-RPC request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
"""
# Class attribute listing the accessible path components;
# paths not on this list will result in a 404 error.
rpc_paths = ('/', '/RPC2')
#if not None, encode responses larger than this, if possible
encode_threshold = 1400 #a common MTU
#Override form StreamRequestHandler: full buffering of output
#and no Nagle.
wbufsize = -1
disable_nagle_algorithm = True
# a re to match a gzip Accept-Encoding
aepattern = re.compile(r"""
\s* ([^\s;]+) \s* #content-coding
(;\s* q \s*=\s* ([0-9\.]+))? #q
""", re.VERBOSE | re.IGNORECASE)
def accept_encodings(self):
r = {}
ae = self.headers.get("Accept-Encoding", "")
for e in ae.split(","):
match = self.aepattern.match(e)
if match:
v = match.group(3)
v = float(v) if v else 1.0
r[match.group(1)] = v
return r
def is_rpc_path_valid(self):
if self.rpc_paths:
return self.path in self.rpc_paths
else:
# If .rpc_paths is empty, just assume all paths are legal
return True
def do_POST(self):
"""Handles the HTTP POST request.
Attempts to interpret all HTTP POST requests as XML-RPC calls,
which are forwarded to the server's _dispatch method for handling.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10*1024*1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
chunk = self.rfile.read(chunk_size)
if not chunk:
break
L.append(chunk)
size_remaining -= len(L[-1])
data = b''.join(L)
data = self.decode_request_content(data)
if data is None:
return #response has been sent
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None), self.path
)
except Exception as e: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
# Send information about the exception if requested
if hasattr(self.server, '_send_traceback_header') and \
self.server._send_traceback_header:
self.send_header("X-exception", str(e))
trace = traceback.format_exc()
trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII')
self.send_header("X-traceback", trace)
self.send_header("Content-length", "0")
self.end_headers()
else:
self.send_response(200)
self.send_header("Content-type", "text/xml")
if self.encode_threshold is not None:
if len(response) > self.encode_threshold:
q = self.accept_encodings().get("gzip", 0)
if q:
try:
response = gzip_encode(response)
self.send_header("Content-Encoding", "gzip")
except NotImplementedError:
pass
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
def decode_request_content(self, data):
#support gzip encoding of request
encoding = self.headers.get("content-encoding", "identity").lower()
if encoding == "identity":
return data
if encoding == "gzip":
try:
return gzip_decode(data)
except NotImplementedError:
self.send_response(501, "encoding %r not supported" % encoding)
except ValueError:
self.send_response(400, "error decoding gzip content")
else:
self.send_response(501, "encoding %r not supported" % encoding)
self.send_header("Content-length", "0")
self.end_headers()
def report_404 (self):
# Report a 404 error
self.send_response(404)
response = b'No such page'
self.send_header("Content-type", "text/plain")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
def log_request(self, code='-', size='-'):
"""Selectively log an accepted request."""
if self.server.logRequests:
BaseHTTPRequestHandler.log_request(self, code, size)
class SimpleXMLRPCServer(socketserver.TCPServer,
SimpleXMLRPCDispatcher):
"""Simple XML-RPC server.
Simple XML-RPC server that allows functions and a single instance
to be installed to handle requests. The default implementation
attempts to dispatch XML-RPC calls to the functions or instance
installed in the server. Override the _dispatch method inherited
from SimpleXMLRPCDispatcher to change this behavior.
"""
allow_reuse_address = True
# Warning: this is for debugging purposes only! Never set this to True in
# production code, as will be sending out sensitive information (exception
# and stack trace details) when exceptions are raised inside
# SimpleXMLRPCRequestHandler.do_POST
_send_traceback_header = False
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True, use_builtin_types=False):
self.logRequests = logRequests
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types)
socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate)
class MultiPathXMLRPCServer(SimpleXMLRPCServer):
"""Multipath XML-RPC Server
This specialization of SimpleXMLRPCServer allows the user to create
multiple Dispatcher instances and assign them to different
HTTP request paths. This makes it possible to run two or more
'virtual XML-RPC servers' at the same port.
Make sure that the requestHandler accepts the paths in question.
"""
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True, use_builtin_types=False):
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none,
encoding, bind_and_activate, use_builtin_types)
self.dispatchers = {}
self.allow_none = allow_none
self.encoding = encoding or 'utf-8'
def add_dispatcher(self, path, dispatcher):
self.dispatchers[path] = dispatcher
return dispatcher
def get_dispatcher(self, path):
return self.dispatchers[path]
def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
try:
response = self.dispatchers[path]._marshaled_dispatch(
data, dispatch_method, path)
except:
# report low level exception back to server
# (each dispatcher should have handled their own
# exceptions)
exc_type, exc_value = sys.exc_info()[:2]
response = dumps(
Fault(1, "%s:%s" % (exc_type, exc_value)),
encoding=self.encoding, allow_none=self.allow_none)
response = response.encode(self.encoding)
return response
class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
"""Simple handler for XML-RPC data passed through CGI."""
def __init__(self, allow_none=False, encoding=None, use_builtin_types=False):
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types)
def handle_xmlrpc(self, request_text):
"""Handle a single XML-RPC request"""
response = self._marshaled_dispatch(request_text)
print('Content-Type: text/xml')
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def handle_get(self):
"""Handle a single HTTP GET request.
Default implementation indicates an error because
XML-RPC uses the POST method.
"""
code = 400
message, explain = BaseHTTPRequestHandler.responses[code]
response = http.server.DEFAULT_ERROR_MESSAGE % \
{
'code' : code,
'message' : message,
'explain' : explain
}
response = response.encode('utf-8')
print('Status: %d %s' % (code, message))
print('Content-Type: %s' % http.server.DEFAULT_ERROR_CONTENT_TYPE)
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def handle_request(self, request_text=None):
"""Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers.
"""
if request_text is None and \
os.environ.get('REQUEST_METHOD', None) == 'GET':
self.handle_get()
else:
# POST data is normally available through stdin
try:
length = int(os.environ.get('CONTENT_LENGTH', None))
except (ValueError, TypeError):
length = -1
if request_text is None:
request_text = sys.stdin.read(length)
self.handle_xmlrpc(request_text)
# -----------------------------------------------------------------------------
# Self documenting XML-RPC Server.
class ServerHTMLDoc(pydoc.HTMLDoc):
"""Class used to generate pydoc HTML document for a server"""
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
# XXX Note that this regular expression does not allow for the
# hyperlinking of arbitrary strings being used as method
# names. Only methods with names consisting of word characters
# and '.'s are hyperlinked.
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?((?:\w|\.)+))\b')
while 1:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
def docroutine(self, object, name, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
title = '<a name="%s"><strong>%s</strong></a>' % (
self.escape(anchor), self.escape(name))
if inspect.ismethod(object):
args = inspect.getfullargspec(object)
# exclude the argument bound to the instance, it will be
# confusing to the non-Python user
argspec = inspect.formatargspec (
args.args[1:],
args.varargs,
args.varkw,
args.defaults,
annotations=args.annotations,
formatvalue=self.formatvalue
)
elif inspect.isfunction(object):
args = inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args.args, args.varargs, args.varkw, args.defaults,
annotations=args.annotations,
formatvalue=self.formatvalue)
else:
argspec = '(...)'
if isinstance(object, tuple):
argspec = object[0] or argspec
docstring = object[1] or ""
else:
docstring = pydoc.getdoc(object)
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
doc = self.markup(
docstring, self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def docserver(self, server_name, package_documentation, methods):
"""Produce HTML documentation for an XML-RPC server."""
fdict = {}
for key, value in methods.items():
fdict[key] = '#-' + key
fdict[value] = fdict[key]
server_name = self.escape(server_name)
head = '<big><big><strong>%s</strong></big></big>' % server_name
result = self.heading(head, '#ffffff', '#7799ee')
doc = self.markup(package_documentation, self.preformat, fdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
contents = []
method_items = sorted(methods.items())
for key, value in method_items:
contents.append(self.docroutine(value, key, funcs=fdict))
result = result + self.bigsection(
'Methods', '#ffffff', '#eeaa77', ''.join(contents))
return result
class XMLRPCDocGenerator:
"""Generates documentation for an XML-RPC server.
This class is designed as mix-in and should not
be constructed directly.
"""
def __init__(self):
# setup variables used for HTML documentation
self.server_name = 'XML-RPC Server Documentation'
self.server_documentation = \
"This server exports the following methods through the XML-RPC "\
"protocol."
self.server_title = 'XML-RPC Server Documentation'
def set_server_title(self, server_title):
"""Set the HTML title of the generated server documentation"""
self.server_title = server_title
def set_server_name(self, server_name):
"""Set the name of the generated HTML server documentation"""
self.server_name = server_name
def set_server_documentation(self, server_documentation):
"""Set the documentation string for the entire server."""
self.server_documentation = server_documentation
def generate_html_documentation(self):
"""generate_html_documentation() => html documentation for the server
Generates HTML documentation for the server using introspection for
installed functions and instances that do not implement the
_dispatch method. Alternatively, instances can choose to implement
the _get_method_argstring(method_name) method to provide the
argument string used in the documentation and the
_methodHelp(method_name) method to provide the help text used
in the documentation."""
methods = {}
for method_name in self.system_listMethods():
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
method_info = [None, None] # argspec, documentation
if hasattr(self.instance, '_get_method_argstring'):
method_info[0] = self.instance._get_method_argstring(method_name)
if hasattr(self.instance, '_methodHelp'):
method_info[1] = self.instance._methodHelp(method_name)
method_info = tuple(method_info)
if method_info != (None, None):
method = method_info
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name
)
except AttributeError:
method = method_info
else:
method = method_info
else:
assert 0, "Could not find method in self.functions and no "\
"instance installed"
methods[method_name] = method
documenter = ServerHTMLDoc()
documentation = documenter.docserver(
self.server_name,
self.server_documentation,
methods
)
return documenter.page(self.server_title, documentation)
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
"""XML-RPC and documentation request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
Handles all HTTP GET requests and interprets them as requests
for documentation.
"""
def do_GET(self):
"""Handles the HTTP GET request.
Interpret all HTTP GET requests as requests for server
documentation.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
response = self.server.generate_html_documentation().encode('utf-8')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
class DocXMLRPCServer( SimpleXMLRPCServer,
XMLRPCDocGenerator):
"""XML-RPC and HTML documentation server.
Adds the ability to serve server documentation to the capabilities
of SimpleXMLRPCServer.
"""
def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True, use_builtin_types=False):
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests,
allow_none, encoding, bind_and_activate,
use_builtin_types)
XMLRPCDocGenerator.__init__(self)
class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler,
XMLRPCDocGenerator):
"""Handler for XML-RPC data and documentation requests passed through
CGI"""
def handle_get(self):
"""Handles the HTTP GET request.
Interpret all HTTP GET requests as requests for server
documentation.
"""
response = self.generate_html_documentation().encode('utf-8')
print('Content-Type: text/html')
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def __init__(self):
CGIXMLRPCRequestHandler.__init__(self)
XMLRPCDocGenerator.__init__(self)
if __name__ == '__main__':
import datetime
class ExampleService:
def getData(self):
return '42'
class currentTime:
@staticmethod
def getCurrentTime():
return datetime.datetime.now()
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.register_instance(ExampleService(), allow_dotted_names=True)
server.register_multicall_functions()
print('Serving XML-RPC on localhost port 8000')
print('It is advisable to run this example server within a secure, closed network.')
try:
server.serve_forever()
except KeyboardInterrupt:
print("\nKeyboard interrupt received, exiting.")
server.server_close()
sys.exit(0)
| mit |
Mattze96/youtube-dl | youtube_dl/extractor/kickstarter.py | 111 | 2654 | # encoding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class KickStarterIE(InfoExtractor):
_VALID_URL = r'https?://www\.kickstarter\.com/projects/(?P<id>[^/]*)/.*'
_TESTS = [{
'url': 'https://www.kickstarter.com/projects/1404461844/intersection-the-story-of-josh-grant?ref=home_location',
'md5': 'c81addca81327ffa66c642b5d8b08cab',
'info_dict': {
'id': '1404461844',
'ext': 'mp4',
'title': 'Intersection: The Story of Josh Grant by Kyle Cowling',
'description': (
'A unique motocross documentary that examines the '
'life and mind of one of sports most elite athletes: Josh Grant.'
),
},
}, {
'note': 'Embedded video (not using the native kickstarter video service)',
'url': 'https://www.kickstarter.com/projects/597507018/pebble-e-paper-watch-for-iphone-and-android/posts/659178',
'info_dict': {
'id': '78704821',
'ext': 'mp4',
'uploader_id': 'pebble',
'uploader': 'Pebble Technology',
'title': 'Pebble iOS Notifications',
}
}, {
'url': 'https://www.kickstarter.com/projects/1420158244/power-drive-2000/widget/video.html',
'info_dict': {
'id': '1420158244',
'ext': 'mp4',
'title': 'Power Drive 2000',
},
'expected_warnings': ['OpenGraph description'],
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
r'<title>\s*(.*?)(?:\s*— Kickstarter)?\s*</title>',
webpage, 'title')
video_url = self._search_regex(
r'data-video-url="(.*?)"',
webpage, 'video URL', default=None)
if video_url is None: # No native kickstarter, look for embedded videos
return {
'_type': 'url_transparent',
'ie_key': 'Generic',
'url': url,
'title': title,
}
thumbnail = self._og_search_thumbnail(webpage, default=None)
if thumbnail is None:
thumbnail = self._html_search_regex(
r'<img[^>]+class="[^"]+\s*poster\s*[^"]+"[^>]+src="([^"]+)"',
webpage, 'thumbnail image', fatal=False)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': self._og_search_description(webpage),
'thumbnail': thumbnail,
}
| unlicense |
skycucumber/Messaging-Gateway | webapp/venv/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.py | 171 | 4326 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
def get_attrs(self):
return self._fore + self._back * 16 + self._style
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & WinStyle.BRIGHT
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
self.set_console(on_stderr=on_stderr)
def back(self, back=None, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
#I'm not currently tracking the position, so there is no default.
#position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_up(self, num_rows=0, on_stderr=False):
if num_rows == 0:
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y - num_rows, position.X)
self.set_cursor_position(adjusted_position, on_stderr)
def erase_data(self, mode=0, on_stderr=False):
# 0 (or None) should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen. (And maybe move cursor to (1,1)?)
#
# At the moment, I only support mode 2. From looking at the API, it
# should be possible to calculate a different number of bytes to clear,
# and to do so relative to the cursor position.
if mode[0] not in (2,):
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
# here's where we'll home the cursor
coord_screen = win32.COORD(0,0)
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
dw_con_size = csbi.dwSize.X * csbi.dwSize.Y
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', dw_con_size, coord_screen)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), dw_con_size, coord_screen );
# put the cursor at (0, 0)
win32.SetConsoleCursorPosition(handle, (coord_screen.X, coord_screen.Y))
| gpl-2.0 |
tensorflow/examples | tensorflow_examples/lite/model_maker/third_party/recommendation/ml/model/recommendation_model_launcher.py | 1 | 12499 | # Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Personalized recommendation model runner based on Tensorflow keras API."""
import os
import time
from typing import List
from absl import app
from absl import flags
import tensorflow as tf
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.configs import input_config_pb2
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.configs import model_config as model_config_class
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.model import input_pipeline
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.model import losses
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.model import metrics
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.model import recommendation_model
from google.protobuf import text_format
FLAGS = flags.FLAGS
def define_flags():
"""Define flags."""
flags.DEFINE_string('training_data_filepattern', None,
'File pattern of the training data.')
flags.DEFINE_string('testing_data_filepattern', None,
'File pattern of the training data.')
flags.DEFINE_string('model_dir', None, 'Directory to store checkpoints.')
flags.DEFINE_string('export_dir', None, 'Directory for the exported model.')
flags.DEFINE_integer('batch_size', 1, 'Training batch size.')
flags.DEFINE_float('learning_rate', 0.1, 'Learning rate.')
flags.DEFINE_integer('steps_per_epoch', 10,
'Number of steps to run in each epoch.')
flags.DEFINE_integer('num_epochs', 10000, 'Number of training epochs.')
flags.DEFINE_integer('num_eval_steps', 1000, 'Number of eval steps.')
flags.DEFINE_enum('run_mode', 'train_and_eval',
['train_and_eval', 'export', 'export_tflite'],
'Mode of the launcher, default value is: train_and_eval')
flags.DEFINE_float('gradient_clip_norm', 1.0,
'gradient_clip_norm <= 0 meaning no clip.')
flags.DEFINE_string('vocab_dir', None,
'Path of the directory storing vocabulary files.')
flags.DEFINE_string('input_config_file', None,
'Path to the input config pbtxt'
'file.')
flags.DEFINE_list('hidden_layer_dims', None, 'Hidden layer dimensions.')
flags.DEFINE_list('eval_top_k', None, 'Top k to evaluate.')
flags.DEFINE_list(
'conv_num_filter_ratios', None,
'Number of filter ratios for the Conv1D layer, this'
'flag is only required if CNN encoder type is used.')
flags.DEFINE_integer(
'conv_kernel_size', 4,
'Size of the Conv1D layer kernel size, this flag is only'
'required if CNN encoder type is used.')
flags.DEFINE_integer(
'lstm_num_units', 4, 'Number of units for the LSTM layer,'
'this flag is only required if LSTM encoder type is used.')
flags.DEFINE_integer('num_predictions', 5,
'Num of top predictions to output.')
flags.DEFINE_string('checkpoint_path', '', 'Path to the checkpoint.')
class SimpleCheckpoint(tf.keras.callbacks.Callback):
"""Keras callback to save tf.train.Checkpoints."""
def __init__(self, checkpoint_manager):
super(SimpleCheckpoint, self).__init__()
self.checkpoint_manager = checkpoint_manager
def on_epoch_end(self, epoch, logs=None):
step_counter = self.checkpoint_manager._step_counter.numpy() # pylint: disable=protected-access
self.checkpoint_manager.save(checkpoint_number=step_counter)
def _get_optimizer(learning_rate: float, gradient_clip_norm: float):
"""Gets model optimizer."""
kwargs = {'clipnorm': gradient_clip_norm} if gradient_clip_norm > 0 else {}
return tf.keras.optimizers.Adagrad(learning_rate, **kwargs)
def _get_metrics(eval_top_k: List[int]):
"""Gets model evaluation metrics of both batch samples and full vocabulary."""
metrics_list = [
metrics.GlobalRecall(name=f'Global_Recall/Recall_{k}', top_k=k)
for k in eval_top_k
]
metrics_list.append(metrics.GlobalMeanRank(name='global_mean_rank'))
metrics_list.extend(
metrics.BatchRecall(name=f'Batch_Recall/Recall_{k}', top_k=k)
for k in eval_top_k)
metrics_list.append(metrics.BatchMeanRank(name='batch_mean_rank'))
return metrics_list
def compile_model(model, eval_top_k, learning_rate, gradient_clip_norm):
"""Compile keras model."""
model.compile(
optimizer=_get_optimizer(
learning_rate=learning_rate, gradient_clip_norm=gradient_clip_norm),
loss=losses.GlobalSoftmax(),
metrics=_get_metrics(eval_top_k))
def build_keras_model(input_config: input_config_pb2.InputConfig,
model_config: model_config_class.ModelConfig):
"""Construct and compile recommendation keras model.
Construct recommendation model according to input config and model config.
Compile the model with optimizer, loss function and eval metrics.
Args:
input_config: The configuration object(input_config_pb2.InputConfig) that
holds parameters for model input feature processing.
model_config: A ModelConfig object that holds parameters to set up the
model architecture.
Returns:
The compiled keras model.
"""
model = recommendation_model.RecommendationModel(
input_config=input_config, model_config=model_config)
compile_model(model, model_config.eval_top_k, FLAGS.learning_rate,
FLAGS.gradient_clip_norm)
return model
def get_callbacks(keras_model: tf.keras.Model,
model_dir: str):
"""Sets up callbacks for training and evaluation."""
summary_dir = os.path.join(model_dir, 'summaries')
summary_callback = tf.keras.callbacks.TensorBoard(summary_dir)
checkpoint = tf.train.Checkpoint(
model=keras_model, optimizer=keras_model.optimizer)
checkpoint_manager = tf.train.CheckpointManager(
checkpoint,
directory=model_dir,
max_to_keep=None,
step_counter=keras_model.optimizer.iterations,
checkpoint_interval=0)
checkpoint_callback = SimpleCheckpoint(checkpoint_manager)
return [summary_callback, checkpoint_callback]
def train_and_eval(model: tf.keras.Model,
model_dir: str,
train_input_dataset: tf.data.Dataset,
eval_input_dataset: tf.data.Dataset,
steps_per_epoch: int,
epochs: int,
eval_steps: int):
"""Train and evaluate."""
callbacks = get_callbacks(model, model_dir)
history = model.fit(
x=train_input_dataset,
validation_data=eval_input_dataset,
steps_per_epoch=steps_per_epoch,
epochs=epochs,
validation_steps=eval_steps,
callbacks=callbacks)
tf.get_logger().info(history)
return model
def save_model(checkpoint_path: str, export_dir: str,
input_config: input_config_pb2.InputConfig,
model_config: model_config_class.ModelConfig):
"""Export to savedmodel.
Args:
checkpoint_path: The path to the checkpoint that the model will be exported
based on.
export_dir: The directory to export models to.
input_config: The input config of the model.
model_config: The configuration to set up the model.
"""
model = recommendation_model.RecommendationModel(
input_config=input_config,
model_config=model_config)
checkpoint = tf.train.Checkpoint(model=model)
checkpoint.restore(checkpoint_path).run_restore_ops()
input_specs = input_pipeline.get_serving_input_specs(input_config)
signatures = {
tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
model.serve.get_concrete_function(**input_specs)
}
tf.saved_model.save(model, export_dir=export_dir, signatures=signatures)
def export_tflite(export_dir):
"""Export to TFLite model.
Args:
export_dir: the model exportation dir, where saved_model is located.
"""
converter = tf.lite.TFLiteConverter.from_saved_model(export_dir)
tflite_model = converter.convert()
tflite_model_path = os.path.join(export_dir, 'model.tflite')
with tf.io.gfile.GFile(tflite_model_path, 'wb') as f:
f.write(tflite_model)
def export(checkpoint_path: str, input_config: input_config_pb2.InputConfig,
model_config: model_config_class.ModelConfig, export_dir: str):
"""Export to tensorflow saved model and TFLite model.
Args:
checkpoint_path: The path to the checkpoint that the model will be exported
based on.
input_config: The input config of the model.
model_config: The configuration to set up the model.
export_dir: The directory to store the exported model, If not set, model is
exported to the model_dir with timestamp.
"""
logger = tf.get_logger()
if not export_dir:
export_dir = os.path.join(FLAGS.model_dir, 'export', str(int(time.time())))
logger.info('Exporting model to dir: {}'.format(export_dir))
save_model(
checkpoint_path=checkpoint_path,
export_dir=export_dir,
input_config=input_config,
model_config=model_config)
logger.info('Converting model to tflite model.')
export_tflite(export_dir)
def load_input_config():
"""Load input config."""
assert FLAGS.input_config_file, 'input_config_file cannot be empty.'
with tf.io.gfile.GFile(FLAGS.input_config_file, 'rb') as reader:
return text_format.Parse(reader.read(), input_config_pb2.InputConfig())
def prepare_model_config():
"""Prepare model config."""
return model_config_class.ModelConfig(
hidden_layer_dims=[int(x) for x in FLAGS.hidden_layer_dims],
eval_top_k=[int(x) for x in FLAGS.eval_top_k],
conv_num_filter_ratios=[int(x) for x in FLAGS.conv_num_filter_ratios],
conv_kernel_size=FLAGS.conv_kernel_size,
lstm_num_units=FLAGS.lstm_num_units,
num_predictions=FLAGS.num_predictions)
def main(_):
logger = tf.get_logger()
if not tf.io.gfile.exists(FLAGS.model_dir):
tf.io.gfile.mkdir(FLAGS.model_dir)
if not tf.io.gfile.exists(FLAGS.export_dir):
tf.io.gfile.mkdir(FLAGS.export_dir)
input_config = load_input_config()
model_config = prepare_model_config()
logger.info('Setting up train and eval input datasets.')
train_input_dataset = input_pipeline.get_input_dataset(
data_filepattern=FLAGS.training_data_filepattern,
input_config=input_config,
vocab_file_dir=FLAGS.vocab_dir,
batch_size=FLAGS.batch_size)
eval_input_dataset = input_pipeline.get_input_dataset(
data_filepattern=FLAGS.testing_data_filepattern,
input_config=input_config,
vocab_file_dir=FLAGS.vocab_dir,
batch_size=FLAGS.batch_size)
logger.info('Build keras model for mode: {}.'.format(FLAGS.run_mode))
model = build_keras_model(
input_config=input_config, model_config=model_config)
if FLAGS.run_mode == 'train_and_eval':
train_and_eval(
model=model,
model_dir=FLAGS.model_dir,
train_input_dataset=train_input_dataset,
eval_input_dataset=eval_input_dataset,
steps_per_epoch=FLAGS.steps_per_epoch,
epochs=FLAGS.num_epochs,
eval_steps=FLAGS.num_eval_steps)
latest_checkpoint_path = tf.train.latest_checkpoint(FLAGS.model_dir)
if latest_checkpoint_path:
export(
checkpoint_path=latest_checkpoint_path,
input_config=input_config,
model_config=model_config,
export_dir=FLAGS.export_dir)
elif FLAGS.run_mode == 'export':
checkpoint_path = (
FLAGS.checkpoint_path if FLAGS.checkpoint_path else
tf.train.latest_checkpoint(FLAGS.model_dir))
export(
checkpoint_path=checkpoint_path,
input_config=input_config,
model_config=model_config,
export_dir=FLAGS.export_dir)
else:
logger.error('Unsupported launcher run model {}.'.format(FLAGS.run_mode))
if __name__ == '__main__':
define_flags()
app.run(main)
| apache-2.0 |
nhenezi/kuma | vendor/lib/python/south/utils.py | 32 | 1969 | """
Generally helpful utility functions.
"""
def _ask_for_it_by_name(name):
"Returns an object referenced by absolute path."
bits = name.split(".")
## what if there is no absolute reference?
if len(bits)>1:
modulename = ".".join(bits[:-1])
else:
modulename=bits[0]
module = __import__(modulename, {}, {}, bits[-1])
if len(bits) == 1:
return module
else:
return getattr(module, bits[-1])
def ask_for_it_by_name(name):
"Returns an object referenced by absolute path. (Memoised outer wrapper)"
if name not in ask_for_it_by_name.cache:
ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
return ask_for_it_by_name.cache[name]
ask_for_it_by_name.cache = {}
def get_attribute(item, attribute):
"""
Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.)
"""
value = item
for part in attribute.split("."):
value = getattr(value, part)
return value
def auto_through(field):
"Returns if the M2M class passed in has an autogenerated through table or not."
return (
# Django 1.0/1.1
(not field.rel.through)
or
# Django 1.2+
getattr(getattr(field.rel.through, "_meta", None), "auto_created", False)
)
def auto_model(model):
"Returns if the given model was automatically generated."
return getattr(model._meta, "auto_created", False)
def memoize(function):
"Standard memoization decorator."
name = function.__name__
_name = '_' + name
def method(self):
if not hasattr(self, _name):
value = function(self)
setattr(self, _name, value)
return getattr(self, _name)
def invalidate():
if hasattr(method, _name):
delattr(method, _name)
method.__name__ = function.__name__
method.__doc__ = function.__doc__
method._invalidate = invalidate
return method
| mpl-2.0 |
mattvick/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/discardlocalchanges_unittest.py | 124 | 4574 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.mocktool import MockOptions, MockTool
from webkitpy.tool.steps.discardlocalchanges import DiscardLocalChanges
from webkitpy.common.system.executive import ScriptError
class DiscardLocalChangesTest(unittest.TestCase):
def test_skip_on_clean(self):
tool = MockTool()
tool._scm = Mock()
step = DiscardLocalChanges(tool, MockOptions(clean=False))
step.run({})
self.assertEqual(tool._scm.discard_local_changes.call_count, 0)
def test_working_changes_exist_with_force(self):
tool = MockTool()
tool._scm = Mock()
tool._scm.has_working_directory_changes = lambda: True
tool._scm.has_local_commits = lambda: False
step = DiscardLocalChanges(tool, MockOptions(clean=True, force_clean=True))
step.run({})
self.assertEqual(tool._scm.discard_local_changes.call_count, 1)
def test_local_commits_exist_with_force(self):
tool = MockTool()
tool._scm = Mock()
tool._scm.has_working_directory_changes = lambda: False
tool._scm.has_local_commits = lambda: True
step = DiscardLocalChanges(tool, MockOptions(clean=True, force_clean=True))
step.run({})
self.assertEqual(tool._scm.discard_local_changes.call_count, 1)
def test_local_commits_and_working_changes_exist_with_force(self):
tool = MockTool()
tool._scm = Mock()
tool._scm.has_working_directory_changes = lambda: True
tool._scm.has_local_commits = lambda: True
step = DiscardLocalChanges(tool, MockOptions(clean=True, force_clean=True))
step.run({})
self.assertEqual(tool._scm.discard_local_changes.call_count, 1)
def test_no_changes_exist_with_force(self):
tool = MockTool()
tool._scm = Mock()
tool._scm.has_working_directory_changes = lambda: False
tool._scm.has_local_commits = lambda: False
step = DiscardLocalChanges(tool, MockOptions(clean=True, force_clean=True))
step.run({})
self.assertEqual(tool._scm.discard_local_changes.call_count, 1)
def test_error_working_changes_exist_without_force(self):
tool = MockTool()
tool._scm = Mock()
tool._scm.has_working_directory_changes = lambda: True
tool._scm.has_local_commits = lambda: False
step = DiscardLocalChanges(tool, MockOptions(clean=True, force_clean=False))
self.assertRaises(ScriptError, step.run, {})
self.assertEqual(tool._scm.discard_local_changes.call_count, 0)
def test_error_local_commits_exist_without_force(self):
tool = MockTool()
tool._scm = Mock()
tool._scm.has_working_directory_changes = lambda: False
tool._scm.has_local_commits = lambda: True
step = DiscardLocalChanges(tool, MockOptions(clean=True, force_clean=False))
self.assertRaises(ScriptError, step.run, {})
self.assertEqual(tool._scm.discard_local_changes.call_count, 0)
| bsd-3-clause |
foobacca/django-cms | cms/migrations/0025_placeholder_migration.py | 385 | 19523 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Dummy migration
pass
def backwards(self, orm):
# Dummy migration
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'})
},
'auth.permission': {
'Meta': {
'ordering': "('content_type__app_label', 'content_type__model', 'codename')",
'unique_together': "(('content_type', 'codename'),)",
'object_name': 'Permission'},
'codename': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['contenttypes.ContentType']"}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [],
{'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Group']", 'symmetrical': 'False',
'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'password': (
'django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': (
'django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.CMSPlugin']", 'null': 'True',
'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': "orm['sites.Site']",
'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')",
'object_name': 'Page'},
'changed_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'created_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'limit_visibility_in_menu': (
'django.db.models.fields.SmallIntegerField', [],
{'default': 'None', 'null': 'True', 'db_index': 'True',
'blank': 'True'}),
'login_required': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '80',
'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'blank': 'True', 'related_name': "'children'",
'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['cms.Placeholder']",
'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'published': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'publisher_public': (
'django.db.models.fields.related.OneToOneField', [],
{'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True',
'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '40', 'null': 'True',
'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'template': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.pagemoderator': {
'Meta': {'object_name': 'PageModerator'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderate_children': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_descendants': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_page': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']"})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')",
'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [],
{'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [],
{'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': (
'django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': ['auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_users'",
'to': "orm['auth.User']"}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['auth.User']", 'unique': 'True',
'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_usergroups'",
'to': "orm['auth.User']"}),
'group_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['auth.Group']", 'unique': 'True',
'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': (
'django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)",
'object_name': 'Title'},
'application_urls': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '200',
'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'meta_keywords': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': (
'django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': (
'django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"},
'app_label': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site',
'db_table': "'django_site'"},
'domain': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
| bsd-3-clause |
dparlevliet/zelenka-report-storage | server-local/twisted/test/test_ftp.py | 7 | 125122 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
FTP tests.
"""
import os
import errno
from StringIO import StringIO
import getpass
from zope.interface import implements
from zope.interface.verify import verifyClass
from twisted.trial import unittest, util
from twisted.python.randbytes import insecureRandom
from twisted.cred.portal import IRealm
from twisted.protocols import basic
from twisted.internet import reactor, task, protocol, defer, error
from twisted.internet.interfaces import IConsumer
from twisted.cred.error import UnauthorizedLogin
from twisted.cred import portal, checkers, credentials
from twisted.python import failure, filepath, runtime
from twisted.test import proto_helpers
from twisted.protocols import ftp, loopback
_changeDirectorySuppression = util.suppress(
category=DeprecationWarning,
message=(
r"FTPClient\.changeDirectory is deprecated in Twisted 8\.2 and "
r"newer\. Use FTPClient\.cwd instead\."))
if runtime.platform.isWindows():
nonPOSIXSkip = "Cannot run on Windows"
else:
nonPOSIXSkip = None
class Dummy(basic.LineReceiver):
logname = None
def __init__(self):
self.lines = []
self.rawData = []
def connectionMade(self):
self.f = self.factory # to save typing in pdb :-)
def lineReceived(self,line):
self.lines.append(line)
def rawDataReceived(self, data):
self.rawData.append(data)
def lineLengthExceeded(self, line):
pass
class _BufferingProtocol(protocol.Protocol):
def connectionMade(self):
self.buffer = ''
self.d = defer.Deferred()
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.d.callback(self)
class FTPServerTestCase(unittest.TestCase):
"""
Simple tests for an FTP server with the default settings.
@ivar clientFactory: class used as ftp client.
"""
clientFactory = ftp.FTPClientBasic
userAnonymous = "anonymous"
def setUp(self):
# Create a directory
self.directory = self.mktemp()
os.mkdir(self.directory)
self.dirPath = filepath.FilePath(self.directory)
# Start the server
p = portal.Portal(ftp.FTPRealm(
anonymousRoot=self.directory,
userHome=self.directory,
))
p.registerChecker(checkers.AllowAnonymousAccess(),
credentials.IAnonymous)
users_checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.username = "test-user"
self.password = "test-password"
users_checker.addUser(self.username, self.password)
p.registerChecker(users_checker, credentials.IUsernamePassword)
self.factory = ftp.FTPFactory(portal=p,
userAnonymous=self.userAnonymous)
port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
self.addCleanup(port.stopListening)
# Hook the server's buildProtocol to make the protocol instance
# accessible to tests.
buildProtocol = self.factory.buildProtocol
d1 = defer.Deferred()
def _rememberProtocolInstance(addr):
# Done hooking this.
del self.factory.buildProtocol
protocol = buildProtocol(addr)
self.serverProtocol = protocol.wrappedProtocol
def cleanupServer():
if self.serverProtocol.transport is not None:
self.serverProtocol.transport.loseConnection()
self.addCleanup(cleanupServer)
d1.callback(None)
return protocol
self.factory.buildProtocol = _rememberProtocolInstance
# Connect a client to it
portNum = port.getHost().port
clientCreator = protocol.ClientCreator(reactor, self.clientFactory)
d2 = clientCreator.connectTCP("127.0.0.1", portNum)
def gotClient(client):
self.client = client
self.addCleanup(self.client.transport.loseConnection)
d2.addCallback(gotClient)
return defer.gatherResults([d1, d2])
def assertCommandResponse(self, command, expectedResponseLines,
chainDeferred=None):
"""Asserts that a sending an FTP command receives the expected
response.
Returns a Deferred. Optionally accepts a deferred to chain its actions
to.
"""
if chainDeferred is None:
chainDeferred = defer.succeed(None)
def queueCommand(ignored):
d = self.client.queueStringCommand(command)
def gotResponse(responseLines):
self.assertEqual(expectedResponseLines, responseLines)
return d.addCallback(gotResponse)
return chainDeferred.addCallback(queueCommand)
def assertCommandFailed(self, command, expectedResponse=None,
chainDeferred=None):
if chainDeferred is None:
chainDeferred = defer.succeed(None)
def queueCommand(ignored):
return self.client.queueStringCommand(command)
chainDeferred.addCallback(queueCommand)
self.assertFailure(chainDeferred, ftp.CommandFailed)
def failed(exception):
if expectedResponse is not None:
self.assertEqual(
expectedResponse, exception.args[0])
return chainDeferred.addCallback(failed)
def _anonymousLogin(self):
d = self.assertCommandResponse(
'USER anonymous',
['331 Guest login ok, type your email address as password.'])
return self.assertCommandResponse(
'PASS test@twistedmatrix.com',
['230 Anonymous login ok, access restrictions apply.'],
chainDeferred=d)
def _userLogin(self):
"""Authenticates the FTP client using the test account."""
d = self.assertCommandResponse(
'USER %s' % (self.username),
['331 Password required for %s.' % (self.username)])
return self.assertCommandResponse(
'PASS %s' % (self.password),
['230 User logged in, proceed'],
chainDeferred=d)
class FTPAnonymousTestCase(FTPServerTestCase):
"""
Simple tests for an FTP server with different anonymous username.
The new anonymous username used in this test case is "guest"
"""
userAnonymous = "guest"
def test_anonymousLogin(self):
"""
Tests whether the changing of the anonymous username is working or not.
The FTP server should not comply about the need of password for the
username 'guest', letting it login as anonymous asking just an email
address as password.
"""
d = self.assertCommandResponse(
'USER guest',
['331 Guest login ok, type your email address as password.'])
return self.assertCommandResponse(
'PASS test@twistedmatrix.com',
['230 Anonymous login ok, access restrictions apply.'],
chainDeferred=d)
class BasicFTPServerTestCase(FTPServerTestCase):
def testNotLoggedInReply(self):
"""
When not logged in, most commands other than USER and PASS should
get NOT_LOGGED_IN errors, but some can be called before USER and PASS.
"""
loginRequiredCommandList = ['CDUP', 'CWD', 'LIST', 'MODE', 'PASV',
'PWD', 'RETR', 'STRU', 'SYST', 'TYPE']
loginNotRequiredCommandList = ['FEAT']
# Issue commands, check responses
def checkFailResponse(exception, command):
failureResponseLines = exception.args[0]
self.failUnless(failureResponseLines[-1].startswith("530"),
"%s - Response didn't start with 530: %r"
% (command, failureResponseLines[-1],))
def checkPassResponse(result, command):
result = result[0]
self.failIf(result.startswith("530"),
"%s - Response start with 530: %r"
% (command, result,))
deferreds = []
for command in loginRequiredCommandList:
deferred = self.client.queueStringCommand(command)
self.assertFailure(deferred, ftp.CommandFailed)
deferred.addCallback(checkFailResponse, command)
deferreds.append(deferred)
for command in loginNotRequiredCommandList:
deferred = self.client.queueStringCommand(command)
deferred.addCallback(checkPassResponse, command)
deferreds.append(deferred)
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def testPASSBeforeUSER(self):
"""
Issuing PASS before USER should give an error.
"""
return self.assertCommandFailed(
'PASS foo',
["503 Incorrect sequence of commands: "
"USER required before PASS"])
def testNoParamsForUSER(self):
"""
Issuing USER without a username is a syntax error.
"""
return self.assertCommandFailed(
'USER',
['500 Syntax error: USER requires an argument.'])
def testNoParamsForPASS(self):
"""
Issuing PASS without a password is a syntax error.
"""
d = self.client.queueStringCommand('USER foo')
return self.assertCommandFailed(
'PASS',
['500 Syntax error: PASS requires an argument.'],
chainDeferred=d)
def testAnonymousLogin(self):
return self._anonymousLogin()
def testQuit(self):
"""
Issuing QUIT should return a 221 message.
"""
d = self._anonymousLogin()
return self.assertCommandResponse(
'QUIT',
['221 Goodbye.'],
chainDeferred=d)
def testAnonymousLoginDenied(self):
# Reconfigure the server to disallow anonymous access, and to have an
# IUsernamePassword checker that always rejects.
self.factory.allowAnonymous = False
denyAlwaysChecker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.factory.portal.registerChecker(denyAlwaysChecker,
credentials.IUsernamePassword)
# Same response code as allowAnonymous=True, but different text.
d = self.assertCommandResponse(
'USER anonymous',
['331 Password required for anonymous.'])
# It will be denied. No-one can login.
d = self.assertCommandFailed(
'PASS test@twistedmatrix.com',
['530 Sorry, Authentication failed.'],
chainDeferred=d)
# It's not just saying that. You aren't logged in.
d = self.assertCommandFailed(
'PWD',
['530 Please login with USER and PASS.'],
chainDeferred=d)
return d
def test_anonymousWriteDenied(self):
"""
When an anonymous user attempts to edit the server-side filesystem, they
will receive a 550 error with a descriptive message.
"""
d = self._anonymousLogin()
return self.assertCommandFailed(
'MKD newdir',
['550 Anonymous users are forbidden to change the filesystem'],
chainDeferred=d)
def testUnknownCommand(self):
d = self._anonymousLogin()
return self.assertCommandFailed(
'GIBBERISH',
["502 Command 'GIBBERISH' not implemented"],
chainDeferred=d)
def testRETRBeforePORT(self):
d = self._anonymousLogin()
return self.assertCommandFailed(
'RETR foo',
["503 Incorrect sequence of commands: "
"PORT or PASV required before RETR"],
chainDeferred=d)
def testSTORBeforePORT(self):
d = self._anonymousLogin()
return self.assertCommandFailed(
'STOR foo',
["503 Incorrect sequence of commands: "
"PORT or PASV required before STOR"],
chainDeferred=d)
def testBadCommandArgs(self):
d = self._anonymousLogin()
self.assertCommandFailed(
'MODE z',
["504 Not implemented for parameter 'z'."],
chainDeferred=d)
self.assertCommandFailed(
'STRU I',
["504 Not implemented for parameter 'I'."],
chainDeferred=d)
return d
def testDecodeHostPort(self):
self.assertEqual(ftp.decodeHostPort('25,234,129,22,100,23'),
('25.234.129.22', 25623))
nums = range(6)
for i in range(6):
badValue = list(nums)
badValue[i] = 256
s = ','.join(map(str, badValue))
self.assertRaises(ValueError, ftp.decodeHostPort, s)
def testPASV(self):
# Login
wfd = defer.waitForDeferred(self._anonymousLogin())
yield wfd
wfd.getResult()
# Issue a PASV command, and extract the host and port from the response
pasvCmd = defer.waitForDeferred(self.client.queueStringCommand('PASV'))
yield pasvCmd
responseLines = pasvCmd.getResult()
host, port = ftp.decodeHostPort(responseLines[-1][4:])
# Make sure the server is listening on the port it claims to be
self.assertEqual(port, self.serverProtocol.dtpPort.getHost().port)
# Semi-reasonable way to force cleanup
self.serverProtocol.transport.loseConnection()
testPASV = defer.deferredGenerator(testPASV)
def test_SYST(self):
"""SYST command will always return UNIX Type: L8"""
d = self._anonymousLogin()
self.assertCommandResponse('SYST', ["215 UNIX Type: L8"],
chainDeferred=d)
return d
def test_RNFRandRNTO(self):
"""
Sending the RNFR command followed by RNTO, with valid filenames, will
perform a successful rename operation.
"""
# Create user home folder with a 'foo' file.
self.dirPath.child(self.username).createDirectory()
self.dirPath.child(self.username).child('foo').touch()
d = self._userLogin()
self.assertCommandResponse(
'RNFR foo',
["350 Requested file action pending further information."],
chainDeferred=d)
self.assertCommandResponse(
'RNTO bar',
["250 Requested File Action Completed OK"],
chainDeferred=d)
def check_rename(result):
self.assertTrue(
self.dirPath.child(self.username).child('bar').exists())
return result
d.addCallback(check_rename)
return d
def test_RNFRwithoutRNTO(self):
"""
Sending the RNFR command followed by any command other than RNTO
should return an error informing users that RNFR should be followed
by RNTO.
"""
d = self._anonymousLogin()
self.assertCommandResponse(
'RNFR foo',
["350 Requested file action pending further information."],
chainDeferred=d)
self.assertCommandFailed(
'OTHER don-tcare',
["503 Incorrect sequence of commands: RNTO required after RNFR"],
chainDeferred=d)
return d
def test_portRangeForwardError(self):
"""
Exceptions other than L{error.CannotListenError} which are raised by
C{listenFactory} should be raised to the caller of L{FTP.getDTPPort}.
"""
def listenFactory(portNumber, factory):
raise RuntimeError()
self.serverProtocol.listenFactory = listenFactory
self.assertRaises(RuntimeError, self.serverProtocol.getDTPPort,
protocol.Factory())
def test_portRange(self):
"""
L{FTP.passivePortRange} should determine the ports which
L{FTP.getDTPPort} attempts to bind. If no port from that iterator can
be bound, L{error.CannotListenError} should be raised, otherwise the
first successful result from L{FTP.listenFactory} should be returned.
"""
def listenFactory(portNumber, factory):
if portNumber in (22032, 22033, 22034):
raise error.CannotListenError('localhost', portNumber, 'error')
return portNumber
self.serverProtocol.listenFactory = listenFactory
port = self.serverProtocol.getDTPPort(protocol.Factory())
self.assertEqual(port, 0)
self.serverProtocol.passivePortRange = xrange(22032, 65536)
port = self.serverProtocol.getDTPPort(protocol.Factory())
self.assertEqual(port, 22035)
self.serverProtocol.passivePortRange = xrange(22032, 22035)
self.assertRaises(error.CannotListenError,
self.serverProtocol.getDTPPort,
protocol.Factory())
def test_portRangeInheritedFromFactory(self):
"""
The L{FTP} instances created by L{ftp.FTPFactory.buildProtocol} have
their C{passivePortRange} attribute set to the same object the
factory's C{passivePortRange} attribute is set to.
"""
portRange = xrange(2017, 2031)
self.factory.passivePortRange = portRange
protocol = self.factory.buildProtocol(None)
self.assertEqual(portRange, protocol.wrappedProtocol.passivePortRange)
def testFEAT(self):
"""
When the server receives 'FEAT', it should report the list of supported
features. (Additionally, ensure that the server reports various
particular features that are supported by all Twisted FTP servers.)
"""
d = self.client.queueStringCommand('FEAT')
def gotResponse(responseLines):
self.assertEqual('211-Features:', responseLines[0])
self.assertTrue(' MDTM' in responseLines)
self.assertTrue(' PASV' in responseLines)
self.assertTrue(' TYPE A;I' in responseLines)
self.assertTrue(' SIZE' in responseLines)
self.assertEqual('211 End', responseLines[-1])
return d.addCallback(gotResponse)
def testOPTS(self):
"""
When the server receives 'OPTS something', it should report
that the FTP server does not support the option called 'something'.
"""
d = self._anonymousLogin()
self.assertCommandFailed(
'OPTS something',
["502 Option 'something' not implemented."],
chainDeferred=d,
)
return d
def test_STORreturnsErrorFromOpen(self):
"""
Any FTP error raised inside STOR while opening the file is returned
to the client.
"""
# We create a folder inside user's home folder and then
# we try to write a file with the same name.
# This will trigger an FTPCmdError.
self.dirPath.child(self.username).createDirectory()
self.dirPath.child(self.username).child('folder').createDirectory()
d = self._userLogin()
def sendPASV(result):
"""
Send the PASV command required before port.
"""
return self.client.queueStringCommand('PASV')
def mockDTPInstance(result):
"""
Fake an incoming connection and create a mock DTPInstance so
that PORT command will start processing the request.
"""
self.serverProtocol.dtpFactory.deferred.callback(None)
self.serverProtocol.dtpInstance = object()
return result
d.addCallback(sendPASV)
d.addCallback(mockDTPInstance)
self.assertCommandFailed(
'STOR folder',
["550 folder: is a directory"],
chainDeferred=d,
)
return d
def test_STORunknownErrorBecomesFileNotFound(self):
"""
Any non FTP error raised inside STOR while opening the file is
converted into FileNotFound error and returned to the client together
with the path.
The unknown error is logged.
"""
d = self._userLogin()
def failingOpenForWriting(ignore):
return defer.fail(AssertionError())
def sendPASV(result):
"""
Send the PASV command required before port.
"""
return self.client.queueStringCommand('PASV')
def mockDTPInstance(result):
"""
Fake an incoming connection and create a mock DTPInstance so
that PORT command will start processing the request.
"""
self.serverProtocol.dtpFactory.deferred.callback(None)
self.serverProtocol.dtpInstance = object()
self.serverProtocol.shell.openForWriting = failingOpenForWriting
return result
def checkLogs(result):
"""
Check that unknown errors are logged.
"""
logs = self.flushLoggedErrors()
self.assertEqual(1, len(logs))
self.assertIsInstance(logs[0].value, AssertionError)
d.addCallback(sendPASV)
d.addCallback(mockDTPInstance)
self.assertCommandFailed(
'STOR something',
["550 something: No such file or directory."],
chainDeferred=d,
)
d.addCallback(checkLogs)
return d
class FTPServerTestCaseAdvancedClient(FTPServerTestCase):
"""
Test FTP server with the L{ftp.FTPClient} class.
"""
clientFactory = ftp.FTPClient
def test_anonymousSTOR(self):
"""
Try to make an STOR as anonymous, and check that we got a permission
denied error.
"""
def eb(res):
res.trap(ftp.CommandFailed)
self.assertEqual(res.value.args[0][0],
'550 foo: Permission denied.')
d1, d2 = self.client.storeFile('foo')
d2.addErrback(eb)
return defer.gatherResults([d1, d2])
def test_STORtransferErrorIsReturned(self):
"""
Any FTP error raised by STOR while transferring the file is returned
to the client.
"""
# Make a failing file writer.
class FailingFileWriter(ftp._FileWriter):
def receive(self):
return defer.fail(ftp.IsADirectoryError("failing_file"))
def failingSTOR(a, b):
return defer.succeed(FailingFileWriter(None))
# Monkey patch the shell so it returns a file writer that will
# fail during transfer.
self.patch(ftp.FTPAnonymousShell, 'openForWriting', failingSTOR)
def eb(res):
res.trap(ftp.CommandFailed)
logs = self.flushLoggedErrors()
self.assertEqual(1, len(logs))
self.assertIsInstance(logs[0].value, ftp.IsADirectoryError)
self.assertEqual(
res.value.args[0][0],
"550 failing_file: is a directory")
d1, d2 = self.client.storeFile('failing_file')
d2.addErrback(eb)
return defer.gatherResults([d1, d2])
def test_STORunknownTransferErrorBecomesAbort(self):
"""
Any non FTP error raised by STOR while transferring the file is
converted into a critical error and transfer is closed.
The unknown error is logged.
"""
class FailingFileWriter(ftp._FileWriter):
def receive(self):
return defer.fail(AssertionError())
def failingSTOR(a, b):
return defer.succeed(FailingFileWriter(None))
# Monkey patch the shell so it returns a file writer that will
# fail during transfer.
self.patch(ftp.FTPAnonymousShell, 'openForWriting', failingSTOR)
def eb(res):
res.trap(ftp.CommandFailed)
logs = self.flushLoggedErrors()
self.assertEqual(1, len(logs))
self.assertIsInstance(logs[0].value, AssertionError)
self.assertEqual(
res.value.args[0][0],
"426 Transfer aborted. Data connection closed.")
d1, d2 = self.client.storeFile('failing_file')
d2.addErrback(eb)
return defer.gatherResults([d1, d2])
def test_RETRreadError(self):
"""
Any errors during reading a file inside a RETR should be returned to
the client.
"""
# Make a failing file reading.
class FailingFileReader(ftp._FileReader):
def send(self, consumer):
return defer.fail(ftp.IsADirectoryError("blah"))
def failingRETR(a, b):
return defer.succeed(FailingFileReader(None))
# Monkey patch the shell so it returns a file reader that will
# fail.
self.patch(ftp.FTPAnonymousShell, 'openForReading', failingRETR)
def check_response(failure):
self.flushLoggedErrors()
failure.trap(ftp.CommandFailed)
self.assertEqual(
failure.value.args[0][0],
"125 Data connection already open, starting transfer")
self.assertEqual(
failure.value.args[0][1],
"550 blah: is a directory")
proto = _BufferingProtocol()
d = self.client.retrieveFile('failing_file', proto)
d.addErrback(check_response)
return d
class FTPServerPasvDataConnectionTestCase(FTPServerTestCase):
def _makeDataConnection(self, ignored=None):
# Establish a passive data connection (i.e. client connecting to
# server).
d = self.client.queueStringCommand('PASV')
def gotPASV(responseLines):
host, port = ftp.decodeHostPort(responseLines[-1][4:])
cc = protocol.ClientCreator(reactor, _BufferingProtocol)
return cc.connectTCP('127.0.0.1', port)
return d.addCallback(gotPASV)
def _download(self, command, chainDeferred=None):
if chainDeferred is None:
chainDeferred = defer.succeed(None)
chainDeferred.addCallback(self._makeDataConnection)
def queueCommand(downloader):
# wait for the command to return, and the download connection to be
# closed.
d1 = self.client.queueStringCommand(command)
d2 = downloader.d
return defer.gatherResults([d1, d2])
chainDeferred.addCallback(queueCommand)
def downloadDone((ignored, downloader)):
return downloader.buffer
return chainDeferred.addCallback(downloadDone)
def test_LISTEmpty(self):
"""
When listing empty folders, LIST returns an empty response.
"""
d = self._anonymousLogin()
# No files, so the file listing should be empty
self._download('LIST', chainDeferred=d)
def checkEmpty(result):
self.assertEqual('', result)
return d.addCallback(checkEmpty)
def test_LISTWithBinLsFlags(self):
"""
LIST ignores requests for folder with names like '-al' and will list
the content of current folder.
"""
os.mkdir(os.path.join(self.directory, 'foo'))
os.mkdir(os.path.join(self.directory, 'bar'))
# Login
d = self._anonymousLogin()
self._download('LIST -aL', chainDeferred=d)
def checkDownload(download):
names = []
for line in download.splitlines():
names.append(line.split(' ')[-1])
self.assertEqual(2, len(names))
self.assertIn('foo', names)
self.assertIn('bar', names)
return d.addCallback(checkDownload)
def test_LISTWithContent(self):
"""
LIST returns all folder's members, each member listed on a separate
line and with name and other details.
"""
os.mkdir(os.path.join(self.directory, 'foo'))
os.mkdir(os.path.join(self.directory, 'bar'))
# Login
d = self._anonymousLogin()
# We expect 2 lines because there are two files.
self._download('LIST', chainDeferred=d)
def checkDownload(download):
self.assertEqual(2, len(download[:-2].split('\r\n')))
d.addCallback(checkDownload)
# Download a names-only listing.
self._download('NLST ', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
filenames.sort()
self.assertEqual(['bar', 'foo'], filenames)
d.addCallback(checkDownload)
# Download a listing of the 'foo' subdirectory. 'foo' has no files, so
# the file listing should be empty.
self._download('LIST foo', chainDeferred=d)
def checkDownload(download):
self.assertEqual('', download)
d.addCallback(checkDownload)
# Change the current working directory to 'foo'.
def chdir(ignored):
return self.client.queueStringCommand('CWD foo')
d.addCallback(chdir)
# Download a listing from within 'foo', and again it should be empty,
# because LIST uses the working directory by default.
self._download('LIST', chainDeferred=d)
def checkDownload(download):
self.assertEqual('', download)
return d.addCallback(checkDownload)
def _listTestHelper(self, command, listOutput, expectedOutput):
"""
Exercise handling by the implementation of I{LIST} or I{NLST} of certain
return values and types from an L{IFTPShell.list} implementation.
This will issue C{command} and assert that if the L{IFTPShell.list}
implementation includes C{listOutput} as one of the file entries then
the result given to the client is matches C{expectedOutput}.
@param command: Either C{b"LIST"} or C{b"NLST"}
@type command: L{bytes}
@param listOutput: A value suitable to be used as an element of the list
returned by L{IFTPShell.list}. Vary the values and types of the
contents to exercise different code paths in the server's handling
of this result.
@param expectedOutput: A line of output to expect as a result of
C{listOutput} being transformed into a response to the command
issued.
@type expectedOutput: L{bytes}
@return: A L{Deferred} which fires when the test is done, either with an
L{Failure} if the test failed or with a function object if it
succeeds. The function object is the function which implements
L{IFTPShell.list} (and is useful to make assertions about what
warnings might have been emitted).
@rtype: L{Deferred}
"""
# Login
d = self._anonymousLogin()
def patchedList(segments, keys=()):
return defer.succeed([listOutput])
def loggedIn(result):
self.serverProtocol.shell.list = patchedList
return result
d.addCallback(loggedIn)
self._download('%s something' % (command,), chainDeferred=d)
def checkDownload(download):
self.assertEqual(expectedOutput, download)
return patchedList
return d.addCallback(checkDownload)
def test_LISTUnicode(self):
"""
Unicode filenames returned from L{IFTPShell.list} are encoded using
UTF-8 before being sent with the response.
"""
return self._listTestHelper(
"LIST",
(u'my resum\xe9', (0, 1, 0777, 0, 0, 'user', 'group')),
'drwxrwxrwx 0 user group '
'0 Jan 01 1970 my resum\xc3\xa9\r\n')
def test_LISTNonASCIIBytes(self):
"""
When LIST receive a filename as byte string from L{IFTPShell.list}
it will just pass the data to lower level without any change.
"""
return self._listTestHelper(
"LIST",
('my resum\xc3\xa9', (0, 1, 0777, 0, 0, 'user', 'group')),
'drwxrwxrwx 0 user group '
'0 Jan 01 1970 my resum\xc3\xa9\r\n')
def testManyLargeDownloads(self):
# Login
d = self._anonymousLogin()
# Download a range of different size files
for size in range(100000, 110000, 500):
fObj = file(os.path.join(self.directory, '%d.txt' % (size,)), 'wb')
fObj.write('x' * size)
fObj.close()
self._download('RETR %d.txt' % (size,), chainDeferred=d)
def checkDownload(download, size=size):
self.assertEqual(size, len(download))
d.addCallback(checkDownload)
return d
def test_downloadFolder(self):
"""
When RETR is called for a folder, it will fail complaining that
the path is a folder.
"""
# Make a directory in the current working directory
self.dirPath.child('foo').createDirectory()
# Login
d = self._anonymousLogin()
d.addCallback(self._makeDataConnection)
def retrFolder(downloader):
downloader.transport.loseConnection()
deferred = self.client.queueStringCommand('RETR foo')
return deferred
d.addCallback(retrFolder)
def failOnSuccess(result):
raise AssertionError('Downloading a folder should not succeed.')
d.addCallback(failOnSuccess)
def checkError(failure):
failure.trap(ftp.CommandFailed)
self.assertEqual(
['550 foo: is a directory'], failure.value.message)
current_errors = self.flushLoggedErrors()
self.assertEqual(
0, len(current_errors),
'No errors should be logged while downloading a folder.')
d.addErrback(checkError)
return d
def test_NLSTEmpty(self):
"""
NLST with no argument returns the directory listing for the current
working directory.
"""
# Login
d = self._anonymousLogin()
# Touch a file in the current working directory
self.dirPath.child('test.txt').touch()
# Make a directory in the current working directory
self.dirPath.child('foo').createDirectory()
self._download('NLST ', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
filenames.sort()
self.assertEqual(['foo', 'test.txt'], filenames)
return d.addCallback(checkDownload)
def test_NLSTNonexistent(self):
"""
NLST on a non-existent file/directory returns nothing.
"""
# Login
d = self._anonymousLogin()
self._download('NLST nonexistent.txt', chainDeferred=d)
def checkDownload(download):
self.assertEqual('', download)
return d.addCallback(checkDownload)
def test_NLSTUnicode(self):
"""
NLST will receive Unicode filenames for IFTPShell.list, and will
encode them using UTF-8.
"""
return self._listTestHelper(
"NLST",
(u'my resum\xe9', (0, 1, 0777, 0, 0, 'user', 'group')),
'my resum\xc3\xa9\r\n')
def test_NLSTNonASCIIBytes(self):
"""
NLST will just pass the non-Unicode data to lower level.
"""
return self._listTestHelper(
"NLST",
('my resum\xc3\xa9', (0, 1, 0777, 0, 0, 'user', 'group')),
'my resum\xc3\xa9\r\n')
def test_NLSTOnPathToFile(self):
"""
NLST on an existent file returns only the path to that file.
"""
# Login
d = self._anonymousLogin()
# Touch a file in the current working directory
self.dirPath.child('test.txt').touch()
self._download('NLST test.txt', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
self.assertEqual(['test.txt'], filenames)
return d.addCallback(checkDownload)
class FTPServerPortDataConnectionTestCase(FTPServerPasvDataConnectionTestCase):
def setUp(self):
self.dataPorts = []
return FTPServerPasvDataConnectionTestCase.setUp(self)
def _makeDataConnection(self, ignored=None):
# Establish an active data connection (i.e. server connecting to
# client).
deferred = defer.Deferred()
class DataFactory(protocol.ServerFactory):
protocol = _BufferingProtocol
def buildProtocol(self, addr):
p = protocol.ServerFactory.buildProtocol(self, addr)
reactor.callLater(0, deferred.callback, p)
return p
dataPort = reactor.listenTCP(0, DataFactory(), interface='127.0.0.1')
self.dataPorts.append(dataPort)
cmd = 'PORT ' + ftp.encodeHostPort('127.0.0.1', dataPort.getHost().port)
self.client.queueStringCommand(cmd)
return deferred
def tearDown(self):
l = [defer.maybeDeferred(port.stopListening) for port in self.dataPorts]
d = defer.maybeDeferred(
FTPServerPasvDataConnectionTestCase.tearDown, self)
l.append(d)
return defer.DeferredList(l, fireOnOneErrback=True)
def testPORTCannotConnect(self):
# Login
d = self._anonymousLogin()
# Listen on a port, and immediately stop listening as a way to find a
# port number that is definitely closed.
def loggedIn(ignored):
port = reactor.listenTCP(0, protocol.Factory(),
interface='127.0.0.1')
portNum = port.getHost().port
d = port.stopListening()
d.addCallback(lambda _: portNum)
return d
d.addCallback(loggedIn)
# Tell the server to connect to that port with a PORT command, and
# verify that it fails with the right error.
def gotPortNum(portNum):
return self.assertCommandFailed(
'PORT ' + ftp.encodeHostPort('127.0.0.1', portNum),
["425 Can't open data connection."])
return d.addCallback(gotPortNum)
def test_nlstGlobbing(self):
"""
When Unix shell globbing is used with NLST only files matching the
pattern will be returned.
"""
self.dirPath.child('test.txt').touch()
self.dirPath.child('ceva.txt').touch()
self.dirPath.child('no.match').touch()
d = self._anonymousLogin()
self._download('NLST *.txt', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
filenames.sort()
self.assertEqual(['ceva.txt', 'test.txt'], filenames)
return d.addCallback(checkDownload)
class DTPFactoryTests(unittest.TestCase):
"""
Tests for L{ftp.DTPFactory}.
"""
def setUp(self):
"""
Create a fake protocol interpreter and a L{ftp.DTPFactory} instance to
test.
"""
self.reactor = task.Clock()
class ProtocolInterpreter(object):
dtpInstance = None
self.protocolInterpreter = ProtocolInterpreter()
self.factory = ftp.DTPFactory(
self.protocolInterpreter, None, self.reactor)
def test_setTimeout(self):
"""
L{ftp.DTPFactory.setTimeout} uses the reactor passed to its initializer
to set up a timed event to time out the DTP setup after the specified
number of seconds.
"""
# Make sure the factory's deferred fails with the right exception, and
# make it so we can tell exactly when it fires.
finished = []
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
d.addCallback(finished.append)
self.factory.setTimeout(6)
# Advance the clock almost to the timeout
self.reactor.advance(5)
# Nothing should have happened yet.
self.assertFalse(finished)
# Advance it to the configured timeout.
self.reactor.advance(1)
# Now the Deferred should have failed with TimeoutError.
self.assertTrue(finished)
# There should also be no calls left in the reactor.
self.assertFalse(self.reactor.calls)
def test_buildProtocolOnce(self):
"""
A L{ftp.DTPFactory} instance's C{buildProtocol} method can be used once
to create a L{ftp.DTP} instance.
"""
protocol = self.factory.buildProtocol(None)
self.assertIsInstance(protocol, ftp.DTP)
# A subsequent call returns None.
self.assertIdentical(self.factory.buildProtocol(None), None)
def test_timeoutAfterConnection(self):
"""
If a timeout has been set up using L{ftp.DTPFactory.setTimeout}, it is
cancelled by L{ftp.DTPFactory.buildProtocol}.
"""
self.factory.setTimeout(10)
protocol = self.factory.buildProtocol(None)
# Make sure the call is no longer active.
self.assertFalse(self.reactor.calls)
def test_connectionAfterTimeout(self):
"""
If L{ftp.DTPFactory.buildProtocol} is called after the timeout
specified by L{ftp.DTPFactory.setTimeout} has elapsed, C{None} is
returned.
"""
# Handle the error so it doesn't get logged.
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
# Set up the timeout and then cause it to elapse so the Deferred does
# fail.
self.factory.setTimeout(10)
self.reactor.advance(10)
# Try to get a protocol - we should not be able to.
self.assertIdentical(self.factory.buildProtocol(None), None)
# Make sure the Deferred is doing the right thing.
return d
def test_timeoutAfterConnectionFailed(self):
"""
L{ftp.DTPFactory.deferred} fails with L{PortConnectionError} when
L{ftp.DTPFactory.clientConnectionFailed} is called. If the timeout
specified with L{ftp.DTPFactory.setTimeout} expires after that, nothing
additional happens.
"""
finished = []
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
d.addCallback(finished.append)
self.factory.setTimeout(10)
self.assertFalse(finished)
self.factory.clientConnectionFailed(None, None)
self.assertTrue(finished)
self.reactor.advance(10)
return d
def test_connectionFailedAfterTimeout(self):
"""
If L{ftp.DTPFactory.clientConnectionFailed} is called after the timeout
specified by L{ftp.DTPFactory.setTimeout} has elapsed, nothing beyond
the normal timeout before happens.
"""
# Handle the error so it doesn't get logged.
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
# Set up the timeout and then cause it to elapse so the Deferred does
# fail.
self.factory.setTimeout(10)
self.reactor.advance(10)
# Now fail the connection attempt. This should do nothing. In
# particular, it should not raise an exception.
self.factory.clientConnectionFailed(None, defer.TimeoutError("foo"))
# Give the Deferred to trial so it can make sure it did what we
# expected.
return d
class DTPTests(unittest.TestCase):
"""
Tests for L{ftp.DTP}.
The DTP instances in these tests are generated using
DTPFactory.buildProtocol()
"""
def setUp(self):
"""
Create a fake protocol interpreter, a L{ftp.DTPFactory} instance,
and dummy transport to help with tests.
"""
self.reactor = task.Clock()
class ProtocolInterpreter(object):
dtpInstance = None
self.protocolInterpreter = ProtocolInterpreter()
self.factory = ftp.DTPFactory(
self.protocolInterpreter, None, self.reactor)
self.transport = proto_helpers.StringTransportWithDisconnection()
def test_sendLineNewline(self):
"""
L{ftp.DTP.sendLine} writes the line passed to it plus a line delimiter
to its transport.
"""
dtpInstance = self.factory.buildProtocol(None)
dtpInstance.makeConnection(self.transport)
lineContent = 'line content'
dtpInstance.sendLine(lineContent)
dataSent = self.transport.value()
self.assertEqual(lineContent + '\r\n', dataSent)
# -- Client Tests -----------------------------------------------------------
class PrintLines(protocol.Protocol):
"""Helper class used by FTPFileListingTests."""
def __init__(self, lines):
self._lines = lines
def connectionMade(self):
for line in self._lines:
self.transport.write(line + "\r\n")
self.transport.loseConnection()
class MyFTPFileListProtocol(ftp.FTPFileListProtocol):
def __init__(self):
self.other = []
ftp.FTPFileListProtocol.__init__(self)
def unknownLine(self, line):
self.other.append(line)
class FTPFileListingTests(unittest.TestCase):
def getFilesForLines(self, lines):
fileList = MyFTPFileListProtocol()
d = loopback.loopbackAsync(PrintLines(lines), fileList)
d.addCallback(lambda _: (fileList.files, fileList.other))
return d
def testOneLine(self):
# This example line taken from the docstring for FTPFileListProtocol
line = '-rw-r--r-- 1 root other 531 Jan 29 03:26 README'
def check(((file,), other)):
self.failIf(other, 'unexpect unparsable lines: %s' % repr(other))
self.failUnless(file['filetype'] == '-', 'misparsed fileitem')
self.failUnless(file['perms'] == 'rw-r--r--', 'misparsed perms')
self.failUnless(file['owner'] == 'root', 'misparsed fileitem')
self.failUnless(file['group'] == 'other', 'misparsed fileitem')
self.failUnless(file['size'] == 531, 'misparsed fileitem')
self.failUnless(file['date'] == 'Jan 29 03:26', 'misparsed fileitem')
self.failUnless(file['filename'] == 'README', 'misparsed fileitem')
self.failUnless(file['nlinks'] == 1, 'misparsed nlinks')
self.failIf(file['linktarget'], 'misparsed linktarget')
return self.getFilesForLines([line]).addCallback(check)
def testVariantLines(self):
line1 = 'drw-r--r-- 2 root other 531 Jan 9 2003 A'
line2 = 'lrw-r--r-- 1 root other 1 Jan 29 03:26 B -> A'
line3 = 'woohoo! '
def check(((file1, file2), (other,))):
self.failUnless(other == 'woohoo! \r', 'incorrect other line')
# file 1
self.failUnless(file1['filetype'] == 'd', 'misparsed fileitem')
self.failUnless(file1['perms'] == 'rw-r--r--', 'misparsed perms')
self.failUnless(file1['owner'] == 'root', 'misparsed owner')
self.failUnless(file1['group'] == 'other', 'misparsed group')
self.failUnless(file1['size'] == 531, 'misparsed size')
self.failUnless(file1['date'] == 'Jan 9 2003', 'misparsed date')
self.failUnless(file1['filename'] == 'A', 'misparsed filename')
self.failUnless(file1['nlinks'] == 2, 'misparsed nlinks')
self.failIf(file1['linktarget'], 'misparsed linktarget')
# file 2
self.failUnless(file2['filetype'] == 'l', 'misparsed fileitem')
self.failUnless(file2['perms'] == 'rw-r--r--', 'misparsed perms')
self.failUnless(file2['owner'] == 'root', 'misparsed owner')
self.failUnless(file2['group'] == 'other', 'misparsed group')
self.failUnless(file2['size'] == 1, 'misparsed size')
self.failUnless(file2['date'] == 'Jan 29 03:26', 'misparsed date')
self.failUnless(file2['filename'] == 'B', 'misparsed filename')
self.failUnless(file2['nlinks'] == 1, 'misparsed nlinks')
self.failUnless(file2['linktarget'] == 'A', 'misparsed linktarget')
return self.getFilesForLines([line1, line2, line3]).addCallback(check)
def testUnknownLine(self):
def check((files, others)):
self.failIf(files, 'unexpected file entries')
self.failUnless(others == ['ABC\r', 'not a file\r'],
'incorrect unparsable lines: %s' % repr(others))
return self.getFilesForLines(['ABC', 'not a file']).addCallback(check)
def test_filenameWithUnescapedSpace(self):
'''
Will parse filenames and linktargets containing unescaped
space characters.
'''
line1 = 'drw-r--r-- 2 root other 531 Jan 9 2003 A B'
line2 = (
'lrw-r--r-- 1 root other 1 Jan 29 03:26 '
'B A -> D C/A B'
)
def check((files, others)):
self.assertEqual([], others, 'unexpected others entries')
self.assertEqual(
'A B', files[0]['filename'], 'misparsed filename')
self.assertEqual(
'B A', files[1]['filename'], 'misparsed filename')
self.assertEqual(
'D C/A B', files[1]['linktarget'], 'misparsed linktarget')
return self.getFilesForLines([line1, line2]).addCallback(check)
def test_filenameWithEscapedSpace(self):
'''
Will parse filenames and linktargets containing escaped
space characters.
'''
line1 = 'drw-r--r-- 2 root other 531 Jan 9 2003 A\ B'
line2 = (
'lrw-r--r-- 1 root other 1 Jan 29 03:26 '
'B A -> D\ C/A B'
)
def check((files, others)):
self.assertEqual([], others, 'unexpected others entries')
self.assertEqual(
'A B', files[0]['filename'], 'misparsed filename')
self.assertEqual(
'B A', files[1]['filename'], 'misparsed filename')
self.assertEqual(
'D C/A B', files[1]['linktarget'], 'misparsed linktarget')
return self.getFilesForLines([line1, line2]).addCallback(check)
def testYear(self):
# This example derived from bug description in issue 514.
fileList = ftp.FTPFileListProtocol()
exampleLine = (
'-rw-r--r-- 1 root other 531 Jan 29 2003 README\n')
class PrintLine(protocol.Protocol):
def connectionMade(self):
self.transport.write(exampleLine)
self.transport.loseConnection()
def check(ignored):
file = fileList.files[0]
self.failUnless(file['size'] == 531, 'misparsed fileitem')
self.failUnless(file['date'] == 'Jan 29 2003', 'misparsed fileitem')
self.failUnless(file['filename'] == 'README', 'misparsed fileitem')
d = loopback.loopbackAsync(PrintLine(), fileList)
return d.addCallback(check)
class FTPClientTests(unittest.TestCase):
def testFailedRETR(self):
f = protocol.Factory()
f.noisy = 0
port = reactor.listenTCP(0, f, interface="127.0.0.1")
self.addCleanup(port.stopListening)
portNum = port.getHost().port
# This test data derived from a bug report by ranty on #twisted
responses = ['220 ready, dude (vsFTPd 1.0.0: beat me, break me)',
# USER anonymous
'331 Please specify the password.',
# PASS twisted@twistedmatrix.com
'230 Login successful. Have fun.',
# TYPE I
'200 Binary it is, then.',
# PASV
'227 Entering Passive Mode (127,0,0,1,%d,%d)' %
(portNum >> 8, portNum & 0xff),
# RETR /file/that/doesnt/exist
'550 Failed to open file.']
f.buildProtocol = lambda addr: PrintLines(responses)
client = ftp.FTPClient(passive=1)
cc = protocol.ClientCreator(reactor, ftp.FTPClient, passive=1)
d = cc.connectTCP('127.0.0.1', portNum)
def gotClient(client):
p = protocol.Protocol()
return client.retrieveFile('/file/that/doesnt/exist', p)
d.addCallback(gotClient)
return self.assertFailure(d, ftp.CommandFailed)
def test_errbacksUponDisconnect(self):
"""
Test the ftp command errbacks when a connection lost happens during
the operation.
"""
ftpClient = ftp.FTPClient()
tr = proto_helpers.StringTransportWithDisconnection()
ftpClient.makeConnection(tr)
tr.protocol = ftpClient
d = ftpClient.list('some path', Dummy())
m = []
def _eb(failure):
m.append(failure)
return None
d.addErrback(_eb)
from twisted.internet.main import CONNECTION_LOST
ftpClient.connectionLost(failure.Failure(CONNECTION_LOST))
self.failUnless(m, m)
return d
class FTPClientTestCase(unittest.TestCase):
"""
Test advanced FTP client commands.
"""
def setUp(self):
"""
Create a FTP client and connect it to fake transport.
"""
self.client = ftp.FTPClient()
self.transport = proto_helpers.StringTransportWithDisconnection()
self.client.makeConnection(self.transport)
self.transport.protocol = self.client
def tearDown(self):
"""
Deliver disconnection notification to the client so that it can
perform any cleanup which may be required.
"""
self.client.connectionLost(error.ConnectionLost())
def _testLogin(self):
"""
Test the login part.
"""
self.assertEqual(self.transport.value(), '')
self.client.lineReceived(
'331 Guest login ok, type your email address as password.')
self.assertEqual(self.transport.value(), 'USER anonymous\r\n')
self.transport.clear()
self.client.lineReceived(
'230 Anonymous login ok, access restrictions apply.')
self.assertEqual(self.transport.value(), 'TYPE I\r\n')
self.transport.clear()
self.client.lineReceived('200 Type set to I.')
def test_CDUP(self):
"""
Test the CDUP command.
L{ftp.FTPClient.cdup} should return a Deferred which fires with a
sequence of one element which is the string the server sent
indicating that the command was executed successfully.
(XXX - This is a bad API)
"""
def cbCdup(res):
self.assertEqual(res[0], '250 Requested File Action Completed OK')
self._testLogin()
d = self.client.cdup().addCallback(cbCdup)
self.assertEqual(self.transport.value(), 'CDUP\r\n')
self.transport.clear()
self.client.lineReceived('250 Requested File Action Completed OK')
return d
def test_failedCDUP(self):
"""
Test L{ftp.FTPClient.cdup}'s handling of a failed CDUP command.
When the CDUP command fails, the returned Deferred should errback
with L{ftp.CommandFailed}.
"""
self._testLogin()
d = self.client.cdup()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'CDUP\r\n')
self.transport.clear()
self.client.lineReceived('550 ..: No such file or directory')
return d
def test_PWD(self):
"""
Test the PWD command.
L{ftp.FTPClient.pwd} should return a Deferred which fires with a
sequence of one element which is a string representing the current
working directory on the server.
(XXX - This is a bad API)
"""
def cbPwd(res):
self.assertEqual(ftp.parsePWDResponse(res[0]), "/bar/baz")
self._testLogin()
d = self.client.pwd().addCallback(cbPwd)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('257 "/bar/baz"')
return d
def test_failedPWD(self):
"""
Test a failure in PWD command.
When the PWD command fails, the returned Deferred should errback
with L{ftp.CommandFailed}.
"""
self._testLogin()
d = self.client.pwd()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('550 /bar/baz: No such file or directory')
return d
def test_CWD(self):
"""
Test the CWD command.
L{ftp.FTPClient.cwd} should return a Deferred which fires with a
sequence of one element which is the string the server sent
indicating that the command was executed successfully.
(XXX - This is a bad API)
"""
def cbCwd(res):
self.assertEqual(res[0], '250 Requested File Action Completed OK')
self._testLogin()
d = self.client.cwd("bar/foo").addCallback(cbCwd)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('250 Requested File Action Completed OK')
return d
def test_failedCWD(self):
"""
Test a failure in CWD command.
When the PWD command fails, the returned Deferred should errback
with L{ftp.CommandFailed}.
"""
self._testLogin()
d = self.client.cwd("bar/foo")
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('550 bar/foo: No such file or directory')
return d
def test_passiveRETR(self):
"""
Test the RETR command in passive mode: get a file and verify its
content.
L{ftp.FTPClient.retrieveFile} should return a Deferred which fires
with the protocol instance passed to it after the download has
completed.
(XXX - This API should be based on producers and consumers)
"""
def cbRetr(res, proto):
self.assertEqual(proto.buffer, 'x' * 1000)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.dataReceived("x" * 1000)
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
d.addCallback(cbRetr, proto)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return d
def test_RETR(self):
"""
Test the RETR command in non-passive mode.
Like L{test_passiveRETR} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
portCmd.protocol.makeConnection(proto_helpers.StringTransport())
portCmd.protocol.dataReceived("x" * 1000)
portCmd.protocol.connectionLost(
failure.Failure(error.ConnectionDone("")))
def cbRetr(res, proto):
self.assertEqual(proto.buffer, 'x' * 1000)
self.client.generatePortCommand = generatePort
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
d.addCallback(cbRetr, proto)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return d
def test_failedRETR(self):
"""
Try to RETR an unexisting file.
L{ftp.FTPClient.retrieveFile} should return a Deferred which
errbacks with L{ftp.CommandFailed} if the server indicates the file
cannot be transferred for some reason.
"""
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.transport.clear()
self.client.lineReceived('550 spam: No such file or directory')
return d
def test_lostRETR(self):
"""
Try a RETR, but disconnect during the transfer.
L{ftp.FTPClient.retrieveFile} should return a Deferred which
errbacks with L{ftp.ConnectionLost)
"""
self.client.passive = False
l = []
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
tr = proto_helpers.StringTransportWithDisconnection()
portCmd.protocol.makeConnection(tr)
tr.protocol = portCmd.protocol
portCmd.protocol.dataReceived("x" * 500)
l.append(tr)
self.client.generatePortCommand = generatePort
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.assert_(l)
l[0].loseConnection()
self.transport.loseConnection()
self.assertFailure(d, ftp.ConnectionLost)
return d
def test_passiveSTOR(self):
"""
Test the STOR command: send a file and verify its content.
L{ftp.FTPClient.storeFile} should return a two-tuple of Deferreds.
The first of which should fire with a protocol instance when the
data connection has been established and is responsible for sending
the contents of the file. The second of which should fire when the
upload has completed, the data connection has been closed, and the
server has acknowledged receipt of the file.
(XXX - storeFile should take a producer as an argument, instead, and
only return a Deferred which fires when the upload has succeeded or
failed).
"""
tr = proto_helpers.StringTransport()
def cbStore(sender):
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sender.transport.write("x" * 1000)
sender.finish()
sender.connectionLost(failure.Failure(error.ConnectionDone("")))
def cbFinish(ign):
self.assertEqual(tr.value(), "x" * 1000)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(tr)
self.client.connectFactory = cbConnect
self._testLogin()
d1, d2 = self.client.storeFile("spam")
d1.addCallback(cbStore)
d2.addCallback(cbFinish)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'STOR spam\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return defer.gatherResults([d1, d2])
def test_failedSTOR(self):
"""
Test a failure in the STOR command.
If the server does not acknowledge successful receipt of the
uploaded file, the second Deferred returned by
L{ftp.FTPClient.storeFile} should errback with L{ftp.CommandFailed}.
"""
tr = proto_helpers.StringTransport()
def cbStore(sender):
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sender.transport.write("x" * 1000)
sender.finish()
sender.connectionLost(failure.Failure(error.ConnectionDone("")))
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(tr)
self.client.connectFactory = cbConnect
self._testLogin()
d1, d2 = self.client.storeFile("spam")
d1.addCallback(cbStore)
self.assertFailure(d2, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'STOR spam\r\n')
self.transport.clear()
self.client.lineReceived(
'426 Transfer aborted. Data connection closed.')
return defer.gatherResults([d1, d2])
def test_STOR(self):
"""
Test the STOR command in non-passive mode.
Like L{test_passiveSTOR} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
tr = proto_helpers.StringTransport()
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % ftp.encodeHostPort('127.0.0.1', 9876)
portCmd.protocol.makeConnection(tr)
def cbStore(sender):
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'STOR spam\r\n')
self.transport.clear()
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sender.transport.write("x" * 1000)
sender.finish()
sender.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.lineReceived('226 Transfer Complete.')
def cbFinish(ign):
self.assertEqual(tr.value(), "x" * 1000)
self.client.generatePortCommand = generatePort
self._testLogin()
d1, d2 = self.client.storeFile("spam")
d1.addCallback(cbStore)
d2.addCallback(cbFinish)
return defer.gatherResults([d1, d2])
def test_passiveLIST(self):
"""
Test the LIST command.
L{ftp.FTPClient.list} should return a Deferred which fires with a
protocol instance which was passed to list after the command has
succeeded.
(XXX - This is a very unfortunate API; if my understanding is
correct, the results are always at least line-oriented, so allowing
a per-line parser function to be specified would make this simpler,
but a default implementation should really be provided which knows
how to deal with all the formats used in real servers, so
application developers never have to care about this insanity. It
would also be nice to either get back a Deferred of a list of
filenames or to be able to consume the files as they are received
(which the current API does allow, but in a somewhat inconvenient
fashion) -exarkun)
"""
def cbList(res, fileList):
fls = [f["filename"] for f in fileList.files]
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sending = [
'-rw-r--r-- 0 spam egg 100 Oct 10 2006 foo\r\n',
'-rw-r--r-- 3 spam egg 100 Oct 10 2006 bar\r\n',
'-rw-r--r-- 4 spam egg 100 Oct 10 2006 baz\r\n',
]
for i in sending:
proto.dataReceived(i)
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
fileList = ftp.FTPFileListProtocol()
d = self.client.list('foo/bar', fileList).addCallback(cbList, fileList)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
self.client.lineReceived('226 Transfer Complete.')
return d
def test_LIST(self):
"""
Test the LIST command in non-passive mode.
Like L{test_passiveLIST} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
portCmd.protocol.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sending = [
'-rw-r--r-- 0 spam egg 100 Oct 10 2006 foo\r\n',
'-rw-r--r-- 3 spam egg 100 Oct 10 2006 bar\r\n',
'-rw-r--r-- 4 spam egg 100 Oct 10 2006 baz\r\n',
]
for i in sending:
portCmd.protocol.dataReceived(i)
portCmd.protocol.connectionLost(
failure.Failure(error.ConnectionDone("")))
def cbList(res, fileList):
fls = [f["filename"] for f in fileList.files]
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
self.client.generatePortCommand = generatePort
self._testLogin()
fileList = ftp.FTPFileListProtocol()
d = self.client.list('foo/bar', fileList).addCallback(cbList, fileList)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return d
def test_failedLIST(self):
"""
Test a failure in LIST command.
L{ftp.FTPClient.list} should return a Deferred which fails with
L{ftp.CommandFailed} if the server indicates the indicated path is
invalid for some reason.
"""
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
fileList = ftp.FTPFileListProtocol()
d = self.client.list('foo/bar', fileList)
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
self.client.lineReceived('550 foo/bar: No such file or directory')
return d
def test_NLST(self):
"""
Test the NLST command in non-passive mode.
L{ftp.FTPClient.nlst} should return a Deferred which fires with a
list of filenames when the list command has completed.
"""
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
portCmd.protocol.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
portCmd.protocol.dataReceived('foo\r\n')
portCmd.protocol.dataReceived('bar\r\n')
portCmd.protocol.dataReceived('baz\r\n')
portCmd.protocol.connectionLost(
failure.Failure(error.ConnectionDone("")))
def cbList(res, proto):
fls = proto.buffer.splitlines()
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
self.client.generatePortCommand = generatePort
self._testLogin()
lstproto = _BufferingProtocol()
d = self.client.nlst('foo/bar', lstproto).addCallback(cbList, lstproto)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
self.client.lineReceived('226 Transfer Complete.')
return d
def test_passiveNLST(self):
"""
Test the NLST command.
Like L{test_passiveNLST} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
def cbList(res, proto):
fls = proto.buffer.splitlines()
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.dataReceived('foo\r\n')
proto.dataReceived('bar\r\n')
proto.dataReceived('baz\r\n')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
lstproto = _BufferingProtocol()
d = self.client.nlst('foo/bar', lstproto).addCallback(cbList, lstproto)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
self.client.lineReceived('226 Transfer Complete.')
return d
def test_failedNLST(self):
"""
Test a failure in NLST command.
L{ftp.FTPClient.nlst} should return a Deferred which fails with
L{ftp.CommandFailed} if the server indicates the indicated path is
invalid for some reason.
"""
tr = proto_helpers.StringTransport()
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(tr)
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
lstproto = _BufferingProtocol()
d = self.client.nlst('foo/bar', lstproto)
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
self.client.lineReceived('550 foo/bar: No such file or directory')
return d
def test_changeDirectoryDeprecated(self):
"""
L{ftp.FTPClient.changeDirectory} is deprecated and the direct caller of
it is warned of this.
"""
self._testLogin()
d = self.assertWarns(
DeprecationWarning,
"FTPClient.changeDirectory is deprecated in Twisted 8.2 and "
"newer. Use FTPClient.cwd instead.",
__file__,
lambda: self.client.changeDirectory('.'))
# This is necessary to make the Deferred fire. The Deferred needs
# to fire so that tearDown doesn't cause it to errback and fail this
# or (more likely) a later test.
self.client.lineReceived('250 success')
return d
def test_changeDirectory(self):
"""
Test the changeDirectory method.
L{ftp.FTPClient.changeDirectory} should return a Deferred which fires
with True if succeeded.
"""
def cbCd(res):
self.assertEqual(res, True)
self._testLogin()
d = self.client.changeDirectory("bar/foo").addCallback(cbCd)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('250 Requested File Action Completed OK')
return d
test_changeDirectory.suppress = [_changeDirectorySuppression]
def test_failedChangeDirectory(self):
"""
Test a failure in the changeDirectory method.
The behaviour here is the same as a failed CWD.
"""
self._testLogin()
d = self.client.changeDirectory("bar/foo")
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('550 bar/foo: No such file or directory')
return d
test_failedChangeDirectory.suppress = [_changeDirectorySuppression]
def test_strangeFailedChangeDirectory(self):
"""
Test a strange failure in changeDirectory method.
L{ftp.FTPClient.changeDirectory} is stricter than CWD as it checks
code 250 for success.
"""
self._testLogin()
d = self.client.changeDirectory("bar/foo")
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('252 I do what I want !')
return d
test_strangeFailedChangeDirectory.suppress = [_changeDirectorySuppression]
def test_renameFromTo(self):
"""
L{ftp.FTPClient.rename} issues I{RNTO} and I{RNFR} commands and returns
a L{Deferred} which fires when a file has successfully been renamed.
"""
self._testLogin()
d = self.client.rename("/spam", "/ham")
self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
self.transport.clear()
fromResponse = (
'350 Requested file action pending further information.\r\n')
self.client.lineReceived(fromResponse)
self.assertEqual(self.transport.value(), 'RNTO /ham\r\n')
toResponse = (
'250 Requested File Action Completed OK')
self.client.lineReceived(toResponse)
d.addCallback(self.assertEqual, ([fromResponse], [toResponse]))
return d
def test_renameFromToEscapesPaths(self):
"""
L{ftp.FTPClient.rename} issues I{RNTO} and I{RNFR} commands with paths
escaped according to U{http://cr.yp.to/ftp/filesystem.html}.
"""
self._testLogin()
fromFile = "/foo/ba\nr/baz"
toFile = "/qu\nux"
self.client.rename(fromFile, toFile)
self.client.lineReceived("350 ")
self.client.lineReceived("250 ")
self.assertEqual(
self.transport.value(),
"RNFR /foo/ba\x00r/baz\r\n"
"RNTO /qu\x00ux\r\n")
def test_renameFromToFailingOnFirstError(self):
"""
The L{Deferred} returned by L{ftp.FTPClient.rename} is errbacked with
L{CommandFailed} if the I{RNFR} command receives an error response code
(for example, because the file does not exist).
"""
self._testLogin()
d = self.client.rename("/spam", "/ham")
self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
self.transport.clear()
self.client.lineReceived('550 Requested file unavailable.\r\n')
# The RNTO should not execute since the RNFR failed.
self.assertEqual(self.transport.value(), '')
return self.assertFailure(d, ftp.CommandFailed)
def test_renameFromToFailingOnRenameTo(self):
"""
The L{Deferred} returned by L{ftp.FTPClient.rename} is errbacked with
L{CommandFailed} if the I{RNTO} command receives an error response code
(for example, because the destination directory does not exist).
"""
self._testLogin()
d = self.client.rename("/spam", "/ham")
self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
self.transport.clear()
self.client.lineReceived('350 Requested file action pending further information.\r\n')
self.assertEqual(self.transport.value(), 'RNTO /ham\r\n')
self.client.lineReceived('550 Requested file unavailable.\r\n')
return self.assertFailure(d, ftp.CommandFailed)
def test_makeDirectory(self):
"""
L{ftp.FTPClient.makeDirectory} issues a I{MKD} command and returns a
L{Deferred} which is called back with the server's response if the
directory is created.
"""
self._testLogin()
d = self.client.makeDirectory("/spam")
self.assertEqual(self.transport.value(), 'MKD /spam\r\n')
self.client.lineReceived('257 "/spam" created.')
return d.addCallback(self.assertEqual, ['257 "/spam" created.'])
def test_makeDirectoryPathEscape(self):
"""
L{ftp.FTPClient.makeDirectory} escapes the path name it sends according
to U{http://cr.yp.to/ftp/filesystem.html}.
"""
self._testLogin()
d = self.client.makeDirectory("/sp\nam")
self.assertEqual(self.transport.value(), 'MKD /sp\x00am\r\n')
# This is necessary to make the Deferred fire. The Deferred needs
# to fire so that tearDown doesn't cause it to errback and fail this
# or (more likely) a later test.
self.client.lineReceived('257 win')
return d
def test_failedMakeDirectory(self):
"""
L{ftp.FTPClient.makeDirectory} returns a L{Deferred} which is errbacked
with L{CommandFailed} if the server returns an error response code.
"""
self._testLogin()
d = self.client.makeDirectory("/spam")
self.assertEqual(self.transport.value(), 'MKD /spam\r\n')
self.client.lineReceived('550 PERMISSION DENIED')
return self.assertFailure(d, ftp.CommandFailed)
def test_getDirectory(self):
"""
Test the getDirectory method.
L{ftp.FTPClient.getDirectory} should return a Deferred which fires with
the current directory on the server. It wraps PWD command.
"""
def cbGet(res):
self.assertEqual(res, "/bar/baz")
self._testLogin()
d = self.client.getDirectory().addCallback(cbGet)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('257 "/bar/baz"')
return d
def test_failedGetDirectory(self):
"""
Test a failure in getDirectory method.
The behaviour should be the same as PWD.
"""
self._testLogin()
d = self.client.getDirectory()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('550 /bar/baz: No such file or directory')
return d
def test_anotherFailedGetDirectory(self):
"""
Test a different failure in getDirectory method.
The response should be quoted to be parsed, so it returns an error
otherwise.
"""
self._testLogin()
d = self.client.getDirectory()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('257 /bar/baz')
return d
def test_removeFile(self):
"""
L{ftp.FTPClient.removeFile} sends a I{DELE} command to the server for
the indicated file and returns a Deferred which fires after the server
sends a 250 response code.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
self.assertEqual(self.transport.value(), 'DELE /tmp/test\r\n')
response = '250 Requested file action okay, completed.'
self.client.lineReceived(response)
return d.addCallback(self.assertEqual, [response])
def test_failedRemoveFile(self):
"""
If the server returns a response code other than 250 in response to a
I{DELE} sent by L{ftp.FTPClient.removeFile}, the L{Deferred} returned
by C{removeFile} is errbacked with a L{Failure} wrapping a
L{CommandFailed}.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
self.assertEqual(self.transport.value(), 'DELE /tmp/test\r\n')
response = '501 Syntax error in parameters or arguments.'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.CommandFailed)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_unparsableRemoveFileResponse(self):
"""
If the server returns a response line which cannot be parsed, the
L{Deferred} returned by L{ftp.FTPClient.removeFile} is errbacked with a
L{BadResponse} containing the response.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
response = '765 blah blah blah'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.BadResponse)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_multilineRemoveFileResponse(self):
"""
If the server returns multiple response lines, the L{Deferred} returned
by L{ftp.FTPClient.removeFile} is still fired with a true value if the
ultimate response code is 250.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
response = ['250-perhaps a progress report',
'250 okay']
map(self.client.lineReceived, response)
return d.addCallback(self.assertTrue)
def test_removeDirectory(self):
"""
L{ftp.FTPClient.removeDirectory} sends a I{RMD} command to the server
for the indicated directory and returns a Deferred which fires after
the server sends a 250 response code.
"""
self._testLogin()
d = self.client.removeDirectory('/tmp/test')
self.assertEqual(self.transport.value(), 'RMD /tmp/test\r\n')
response = '250 Requested file action okay, completed.'
self.client.lineReceived(response)
return d.addCallback(self.assertEqual, [response])
def test_failedRemoveDirectory(self):
"""
If the server returns a response code other than 250 in response to a
I{RMD} sent by L{ftp.FTPClient.removeDirectory}, the L{Deferred}
returned by C{removeDirectory} is errbacked with a L{Failure} wrapping
a L{CommandFailed}.
"""
self._testLogin()
d = self.client.removeDirectory("/tmp/test")
self.assertEqual(self.transport.value(), 'RMD /tmp/test\r\n')
response = '501 Syntax error in parameters or arguments.'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.CommandFailed)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_unparsableRemoveDirectoryResponse(self):
"""
If the server returns a response line which cannot be parsed, the
L{Deferred} returned by L{ftp.FTPClient.removeDirectory} is errbacked
with a L{BadResponse} containing the response.
"""
self._testLogin()
d = self.client.removeDirectory("/tmp/test")
response = '765 blah blah blah'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.BadResponse)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_multilineRemoveDirectoryResponse(self):
"""
If the server returns multiple response lines, the L{Deferred} returned
by L{ftp.FTPClient.removeDirectory} is still fired with a true value
if the ultimate response code is 250.
"""
self._testLogin()
d = self.client.removeDirectory("/tmp/test")
response = ['250-perhaps a progress report',
'250 okay']
map(self.client.lineReceived, response)
return d.addCallback(self.assertTrue)
class FTPClientBasicTests(unittest.TestCase):
def testGreeting(self):
# The first response is captured as a greeting.
ftpClient = ftp.FTPClientBasic()
ftpClient.lineReceived('220 Imaginary FTP.')
self.assertEqual(['220 Imaginary FTP.'], ftpClient.greeting)
def testResponseWithNoMessage(self):
# Responses with no message are still valid, i.e. three digits followed
# by a space is complete response.
ftpClient = ftp.FTPClientBasic()
ftpClient.lineReceived('220 ')
self.assertEqual(['220 '], ftpClient.greeting)
def testMultilineResponse(self):
ftpClient = ftp.FTPClientBasic()
ftpClient.transport = proto_helpers.StringTransport()
ftpClient.lineReceived('220 Imaginary FTP.')
# Queue (and send) a dummy command, and set up a callback to capture the
# result
deferred = ftpClient.queueStringCommand('BLAH')
result = []
deferred.addCallback(result.append)
deferred.addErrback(self.fail)
# Send the first line of a multiline response.
ftpClient.lineReceived('210-First line.')
self.assertEqual([], result)
# Send a second line, again prefixed with "nnn-".
ftpClient.lineReceived('123-Second line.')
self.assertEqual([], result)
# Send a plain line of text, no prefix.
ftpClient.lineReceived('Just some text.')
self.assertEqual([], result)
# Now send a short (less than 4 chars) line.
ftpClient.lineReceived('Hi')
self.assertEqual([], result)
# Now send an empty line.
ftpClient.lineReceived('')
self.assertEqual([], result)
# And a line with 3 digits in it, and nothing else.
ftpClient.lineReceived('321')
self.assertEqual([], result)
# Now finish it.
ftpClient.lineReceived('210 Done.')
self.assertEqual(
['210-First line.',
'123-Second line.',
'Just some text.',
'Hi',
'',
'321',
'210 Done.'], result[0])
def test_noPasswordGiven(self):
"""
Passing None as the password avoids sending the PASS command.
"""
# Create a client, and give it a greeting.
ftpClient = ftp.FTPClientBasic()
ftpClient.transport = proto_helpers.StringTransport()
ftpClient.lineReceived('220 Welcome to Imaginary FTP.')
# Queue a login with no password
ftpClient.queueLogin('bob', None)
self.assertEqual('USER bob\r\n', ftpClient.transport.value())
# Clear the test buffer, acknowledge the USER command.
ftpClient.transport.clear()
ftpClient.lineReceived('200 Hello bob.')
# The client shouldn't have sent anything more (i.e. it shouldn't have
# sent a PASS command).
self.assertEqual('', ftpClient.transport.value())
def test_noPasswordNeeded(self):
"""
Receiving a 230 response to USER prevents PASS from being sent.
"""
# Create a client, and give it a greeting.
ftpClient = ftp.FTPClientBasic()
ftpClient.transport = proto_helpers.StringTransport()
ftpClient.lineReceived('220 Welcome to Imaginary FTP.')
# Queue a login with no password
ftpClient.queueLogin('bob', 'secret')
self.assertEqual('USER bob\r\n', ftpClient.transport.value())
# Clear the test buffer, acknowledge the USER command with a 230
# response code.
ftpClient.transport.clear()
ftpClient.lineReceived('230 Hello bob. No password needed.')
# The client shouldn't have sent anything more (i.e. it shouldn't have
# sent a PASS command).
self.assertEqual('', ftpClient.transport.value())
class PathHandling(unittest.TestCase):
def testNormalizer(self):
for inp, outp in [('a', ['a']),
('/a', ['a']),
('/', []),
('a/b/c', ['a', 'b', 'c']),
('/a/b/c', ['a', 'b', 'c']),
('/a/', ['a']),
('a/', ['a'])]:
self.assertEqual(ftp.toSegments([], inp), outp)
for inp, outp in [('b', ['a', 'b']),
('b/', ['a', 'b']),
('/b', ['b']),
('/b/', ['b']),
('b/c', ['a', 'b', 'c']),
('b/c/', ['a', 'b', 'c']),
('/b/c', ['b', 'c']),
('/b/c/', ['b', 'c'])]:
self.assertEqual(ftp.toSegments(['a'], inp), outp)
for inp, outp in [('//', []),
('//a', ['a']),
('a//', ['a']),
('a//b', ['a', 'b'])]:
self.assertEqual(ftp.toSegments([], inp), outp)
for inp, outp in [('//', []),
('//b', ['b']),
('b//c', ['a', 'b', 'c'])]:
self.assertEqual(ftp.toSegments(['a'], inp), outp)
for inp, outp in [('..', []),
('../', []),
('a/..', ['x']),
('/a/..', []),
('/a/b/..', ['a']),
('/a/b/../', ['a']),
('/a/b/../c', ['a', 'c']),
('/a/b/../c/', ['a', 'c']),
('/a/b/../../c', ['c']),
('/a/b/../../c/', ['c']),
('/a/b/../../c/..', []),
('/a/b/../../c/../', [])]:
self.assertEqual(ftp.toSegments(['x'], inp), outp)
for inp in ['..', '../', 'a/../..', 'a/../../',
'/..', '/../', '/a/../..', '/a/../../',
'/a/b/../../..']:
self.assertRaises(ftp.InvalidPath, ftp.toSegments, [], inp)
for inp in ['../..', '../../', '../a/../..']:
self.assertRaises(ftp.InvalidPath, ftp.toSegments, ['x'], inp)
class IsGlobbingExpressionTests(unittest.TestCase):
"""
Tests for _isGlobbingExpression utility function.
"""
def test_isGlobbingExpressionEmptySegments(self):
"""
_isGlobbingExpression will return False for None, or empty
segments.
"""
self.assertFalse(ftp._isGlobbingExpression())
self.assertFalse(ftp._isGlobbingExpression([]))
self.assertFalse(ftp._isGlobbingExpression(None))
def test_isGlobbingExpressionNoGlob(self):
"""
_isGlobbingExpression will return False for plain segments.
Also, it only checks the last segment part (filename) and will not
check the path name.
"""
self.assertFalse(ftp._isGlobbingExpression(['ignore', 'expr']))
self.assertFalse(ftp._isGlobbingExpression(['*.txt', 'expr']))
def test_isGlobbingExpressionGlob(self):
"""
_isGlobbingExpression will return True for segments which contains
globbing characters in the last segment part (filename).
"""
self.assertTrue(ftp._isGlobbingExpression(['ignore', '*.txt']))
self.assertTrue(ftp._isGlobbingExpression(['ignore', '[a-b].txt']))
self.assertTrue(ftp._isGlobbingExpression(['ignore', 'fil?.txt']))
class BaseFTPRealmTests(unittest.TestCase):
"""
Tests for L{ftp.BaseFTPRealm}, a base class to help define L{IFTPShell}
realms with different user home directory policies.
"""
def test_interface(self):
"""
L{ftp.BaseFTPRealm} implements L{IRealm}.
"""
self.assertTrue(verifyClass(IRealm, ftp.BaseFTPRealm))
def test_getHomeDirectory(self):
"""
L{ftp.BaseFTPRealm} calls its C{getHomeDirectory} method with the
avatarId being requested to determine the home directory for that
avatar.
"""
result = filepath.FilePath(self.mktemp())
avatars = []
class TestRealm(ftp.BaseFTPRealm):
def getHomeDirectory(self, avatarId):
avatars.append(avatarId)
return result
realm = TestRealm(self.mktemp())
iface, avatar, logout = realm.requestAvatar(
"alice@example.com", None, ftp.IFTPShell)
self.assertIsInstance(avatar, ftp.FTPShell)
self.assertEqual(avatar.filesystemRoot, result)
def test_anonymous(self):
"""
L{ftp.BaseFTPRealm} returns an L{ftp.FTPAnonymousShell} instance for
anonymous avatar requests.
"""
anonymous = self.mktemp()
realm = ftp.BaseFTPRealm(anonymous)
iface, avatar, logout = realm.requestAvatar(
checkers.ANONYMOUS, None, ftp.IFTPShell)
self.assertIsInstance(avatar, ftp.FTPAnonymousShell)
self.assertEqual(avatar.filesystemRoot, filepath.FilePath(anonymous))
def test_notImplemented(self):
"""
L{ftp.BaseFTPRealm.getHomeDirectory} should be overridden by a subclass
and raises L{NotImplementedError} if it is not.
"""
realm = ftp.BaseFTPRealm(self.mktemp())
self.assertRaises(NotImplementedError, realm.getHomeDirectory, object())
class FTPRealmTestCase(unittest.TestCase):
"""
Tests for L{ftp.FTPRealm}.
"""
def test_getHomeDirectory(self):
"""
L{ftp.FTPRealm} accepts an extra directory to its initializer and treats
the avatarId passed to L{ftp.FTPRealm.getHomeDirectory} as a single path
segment to construct a child of that directory.
"""
base = '/path/to/home'
realm = ftp.FTPRealm(self.mktemp(), base)
home = realm.getHomeDirectory('alice@example.com')
self.assertEqual(
filepath.FilePath(base).child('alice@example.com'), home)
def test_defaultHomeDirectory(self):
"""
If no extra directory is passed to L{ftp.FTPRealm}, it uses C{"/home"}
as the base directory containing all user home directories.
"""
realm = ftp.FTPRealm(self.mktemp())
home = realm.getHomeDirectory('alice@example.com')
self.assertEqual(filepath.FilePath('/home/alice@example.com'), home)
class SystemFTPRealmTests(unittest.TestCase):
"""
Tests for L{ftp.SystemFTPRealm}.
"""
skip = nonPOSIXSkip
def test_getHomeDirectory(self):
"""
L{ftp.SystemFTPRealm.getHomeDirectory} treats the avatarId passed to it
as a username in the underlying platform and returns that account's home
directory.
"""
# Try to pick a username that will have a home directory.
user = getpass.getuser()
# Try to find their home directory in a different way than used by the
# implementation. Maybe this is silly and can only introduce spurious
# failures due to system-specific configurations.
import pwd
expected = pwd.getpwnam(user).pw_dir
realm = ftp.SystemFTPRealm(self.mktemp())
home = realm.getHomeDirectory(user)
self.assertEqual(home, filepath.FilePath(expected))
def test_noSuchUser(self):
"""
L{ftp.SystemFTPRealm.getHomeDirectory} raises L{UnauthorizedLogin} when
passed a username which has no corresponding home directory in the
system's accounts database.
"""
user = insecureRandom(4).encode('hex')
realm = ftp.SystemFTPRealm(self.mktemp())
self.assertRaises(UnauthorizedLogin, realm.getHomeDirectory, user)
class ErrnoToFailureTestCase(unittest.TestCase):
"""
Tests for L{ftp.errnoToFailure} errno checking.
"""
def test_notFound(self):
"""
C{errno.ENOENT} should be translated to L{ftp.FileNotFoundError}.
"""
d = ftp.errnoToFailure(errno.ENOENT, "foo")
return self.assertFailure(d, ftp.FileNotFoundError)
def test_permissionDenied(self):
"""
C{errno.EPERM} should be translated to L{ftp.PermissionDeniedError}.
"""
d = ftp.errnoToFailure(errno.EPERM, "foo")
return self.assertFailure(d, ftp.PermissionDeniedError)
def test_accessDenied(self):
"""
C{errno.EACCES} should be translated to L{ftp.PermissionDeniedError}.
"""
d = ftp.errnoToFailure(errno.EACCES, "foo")
return self.assertFailure(d, ftp.PermissionDeniedError)
def test_notDirectory(self):
"""
C{errno.ENOTDIR} should be translated to L{ftp.IsNotADirectoryError}.
"""
d = ftp.errnoToFailure(errno.ENOTDIR, "foo")
return self.assertFailure(d, ftp.IsNotADirectoryError)
def test_fileExists(self):
"""
C{errno.EEXIST} should be translated to L{ftp.FileExistsError}.
"""
d = ftp.errnoToFailure(errno.EEXIST, "foo")
return self.assertFailure(d, ftp.FileExistsError)
def test_isDirectory(self):
"""
C{errno.EISDIR} should be translated to L{ftp.IsADirectoryError}.
"""
d = ftp.errnoToFailure(errno.EISDIR, "foo")
return self.assertFailure(d, ftp.IsADirectoryError)
def test_passThrough(self):
"""
If an unknown errno is passed to L{ftp.errnoToFailure}, it should let
the originating exception pass through.
"""
try:
raise RuntimeError("bar")
except:
d = ftp.errnoToFailure(-1, "foo")
return self.assertFailure(d, RuntimeError)
class AnonymousFTPShellTestCase(unittest.TestCase):
"""
Test anynomous shell properties.
"""
def test_anonymousWrite(self):
"""
Check that L{ftp.FTPAnonymousShell} returns an error when trying to
open it in write mode.
"""
shell = ftp.FTPAnonymousShell('')
d = shell.openForWriting(('foo',))
self.assertFailure(d, ftp.PermissionDeniedError)
return d
class IFTPShellTestsMixin:
"""
Generic tests for the C{IFTPShell} interface.
"""
def directoryExists(self, path):
"""
Test if the directory exists at C{path}.
@param path: the relative path to check.
@type path: C{str}.
@return: C{True} if C{path} exists and is a directory, C{False} if
it's not the case
@rtype: C{bool}
"""
raise NotImplementedError()
def createDirectory(self, path):
"""
Create a directory in C{path}.
@param path: the relative path of the directory to create, with one
segment.
@type path: C{str}
"""
raise NotImplementedError()
def fileExists(self, path):
"""
Test if the file exists at C{path}.
@param path: the relative path to check.
@type path: C{str}.
@return: C{True} if C{path} exists and is a file, C{False} if it's not
the case.
@rtype: C{bool}
"""
raise NotImplementedError()
def createFile(self, path, fileContent=''):
"""
Create a file named C{path} with some content.
@param path: the relative path of the file to create, without
directory.
@type path: C{str}
@param fileContent: the content of the file.
@type fileContent: C{str}
"""
raise NotImplementedError()
def test_createDirectory(self):
"""
C{directoryExists} should report correctly about directory existence,
and C{createDirectory} should create a directory detectable by
C{directoryExists}.
"""
self.assertFalse(self.directoryExists('bar'))
self.createDirectory('bar')
self.assertTrue(self.directoryExists('bar'))
def test_createFile(self):
"""
C{fileExists} should report correctly about file existence, and
C{createFile} should create a file detectable by C{fileExists}.
"""
self.assertFalse(self.fileExists('file.txt'))
self.createFile('file.txt')
self.assertTrue(self.fileExists('file.txt'))
def test_makeDirectory(self):
"""
Create a directory and check it ends in the filesystem.
"""
d = self.shell.makeDirectory(('foo',))
def cb(result):
self.assertTrue(self.directoryExists('foo'))
return d.addCallback(cb)
def test_makeDirectoryError(self):
"""
Creating a directory that already exists should fail with a
C{ftp.FileExistsError}.
"""
self.createDirectory('foo')
d = self.shell.makeDirectory(('foo',))
return self.assertFailure(d, ftp.FileExistsError)
def test_removeDirectory(self):
"""
Try to remove a directory and check it's removed from the filesystem.
"""
self.createDirectory('bar')
d = self.shell.removeDirectory(('bar',))
def cb(result):
self.assertFalse(self.directoryExists('bar'))
return d.addCallback(cb)
def test_removeDirectoryOnFile(self):
"""
removeDirectory should not work in file and fail with a
C{ftp.IsNotADirectoryError}.
"""
self.createFile('file.txt')
d = self.shell.removeDirectory(('file.txt',))
return self.assertFailure(d, ftp.IsNotADirectoryError)
def test_removeNotExistingDirectory(self):
"""
Removing directory that doesn't exist should fail with a
C{ftp.FileNotFoundError}.
"""
d = self.shell.removeDirectory(('bar',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_removeFile(self):
"""
Try to remove a file and check it's removed from the filesystem.
"""
self.createFile('file.txt')
d = self.shell.removeFile(('file.txt',))
def cb(res):
self.assertFalse(self.fileExists('file.txt'))
d.addCallback(cb)
return d
def test_removeFileOnDirectory(self):
"""
removeFile should not work on directory.
"""
self.createDirectory('ned')
d = self.shell.removeFile(('ned',))
return self.assertFailure(d, ftp.IsADirectoryError)
def test_removeNotExistingFile(self):
"""
Try to remove a non existent file, and check it raises a
L{ftp.FileNotFoundError}.
"""
d = self.shell.removeFile(('foo',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_list(self):
"""
Check the output of the list method.
"""
self.createDirectory('ned')
self.createFile('file.txt')
d = self.shell.list(('.',))
def cb(l):
l.sort()
self.assertEqual(l,
[('file.txt', []), ('ned', [])])
return d.addCallback(cb)
def test_listWithStat(self):
"""
Check the output of list with asked stats.
"""
self.createDirectory('ned')
self.createFile('file.txt')
d = self.shell.list(('.',), ('size', 'permissions',))
def cb(l):
l.sort()
self.assertEqual(len(l), 2)
self.assertEqual(l[0][0], 'file.txt')
self.assertEqual(l[1][0], 'ned')
# Size and permissions are reported differently between platforms
# so just check they are present
self.assertEqual(len(l[0][1]), 2)
self.assertEqual(len(l[1][1]), 2)
return d.addCallback(cb)
def test_listWithInvalidStat(self):
"""
Querying an invalid stat should result to a C{AttributeError}.
"""
self.createDirectory('ned')
d = self.shell.list(('.',), ('size', 'whateverstat',))
return self.assertFailure(d, AttributeError)
def test_listFile(self):
"""
Check the output of the list method on a file.
"""
self.createFile('file.txt')
d = self.shell.list(('file.txt',))
def cb(l):
l.sort()
self.assertEqual(l,
[('file.txt', [])])
return d.addCallback(cb)
def test_listNotExistingDirectory(self):
"""
list on a directory that doesn't exist should fail with a
L{ftp.FileNotFoundError}.
"""
d = self.shell.list(('foo',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_access(self):
"""
Try to access a resource.
"""
self.createDirectory('ned')
d = self.shell.access(('ned',))
return d
def test_accessNotFound(self):
"""
access should fail on a resource that doesn't exist.
"""
d = self.shell.access(('foo',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_openForReading(self):
"""
Check that openForReading returns an object providing C{ftp.IReadFile}.
"""
self.createFile('file.txt')
d = self.shell.openForReading(('file.txt',))
def cb(res):
self.assertTrue(ftp.IReadFile.providedBy(res))
d.addCallback(cb)
return d
def test_openForReadingNotFound(self):
"""
openForReading should fail with a C{ftp.FileNotFoundError} on a file
that doesn't exist.
"""
d = self.shell.openForReading(('ned',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_openForReadingOnDirectory(self):
"""
openForReading should not work on directory.
"""
self.createDirectory('ned')
d = self.shell.openForReading(('ned',))
return self.assertFailure(d, ftp.IsADirectoryError)
def test_openForWriting(self):
"""
Check that openForWriting returns an object providing C{ftp.IWriteFile}.
"""
d = self.shell.openForWriting(('foo',))
def cb1(res):
self.assertTrue(ftp.IWriteFile.providedBy(res))
return res.receive().addCallback(cb2)
def cb2(res):
self.assertTrue(IConsumer.providedBy(res))
d.addCallback(cb1)
return d
def test_openForWritingExistingDirectory(self):
"""
openForWriting should not be able to open a directory that already
exists.
"""
self.createDirectory('ned')
d = self.shell.openForWriting(('ned',))
return self.assertFailure(d, ftp.IsADirectoryError)
def test_openForWritingInNotExistingDirectory(self):
"""
openForWring should fail with a L{ftp.FileNotFoundError} if you specify
a file in a directory that doesn't exist.
"""
self.createDirectory('ned')
d = self.shell.openForWriting(('ned', 'idonotexist', 'foo'))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_statFile(self):
"""
Check the output of the stat method on a file.
"""
fileContent = 'wobble\n'
self.createFile('file.txt', fileContent)
d = self.shell.stat(('file.txt',), ('size', 'directory'))
def cb(res):
self.assertEqual(res[0], len(fileContent))
self.assertFalse(res[1])
d.addCallback(cb)
return d
def test_statDirectory(self):
"""
Check the output of the stat method on a directory.
"""
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('size', 'directory'))
def cb(res):
self.assertTrue(res[1])
d.addCallback(cb)
return d
def test_statOwnerGroup(self):
"""
Check the owner and groups stats.
"""
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('owner', 'group'))
def cb(res):
self.assertEqual(len(res), 2)
d.addCallback(cb)
return d
def test_statNotExisting(self):
"""
stat should fail with L{ftp.FileNotFoundError} on a file that doesn't
exist.
"""
d = self.shell.stat(('foo',), ('size', 'directory'))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_invalidStat(self):
"""
Querying an invalid stat should result to a C{AttributeError}.
"""
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('size', 'whateverstat'))
return self.assertFailure(d, AttributeError)
def test_rename(self):
"""
Try to rename a directory.
"""
self.createDirectory('ned')
d = self.shell.rename(('ned',), ('foo',))
def cb(res):
self.assertTrue(self.directoryExists('foo'))
self.assertFalse(self.directoryExists('ned'))
return d.addCallback(cb)
def test_renameNotExisting(self):
"""
Renaming a directory that doesn't exist should fail with
L{ftp.FileNotFoundError}.
"""
d = self.shell.rename(('foo',), ('bar',))
return self.assertFailure(d, ftp.FileNotFoundError)
class FTPShellTestCase(unittest.TestCase, IFTPShellTestsMixin):
"""
Tests for the C{ftp.FTPShell} object.
"""
def setUp(self):
"""
Create a root directory and instantiate a shell.
"""
self.root = filepath.FilePath(self.mktemp())
self.root.createDirectory()
self.shell = ftp.FTPShell(self.root)
def directoryExists(self, path):
"""
Test if the directory exists at C{path}.
"""
return self.root.child(path).isdir()
def createDirectory(self, path):
"""
Create a directory in C{path}.
"""
return self.root.child(path).createDirectory()
def fileExists(self, path):
"""
Test if the file exists at C{path}.
"""
return self.root.child(path).isfile()
def createFile(self, path, fileContent=''):
"""
Create a file named C{path} with some content.
"""
return self.root.child(path).setContent(fileContent)
class TestConsumer(object):
"""
A simple consumer for tests. It only works with non-streaming producers.
@ivar producer: an object providing
L{twisted.internet.interfaces.IPullProducer}.
"""
implements(IConsumer)
producer = None
def registerProducer(self, producer, streaming):
"""
Simple register of producer, checks that no register has happened
before.
"""
assert self.producer is None
self.buffer = []
self.producer = producer
self.producer.resumeProducing()
def unregisterProducer(self):
"""
Unregister the producer, it should be done after a register.
"""
assert self.producer is not None
self.producer = None
def write(self, data):
"""
Save the data received.
"""
self.buffer.append(data)
self.producer.resumeProducing()
class TestProducer(object):
"""
A dumb producer.
"""
def __init__(self, toProduce, consumer):
"""
@param toProduce: data to write
@type toProduce: C{str}
@param consumer: the consumer of data.
@type consumer: C{IConsumer}
"""
self.toProduce = toProduce
self.consumer = consumer
def start(self):
"""
Send the data to consume.
"""
self.consumer.write(self.toProduce)
class IReadWriteTestsMixin:
"""
Generic tests for the C{IReadFile} and C{IWriteFile} interfaces.
"""
def getFileReader(self, content):
"""
Return an object providing C{IReadFile}, ready to send data C{content}.
"""
raise NotImplementedError()
def getFileWriter(self):
"""
Return an object providing C{IWriteFile}, ready to receive data.
"""
raise NotImplementedError()
def getFileContent(self):
"""
Return the content of the file used.
"""
raise NotImplementedError()
def test_read(self):
"""
Test L{ftp.IReadFile}: the implementation should have a send method
returning a C{Deferred} which fires when all the data has been sent
to the consumer, and the data should be correctly send to the consumer.
"""
content = 'wobble\n'
consumer = TestConsumer()
def cbGet(reader):
return reader.send(consumer).addCallback(cbSend)
def cbSend(res):
self.assertEqual("".join(consumer.buffer), content)
return self.getFileReader(content).addCallback(cbGet)
def test_write(self):
"""
Test L{ftp.IWriteFile}: the implementation should have a receive
method returning a C{Deferred} which fires with a consumer ready to
receive data to be written. It should also have a close() method that
returns a Deferred.
"""
content = 'elbbow\n'
def cbGet(writer):
return writer.receive().addCallback(cbReceive, writer)
def cbReceive(consumer, writer):
producer = TestProducer(content, consumer)
consumer.registerProducer(None, True)
producer.start()
consumer.unregisterProducer()
return writer.close().addCallback(cbClose)
def cbClose(ignored):
self.assertEqual(self.getFileContent(), content)
return self.getFileWriter().addCallback(cbGet)
class FTPReadWriteTestCase(unittest.TestCase, IReadWriteTestsMixin):
"""
Tests for C{ftp._FileReader} and C{ftp._FileWriter}, the objects returned
by the shell in C{openForReading}/C{openForWriting}.
"""
def setUp(self):
"""
Create a temporary file used later.
"""
self.root = filepath.FilePath(self.mktemp())
self.root.createDirectory()
self.shell = ftp.FTPShell(self.root)
self.filename = "file.txt"
def getFileReader(self, content):
"""
Return a C{ftp._FileReader} instance with a file opened for reading.
"""
self.root.child(self.filename).setContent(content)
return self.shell.openForReading((self.filename,))
def getFileWriter(self):
"""
Return a C{ftp._FileWriter} instance with a file opened for writing.
"""
return self.shell.openForWriting((self.filename,))
def getFileContent(self):
"""
Return the content of the temporary file.
"""
return self.root.child(self.filename).getContent()
class CloseTestWriter:
implements(ftp.IWriteFile)
closeStarted = False
def receive(self):
self.s = StringIO()
fc = ftp.FileConsumer(self.s)
return defer.succeed(fc)
def close(self):
self.closeStarted = True
return self.d
class CloseTestShell:
def openForWriting(self, segs):
return defer.succeed(self.writer)
class FTPCloseTest(unittest.TestCase):
"""Tests that the server invokes IWriteFile.close"""
def test_write(self):
"""Confirm that FTP uploads (i.e. ftp_STOR) correctly call and wait
upon the IWriteFile object's close() method"""
f = ftp.FTP()
f.workingDirectory = ["root"]
f.shell = CloseTestShell()
f.shell.writer = CloseTestWriter()
f.shell.writer.d = defer.Deferred()
f.factory = ftp.FTPFactory()
f.factory.timeOut = None
f.makeConnection(StringIO())
di = ftp.DTP()
di.factory = ftp.DTPFactory(f)
f.dtpInstance = di
di.makeConnection(None)#
stor_done = []
d = f.ftp_STOR("path")
d.addCallback(stor_done.append)
# the writer is still receiving data
self.assertFalse(f.shell.writer.closeStarted, "close() called early")
di.dataReceived("some data here")
self.assertFalse(f.shell.writer.closeStarted, "close() called early")
di.connectionLost("reason is ignored")
# now we should be waiting in close()
self.assertTrue(f.shell.writer.closeStarted, "close() not called")
self.assertFalse(stor_done)
f.shell.writer.d.callback("allow close() to finish")
self.assertTrue(stor_done)
return d # just in case an errback occurred
class FTPResponseCodeTests(unittest.TestCase):
"""
Tests relating directly to response codes.
"""
def test_unique(self):
"""
All of the response code globals (for example C{RESTART_MARKER_REPLY} or
C{USR_NAME_OK_NEED_PASS}) have unique values and are present in the
C{RESPONSE} dictionary.
"""
allValues = set(ftp.RESPONSE)
seenValues = set()
for key, value in vars(ftp).items():
if isinstance(value, str) and key.isupper():
self.assertIn(
value, allValues,
"Code %r with value %r missing from RESPONSE dict" % (
key, value))
self.assertNotIn(
value, seenValues,
"Duplicate code %r with value %r" % (key, value))
seenValues.add(value)
| lgpl-3.0 |
kotfic/girder | plugins/download_statistics/plugin_tests/download_statistics_test.py | 4 | 7299 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import json
from tests import base
from girder.constants import ROOT_DIR
from girder.models.collection import Collection
from girder.models.folder import Folder
from girder.models.item import Item
from girder.models.upload import Upload
from girder.models.user import User
def setUpModule():
base.enabledPlugins.append('download_statistics')
base.startServer()
def tearDownModule():
base.stopServer()
class DownloadStatisticsTestCase(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
# Create admin user
admin = {'email': 'admin@email.com',
'login': 'adminLogin',
'firstName': 'adminFirst',
'lastName': 'adminLast',
'password': 'adminPassword',
'admin': True}
self.admin = User().createUser(**admin)
self.filesDir = os.path.join(ROOT_DIR, 'plugins', 'download_statistics',
'plugin_tests', 'files')
def _downloadFolder(self, folderId):
# Download folder through REST api
path = '/folder/%s/download' % str(folderId)
resp = self.request(path, isJson=False)
self.assertStatusOk(resp)
# Iterate through generator to trigger download events
for data in resp.body:
data
def _downloadItem(self, itemId):
# Download item through REST api
path = '/item/%s/download' % str(itemId)
resp = self.request(path, isJson=False)
self.assertStatusOk(resp)
# Iterate through generator to trigger download events
for data in resp.body:
data
def _downloadFile(self, fileId):
# Download file through REST api
path = '/file/%s/download' % str(fileId)
resp = self.request(path, isJson=False)
self.assertStatusOk(resp)
# Iterate through generator to trigger download events
for data in resp.body:
data
def _checkDownloadsCount(self, fileId, started, requested, completed):
# Downloads file info and asserts download statistics are accurate
path = '/file/%s' % str(fileId)
resp = self.request(path, isJson=True)
self.assertStatusOk(resp)
data = resp.json
# The generator is never iterated as to not trigger additional events
self.assertEqual(data['downloadStatistics']['started'], started)
self.assertEqual(data['downloadStatistics']['requested'], requested)
self.assertEqual(data['downloadStatistics']['completed'], completed)
def _downloadFileInTwoChunks(self, fileId):
# Adds 1 to downloads started, 2 to requested, and 1 to completed
# txt1.txt and txt2.txt each have a filesize of 5
path = '/file/%s/download' % str(fileId)
params = {
'offset': 0,
'endByte': 3
}
resp = self.request(path, method='GET', isJson=False, params=params)
# Iterate through generator to trigger download events
for data in resp.body:
data
params['offset'] = 3
params['endByte'] = 6
resp = self.request(path, method='GET', isJson=False, params=params)
# Iterate through generator to trigger download events
for data in resp.body:
data
def _downloadPartialFile(self, fileId):
# Adds 1 to downloads started and 4 to downloads requested
# txt1.txt and txt2.txt each have a filesize of 5
path = '/file/%s/download' % str(fileId)
for i in range(1, 5):
params = {
'offset': i-1,
'endByte': i
}
resp = self.request(path, method='GET', isJson=False, params=params)
# Iterate through generator to trigger download events
for data in resp.body:
data
def testDownload(self):
collection = Collection().createCollection('collection1', public=True)
folder = Folder().createFolder(collection, 'folder1', parentType='collection', public=True)
item = Item().createItem('item1', self.admin, folder)
# Path to test files
file1Path = os.path.join(self.filesDir, 'txt1.txt')
file2Path = os.path.join(self.filesDir, 'txt2.txt')
# Upload files to item
with open(file1Path, 'rb') as fp:
file1 = Upload().uploadFromFile(
fp, os.path.getsize(file1Path), 'txt1.txt', parentType='item',
parent=item, user=self.admin)
with open(file2Path, 'rb') as fp:
file2 = Upload().uploadFromFile(
fp, os.path.getsize(file2Path), 'txt2.txt', mimeType='image/jpeg',
parentType='item', parent=item, user=self.admin)
# Download item and its files several times and ensure downloads are recorded
# Each file is downloaded 10 times
for n in range(0, 5):
self._downloadItem(item['_id'])
self._downloadFile(file1['_id'])
self._downloadFile(file2['_id'])
# Download each file 1 time by downloading parent folder
self._downloadFolder(folder['_id'])
# Download each file over 2 requests
self._downloadFileInTwoChunks(file1['_id'])
self._downloadFileInTwoChunks(file2['_id'])
# Download each file partially, adding 1 to start and 4 to requested
self._downloadPartialFile(file1['_id'])
self._downloadPartialFile(file2['_id'])
# Download entire collection
# Each file is downloaded 1 additional time
path = '/collection/%s/download' % collection['_id']
resp = self.request(path, user=self.admin, isJson=False)
# Iterate through generator to trigger download events
for data in resp.body:
data
# Download collection filtered by mime type
# file2 is downloaded one additional time
path = '/collection/%s/download' % collection['_id']
resp = self.request(path, user=self.admin, isJson=False, method='GET',
params={
'id': collection['_id'],
'mimeFilter': json.dumps(['image/jpeg'])
})
# iterate through generator to trigger download events
for data in resp.body:
data
self._checkDownloadsCount(file1['_id'], 14, 18, 13)
self._checkDownloadsCount(file2['_id'], 15, 19, 14)
| apache-2.0 |
chromium/chromium | third_party/libxml/src/check-xml-test-suite.py | 13 | 9634 | #!/usr/bin/python
import sys
import time
import os
import string
sys.path.insert(0, "python")
import libxml2
test_nr = 0
test_succeed = 0
test_failed = 0
test_error = 0
#
# the testsuite description
#
CONF="xml-test-suite/xmlconf/xmlconf.xml"
LOG="check-xml-test-suite.log"
log = open(LOG, "w")
#
# Error and warning handlers
#
error_nr = 0
error_msg = ''
def errorHandler(ctx, str):
global error_nr
global error_msg
error_nr = error_nr + 1
if len(error_msg) < 300:
if len(error_msg) == 0 or error_msg[-1] == '\n':
error_msg = error_msg + " >>" + str
else:
error_msg = error_msg + str
libxml2.registerErrorHandler(errorHandler, None)
#warning_nr = 0
#warning = ''
#def warningHandler(ctx, str):
# global warning_nr
# global warning
#
# warning_nr = warning_nr + 1
# warning = warning + str
#
#libxml2.registerWarningHandler(warningHandler, None)
#
# Used to load the XML testsuite description
#
def loadNoentDoc(filename):
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return None
ctxt.replaceEntities(1)
ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
if ctxt.wellFormed() != 1:
doc.freeDoc()
return None
return doc
#
# The conformance testing routines
#
def testNotWf(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ret = ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
if doc != None:
doc.freeDoc()
if ret == 0 or ctxt.wellFormed() != 0:
print "%s: error: Well Formedness error not detected" % (id)
log.write("%s: error: Well Formedness error not detected\n" % (id))
return 0
return 1
def testNotWfEnt(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ctxt.replaceEntities(1)
ret = ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
if doc != None:
doc.freeDoc()
if ret == 0 or ctxt.wellFormed() != 0:
print "%s: error: Well Formedness error not detected" % (id)
log.write("%s: error: Well Formedness error not detected\n" % (id))
return 0
return 1
def testNotWfEntDtd(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ctxt.replaceEntities(1)
ctxt.loadSubset(1)
ret = ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
if doc != None:
doc.freeDoc()
if ret == 0 or ctxt.wellFormed() != 0:
print "%s: error: Well Formedness error not detected" % (id)
log.write("%s: error: Well Formedness error not detected\n" % (id))
return 0
return 1
def testWfEntDtd(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ctxt.replaceEntities(1)
ctxt.loadSubset(1)
ret = ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
if doc == None or ret != 0 or ctxt.wellFormed() == 0:
print "%s: error: wrongly failed to parse the document" % (id)
log.write("%s: error: wrongly failed to parse the document\n" % (id))
if doc != None:
doc.freeDoc()
return 0
if error_nr != 0:
print "%s: warning: WF document generated an error msg" % (id)
log.write("%s: error: WF document generated an error msg\n" % (id))
doc.freeDoc()
return 2
doc.freeDoc()
return 1
def testError(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ctxt.replaceEntities(1)
ctxt.loadSubset(1)
ret = ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
if doc != None:
doc.freeDoc()
if ctxt.wellFormed() == 0:
print "%s: warning: failed to parse the document but accepted" % (id)
log.write("%s: warning: failed to parse the document but accepte\n" % (id))
return 2
if error_nr != 0:
print "%s: warning: WF document generated an error msg" % (id)
log.write("%s: error: WF document generated an error msg\n" % (id))
return 2
return 1
def testInvalid(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ctxt.validate(1)
ret = ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
valid = ctxt.isValid()
if doc == None:
print "%s: error: wrongly failed to parse the document" % (id)
log.write("%s: error: wrongly failed to parse the document\n" % (id))
return 0
if valid == 1:
print "%s: error: Validity error not detected" % (id)
log.write("%s: error: Validity error not detected\n" % (id))
doc.freeDoc()
return 0
if error_nr == 0:
print "%s: warning: Validity error not reported" % (id)
log.write("%s: warning: Validity error not reported\n" % (id))
doc.freeDoc()
return 2
doc.freeDoc()
return 1
def testValid(filename, id):
global error_nr
global error_msg
error_nr = 0
error_msg = ''
ctxt = libxml2.createFileParserCtxt(filename)
if ctxt == None:
return -1
ctxt.validate(1)
ctxt.parseDocument()
try:
doc = ctxt.doc()
except:
doc = None
valid = ctxt.isValid()
if doc == None:
print "%s: error: wrongly failed to parse the document" % (id)
log.write("%s: error: wrongly failed to parse the document\n" % (id))
return 0
if valid != 1:
print "%s: error: Validity check failed" % (id)
log.write("%s: error: Validity check failed\n" % (id))
doc.freeDoc()
return 0
if error_nr != 0 or valid != 1:
print "%s: warning: valid document reported an error" % (id)
log.write("%s: warning: valid document reported an error\n" % (id))
doc.freeDoc()
return 2
doc.freeDoc()
return 1
def runTest(test):
global test_nr
global test_succeed
global test_failed
global error_msg
global log
uri = test.prop('URI')
id = test.prop('ID')
if uri == None:
print "Test without ID:", uri
return -1
if id == None:
print "Test without URI:", id
return -1
base = test.getBase(None)
URI = libxml2.buildURI(uri, base)
if os.access(URI, os.R_OK) == 0:
print "Test %s missing: base %s uri %s" % (URI, base, uri)
return -1
type = test.prop('TYPE')
if type == None:
print "Test %s missing TYPE" % (id)
return -1
extra = None
if type == "invalid":
res = testInvalid(URI, id)
elif type == "valid":
res = testValid(URI, id)
elif type == "not-wf":
extra = test.prop('ENTITIES')
# print URI
#if extra == None:
# res = testNotWfEntDtd(URI, id)
#elif extra == 'none':
# res = testNotWf(URI, id)
#elif extra == 'general':
# res = testNotWfEnt(URI, id)
#elif extra == 'both' or extra == 'parameter':
res = testNotWfEntDtd(URI, id)
#else:
# print "Unknown value %s for an ENTITIES test value" % (extra)
# return -1
elif type == "error":
res = testError(URI, id)
else:
# TODO skipped for now
return -1
test_nr = test_nr + 1
if res > 0:
test_succeed = test_succeed + 1
elif res == 0:
test_failed = test_failed + 1
elif res < 0:
test_error = test_error + 1
# Log the ontext
if res != 1:
log.write(" File: %s\n" % (URI))
content = string.strip(test.content)
while content[-1] == '\n':
content = content[0:-1]
if extra != None:
log.write(" %s:%s:%s\n" % (type, extra, content))
else:
log.write(" %s:%s\n\n" % (type, content))
if error_msg != '':
log.write(" ----\n%s ----\n" % (error_msg))
error_msg = ''
log.write("\n")
return 0
def runTestCases(case):
profile = case.prop('PROFILE')
if profile != None and \
string.find(profile, "IBM XML Conformance Test Suite - Production") < 0:
print "=>", profile
test = case.children
while test != None:
if test.name == 'TEST':
runTest(test)
if test.name == 'TESTCASES':
runTestCases(test)
test = test.next
conf = loadNoentDoc(CONF)
if conf == None:
print "Unable to load %s" % CONF
sys.exit(1)
testsuite = conf.getRootElement()
if testsuite.name != 'TESTSUITE':
print "Expecting TESTSUITE root element: aborting"
sys.exit(1)
profile = testsuite.prop('PROFILE')
if profile != None:
print profile
start = time.time()
case = testsuite.children
while case != None:
if case.name == 'TESTCASES':
old_test_nr = test_nr
old_test_succeed = test_succeed
old_test_failed = test_failed
old_test_error = test_error
runTestCases(case)
print " Ran %d tests: %d succeeded, %d failed and %d generated an error" % (
test_nr - old_test_nr, test_succeed - old_test_succeed,
test_failed - old_test_failed, test_error - old_test_error)
case = case.next
conf.freeDoc()
log.close()
print "Ran %d tests: %d succeeded, %d failed and %d generated an error in %.2f s." % (
test_nr, test_succeed, test_failed, test_error, time.time() - start)
| bsd-3-clause |
oliver-sanders/cylc | cylc/flow/parsec/jinja2support.py | 1 | 9667 | # THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2019 NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""cylc support for the Jinja2 template processor
Importing code should catch ImportError in case Jinja2 is not installed.
"""
import importlib
import os
import pkgutil
import re
import sys
import traceback
from glob import glob
from jinja2 import (
BaseLoader,
ChoiceLoader,
Environment,
FileSystemLoader,
StrictUndefined,
TemplateNotFound,
TemplateSyntaxError)
from cylc.flow import LOG
from cylc.flow.parsec.exceptions import Jinja2Error
TRACEBACK_LINENO = re.compile(r'(\s+)?File "<template>", line (\d+)')
CONTEXT_LINES = 3
class PyModuleLoader(BaseLoader):
"""Load python module as Jinja2 template.
This loader piggybacks on the jinja import mechanism and
returns an empty template that exports module's namespace."""
# no source access for this loader
has_source_access = False
def __init__(self, prefix='__python__'):
self._templates = {}
# prefix that can be used to avoid name collisions with template files
self._python_namespace_prefix = prefix + '.'
# pylint: disable-msg=redefined-builtin
def load(self, environment, name, globals=None):
"""Imports Python module and returns it as Jinja2 template."""
if name.startswith(self._python_namespace_prefix):
name = name[len(self._python_namespace_prefix):]
try:
return self._templates[name]
except KeyError:
pass
try:
mdict = __import__(name, fromlist=['*']).__dict__
except ImportError:
raise TemplateNotFound(name)
# inject module dict into the context of an empty template
def root_render_func(context, *args, **kwargs):
"""Template render function."""
if False:
yield None # to make it a generator
context.vars.update(mdict)
context.exported_vars.update(mdict)
templ = environment.from_string('')
templ.root_render_func = root_render_func
self._templates[name] = templ
return templ
def raise_helper(message, error_type='Error'):
"""Provides a Jinja2 function for raising exceptions."""
# TODO - this more nicely
raise Exception('Jinja2 %s: %s' % (error_type, message))
def assert_helper(logical, message):
"""Provides a Jinja2 function for asserting logical expressions."""
if not logical:
raise_helper(message, 'Assertation Error')
return '' # Prevent None return value polluting output.
def _load_jinja2_extensions():
"""
Load modules under the cylc.jinja package namespace.
Filters provided by third-party packages (i.e. user created packages) will
also be included if correctly put in the cylc.jinja.filters namespace.
Global variables are expected to be found in cylc.jinja.globals,
and jinja tests in cylc.jinja.tests.
The dictionary returned contains the full module name (e.g.
cylc.jinja.filters.pad), and the second value is the module
object (same object as in __import__("module_name")__).
:return: jinja2 filter modules
:rtype: dict[string, object]
"""
jinja2_extensions = {}
for module_name in [
"cylc.flow.jinja.filters",
"cylc.flow.jinja.globals",
"cylc.flow.jinja.tests"
]:
try:
module = importlib.import_module(module_name)
jinja2_filters_modules = pkgutil.iter_modules(
module.__path__, f"{module.__name__}.")
if jinja2_filters_modules:
namespace = module_name.split(".")[-1]
jinja2_extensions[namespace] = {
name.split(".")[-1]: importlib.import_module(name)
for finder, name, ispkg in jinja2_filters_modules
}
except ModuleNotFoundError:
# Nothing to do, we may start without any filters/globals/tests
pass
return jinja2_extensions
def jinja2environment(dir_=None):
"""Set up and return Jinja2 environment."""
if dir_ is None:
dir_ = os.getcwd()
# Ignore bandit false positive: B701:jinja2_autoescape_false
# This env is not used to render content that is vulnerable to XSS.
env = Environment( # nosec
loader=ChoiceLoader([FileSystemLoader(dir_), PyModuleLoader()]),
undefined=StrictUndefined,
extensions=['jinja2.ext.do'])
# Load Jinja2 filters using setuptools
for scope, extensions in _load_jinja2_extensions().items():
for fname, module in extensions.items():
getattr(env, scope)[fname] = getattr(module, fname)
# Load any custom Jinja2 filters, tests or globals in the suite
# definition directory
# Example: a filter to pad integer values some fill character:
# |(file SUITE_DEFINITION_DIRECTORY/Jinja2/foo.py)
# | #!/usr/bin/env python3
# | def foo( value, length, fillchar ):
# | return str(value).rjust( int(length), str(fillchar) )
for namespace in ['filters', 'tests', 'globals']:
nspdir = 'Jinja2' + namespace.capitalize()
fdirs = [
os.path.join(dir_, nspdir),
os.path.join(os.environ['HOME'], '.cylc', nspdir)
]
for fdir in fdirs:
if os.path.isdir(fdir):
sys.path.insert(1, os.path.abspath(fdir))
for name in glob(os.path.join(fdir, '*.py')):
fname = os.path.splitext(os.path.basename(name))[0]
# TODO - EXCEPTION HANDLING FOR LOADING CUSTOM FILTERS
module = __import__(fname)
envnsp = getattr(env, namespace)
envnsp[fname] = getattr(module, fname)
# Import SUITE HOST USER ENVIRONMENT into template:
# (usage e.g.: {{environ['HOME']}}).
env.globals['environ'] = os.environ
env.globals['raise'] = raise_helper
env.globals['assert'] = assert_helper
return env
def get_error_location():
"""Extract template line number from end of traceback.
Returns:
int: The line number or None if not found.
"""
for line in reversed(traceback.format_exc().splitlines()):
match = TRACEBACK_LINENO.match(line)
if match:
return int(match.groups()[1])
return None
def jinja2process(flines, dir_, template_vars=None):
"""Pass configure file through Jinja2 processor."""
# Load file lines into a template, excluding '#!jinja2' so that
# '#!cylc-x.y.z' rises to the top. Callers should handle jinja2
# TemplateSyntaxerror and TemplateError.
if template_vars:
LOG.debug(
'Setting Jinja2 template variables:\n%s',
'\n'.join(
['+ %s=%s' % item for item in sorted(template_vars.items())]))
# Jinja2 render method requires a dictionary as argument (not None):
if not template_vars:
template_vars = {}
# CALLERS SHOULD HANDLE JINJA2 TEMPLATESYNTAXERROR AND TEMPLATEERROR
# AND TYPEERROR (e.g. for not using "|int" filter on number inputs.
# Convert unicode to plain str, ToDo - still needed for parsec?)
try:
env = jinja2environment(dir_)
template = env.from_string('\n'.join(flines[1:]))
lines = str(template.render(template_vars)).splitlines()
except TemplateSyntaxError as exc:
filename = None
# extract source lines
if exc.lineno and exc.source and not exc.filename:
# error in suite.rc or cylc include file
lines = exc.source.splitlines()
elif exc.lineno and exc.filename:
# error in jinja2 include file
filename = os.path.relpath(exc.filename, dir_)
with open(exc.filename, 'r') as include_file:
include_file.seek(max(exc.lineno - CONTEXT_LINES, 0), 0)
lines = []
for _ in range(CONTEXT_LINES):
lines.append(include_file.readline().splitlines()[0])
if lines:
# extract context lines from source lines
lines = lines[max(exc.lineno - CONTEXT_LINES, 0):exc.lineno]
raise Jinja2Error(exc, lines=lines, filename=filename)
except Exception as exc:
lineno = get_error_location()
lines = None
if lineno:
lineno += 1 # shebang line ignored by jinja2
lines = flines[max(lineno - CONTEXT_LINES, 0):lineno]
raise Jinja2Error(exc, lines=lines)
suiterc = []
for line in lines:
# Jinja2 leaves blank lines where source lines contain
# only Jinja2 code; this matters if line continuation
# markers are involved, so we remove blank lines here.
if not line.strip():
continue
# restoring newlines here is only necessary for display by
# the cylc view command:
# ##suiterc.append(line + '\n')
suiterc.append(line)
return suiterc
| gpl-3.0 |
izonder/intellij-community | python/lib/Lib/gopherlib.py | 87 | 5709 | """Gopher protocol client interface."""
__all__ = ["send_selector","send_query"]
import warnings
warnings.warn("the gopherlib module is deprecated", DeprecationWarning,
stacklevel=2)
# Default selector, host and port
DEF_SELECTOR = '1/'
DEF_HOST = 'gopher.micro.umn.edu'
DEF_PORT = 70
# Recognized file types
A_TEXT = '0'
A_MENU = '1'
A_CSO = '2'
A_ERROR = '3'
A_MACBINHEX = '4'
A_PCBINHEX = '5'
A_UUENCODED = '6'
A_INDEX = '7'
A_TELNET = '8'
A_BINARY = '9'
A_DUPLICATE = '+'
A_SOUND = 's'
A_EVENT = 'e'
A_CALENDAR = 'c'
A_HTML = 'h'
A_TN3270 = 'T'
A_MIME = 'M'
A_IMAGE = 'I'
A_WHOIS = 'w'
A_QUERY = 'q'
A_GIF = 'g'
A_HTML = 'h' # HTML file
A_WWW = 'w' # WWW address
A_PLUS_IMAGE = ':'
A_PLUS_MOVIE = ';'
A_PLUS_SOUND = '<'
_names = dir()
_type_to_name_map = {}
def type_to_name(gtype):
"""Map all file types to strings; unknown types become TYPE='x'."""
global _type_to_name_map
if _type_to_name_map=={}:
for name in _names:
if name[:2] == 'A_':
_type_to_name_map[eval(name)] = name[2:]
if gtype in _type_to_name_map:
return _type_to_name_map[gtype]
return 'TYPE=%r' % (gtype,)
# Names for characters and strings
CRLF = '\r\n'
TAB = '\t'
def send_selector(selector, host, port = 0):
"""Send a selector to a given host and port, return a file with the reply."""
import socket
if not port:
i = host.find(':')
if i >= 0:
host, port = host[:i], int(host[i+1:])
if not port:
port = DEF_PORT
elif type(port) == type(''):
port = int(port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.sendall(selector + CRLF)
s.shutdown(1)
return s.makefile('rb')
def send_query(selector, query, host, port = 0):
"""Send a selector and a query string."""
return send_selector(selector + '\t' + query, host, port)
def path_to_selector(path):
"""Takes a path as returned by urlparse and returns the appropriate selector."""
if path=="/":
return "/"
else:
return path[2:] # Cuts initial slash and data type identifier
def path_to_datatype_name(path):
"""Takes a path as returned by urlparse and maps it to a string.
See section 3.4 of RFC 1738 for details."""
if path=="/":
# No way to tell, although "INDEX" is likely
return "TYPE='unknown'"
else:
return type_to_name(path[1])
# The following functions interpret the data returned by the gopher
# server according to the expected type, e.g. textfile or directory
def get_directory(f):
"""Get a directory in the form of a list of entries."""
entries = []
while 1:
line = f.readline()
if not line:
print '(Unexpected EOF from server)'
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] in CRLF:
line = line[:-1]
if line == '.':
break
if not line:
print '(Empty line from server)'
continue
gtype = line[0]
parts = line[1:].split(TAB)
if len(parts) < 4:
print '(Bad line from server: %r)' % (line,)
continue
if len(parts) > 4:
if parts[4:] != ['+']:
print '(Extra info from server:',
print parts[4:], ')'
else:
parts.append('')
parts.insert(0, gtype)
entries.append(parts)
return entries
def get_textfile(f):
"""Get a text file as a list of lines, with trailing CRLF stripped."""
lines = []
get_alt_textfile(f, lines.append)
return lines
def get_alt_textfile(f, func):
"""Get a text file and pass each line to a function, with trailing CRLF stripped."""
while 1:
line = f.readline()
if not line:
print '(Unexpected EOF from server)'
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] in CRLF:
line = line[:-1]
if line == '.':
break
if line[:2] == '..':
line = line[1:]
func(line)
def get_binary(f):
"""Get a binary file as one solid data block."""
data = f.read()
return data
def get_alt_binary(f, func, blocksize):
"""Get a binary file and pass each block to a function."""
while 1:
data = f.read(blocksize)
if not data:
break
func(data)
def test():
"""Trivial test program."""
import sys
import getopt
opts, args = getopt.getopt(sys.argv[1:], '')
selector = DEF_SELECTOR
type = selector[0]
host = DEF_HOST
if args:
host = args[0]
args = args[1:]
if args:
type = args[0]
args = args[1:]
if len(type) > 1:
type, selector = type[0], type
else:
selector = ''
if args:
selector = args[0]
args = args[1:]
query = ''
if args:
query = args[0]
args = args[1:]
if type == A_INDEX:
f = send_query(selector, query, host)
else:
f = send_selector(selector, host)
if type == A_TEXT:
lines = get_textfile(f)
for item in lines: print item
elif type in (A_MENU, A_INDEX):
entries = get_directory(f)
for item in entries: print item
else:
data = get_binary(f)
print 'binary data:', len(data), 'bytes:', repr(data[:100])[:40]
# Run the test when run as script
if __name__ == '__main__':
test()
| apache-2.0 |
cycomachead/info290 | lab3/q7.py | 1 | 2502 | from __future__ import print_function
from sklearn import cluster, metrics
from numpy import recfromcsv
import numpy as np
#from file_utils import reviewers
import csv
### utility functions
def na_rm(data):
data = data[~np.isnan(data).any(axis=1)]
return data[~np.isinf(data).any(axis=1)]
def returnNaNs(data):
return [i for i in data if np.isnan(i)]
D = recfromcsv("../yelp_reviewers.txt", delimiter='|')
D7 = np.array(D[['q8', 'q9', 'q10', 'q11', 'q12', 'q13',
'q18_group2', 'q18_group3', 'q18_group5', 'q18_group6',
'q18_group7', 'q18_group11', 'q18_group13', 'q18_group14',
'q18_group15', 'q18_group16_a', 'q18_group16_b',
'q18_group16_c', 'q18_group16_d', 'q18_group16_e',
'q18_group16_f', 'q18_group16_g', 'q18_group16_h']].tolist())
def get_clustering(n, data):
clusterer = cluster.KMeans(n_clusters = n)
clustering = clusterer.fit(data)
return clustering
def pctNaN(col):
return len(returnNaNs(col))/len(col)
def preprocess(data):
i = 0
realCol = 0
while i < data.shape[1]:
row = data[:, i]
pct = pctNaN(row)
if pct > 0.50:
# The last 1 specifies to delete a column not a row
data = np.delete(data, i, 1)
else:
i += 1
realCol += 1
return na_rm(data)
def question7b(data):
with open('q7b.feature', 'w+') as f:
file_writer = csv.writer(f)
file_writer.writerow(['num_clusters', 'sum_win_var_clust'])
for i in range(2, 9):
try:
clustering = get_clustering(i, data)
file_writer.writerow([i, clustering.inertia_])
except Exception as e:
print(str(i) + " clusters had a problem:")
print(e)
def question7a(data):
with open('q7a.feature', 'w+') as f:
file_writer = csv.writer(f)
file_writer.writerow(['num_clusters', 'silhouette_coeff'])
for i in range(2, 9):
try:
clustering = get_clustering(i, data)
cluster_fits[i] = clustering
m = metrics.silhouette_score(data, clustering.labels_, metric='euclidean', sample_size = 10000)
silhouettes[i] = m
file_writer.writerow([i, m])
except Exception as e:
print(str(i) + " clusters had a problem:")
print(e)
D7 = preprocess(D7)
question7a(D7)
question7b(D7)
| bsd-2-clause |
quanvm009/codev7 | openerp/addons/hr_attendance/res_config.py | 434 | 1406 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class hr_attendance_config_settings(osv.osv_memory):
_inherit = 'hr.config.settings'
_columns = {
'group_hr_attendance': fields.boolean('Track attendances for all employees',
implied_group='base.group_hr_attendance',
help="Allocates attendance group to all users."),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
johnfraney/carrot | migrations/0001_initial.py | 1 | 1059 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='QueuedTask',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('app', models.CharField(max_length=255)),
('data', models.CharField(max_length=5000)),
('user_id', models.IntegerField(null=True, blank=True)),
('status', models.CharField(max_length=255, null=True, blank=True)),
('completed', models.BooleanField(default=False)),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_completed', models.DateTimeField(null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
| gpl-2.0 |
pridemusvaire/youtube-dl | youtube_dl/extractor/svt.py | 113 | 3985 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
)
class SVTBaseIE(InfoExtractor):
def _extract_video(self, url, video_id):
info = self._download_json(url, video_id)
title = info['context']['title']
thumbnail = info['context'].get('thumbnailImage')
video_info = info['video']
formats = []
for vr in video_info['videoReferences']:
vurl = vr['url']
ext = determine_ext(vurl)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
vurl, video_id,
ext='mp4', entry_protocol='m3u8_native',
m3u8_id=vr.get('playerType')))
elif ext == 'f4m':
formats.extend(self._extract_f4m_formats(
vurl + '?hdcore=3.3.0', video_id,
f4m_id=vr.get('playerType')))
else:
formats.append({
'format_id': vr.get('playerType'),
'url': vurl,
})
self._sort_formats(formats)
duration = video_info.get('materialLength')
age_limit = 18 if video_info.get('inappropriateForChildren') else 0
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
'duration': duration,
'age_limit': age_limit,
}
class SVTIE(SVTBaseIE):
_VALID_URL = r'https?://(?:www\.)?svt\.se/wd\?(?:.*?&)?widgetId=(?P<widget_id>\d+)&.*?\barticleId=(?P<id>\d+)'
_TEST = {
'url': 'http://www.svt.se/wd?widgetId=23991§ionId=541&articleId=2900353&type=embed&contextSectionId=123&autostart=false',
'md5': '9648197555fc1b49e3dc22db4af51d46',
'info_dict': {
'id': '2900353',
'ext': 'flv',
'title': 'Här trycker Jagr till Giroux (under SVT-intervjun)',
'duration': 27,
'age_limit': 0,
},
}
@staticmethod
def _extract_url(webpage):
mobj = re.search(
r'(?:<iframe src|href)="(?P<url>%s[^"]*)"' % SVTIE._VALID_URL, webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
widget_id = mobj.group('widget_id')
article_id = mobj.group('id')
return self._extract_video(
'http://www.svt.se/wd?widgetId=%s&articleId=%s&format=json&type=embed&output=json' % (widget_id, article_id),
article_id)
class SVTPlayIE(SVTBaseIE):
IE_DESC = 'SVT Play and Öppet arkiv'
_VALID_URL = r'https?://(?:www\.)?(?P<host>svtplay|oppetarkiv)\.se/video/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.svtplay.se/video/2609989/sm-veckan/sm-veckan-rally-final-sasong-1-sm-veckan-rally-final',
'md5': 'ade3def0643fa1c40587a422f98edfd9',
'info_dict': {
'id': '2609989',
'ext': 'flv',
'title': 'SM veckan vinter, Örebro - Rally, final',
'duration': 4500,
'thumbnail': 're:^https?://.*[\.-]jpg$',
'age_limit': 0,
},
}, {
'url': 'http://www.oppetarkiv.se/video/1058509/rederiet-sasong-1-avsnitt-1-av-318',
'md5': 'c3101a17ce9634f4c1f9800f0746c187',
'info_dict': {
'id': '1058509',
'ext': 'flv',
'title': 'Farlig kryssning',
'duration': 2566,
'thumbnail': 're:^https?://.*[\.-]jpg$',
'age_limit': 0,
},
'skip': 'Only works from Sweden',
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
host = mobj.group('host')
return self._extract_video(
'http://www.%s.se/video/%s?output=json' % (host, video_id),
video_id)
| unlicense |
ihsanudin/odoo | addons/stock_dropshipping/__openerp__.py | 260 | 2037 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 OpenERP S.A. (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Drop Shipping',
'version': '1.0',
'category': 'Warehouse Management',
'summary': 'Drop Shipping',
'description': """
Manage drop shipping orders
===========================
This module adds a pre-configured Drop Shipping picking type
as well as a procurement route that allow configuring Drop
Shipping products and orders.
When drop shipping is used the goods are directly transferred
from suppliers to customers (direct delivery) without
going through the retailer's warehouse. In this case no
internal transfer document is needed.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/warehouse',
'depends': ['purchase', 'sale_stock'],
'data': ['stock_dropshipping.xml'],
'test': [
'test/cancellation_propagated.yml',
'test/crossdock.yml',
'test/dropship.yml',
'test/procurementexception.yml',
'test/lifo_price.yml'
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mlufei/depot_tools | third_party/gsutil/gslib/addlhelp/metadata.py | 51 | 8027 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HelpProvider
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
_detailed_help_text = ("""
<B>OVERVIEW OF METADATA</B>
Objects can have associated metadata, which control aspects of how
GET requests are handled, including Content-Type, Cache-Control,
Content-Disposition, and Content-Encoding (discussed in more detail in
the subsections below). In addition, you can set custom metadata that
can be used by applications (e.g., tagging that particular objects possess
some property).
There are two ways to set metadata on objects:
- at upload time you can specify one or more headers to associate with
objects, using the gsutil -h option. For example, the following command
would cause gsutil to set the Content-Type and Cache-Control for each
of the files being uploaded:
gsutil -h "Content-Type:text/html" -h "Cache-Control:public, max-age=3600" cp -r images gs://bucket/images
Note that -h is an option on the gsutil command, not the cp sub-command.
- You can set or remove metadata fields from already uploaded objects using
the gsutil setmeta command. See "gsutil help setmeta".
More details about specific pieces of metadata are discussed below.
<B>CONTENT TYPE</B>
The most commonly set metadata is Content-Type (also known as MIME type),
which allows browsers to render the object properly.
gsutil sets the Content-Type
automatically at upload time, based on each filename extension. For
example, uploading files with names ending in .txt will set Content-Type
to text/plain. If you're running gsutil on Linux or MacOS and would prefer
to have content type set based on naming plus content examination, see the
use_magicfile configuration variable in the gsutil/boto configuration file
(See also "gsutil help config"). In general, using use_magicfile is more
robust and configurable, but is not available on Windows.
If you specify a -h header when uploading content (like the example gsutil
command given in the previous section), it overrides the Content-Type that
would have been set based on filename extension or content. This can be
useful if the Content-Type detection algorithm doesn't work as desired
for some of your files.
You can also completely suppress content type detection in gsutil, by
specifying an empty string on the Content-Type header:
gsutil -h 'Content-Type:' cp -r images gs://bucket/images
In this case, the Google Cloud Storage service will attempt to detect
the content type. In general this approach will work better than using
filename extension-based content detection in gsutil, because the list of
filename extensions is kept more current in the server-side content detection
system than in the Python library upon which gsutil content type detection
depends. (For example, at the time of writing this, the filename extension
".webp" was recognized by the server-side content detection system, but
not by gsutil.)
<B>CACHE-CONTROL</B>
Another commonly set piece of metadata is Cache-Control, which allows
you to control whether and for how long browser and Internet caches are
allowed to cache your objects. Cache-Control only applies to objects with
a public-read ACL. Non-public data are not cacheable.
Here's an example of uploading an object set to allow caching:
gsutil -h "Cache-Control:public,max-age=3600" cp -a public-read -r html gs://bucket/html
This command would upload all files in the html directory (and subdirectories)
and make them publicly readable and cacheable, with cache expiration of
one hour.
Note that if you allow caching, at download time you may see older versions
of objects after uploading a newer replacement object. Note also that because
objects can be cached at various places on the Internet there is no way to
force a cached object to expire globally (unlike the way you can force your
browser to refresh its cache).
<B>CONTENT-ENCODING</B>
You could specify Content-Encoding to indicate that an object is compressed,
using a command like:
gsutil -h "Content-Encoding:gzip" cp *.gz gs://bucket/compressed
Note that Google Cloud Storage does not compress or decompress objects. If
you use this header to specify a compression type or compression algorithm
(for example, deflate), Google Cloud Storage preserves the header but does
not compress or decompress the object. Instead, you need to ensure that
the files have been compressed using the specified Content-Encoding before
using gsutil to upload them.
For compressible content, using Content-Encoding:gzip saves network and
storage costs, and improves content serving performance (since most browsers
are able to decompress objects served this way).
Note also that gsutil provides an easy way to cause content to be compressed
and stored with Content-Encoding:gzip: see the -z option in "gsutil help cp".
<B>CONTENT-DISPOSITION</B>
You can set Content-Disposition on your objects, to specify presentation
information about the data being transmitted. Here's an example:
gsutil -h 'Content-Disposition:attachment; filename=filename.ext' \\
cp -r attachments gs://bucket/attachments
Setting the Content-Disposition allows you to control presentation style
of the content, for example determining whether an attachment should be
automatically displayed vs should require some form of action from the user to
open it. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1
for more details about the meaning of Content-Disposition.
<B>CUSTOM METADATA</B>
You can add your own custom metadata (e.g,. for use by your application)
to an object by setting a header that starts with "x-goog-meta", for example:
gsutil -h x-goog-meta-reviewer:jane cp mycode.java gs://bucket/reviews
You can add multiple differently named custom metadata fields to each object.
<B>SETTABLE FIELDS; FIELD VALUES</B>
You can't set some metadata fields, such as ETag and Content-Length. The
fields you can set are:
- Cache-Control
- Content-Disposition
- Content-Encoding
- Content-Language
- Content-MD5
- Content-Type
- Any field starting with X-GOOG-META- (i.e., custom metadata).
Header names are case-insensitive.
X-GOOG-META- fields can have data set to arbitrary Unicode values. All
other fields must have ASCII values.
<B>VIEWING CURRENTLY SET METADATA</B>
You can see what metadata is currently set on an object by using:
gsutil ls -L gs://the_bucket/the_object
""")
class CommandOptions(HelpProvider):
"""Additional help about object metadata."""
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'metadata',
# List of help name aliases.
HELP_NAME_ALIASES : ['cache-control', 'caching', 'content type',
'mime type', 'mime', 'type'],
# Type of help:
HELP_TYPE : HelpType.ADDITIONAL_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Working with object metadata',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
| bsd-3-clause |
scholer/cadnano2.5 | cadnano/views/propertyview/abstractproppartitem.py | 2 | 1440 | # -*- coding: utf-8 -*-
from typing import Any
from cadnano.views.abstractitems import AbstractPartItem
from .cnpropertyitem import CNPropertyItem
from cadnano.cntypes import (
PartT
)
class AbstractPropertyPartSetItem(CNPropertyItem, AbstractPartItem):
"""Summary
"""
def __init__(self, **kwargs):
"""
Args:
model_part (Part): The model part
parent (TYPE): Description
key (None, optional): Description
"""
super(AbstractPropertyPartSetItem, self).__init__(**kwargs)
# end def
# SLOTS
def partRemovedSlot(self, part: PartT):
"""
Args:
part: Model object that emitted the signal.
"""
# self.parent.removePartItem(self)
for controller in self._controller_list:
controller.disconnectSignals()
self._controller_list = []
# end def
def partPropertyChangedSlot(self, part: PartT, key: str, new_value: Any):
"""
Args:
part: The model part
key: Description
new_value: Description
"""
if part in self.part_set:
self.setValue(key, new_value)
# end def
def partSelectedChangedSlot(self, part: PartT, is_selected: bool):
"""
Args:
part: The model part
is_selected: Description
"""
self.setSelected(is_selected)
# end def
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.