text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""DocstringChecker is used to check python doc string's style."""
import astroid
from pylint.checkers import BaseChecker
from pylint.interfaces import IAstroidChecker
from collections import defaultdict
import re
def register(linter):
"""Register checkers."""
linter.register_checker(DocstringChecker(linter))
class Docstring:
"""Docstring class holds the parsed doc string elements."""
def __init__(self):
self.d = defaultdict(list) # name->[]
self.clear()
def clear(self):
self.d['Args'] = []
self.d['Examples'] = []
self.d['Returns'] = []
self.d['Raises'] = []
self.args = {} # arg_name->arg_type
def get_level(self, string, indent=' '):
level = 0
unit_size = len(indent)
while string[:unit_size] == indent:
string = string[unit_size:]
level += 1
return level
def parse(self, doc):
"""parse gets sections from doc
Such as Args, Returns, Raises, Examples s
Args:
doc (string): is the astroid node doc string.
Returns:
True if doc is parsed successfully.
"""
self.clear()
lines = doc.splitlines()
state = ("others", -1)
for l in lines:
c = l.strip()
if len(c) <= 0:
continue
level = self.get_level(l)
if c.startswith("Args:"):
state = ("Args", level)
elif c.startswith("Returns:"):
state = ("Returns", level)
elif c.startswith("Raises:"):
state = ("Raises", level)
elif c.startswith("Examples:"):
state = ("Examples", level)
else:
if level > state[1]:
self.d[state[0]].append(c)
continue
state = ("others", -1)
self.d[state[0]].append(c)
self._arg_with_type()
return True
def get_returns(self):
return self.d['Returns']
def get_raises(self):
return self.d['Raises']
def get_examples(self):
return self.d['Examples']
def _arg_with_type(self):
for t in self.d['Args']:
m = re.search(r'([A-Za-z0-9_-]+)\s{0,4}(\(.+\))\s{0,4}:', t)
if m:
self.args[m.group(1)] = m.group(2)
return self.args
class DocstringChecker(BaseChecker):
"""DosstringChecker is pylint checker to
check docstring style.
"""
__implements__ = (IAstroidChecker,)
POSITIONAL_MESSAGE_ID = 'str-used-on-positional-format-argument'
KEYWORD_MESSAGE_ID = 'str-used-on-keyword-format-argument'
name = 'doc-string-checker'
symbol = "doc-string"
priority = -1
msgs = {
'W9001': (
'One line doc string on > 1 lines',
symbol + "-one-line",
'Used when a short doc string is on multiple lines',
),
'W9002': (
'Doc string does not end with "." period',
symbol + "-end-with",
'Used when a doc string does not end with a period',
),
'W9003': (
'All args with their types must be mentioned in doc string %s',
symbol + "-with-all-args",
'Used when not all arguments are in the doc string ',
),
'W9005': (
'Missing docstring or docstring is too short',
symbol + "-missing",
'Add docstring longer >=10',
),
'W9006': (
'Docstring indent error, use 4 space for indent',
symbol + "-indent-error",
'Use 4 space for indent',
),
'W9007': (
'You should add `Returns` in comments',
symbol + "-with-returns",
'There should be a `Returns` section in comments',
),
'W9008': (
'You should add `Raises` section in comments',
symbol + "-with-raises",
'There should be a `Raises` section in comments',
),
}
options = ()
def visit_functiondef(self, node):
"""visit_functiondef checks Function node docstring style.
Args:
node (astroid.node): The visiting node.
Returns:
True if successful other wise False.
"""
self.check_doc_string(node)
if node.tolineno - node.fromlineno <= 10:
return True
if not node.doc:
return True
doc = Docstring()
doc.parse(node.doc)
self.all_args_in_doc(node, doc)
self.with_returns(node, doc)
self.with_raises(node, doc)
def visit_module(self, node):
self.check_doc_string(node)
def visit_classdef(self, node):
self.check_doc_string(node)
def check_doc_string(self, node):
self.missing_doc_string(node)
self.one_line(node)
self.has_period(node)
self.indent_style(node)
def missing_doc_string(self, node):
if node.name.startswith("__") or node.name.startswith("_"):
return True
if node.tolineno - node.fromlineno <= 10:
return True
if node.doc is None or len(node.doc) < 10:
self.add_message('W9005', node=node, line=node.fromlineno)
return False
# FIXME(gongwb): give the docstring line-no
def indent_style(self, node, indent=4):
"""indent_style checks docstring's indent style
Args:
node (astroid.node): The visiting node.
indent (int): The default indent of style
Returns:
True if successful other wise False.
"""
if node.doc is None:
return True
doc = node.doc
lines = doc.splitlines()
line_num = 0
for l in lines:
if line_num == 0:
continue
cur_indent = len(l) - len(l.lstrip())
if cur_indent % indent != 0:
self.add_message('W9006', node=node, line=node.fromlineno)
return False
line_num += 1
return True
def one_line(self, node):
"""one_line checks if docstring (len < 40) is on one line.
Args:
node (astroid.node): The node visiting.
Returns:
True if successful otherwise False.
"""
doc = node.doc
if doc is None:
return True
if len(doc) > 40:
return True
elif sum(doc.find(nl) for nl in ('\n', '\r', '\n\r')) == -3:
return True
else:
self.add_message('W9001', node=node, line=node.fromlineno)
return False
return True
def has_period(self, node):
"""has_period checks if one line doc end-with '.' .
Args:
node (astroid.node): the node is visiting.
Returns:
True if successful otherwise False.
"""
if node.doc is None:
return True
if len(node.doc.splitlines()) > 1:
return True
if not node.doc.strip().endswith('.'):
self.add_message('W9002', node=node, line=node.fromlineno)
return False
return True
def with_raises(self, node, doc):
"""with_raises checks if one line doc end-with '.' .
Args:
node (astroid.node): the node is visiting.
doc (Docstring): Docstring object.
Returns:
True if successful otherwise False.
"""
find = False
for t in node.body:
if not isinstance(t, astroid.Raise):
continue
find = True
break
if not find:
return True
if len(doc.get_raises()) == 0:
self.add_message('W9008', node=node, line=node.fromlineno)
return False
return True
def with_returns(self, node, doc):
"""with_returns checks if docstring comments what are returned .
Args:
node (astroid.node): the node is visiting.
doc (Docstring): Docstring object.
Returns:
True if successful otherwise False.
"""
if node.name.startswith("__") or node.name.startswith("_"):
return True
find = False
for t in node.body:
if not isinstance(t, astroid.Return):
continue
find = True
break
if not find:
return True
if len(doc.get_returns()) == 0:
self.add_message('W9007', node=node, line=node.fromlineno)
return False
return True
def all_args_in_doc(self, node, doc):
"""all_args_in_doc checks if arguments are mentioned in doc
Args:
node (astroid.node): the node is visiting.
doc (Docstring): Docstring object
Returns:
True if successful otherwise False.
"""
if node.name.startswith("__") or node.name.startswith("_"):
return True
args = []
for arg in node.args.get_children():
if (not isinstance(arg, astroid.AssignName)) or arg.name == "self":
continue
args.append(arg.name)
if len(args) <= 0:
return True
parsed_args = doc.args
args_not_documented = set(args) - set(parsed_args)
if len(args) > 0 and len(parsed_args) <= 0:
self.add_message(
'W9003',
node=node,
line=node.fromlineno,
args=list(args_not_documented),
)
return False
for t in args:
if t not in parsed_args:
self.add_message(
'W9003',
node=node,
line=node.fromlineno,
args=[
t,
],
)
return False
return True
|
{
"content_hash": "4c008303a9a20133f6fe70d1e3f0b5fb",
"timestamp": "",
"source": "github",
"line_count": 357,
"max_line_length": 79,
"avg_line_length": 28.11764705882353,
"alnum_prop": 0.511655708308428,
"repo_name": "luotao1/Paddle",
"id": "8deeff77348f476e7e5d29216ae0de298beba212",
"size": "10650",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tools/codestyle/docstring_checker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
}
|
"""Test the collection module."""
import json
import os
import re
import sys
sys.path[0:0] = [""]
from bson.py3compat import iteritems
from pymongo.command_cursor import CommandCursor
from pymongo.cursor import Cursor
from pymongo.results import _WriteResult
from test import unittest, client_context, IntegrationTest
# Location of JSON test specifications.
_TEST_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'crud')
class TestAllScenarios(IntegrationTest):
pass
def camel_to_snake(camel):
# Regex to convert CamelCase to snake_case.
snake = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', snake).lower()
def check_result(expected_result, result):
if isinstance(result, Cursor) or isinstance(result, CommandCursor):
return list(result) == expected_result
elif isinstance(result, _WriteResult):
for r in expected_result:
prop = camel_to_snake(r)
return getattr(result, prop) == expected_result[r]
else:
if not expected_result:
return result is None
else:
return result == expected_result
def create_test(scenario_def, test, ignore_result):
def run_scenario(self):
# Load data.
assert scenario_def['data'], "tests must have non-empty data"
self.db.test.drop()
self.db.test.insert_many(scenario_def['data'])
# Convert command from CamelCase to pymongo.collection method.
operation = camel_to_snake(test['operation']['name'])
cmd = getattr(self.db.test, operation)
# Convert arguments to snake_case and handle special cases.
arguments = test['operation']['arguments']
for arg_name in list(arguments):
c2s = camel_to_snake(arg_name)
# PyMongo accepts sort as list of tuples. Asserting len=1
# because ordering dicts from JSON in 2.6 is unwieldy.
if arg_name == "sort":
sort_dict = arguments[arg_name]
assert len(sort_dict) == 1, 'test can only have 1 sort key'
arguments[arg_name] = list(iteritems(sort_dict))
# Named "key" instead not fieldName.
if arg_name == "fieldName":
arguments["key"] = arguments.pop(arg_name)
# Aggregate uses "batchSize", while find uses batch_size.
elif arg_name == "batchSize" and operation == "aggregate":
continue
# Requires boolean returnDocument.
elif arg_name == "returnDocument":
arguments[c2s] = arguments[arg_name] == "After"
else:
arguments[c2s] = arguments.pop(arg_name)
result = cmd(**arguments)
# Assert result is expected, excluding the $out aggregation test.
if not ignore_result:
check_result(test['outcome'].get('result'), result)
# Assert final state is expected.
expected_c = test['outcome'].get('collection')
if expected_c is not None:
expected_name = expected_c.get('name')
if expected_name is not None:
db_coll = self.db[expected_name]
else:
db_coll = self.db.test
self.assertEqual(list(db_coll.find()), expected_c['data'])
return run_scenario
def create_tests():
for dirpath, _, filenames in os.walk(_TEST_PATH):
dirname = os.path.split(dirpath)[-1]
for filename in filenames:
with open(os.path.join(dirpath, filename)) as scenario_stream:
scenario_def = json.load(scenario_stream)
test_type = os.path.splitext(filename)[0]
# Construct test from scenario.
for test in scenario_def['tests']:
desc = test['description']
# Special case tests that require specific versions
if ("without an id specified" in desc or
"FindOneAndReplace" in desc and "with upsert" in desc):
new_test = client_context.require_version_min(2, 6, 0)(
create_test(scenario_def, test, False))
elif desc == "Aggregate with $out":
new_test = client_context.require_version_min(2, 6, 0)(
create_test(scenario_def, test, True))
else:
new_test = create_test(scenario_def, test, False)
test_name = 'test_%s_%s_%s' % (
dirname,
test_type,
str(test['description'].replace(" ", "_")))
new_test.__name__ = test_name
setattr(TestAllScenarios, new_test.__name__, new_test)
create_tests()
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "a3c721300ec9de685d09ea45d0073231",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 79,
"avg_line_length": 36.903703703703705,
"alnum_prop": 0.5590124448012846,
"repo_name": "raghuvam/fusepy",
"id": "ecffe4339b41cbe56c2cbd7ac2ce5ec951ad5702",
"size": "5556",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/test_crud.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "169202"
},
{
"name": "Python",
"bytes": "1614533"
},
{
"name": "Shell",
"bytes": "2144"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import datetime
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import *
from django.http import QueryDict
from django.template import Template, Context
from django.test import TestCase
from django.test.utils import str_prefix
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.safestring import mark_safe
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class PersonNew(Form):
first_name = CharField(widget=TextInput(attrs={'id': 'first_name_id'}))
last_name = CharField()
birthday = DateField()
class FormsTestCase(TestCase):
# A Form is a collection of Fields. It knows how to validate a set of data and it
# knows how to render itself in a couple of default ways (e.g., an HTML table).
# You can pass it data in __init__(), as a dictionary.
def test_form(self):
# Pass a dictionary to a Form's __init__().
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'})
self.assertTrue(p.is_bound)
self.assertEqual(p.errors, {})
self.assertTrue(p.is_valid())
self.assertHTMLEqual(p.errors.as_ul(), '')
self.assertEqual(p.errors.as_text(), '')
self.assertEqual(p.cleaned_data["first_name"], 'John')
self.assertEqual(p.cleaned_data["last_name"], 'Lennon')
self.assertEqual(p.cleaned_data["birthday"], datetime.date(1940, 10, 9))
self.assertHTMLEqual(str(p['first_name']), '<input type="text" name="first_name" value="John" id="id_first_name" />')
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="last_name" value="Lennon" id="id_last_name" />')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />')
try:
p['nonexistentfield']
self.fail('Attempts to access non-existent fields should fail.')
except KeyError:
pass
form_output = []
for boundfield in p:
form_output.append(str(boundfield))
self.assertHTMLEqual('\n'.join(form_output), """<input type="text" name="first_name" value="John" id="id_first_name" />
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />""")
form_output = []
for boundfield in p:
form_output.append([boundfield.label, boundfield.data])
self.assertEqual(form_output, [
['First name', 'John'],
['Last name', 'Lennon'],
['Birthday', '1940-10-9']
])
self.assertHTMLEqual(str(p), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="Lennon" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>""")
def test_empty_dict(self):
# Empty dictionaries are valid, too.
p = Person({})
self.assertTrue(p.is_bound)
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['last_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
self.assertFalse(p.is_valid())
self.assertEqual(p.cleaned_data, {})
self.assertHTMLEqual(str(p), """<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>""")
self.assertHTMLEqual(p.as_p(), """<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>""")
def test_unbound_form(self):
# If you don't pass any values to the Form's __init__(), or if you pass None,
# the Form will be considered unbound and won't do any validation. Form.errors
# will be an empty dictionary *but* Form.is_valid() will return False.
p = Person()
self.assertFalse(p.is_bound)
self.assertEqual(p.errors, {})
self.assertFalse(p.is_valid())
try:
p.cleaned_data
self.fail('Attempts to access cleaned_data when validation fails should fail.')
except AttributeError:
pass
self.assertHTMLEqual(str(p), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>""")
def test_unicode_values(self):
# Unicode values are handled properly.
p = Person({'first_name': 'John', 'last_name': '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111', 'birthday': '1940-10-9'})
self.assertHTMLEqual(p.as_table(), '<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>\n<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></td></tr>\n<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>')
self.assertHTMLEqual(p.as_ul(), '<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></li>\n<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></li>\n<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></li>')
self.assertHTMLEqual(p.as_p(), '<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></p>\n<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></p>\n<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></p>')
p = Person({'last_name': 'Lennon'})
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
self.assertFalse(p.is_valid())
self.assertHTMLEqual(p.errors.as_ul(), '<ul class="errorlist"><li>first_name<ul class="errorlist"><li>This field is required.</li></ul></li><li>birthday<ul class="errorlist"><li>This field is required.</li></ul></li></ul>')
self.assertEqual(p.errors.as_text(), """* first_name
* This field is required.
* birthday
* This field is required.""")
self.assertEqual(p.cleaned_data, {'last_name': 'Lennon'})
self.assertEqual(p['first_name'].errors, ['This field is required.'])
self.assertHTMLEqual(p['first_name'].errors.as_ul(), '<ul class="errorlist"><li>This field is required.</li></ul>')
self.assertEqual(p['first_name'].errors.as_text(), '* This field is required.')
p = Person()
self.assertHTMLEqual(str(p['first_name']), '<input type="text" name="first_name" id="id_first_name" />')
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="last_name" id="id_last_name" />')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="birthday" id="id_birthday" />')
def test_cleaned_data_only_fields(self):
# cleaned_data will always *only* contain a key for fields defined in the
# Form, even if you pass extra data when you define the Form. In this
# example, we pass a bunch of extra fields to the form constructor,
# but cleaned_data contains only the form's fields.
data = {'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9', 'extra1': 'hello', 'extra2': 'hello'}
p = Person(data)
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], 'John')
self.assertEqual(p.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
def test_optional_data(self):
# cleaned_data will include a key and value for *all* fields defined in the Form,
# even if the Form's data didn't include a value for fields that are not
# required. In this example, the data dictionary doesn't include a value for the
# "nick_name" field, but cleaned_data includes it. For CharFields, it's set to the
# empty string.
class OptionalPersonForm(Form):
first_name = CharField()
last_name = CharField()
nick_name = CharField(required=False)
data = {'first_name': 'John', 'last_name': 'Lennon'}
f = OptionalPersonForm(data)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['nick_name'], '')
self.assertEqual(f.cleaned_data['first_name'], 'John')
self.assertEqual(f.cleaned_data['last_name'], 'Lennon')
# For DateFields, it's set to None.
class OptionalPersonForm(Form):
first_name = CharField()
last_name = CharField()
birth_date = DateField(required=False)
data = {'first_name': 'John', 'last_name': 'Lennon'}
f = OptionalPersonForm(data)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['birth_date'], None)
self.assertEqual(f.cleaned_data['first_name'], 'John')
self.assertEqual(f.cleaned_data['last_name'], 'Lennon')
def test_auto_id(self):
# "auto_id" tells the Form to add an "id" attribute to each form element.
# If it's a string that contains '%s', Django will use that as a format string
# into which the field's name will be inserted. It will also put a <label> around
# the human-readable labels for a field.
p = Person(auto_id='%s_id')
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="first_name_id">First name:</label></th><td><input type="text" name="first_name" id="first_name_id" /></td></tr>
<tr><th><label for="last_name_id">Last name:</label></th><td><input type="text" name="last_name" id="last_name_id" /></td></tr>
<tr><th><label for="birthday_id">Birthday:</label></th><td><input type="text" name="birthday" id="birthday_id" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></li>
<li><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></li>
<li><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></p>
<p><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></p>
<p><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></p>""")
def test_auto_id_true(self):
# If auto_id is any True value whose str() does not contain '%s', the "id"
# attribute will be the name of the field.
p = Person(auto_id=True)
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name">First name:</label> <input type="text" name="first_name" id="first_name" /></li>
<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>""")
def test_auto_id_false(self):
# If auto_id is any False value, an "id" attribute won't be output unless it
# was manually entered.
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>""")
def test_id_on_field(self):
# In this example, auto_id is False, but the "id" attribute for the "first_name"
# field is given. Also note that field gets a <label>, while the others don't.
p = PersonNew(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>""")
def test_auto_id_on_form_and_field(self):
# If the "id" attribute is specified in the Form and auto_id is True, the "id"
# attribute in the Form gets precedence.
p = PersonNew(auto_id=True)
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>""")
def test_various_boolean_values(self):
class SignupForm(Form):
email = EmailField()
get_spam = BooleanField()
f = SignupForm(auto_id=False)
self.assertHTMLEqual(str(f['email']), '<input type="text" name="email" />')
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" />')
f = SignupForm({'email': 'test@example.com', 'get_spam': True}, auto_id=False)
self.assertHTMLEqual(str(f['email']), '<input type="text" name="email" value="test@example.com" />')
self.assertHTMLEqual(str(f['get_spam']), '<input checked="checked" type="checkbox" name="get_spam" />')
# 'True' or 'true' should be rendered without a value attribute
f = SignupForm({'email': 'test@example.com', 'get_spam': 'True'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input checked="checked" type="checkbox" name="get_spam" />')
f = SignupForm({'email': 'test@example.com', 'get_spam': 'true'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input checked="checked" type="checkbox" name="get_spam" />')
# A value of 'False' or 'false' should be rendered unchecked
f = SignupForm({'email': 'test@example.com', 'get_spam': 'False'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" />')
f = SignupForm({'email': 'test@example.com', 'get_spam': 'false'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" />')
def test_widget_output(self):
# Any Field can have a Widget class passed to its constructor:
class ContactForm(Form):
subject = CharField()
message = CharField(widget=Textarea)
f = ContactForm(auto_id=False)
self.assertHTMLEqual(str(f['subject']), '<input type="text" name="subject" />')
self.assertHTMLEqual(str(f['message']), '<textarea name="message" rows="10" cols="40"></textarea>')
# as_textarea(), as_text() and as_hidden() are shortcuts for changing the output
# widget type:
self.assertHTMLEqual(f['subject'].as_textarea(), '<textarea name="subject" rows="10" cols="40"></textarea>')
self.assertHTMLEqual(f['message'].as_text(), '<input type="text" name="message" />')
self.assertHTMLEqual(f['message'].as_hidden(), '<input type="hidden" name="message" />')
# The 'widget' parameter to a Field can also be an instance:
class ContactForm(Form):
subject = CharField()
message = CharField(widget=Textarea(attrs={'rows': 80, 'cols': 20}))
f = ContactForm(auto_id=False)
self.assertHTMLEqual(str(f['message']), '<textarea name="message" rows="80" cols="20"></textarea>')
# Instance-level attrs are *not* carried over to as_textarea(), as_text() and
# as_hidden():
self.assertHTMLEqual(f['message'].as_text(), '<input type="text" name="message" />')
f = ContactForm({'subject': 'Hello', 'message': 'I love you.'}, auto_id=False)
self.assertHTMLEqual(f['subject'].as_textarea(), '<textarea rows="10" cols="40" name="subject">Hello</textarea>')
self.assertHTMLEqual(f['message'].as_text(), '<input type="text" name="message" value="I love you." />')
self.assertHTMLEqual(f['message'].as_hidden(), '<input type="hidden" name="message" value="I love you." />')
def test_forms_with_choices(self):
# For a form with a <select>, use ChoiceField:
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>""")
# A subtlety: If one of the choices' value is the empty string and the form is
# unbound, then the <option> for the empty-string choice will get selected="selected".
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('', '------'), ('P', 'Python'), ('J', 'Java')])
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="" selected="selected">------</option>
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
# You can specify widget attributes in the Widget constructor.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(attrs={'class': 'foo'}))
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>""")
# When passing a custom widget instance to ChoiceField, note that setting
# 'choices' on the widget is meaningless. The widget will use the choices
# defined on the Field, not the ones defined on the Widget.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(choices=[('R', 'Ruby'), ('P', 'Perl')], attrs={'class': 'foo'}))
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>""")
# You can set a ChoiceField's choices after the fact.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField()
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
</select>""")
f.fields['language'].choices = [('P', 'Python'), ('J', 'Java')]
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
def test_forms_with_radio(self):
# Add widget=RadioSelect to use that widget with a ChoiceField.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=RadioSelect)
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul>""")
self.assertHTMLEqual(f.as_table(), """<tr><th>Name:</th><td><input type="text" name="name" /></td></tr>
<tr><th>Language:</th><td><ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li>Name: <input type="text" name="name" /></li>
<li>Language: <ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul></li>""")
# Regarding auto_id and <label>, RadioSelect is a special case. Each radio button
# gets a distinct ID, formed by appending an underscore plus the button's
# zero-based index.
f = FrameworkForm(auto_id='id_%s')
self.assertHTMLEqual(str(f['language']), """<ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul>""")
# When RadioSelect is used with auto_id, and the whole form is printed using
# either as_table() or as_ul(), the label for the RadioSelect will point to the
# ID of the *first* radio button.
self.assertHTMLEqual(f.as_table(), """<tr><th><label for="id_name">Name:</label></th><td><input type="text" name="name" id="id_name" /></td></tr>
<tr><th><label for="id_language_0">Language:</label></th><td><ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
<li><label for="id_language_0">Language:</label> <ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></li>""")
self.assertHTMLEqual(f.as_p(), """<p><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
<p><label for="id_language_0">Language:</label> <ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></p>""")
def test_form_with_iterable_boundfield(self):
class BeatleForm(Form):
name = ChoiceField(choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')], widget=RadioSelect)
f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join([str(bf) for bf in f['name']]), """<label><input type="radio" name="name" value="john" /> John</label>
<label><input type="radio" name="name" value="paul" /> Paul</label>
<label><input type="radio" name="name" value="george" /> George</label>
<label><input type="radio" name="name" value="ringo" /> Ringo</label>""")
self.assertHTMLEqual('\n'.join(['<div>%s</div>' % bf for bf in f['name']]), """<div><label><input type="radio" name="name" value="john" /> John</label></div>
<div><label><input type="radio" name="name" value="paul" /> Paul</label></div>
<div><label><input type="radio" name="name" value="george" /> George</label></div>
<div><label><input type="radio" name="name" value="ringo" /> Ringo</label></div>""")
def test_form_with_noniterable_boundfield(self):
# You can iterate over any BoundField, not just those with widget=RadioSelect.
class BeatleForm(Form):
name = CharField()
f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join([str(bf) for bf in f['name']]), '<input type="text" name="name" />')
def test_forms_with_multiple_choice(self):
# MultipleChoiceField is a special case, as its data is required to be a list:
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField()
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<select multiple="multiple" name="composers">
</select>""")
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<select multiple="multiple" name="composers">
<option value="J">John Lennon</option>
<option value="P">Paul McCartney</option>
</select>""")
f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
self.assertHTMLEqual(str(f['name']), '<input type="text" name="name" value="Yesterday" />')
self.assertHTMLEqual(str(f['composers']), """<select multiple="multiple" name="composers">
<option value="J">John Lennon</option>
<option value="P" selected="selected">Paul McCartney</option>
</select>""")
def test_hidden_data(self):
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
# MultipleChoiceField rendered as_hidden() is a special case. Because it can
# have multiple values, its as_hidden() renders multiple <input type="hidden">
# tags.
f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
self.assertHTMLEqual(f['composers'].as_hidden(), '<input type="hidden" name="composers" value="P" />')
f = SongForm({'name': 'From Me To You', 'composers': ['P', 'J']}, auto_id=False)
self.assertHTMLEqual(f['composers'].as_hidden(), """<input type="hidden" name="composers" value="P" />
<input type="hidden" name="composers" value="J" />""")
# DateTimeField rendered as_hidden() is special too
class MessageForm(Form):
when = SplitDateTimeField()
f = MessageForm({'when_0': '1992-01-01', 'when_1': '01:01'})
self.assertTrue(f.is_valid())
self.assertHTMLEqual(str(f['when']), '<input type="text" name="when_0" value="1992-01-01" id="id_when_0" /><input type="text" name="when_1" value="01:01" id="id_when_1" />')
self.assertHTMLEqual(f['when'].as_hidden(), '<input type="hidden" name="when_0" value="1992-01-01" id="id_when_0" /><input type="hidden" name="when_1" value="01:01" id="id_when_1" />')
def test_mulitple_choice_checkbox(self):
# MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>""")
f = SongForm({'composers': ['J']}, auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>""")
f = SongForm({'composers': ['J', 'P']}, auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input checked="checked" type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>""")
def test_checkbox_auto_id(self):
# Regarding auto_id, CheckboxSelectMultiple is a special case. Each checkbox
# gets a distinct ID, formed by appending an underscore plus the checkbox's
# zero-based index.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
f = SongForm(auto_id='%s_id')
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label for="composers_id_0"><input type="checkbox" name="composers" value="J" id="composers_id_0" /> John Lennon</label></li>
<li><label for="composers_id_1"><input type="checkbox" name="composers" value="P" id="composers_id_1" /> Paul McCartney</label></li>
</ul>""")
def test_multiple_choice_list_data(self):
# Data for a MultipleChoiceField should be a list. QueryDict, MultiValueDict and
# MergeDict (when created as a merge of MultiValueDicts) conveniently work with
# this.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
data = {'name': 'Yesterday', 'composers': ['J', 'P']}
f = SongForm(data)
self.assertEqual(f.errors, {})
data = QueryDict('name=Yesterday&composers=J&composers=P')
f = SongForm(data)
self.assertEqual(f.errors, {})
data = MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P']))
f = SongForm(data)
self.assertEqual(f.errors, {})
data = MergeDict(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])))
f = SongForm(data)
self.assertEqual(f.errors, {})
def test_multiple_hidden(self):
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
# The MultipleHiddenInput widget renders multiple values as hidden fields.
class SongFormHidden(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=MultipleHiddenInput)
f = SongFormHidden(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])), auto_id=False)
self.assertHTMLEqual(f.as_ul(), """<li>Name: <input type="text" name="name" value="Yesterday" /><input type="hidden" name="composers" value="J" />
<input type="hidden" name="composers" value="P" /></li>""")
# When using CheckboxSelectMultiple, the framework expects a list of input and
# returns a list of input.
f = SongForm({'name': 'Yesterday'}, auto_id=False)
self.assertEqual(f.errors['composers'], ['This field is required.'])
f = SongForm({'name': 'Yesterday', 'composers': ['J']}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], ['J'])
self.assertEqual(f.cleaned_data['name'], 'Yesterday')
f = SongForm({'name': 'Yesterday', 'composers': ['J', 'P']}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], ['J', 'P'])
self.assertEqual(f.cleaned_data['name'], 'Yesterday')
def test_escaping(self):
# Validation errors are HTML-escaped when output as HTML.
class EscapingForm(Form):
special_name = CharField(label="<em>Special</em> Field")
special_safe_name = CharField(label=mark_safe("<em>Special</em> Field"))
def clean_special_name(self):
raise ValidationError("Something's wrong with '%s'" % self.cleaned_data['special_name'])
def clean_special_safe_name(self):
raise ValidationError(mark_safe("'<b>%s</b>' is a safe string" % self.cleaned_data['special_safe_name']))
f = EscapingForm({'special_name': "Nothing to escape", 'special_safe_name': "Nothing to escape"}, auto_id=False)
self.assertHTMLEqual(f.as_table(), """<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>Something's wrong with 'Nothing to escape'</li></ul><input type="text" name="special_name" value="Nothing to escape" /></td></tr>
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>'<b>Nothing to escape</b>' is a safe string</li></ul><input type="text" name="special_safe_name" value="Nothing to escape" /></td></tr>""")
f = EscapingForm({
'special_name': "Should escape < & > and <script>alert('xss')</script>",
'special_safe_name': "<i>Do not escape</i>"
}, auto_id=False)
self.assertHTMLEqual(f.as_table(), """<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>Something's wrong with 'Should escape < & > and <script>alert('xss')</script>'</li></ul><input type="text" name="special_name" value="Should escape < & > and <script>alert('xss')</script>" /></td></tr>
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>'<b><i>Do not escape</i></b>' is a safe string</li></ul><input type="text" name="special_safe_name" value="<i>Do not escape</i>" /></td></tr>""")
def test_validating_multiple_fields(self):
# There are a couple of ways to do multiple-field validation. If you want the
# validation message to be associated with a particular field, implement the
# clean_XXX() method on the Form, where XXX is the field name. As in
# Field.clean(), the clean_XXX() method should return the cleaned value. In the
# clean_XXX() method, you have access to self.cleaned_data, which is a dictionary
# of all the data that has been cleaned *so far*, in order by the fields,
# including the current field (e.g., the field XXX if you're in clean_XXX()).
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean_password2(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data['password2']
f = UserRegistration(auto_id=False)
self.assertEqual(f.errors, {})
f = UserRegistration({}, auto_id=False)
self.assertEqual(f.errors['username'], ['This field is required.'])
self.assertEqual(f.errors['password1'], ['This field is required.'])
self.assertEqual(f.errors['password2'], ['This field is required.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
self.assertEqual(f.errors['password2'], ['Please make sure your passwords match.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['username'], 'adrian')
self.assertEqual(f.cleaned_data['password1'], 'foo')
self.assertEqual(f.cleaned_data['password2'], 'foo')
# Another way of doing multiple-field validation is by implementing the
# Form's clean() method. If you do this, any ValidationError raised by that
# method will not be associated with a particular field; it will have a
# special-case association with the field named '__all__'.
# Note that in Form.clean(), you have access to self.cleaned_data, a dictionary of
# all the fields/values that have *not* raised a ValidationError. Also note
# Form.clean() is required to return a dictionary of all clean data.
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data
f = UserRegistration(auto_id=False)
self.assertEqual(f.errors, {})
f = UserRegistration({}, auto_id=False)
self.assertHTMLEqual(f.as_table(), """<tr><th>Username:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password2" /></td></tr>""")
self.assertEqual(f.errors['username'], ['This field is required.'])
self.assertEqual(f.errors['password1'], ['This field is required.'])
self.assertEqual(f.errors['password2'], ['This field is required.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
self.assertEqual(f.errors['__all__'], ['Please make sure your passwords match.'])
self.assertHTMLEqual(f.as_table(), """<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><input type="text" name="username" value="adrian" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></li>
<li>Username: <input type="text" name="username" value="adrian" maxlength="10" /></li>
<li>Password1: <input type="password" name="password1" /></li>
<li>Password2: <input type="password" name="password2" /></li>""")
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['username'], 'adrian')
self.assertEqual(f.cleaned_data['password1'], 'foo')
self.assertEqual(f.cleaned_data['password2'], 'foo')
def test_dynamic_construction(self):
# It's possible to construct a Form dynamically by adding to the self.fields
# dictionary in __init__(). Don't forget to call Form.__init__() within the
# subclass' __init__().
class Person(Form):
first_name = CharField()
last_name = CharField()
def __init__(self, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
self.fields['birthday'] = DateField()
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" /></td></tr>""")
# Instances of a dynamic Form do not persist fields from one Form instance to
# the next.
class MyForm(Form):
def __init__(self, data=None, auto_id=False, field_list=[]):
Form.__init__(self, data, auto_id=auto_id)
for field in field_list:
self.fields[field[0]] = field[1]
field_list = [('field1', CharField()), ('field2', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>""")
field_list = [('field3', CharField()), ('field4', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>""")
class MyForm(Form):
default_field_1 = CharField()
default_field_2 = CharField()
def __init__(self, data=None, auto_id=False, field_list=[]):
Form.__init__(self, data, auto_id=auto_id)
for field in field_list:
self.fields[field[0]] = field[1]
field_list = [('field1', CharField()), ('field2', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>""")
field_list = [('field3', CharField()), ('field4', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>""")
# Similarly, changes to field attributes do not persist from one Form instance
# to the next.
class Person(Form):
first_name = CharField(required=False)
last_name = CharField(required=False)
def __init__(self, names_required=False, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
if names_required:
self.fields['first_name'].required = True
self.fields['first_name'].widget.attrs['class'] = 'required'
self.fields['last_name'].required = True
self.fields['last_name'].widget.attrs['class'] = 'required'
f = Person(names_required=False)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (False, False))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({}, {}))
f = Person(names_required=True)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (True, True))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({'class': 'required'}, {'class': 'required'}))
f = Person(names_required=False)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (False, False))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({}, {}))
class Person(Form):
first_name = CharField(max_length=30)
last_name = CharField(max_length=30)
def __init__(self, name_max_length=None, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
if name_max_length:
self.fields['first_name'].max_length = name_max_length
self.fields['last_name'].max_length = name_max_length
f = Person(name_max_length=None)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (30, 30))
f = Person(name_max_length=20)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (20, 20))
f = Person(name_max_length=None)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (30, 30))
# Similarly, choices do not persist from one Form instance to the next.
# Refs #15127.
class Person(Form):
first_name = CharField(required=False)
last_name = CharField(required=False)
gender = ChoiceField(choices=(('f', 'Female'), ('m', 'Male')))
def __init__(self, allow_unspec_gender=False, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
if allow_unspec_gender:
self.fields['gender'].choices += (('u', 'Unspecified'),)
f = Person()
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male')])
f = Person(allow_unspec_gender=True)
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male'), ('u', 'Unspecified')])
f = Person()
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male')])
def test_validators_independence(self):
""" Test that we are able to modify a form field validators list without polluting
other forms """
from django.core.validators import MaxValueValidator
class MyForm(Form):
myfield = CharField(max_length=25)
f1 = MyForm()
f2 = MyForm()
f1.fields['myfield'].validators[0] = MaxValueValidator(12)
self.assertFalse(f1.fields['myfield'].validators[0] == f2.fields['myfield'].validators[0])
def test_hidden_widget(self):
# HiddenInput widgets are displayed differently in the as_table(), as_ul())
# and as_p() output of a Form -- their verbose names are not displayed, and a
# separate row is not displayed. They're displayed in the last row of the
# form, directly after that row's form element.
class Person(Form):
first_name = CharField()
last_name = CharField()
hidden_text = CharField(widget=HiddenInput)
birthday = DateField()
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p>First name: <input type="text" name="first_name" /></p>
<p>Last name: <input type="text" name="last_name" /></p>
<p>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></p>""")
# With auto_id set, a HiddenInput still gets an ID, but it doesn't get a label.
p = Person(auto_id='id_%s')
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></p>""")
# If a field with a HiddenInput has errors, the as_table() and as_ul() output
# will include the error message(s) with the text "(Hidden field [fieldname]) "
# prepended. This message is displayed at the top of the output, regardless of
# its field's order in the form.
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'}, auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><td colspan="2"><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></td></tr>
<tr><th>First name:</th><td><input type="text" name="first_name" value="John" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" value="Lennon" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></li>
<li>First name: <input type="text" name="first_name" value="John" /></li>
<li>Last name: <input type="text" name="last_name" value="Lennon" /></li>
<li>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></li>""")
self.assertHTMLEqual(p.as_p(), """<ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul>
<p>First name: <input type="text" name="first_name" value="John" /></p>
<p>Last name: <input type="text" name="last_name" value="Lennon" /></p>
<p>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></p>""")
# A corner case: It's possible for a form to have only HiddenInputs.
class TestForm(Form):
foo = CharField(widget=HiddenInput)
bar = CharField(widget=HiddenInput)
p = TestForm(auto_id=False)
self.assertHTMLEqual(p.as_table(), '<input type="hidden" name="foo" /><input type="hidden" name="bar" />')
self.assertHTMLEqual(p.as_ul(), '<input type="hidden" name="foo" /><input type="hidden" name="bar" />')
self.assertHTMLEqual(p.as_p(), '<input type="hidden" name="foo" /><input type="hidden" name="bar" />')
def test_field_order(self):
# A Form's fields are displayed in the same order in which they were defined.
class TestForm(Form):
field1 = CharField()
field2 = CharField()
field3 = CharField()
field4 = CharField()
field5 = CharField()
field6 = CharField()
field7 = CharField()
field8 = CharField()
field9 = CharField()
field10 = CharField()
field11 = CharField()
field12 = CharField()
field13 = CharField()
field14 = CharField()
p = TestForm(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
<tr><th>Field5:</th><td><input type="text" name="field5" /></td></tr>
<tr><th>Field6:</th><td><input type="text" name="field6" /></td></tr>
<tr><th>Field7:</th><td><input type="text" name="field7" /></td></tr>
<tr><th>Field8:</th><td><input type="text" name="field8" /></td></tr>
<tr><th>Field9:</th><td><input type="text" name="field9" /></td></tr>
<tr><th>Field10:</th><td><input type="text" name="field10" /></td></tr>
<tr><th>Field11:</th><td><input type="text" name="field11" /></td></tr>
<tr><th>Field12:</th><td><input type="text" name="field12" /></td></tr>
<tr><th>Field13:</th><td><input type="text" name="field13" /></td></tr>
<tr><th>Field14:</th><td><input type="text" name="field14" /></td></tr>""")
def test_form_html_attributes(self):
# Some Field classes have an effect on the HTML attributes of their associated
# Widget. If you set max_length in a CharField and its associated widget is
# either a TextInput or PasswordInput, then the widget's rendered HTML will
# include the "maxlength" attribute.
class UserRegistration(Form):
username = CharField(max_length=10) # uses TextInput by default
password = CharField(max_length=10, widget=PasswordInput)
realname = CharField(max_length=10, widget=TextInput) # redundantly define widget, just to test
address = CharField() # no max_length defined here
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" maxlength="10" /></li>
<li>Realname: <input type="text" name="realname" maxlength="10" /></li>
<li>Address: <input type="text" name="address" /></li>""")
# If you specify a custom "attrs" that includes the "maxlength" attribute,
# the Field's max_length attribute will override whatever "maxlength" you specify
# in "attrs".
class UserRegistration(Form):
username = CharField(max_length=10, widget=TextInput(attrs={'maxlength': 20}))
password = CharField(max_length=10, widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" maxlength="10" /></li>""")
def test_specifying_labels(self):
# You can specify the label for a field by using the 'label' argument to a Field
# class. If you don't specify 'label', Django will use the field name with
# underscores converted to spaces, and the initial letter capitalized.
class UserRegistration(Form):
username = CharField(max_length=10, label='Your username')
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput, label='Password (again)')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Your username: <input type="text" name="username" maxlength="10" /></li>
<li>Password1: <input type="password" name="password1" /></li>
<li>Password (again): <input type="password" name="password2" /></li>""")
# Labels for as_* methods will only end in a colon if they don't end in other
# punctuation already.
class Questions(Form):
q1 = CharField(label='The first question')
q2 = CharField(label='What is your name?')
q3 = CharField(label='The answer to life is:')
q4 = CharField(label='Answer this question!')
q5 = CharField(label='The last question. Period.')
self.assertHTMLEqual(Questions(auto_id=False).as_p(), """<p>The first question: <input type="text" name="q1" /></p>
<p>What is your name? <input type="text" name="q2" /></p>
<p>The answer to life is: <input type="text" name="q3" /></p>
<p>Answer this question! <input type="text" name="q4" /></p>
<p>The last question. Period. <input type="text" name="q5" /></p>""")
self.assertHTMLEqual(Questions().as_p(), """<p><label for="id_q1">The first question:</label> <input type="text" name="q1" id="id_q1" /></p>
<p><label for="id_q2">What is your name?</label> <input type="text" name="q2" id="id_q2" /></p>
<p><label for="id_q3">The answer to life is:</label> <input type="text" name="q3" id="id_q3" /></p>
<p><label for="id_q4">Answer this question!</label> <input type="text" name="q4" id="id_q4" /></p>
<p><label for="id_q5">The last question. Period.</label> <input type="text" name="q5" id="id_q5" /></p>""")
# A label can be a Unicode object or a bytestring with special characters.
class UserRegistration(Form):
username = CharField(max_length=10, label='ŠĐĆŽćžšđ')
password = CharField(widget=PasswordInput, label='\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), '<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="text" name="username" maxlength="10" /></li>\n<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="password" name="password" /></li>')
# If a label is set to the empty string for a field, that field won't get a label.
class UserRegistration(Form):
username = CharField(max_length=10, label='')
password = CharField(widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li> <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
p = UserRegistration(auto_id='id_%s')
self.assertHTMLEqual(p.as_ul(), """<li> <input id="id_username" type="text" name="username" maxlength="10" /></li>
<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>""")
# If label is None, Django will auto-create the label from the field name. This
# is default behavior.
class UserRegistration(Form):
username = CharField(max_length=10, label=None)
password = CharField(widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
p = UserRegistration(auto_id='id_%s')
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_username">Username:</label> <input id="id_username" type="text" name="username" maxlength="10" /></li>
<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>""")
def test_label_suffix(self):
# You can specify the 'label_suffix' argument to a Form class to modify the
# punctuation symbol used at the end of a label. By default, the colon (:) is
# used, and is only appended to the label if the label doesn't already end with a
# punctuation symbol: ., !, ? or :. If you specify a different suffix, it will
# be appended regardless of the last character of the label.
class FavoriteForm(Form):
color = CharField(label='Favorite color?')
animal = CharField(label='Favorite animal')
f = FavoriteForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal: <input type="text" name="animal" /></li>""")
f = FavoriteForm(auto_id=False, label_suffix='?')
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal? <input type="text" name="animal" /></li>""")
f = FavoriteForm(auto_id=False, label_suffix='')
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal <input type="text" name="animal" /></li>""")
f = FavoriteForm(auto_id=False, label_suffix='\u2192')
self.assertHTMLEqual(f.as_ul(), '<li>Favorite color? <input type="text" name="color" /></li>\n<li>Favorite animal\u2192 <input type="text" name="animal" /></li>')
def test_initial_data(self):
# You can specify initial data for a field by using the 'initial' argument to a
# Field class. This initial data is displayed when a Form is rendered with *no*
# data. It is not displayed when a Form is rendered with any data (including an
# empty dictionary). Also, the initial value is *not* used if data for a
# particular required field isn't provided.
class UserRegistration(Form):
username = CharField(max_length=10, initial='django')
password = CharField(widget=PasswordInput)
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
# Here, we're submitting data, so the initial value will *not* be displayed.
p = UserRegistration({}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': ''}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': 'foo'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
# An 'initial' value is *not* used as a fallback if data is not provided. In this
# example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'})
self.assertEqual(p.errors['username'], ['This field is required.'])
self.assertFalse(p.is_valid())
def test_dynamic_initial_data(self):
# The previous technique dealt with "hard-coded" initial data, but it's also
# possible to specify initial data after you've already created the Form class
# (i.e., at runtime). Use the 'initial' parameter to the Form constructor. This
# should be a dictionary containing initial values for one or more fields in the
# form, keyed by field name.
class UserRegistration(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
p = UserRegistration(initial={'username': 'stephane'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
# The 'initial' parameter is meaningless if you pass data.
p = UserRegistration({}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': ''}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': 'foo'}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
# A dynamic 'initial' value is *not* used as a fallback if data is not provided.
# In this example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'}, initial={'username': 'django'})
self.assertEqual(p.errors['username'], ['This field is required.'])
self.assertFalse(p.is_valid())
# If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
# then the latter will get precedence.
class UserRegistration(Form):
username = CharField(max_length=10, initial='django')
password = CharField(widget=PasswordInput)
p = UserRegistration(initial={'username': 'babik'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="babik" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
def test_callable_initial_data(self):
# The previous technique dealt with raw values as initial data, but it's also
# possible to specify callable data.
class UserRegistration(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
options = MultipleChoiceField(choices=[('f','foo'),('b','bar'),('w','whiz')])
# We need to define functions that get called later.)
def initial_django():
return 'django'
def initial_stephane():
return 'stephane'
def initial_options():
return ['f','b']
def initial_other_options():
return ['b','w']
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(initial={'username': initial_django, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>""")
# The 'initial' parameter is meaningless if you pass data.
p = UserRegistration({}, initial={'username': initial_django, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>""")
p = UserRegistration({'username': ''}, initial={'username': initial_django}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>""")
p = UserRegistration({'username': 'foo', 'options':['f','b']}, initial={'username': initial_django}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>""")
# A callable 'initial' value is *not* used as a fallback if data is not provided.
# In this example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'}, initial={'username': initial_django, 'options': initial_options})
self.assertEqual(p.errors['username'], ['This field is required.'])
self.assertFalse(p.is_valid())
# If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
# then the latter will get precedence.
class UserRegistration(Form):
username = CharField(max_length=10, initial=initial_django)
password = CharField(widget=PasswordInput)
options = MultipleChoiceField(choices=[('f','foo'),('b','bar'),('w','whiz')], initial=initial_other_options)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w" selected="selected">whiz</option>
</select></li>""")
p = UserRegistration(initial={'username': initial_stephane, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>""")
def test_boundfield_values(self):
# It's possible to get to the value which would be used for rendering
# the widget for a field by using the BoundField's value method.
class UserRegistration(Form):
username = CharField(max_length=10, initial='djangonaut')
password = CharField(widget=PasswordInput)
unbound = UserRegistration()
bound = UserRegistration({'password': 'foo'})
self.assertEqual(bound['username'].value(), None)
self.assertEqual(unbound['username'].value(), 'djangonaut')
self.assertEqual(bound['password'].value(), 'foo')
self.assertEqual(unbound['password'].value(), None)
def test_help_text(self):
# You can specify descriptive text for a field by using the 'help_text' argument)
class UserRegistration(Form):
username = CharField(max_length=10, help_text='e.g., user@example.com')
password = CharField(widget=PasswordInput, help_text='Choose wisely.')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></li>
<li>Password: <input type="password" name="password" /> <span class="helptext">Choose wisely.</span></li>""")
self.assertHTMLEqual(p.as_p(), """<p>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></p>
<p>Password: <input type="password" name="password" /> <span class="helptext">Choose wisely.</span></p>""")
self.assertHTMLEqual(p.as_table(), """<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /><br /><span class="helptext">e.g., user@example.com</span></td></tr>
<tr><th>Password:</th><td><input type="password" name="password" /><br /><span class="helptext">Choose wisely.</span></td></tr>""")
# The help text is displayed whether or not data is provided for the form.
p = UserRegistration({'username': 'foo'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /> <span class="helptext">Choose wisely.</span></li>""")
# help_text is not displayed for hidden fields. It can be used for documentation
# purposes, though.
class UserRegistration(Form):
username = CharField(max_length=10, help_text='e.g., user@example.com')
password = CharField(widget=PasswordInput)
next = CharField(widget=HiddenInput, initial='/', help_text='Redirect destination')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></li>
<li>Password: <input type="password" name="password" /><input type="hidden" name="next" value="/" /></li>""")
# Help text can include arbitrary Unicode characters.
class UserRegistration(Form):
username = CharField(max_length=10, help_text='ŠĐĆŽćžšđ')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), '<li>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</span></li>')
def test_subclassing_forms(self):
# You can subclass a Form to add fields. The resulting form subclass will have
# all of the fields of the parent Form, plus whichever fields you define in the
# subclass.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class Musician(Person):
instrument = CharField()
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>""")
m = Musician(auto_id=False)
self.assertHTMLEqual(m.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
<li>Instrument: <input type="text" name="instrument" /></li>""")
# Yes, you can subclass multiple forms. The fields are added in the order in
# which the parent classes are listed.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class Instrument(Form):
instrument = CharField()
class Beatle(Person, Instrument):
haircut_type = CharField()
b = Beatle(auto_id=False)
self.assertHTMLEqual(b.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
<li>Instrument: <input type="text" name="instrument" /></li>
<li>Haircut type: <input type="text" name="haircut_type" /></li>""")
def test_forms_with_prefixes(self):
# Sometimes it's necessary to have multiple forms display on the same HTML page,
# or multiple copies of the same form. We can accomplish this with form prefixes.
# Pass the keyword argument 'prefix' to the Form constructor to use this feature.
# This value will be prepended to each HTML form field name. One way to think
# about this is "namespaces for HTML forms". Notice that in the data argument,
# each field's key has the prefix, in this case 'person1', prepended to the
# actual field name.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
data = {
'person1-first_name': 'John',
'person1-last_name': 'Lennon',
'person1-birthday': '1940-10-9'
}
p = Person(data, prefix='person1')
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_person1-first_name">First name:</label> <input type="text" name="person1-first_name" value="John" id="id_person1-first_name" /></li>
<li><label for="id_person1-last_name">Last name:</label> <input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" /></li>
<li><label for="id_person1-birthday">Birthday:</label> <input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" /></li>""")
self.assertHTMLEqual(str(p['first_name']), '<input type="text" name="person1-first_name" value="John" id="id_person1-first_name" />')
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" />')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" />')
self.assertEqual(p.errors, {})
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], 'John')
self.assertEqual(p.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
# Let's try submitting some bad data to make sure form.errors and field.errors
# work as expected.
data = {
'person1-first_name': '',
'person1-last_name': '',
'person1-birthday': ''
}
p = Person(data, prefix='person1')
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['last_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
self.assertEqual(p['first_name'].errors, ['This field is required.'])
try:
p['person1-first_name'].errors
self.fail('Attempts to access non-existent fields should fail.')
except KeyError:
pass
# In this example, the data doesn't have a prefix, but the form requires it, so
# the form doesn't "see" the fields.
data = {
'first_name': 'John',
'last_name': 'Lennon',
'birthday': '1940-10-9'
}
p = Person(data, prefix='person1')
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['last_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
# With prefixes, a single data dictionary can hold data for multiple instances
# of the same form.
data = {
'person1-first_name': 'John',
'person1-last_name': 'Lennon',
'person1-birthday': '1940-10-9',
'person2-first_name': 'Jim',
'person2-last_name': 'Morrison',
'person2-birthday': '1943-12-8'
}
p1 = Person(data, prefix='person1')
self.assertTrue(p1.is_valid())
self.assertEqual(p1.cleaned_data['first_name'], 'John')
self.assertEqual(p1.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p1.cleaned_data['birthday'], datetime.date(1940, 10, 9))
p2 = Person(data, prefix='person2')
self.assertTrue(p2.is_valid())
self.assertEqual(p2.cleaned_data['first_name'], 'Jim')
self.assertEqual(p2.cleaned_data['last_name'], 'Morrison')
self.assertEqual(p2.cleaned_data['birthday'], datetime.date(1943, 12, 8))
# By default, forms append a hyphen between the prefix and the field name, but a
# form can alter that behavior by implementing the add_prefix() method. This
# method takes a field name and returns the prefixed field, according to
# self.prefix.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
def add_prefix(self, field_name):
return self.prefix and '%s-prefix-%s' % (self.prefix, field_name) or field_name
p = Person(prefix='foo')
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_foo-prefix-first_name">First name:</label> <input type="text" name="foo-prefix-first_name" id="id_foo-prefix-first_name" /></li>
<li><label for="id_foo-prefix-last_name">Last name:</label> <input type="text" name="foo-prefix-last_name" id="id_foo-prefix-last_name" /></li>
<li><label for="id_foo-prefix-birthday">Birthday:</label> <input type="text" name="foo-prefix-birthday" id="id_foo-prefix-birthday" /></li>""")
data = {
'foo-prefix-first_name': 'John',
'foo-prefix-last_name': 'Lennon',
'foo-prefix-birthday': '1940-10-9'
}
p = Person(data, prefix='foo')
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], 'John')
self.assertEqual(p.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
def test_forms_with_null_boolean(self):
# NullBooleanField is a bit of a special case because its presentation (widget)
# is different than its data. This is handled transparently, though.
class Person(Form):
name = CharField()
is_cool = NullBooleanField()
p = Person({'name': 'Joe'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': '1'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': '2'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': '3'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': True}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': False}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
def test_forms_with_file_fields(self):
# FileFields are a special case because they take their data from the request.FILES,
# not request.POST.
class FileForm(Form):
file1 = FileField()
f = FileForm(auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', b'')}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><ul class="errorlist"><li>The submitted file is empty.</li></ul><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={'file1': 'something that is not a file'}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><ul class="errorlist"><li>No file was submitted. Check the encoding type on the form.</li></ul><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', b'some content')}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>')
self.assertTrue(f.is_valid())
f = FileForm(data={}, files={'file1': SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह'.encode('utf-8'))}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>')
def test_basic_processing_in_view(self):
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data
def my_function(method, post_data):
if method == 'POST':
form = UserRegistration(post_data, auto_id=False)
else:
form = UserRegistration(auto_id=False)
if form.is_valid():
return 'VALID: %r' % form.cleaned_data
t = Template('<form action="" method="post">\n<table>\n{{ form }}\n</table>\n<input type="submit" />\n</form>')
return t.render(Context({'form': form}))
# Case 1: GET (an empty form, with no errors).)
self.assertHTMLEqual(my_function('GET', {}), """<form action="" method="post">
<table>
<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>
</table>
<input type="submit" />
</form>""")
# Case 2: POST with erroneous data (a redisplayed form, with errors).)
self.assertHTMLEqual(my_function('POST', {'username': 'this-is-a-long-username', 'password1': 'foo', 'password2': 'bar'}), """<form action="" method="post">
<table>
<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><ul class="errorlist"><li>Ensure this value has at most 10 characters (it has 23).</li></ul><input type="text" name="username" value="this-is-a-long-username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>
</table>
<input type="submit" />
</form>""")
# Case 3: POST with valid data (the success message).)
self.assertHTMLEqual(my_function('POST', {'username': 'adrian', 'password1': 'secret', 'password2': 'secret'}), str_prefix("VALID: {'username': %(_)s'adrian', 'password1': %(_)s'secret', 'password2': %(_)s'secret'}"))
def test_templates_with_forms(self):
class UserRegistration(Form):
username = CharField(max_length=10, help_text="Good luck picking a username that doesn't already exist.")
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data
# You have full flexibility in displaying form fields in a template. Just pass a
# Form instance to the template, and use "dot" access to refer to individual
# fields. Note, however, that this flexibility comes with the responsibility of
# displaying all the errors, including any that might not be associated with a
# particular field.
t = Template('''<form action="">
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p><label>Your username: <input type="text" name="username" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
self.assertHTMLEqual(t.render(Context({'form': UserRegistration({'username': 'django'}, auto_id=False)})), """<form action="">
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password: <input type="password" name="password1" /></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
# Use form.[field].label to output a field's label. You can specify the label for
# a field by using the 'label' argument to a Field class. If you don't specify
# 'label', Django will use the field name with underscores converted to spaces,
# and the initial letter capitalized.
t = Template('''<form action="">
<p><label>{{ form.username.label }}: {{ form.username }}</label></p>
<p><label>{{ form.password1.label }}: {{ form.password1 }}</label></p>
<p><label>{{ form.password2.label }}: {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p><label>Username: <input type="text" name="username" maxlength="10" /></label></p>
<p><label>Password1: <input type="password" name="password1" /></label></p>
<p><label>Password2: <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
# User form.[field].label_tag to output a field's label with a <label> tag
# wrapped around it, but *only* if the given field has an "id" attribute.
# Recall from above that passing the "auto_id" argument to a Form gives each
# field an "id" attribute.
t = Template('''<form action="">
<p>{{ form.username.label_tag }}: {{ form.username }}</p>
<p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
<p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p>Username: <input type="text" name="username" maxlength="10" /></p>
<p>Password1: <input type="password" name="password1" /></p>
<p>Password2: <input type="password" name="password2" /></p>
<input type="submit" />
</form>""")
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id='id_%s')})), """<form action="">
<p><label for="id_username">Username</label>: <input id="id_username" type="text" name="username" maxlength="10" /></p>
<p><label for="id_password1">Password1</label>: <input type="password" name="password1" id="id_password1" /></p>
<p><label for="id_password2">Password2</label>: <input type="password" name="password2" id="id_password2" /></p>
<input type="submit" />
</form>""")
# User form.[field].help_text to output a field's help text. If the given field
# does not have help text, nothing will be output.
t = Template('''<form action="">
<p>{{ form.username.label_tag }}: {{ form.username }}<br />{{ form.username.help_text }}</p>
<p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
<p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p>Username: <input type="text" name="username" maxlength="10" /><br />Good luck picking a username that doesn't already exist.</p>
<p>Password1: <input type="password" name="password1" /></p>
<p>Password2: <input type="password" name="password2" /></p>
<input type="submit" />
</form>""")
self.assertEqual(Template('{{ form.password1.help_text }}').render(Context({'form': UserRegistration(auto_id=False)})), '')
# The label_tag() method takes an optional attrs argument: a dictionary of HTML
# attributes to add to the <label> tag.
f = UserRegistration(auto_id='id_%s')
form_output = []
for bf in f:
form_output.append(bf.label_tag(attrs={'class': 'pretty'}))
expected_form_output = [
'<label for="id_username" class="pretty">Username</label>',
'<label for="id_password1" class="pretty">Password1</label>',
'<label for="id_password2" class="pretty">Password2</label>',
]
self.assertEqual(len(form_output), len(expected_form_output))
for i in range(len(form_output)):
self.assertHTMLEqual(form_output[i], expected_form_output[i])
# To display the errors that aren't associated with a particular field -- e.g.,
# the errors caused by Form.clean() -- use {{ form.non_field_errors }} in the
# template. If used on its own, it is displayed as a <ul> (or an empty string, if
# the list of errors is empty). You can also use it in {% if %} statements.
t = Template('''<form action="">
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)})), """<form action="">
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
t = Template('''<form action="">
{{ form.non_field_errors }}
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)})), """<form action="">
<ul class="errorlist"><li>Please make sure your passwords match.</li></ul>
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
def test_empty_permitted(self):
# Sometimes (pretty much in formsets) we want to allow a form to pass validation
# if it is completely empty. We can accomplish this by using the empty_permitted
# agrument to a form constructor.
class SongForm(Form):
artist = CharField()
name = CharField()
# First let's show what happens id empty_permitted=False (the default):
data = {'artist': '', 'song': ''}
form = SongForm(data, empty_permitted=False)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['This field is required.'], 'artist': ['This field is required.']})
self.assertEqual(form.cleaned_data, {})
# Now let's show what happens when empty_permitted=True and the form is empty.
form = SongForm(data, empty_permitted=True)
self.assertTrue(form.is_valid())
self.assertEqual(form.errors, {})
self.assertEqual(form.cleaned_data, {})
# But if we fill in data for one of the fields, the form is no longer empty and
# the whole thing must pass validation.
data = {'artist': 'The Doors', 'song': ''}
form = SongForm(data, empty_permitted=False)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['This field is required.']})
self.assertEqual(form.cleaned_data, {'artist': 'The Doors'})
# If a field is not given in the data then None is returned for its data. Lets
# make sure that when checking for empty_permitted that None is treated
# accordingly.
data = {'artist': None, 'song': ''}
form = SongForm(data, empty_permitted=True)
self.assertTrue(form.is_valid())
# However, we *really* need to be sure we are checking for None as any data in
# initial that returns False on a boolean call needs to be treated literally.
class PriceForm(Form):
amount = FloatField()
qty = IntegerField()
data = {'amount': '0.0', 'qty': ''}
form = PriceForm(data, initial={'amount': 0.0}, empty_permitted=True)
self.assertTrue(form.is_valid())
def test_extracting_hidden_and_visible(self):
class SongForm(Form):
token = CharField(widget=HiddenInput)
artist = CharField()
name = CharField()
form = SongForm()
self.assertEqual([f.name for f in form.hidden_fields()], ['token'])
self.assertEqual([f.name for f in form.visible_fields()], ['artist', 'name'])
def test_hidden_initial_gets_id(self):
class MyForm(Form):
field1 = CharField(max_length=50, show_hidden_initial=True)
self.assertHTMLEqual(MyForm().as_table(), '<tr><th><label for="id_field1">Field1:</label></th><td><input id="id_field1" type="text" name="field1" maxlength="50" /><input type="hidden" name="initial-field1" id="initial-id_field1" /></td></tr>')
def test_error_html_required_html_classes(self):
class Person(Form):
name = CharField()
is_cool = NullBooleanField()
email = EmailField(required=False)
age = IntegerField()
p = Person({})
p.error_css_class = 'error'
p.required_css_class = 'required'
self.assertHTMLEqual(p.as_ul(), """<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
<li class="required"><label for="id_is_cool">Is cool:</label> <select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></li>
<li><label for="id_email">Email:</label> <input type="text" name="email" id="id_email" /></li>
<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></li>""")
self.assertHTMLEqual(p.as_p(), """<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
<p class="required"><label for="id_is_cool">Is cool:</label> <select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></p>
<p><label for="id_email">Email:</label> <input type="text" name="email" id="id_email" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></p>""")
self.assertHTMLEqual(p.as_table(), """<tr class="required error"><th><label for="id_name">Name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="name" id="id_name" /></td></tr>
<tr class="required"><th><label for="id_is_cool">Is cool:</label></th><td><select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></td></tr>
<tr><th><label for="id_email">Email:</label></th><td><input type="text" name="email" id="id_email" /></td></tr>
<tr class="required error"><th><label for="id_age">Age:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="age" id="id_age" /></td></tr>""")
def test_label_split_datetime_not_displayed(self):
class EventForm(Form):
happened_at = SplitDateTimeField(widget=widgets.SplitHiddenDateTimeWidget)
form = EventForm()
self.assertHTMLEqual(form.as_ul(), '<input type="hidden" name="happened_at_0" id="id_happened_at_0" /><input type="hidden" name="happened_at_1" id="id_happened_at_1" />')
def test_multivalue_field_validation(self):
def bad_names(value):
if value == 'bad value':
raise ValidationError('bad value not allowed')
class NameField(MultiValueField):
def __init__(self, fields=(), *args, **kwargs):
fields = (CharField(label='First name', max_length=10),
CharField(label='Last name', max_length=10))
super(NameField, self).__init__(fields=fields, *args, **kwargs)
def compress(self, data_list):
return ' '.join(data_list)
class NameForm(Form):
name = NameField(validators=[bad_names])
form = NameForm(data={'name' : ['bad', 'value']})
form.full_clean()
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['bad value not allowed']})
form = NameForm(data={'name' : ['should be overly', 'long for the field names']})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['Ensure this value has at most 10 characters (it has 16).',
'Ensure this value has at most 10 characters (it has 24).']})
form = NameForm(data={'name' : ['fname', 'lname']})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'name' : 'fname lname'})
|
{
"content_hash": "8087d54c51de48a79ff8132fd73eb805",
"timestamp": "",
"source": "github",
"line_count": 1795,
"max_line_length": 519,
"avg_line_length": 60.68356545961003,
"alnum_prop": 0.6239132630110074,
"repo_name": "pygeek/django",
"id": "a8a28ba8060e834bcf28875f12afabe607986b12",
"size": "109051",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tests/regressiontests/forms/tests/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils import timezone
class List(models.Model):
name = models.CharField(max_length=200)
color = models.CharField(max_length=10)
created = models.DateTimeField(auto_now_add=True, default=timezone.now)
def do_first_thing(self, action):
print 'List is {} first thing'.format(action)
def do_second_thing(self, action):
print 'List is {} second thing'.format(action)
def do_third_thing(self, action):
print 'List is {} third thing'.format(action)
@receiver(pre_save, sender=List)
def list_pre_save_hook(sender, instance, **kwargs):
instance.do_first_thing('pre_save')
instance.do_second_thing('pre_save')
instance.do_third_thing('pre_save')
@receiver(post_save, sender=List)
def list_post_save_hook(sender, instance, **kwargs):
instance.do_first_thing('post_save')
instance.do_second_thing('post_save')
instance.do_third_thing('post_save')
|
{
"content_hash": "330b7b98220c0230793c5695f2af6640",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 72,
"avg_line_length": 31.125,
"alnum_prop": 0.7449799196787149,
"repo_name": "gregwym/djangoProjectStructure",
"id": "71cab9a90371a98acf4d8545b6d67bc4f4779f00",
"size": "996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "todo/models/List.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7953"
}
],
"symlink_target": ""
}
|
"""
@title: El Coladero
@description: Aplicación web para detectar y corregir vulnerabilidades
@author: Enrique Martín Martín
@email: emartinm@ucm.es
"""
from bottle import run, template, get, post, request
import sqlite3
@get('/show_all_questions')
def show_all_questions():
conn = sqlite3.connect("database.db")
cur = conn.cursor()
query = """SELECT author,title,time,tags,id
FROM Questions
ORDER BY time DESC"""
cur.execute(query)
res = list(cur.fetchall())
conn.close()
return template('messages.html', questions=res)
@post('/insert_question')
def insert_question():
author = request.forms['author']
title = request.forms['title']
tags = request.forms['tags']
body = request.forms['body']
conn = sqlite3.connect("database.db")
cur = conn.cursor()
qbody = """INSERT INTO Questions(author, title, tags, body, time)
VALUES ('{0}','{1}','{2}','{3}',CURRENT_TIMESTAMP)"""
query = qbody.format(author, title, tags, body)
cur.executescript(query)
conn.commit()
conn.close()
return "Pregunta insertada con exito"
@get('/show_question')
def show_question():
ident = request.query['id']
conn = sqlite3.connect("database.db")
cur = conn.cursor()
qbody1 = """SELECT author,title,time,tags,body
FROM Questions
WHERE id={0}"""
qbody2 = """SELECT author,time,body
FROM Replies
WHERE question_id={0}"""
query1 = qbody1.format(ident)
query2 = qbody2.format(ident)
cur.execute(query1)
question = cur.fetchone()
cur.execute(query2)
replies = list(cur.fetchall())
conn.close()
return template("message_detail.html", q=question, replies=replies, ident=ident)
@post('/insert_reply')
def insert_reply():
author = request.forms['author']
body = request.forms['body']
question_id = request.forms['question_id']
conn = sqlite3.connect('database.db')
cur = conn.cursor()
qbody = """INSERT INTO Replies(author,body,time,question_id)
VALUES ('{0}', '{1}', CURRENT_TIMESTAMP, {2})"""
query = qbody.format(author, body, question_id)
cur.execute(query)
conn.commit()
conn.close()
return "Contestación insertada con éxito"
@get('/search_question')
def search_question():
tag = request.query['tag']
conn = sqlite3.connect('database.db')
cur = conn.cursor()
qbody = """SELECT author,title,time,tags
FROM Questions
WHERE tags LIKE '%{0}%'
ORDER BY time DESC"""
print tag
print qbody.format(tag)
query = qbody.format(tag)
cur.execute(query)
res = list(cur.fetchall())
conn.close()
return template('messages_search.html', questions=res, tag=tag)
if __name__ == "__main__":
run(host='localhost',port=8080,debug=True)
|
{
"content_hash": "bb42607533d48fd66cb9c504e86a5a47",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 84,
"avg_line_length": 29.525252525252526,
"alnum_prop": 0.6130687649674992,
"repo_name": "ferreiro/Python-course",
"id": "fb65b89a230e503fc381492388451a188982ff1b",
"size": "2952",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "4_Security/2_DelegateLogin/w_partA/coladero.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "79476"
},
{
"name": "HTML",
"bytes": "4912"
},
{
"name": "JavaScript",
"bytes": "51913"
},
{
"name": "PLpgSQL",
"bytes": "1069"
},
{
"name": "Python",
"bytes": "109348"
},
{
"name": "Shell",
"bytes": "72"
},
{
"name": "Smarty",
"bytes": "51439"
}
],
"symlink_target": ""
}
|
'''
Testing RemindmeRepository model.
Objectives:
- ensure a consistent interface is maintained
'''
import unittest
from remindme.Remindme import Remindme
from remindme.RemindmeRepository import RemindmeRepository
class Test_RemindmeRepository_Model(unittest.TestCase):
'''Tests against the RemindmeRepository model
(remindme.models.RemindmeRepository).'''
def setUp(self):
self.db_file = "./test/_test.RemindmeRepository.db"
self.repository = RemindmeRepository(self.db_file)
self.data = [
{ "title": "my title is awesome", "content": "and so is my content" },
{ "title": "boring but real title", "content": "get some content for me" },
{ "title": "pizza lover", "content": "some good pizza for me" }
]
for d in self.data:
self.repository.create_remindme(d["title"], d["content"])
def test_constructor_restores_remindmes(self):
num_remindmes = self.repository.count()
same_repo = RemindmeRepository(self.db_file)
num_restored_remindmes = same_repo.count()
self.assertEqual(num_remindmes, num_restored_remindmes,
'''RemindmeRepository#__init__ fails to restores remindmes''')
def test_insert_remindme(self):
title = "gotcha boy"
content = "pink stars are awesome"
remindme = Remindme(title, content, self.repository)
status = self.repository.insert_remindme(remindme)
self.assertTrue(status,
'''RemindmeRepository#insert_remindme(remindme) failed to insert
remindme''')
found_remindme = self.repository.find_by_title(title)
self.assertEqual(content, found_remindme.get_content(),
'''RemindmeRepository#insert_remindme(remindme) failed to insert
remindme''')
def test_insert_remindmes_with_same_title(self):
title = "same old title"
remindme_1 = Remindme(title, "some content", self.repository)
remindme_2 = Remindme(title, "other content", self.repository)
status = self.repository.insert_remindme(remindme_1)
self.assertTrue(status,
'''Unexpected error with RemindmeRepository#insert_remindme''')
status = self.repository.insert_remindme(remindme_2)
self.assertFalse(status,
'''RemindmeRepository#insert_remindme fails to return False when
a remindme fails to be inserted''')
def test_create_remindme(self):
title = "some good title, boo"
content = "got some content for you!"
remindme = self.repository.create_remindme(title, content)
self.assertTrue(isinstance(remindme, Remindme),
'''RemindmeRepository#create_remindme does NOT
return an instance of Remindme''')
def test_create_remindmes_with_same_title(self):
title = "some random content for everyone"
content = "got some content for you!"
remindme = self.repository.create_remindme(title, content)
self.assertTrue(isinstance(remindme, Remindme),
'''Unexpected error with RemindmeRepository#create_remindme''')
remindme = self.repository.create_remindme(title, "soem other content")
self.assertFalse(remindme,
'''RemindmeRepository#create_remindme fails to return False when a
remindme with same title is being created''')
def test_remove_remindme(self):
remindme_to_remove = self.repository.get_remindmes()[0]
status = self.repository.remove_remindme(remindme_to_remove)
self.assertTrue(status,
'''RemindmeRepository#remove_remindme fails to return True when
removing a remindme''')
remindmes = self.repository.get_remindmes()
found_remindme = (len([r for r in remindmes
if r.get_title() == remindme_to_remove.get_title()]) == 0)
self.assertTrue(found_remindme,
'''RemindmeRepository#remove_remindme fails to remove remindme''')
status = self.repository.remove_remindme(remindme_to_remove)
self.assertFalse(status,
'''RemindmeRepository#remove_remindme fails to return False when
removing an already removed remindme''')
def test_remove_remindmes(self):
remindmes = self.repository.get_remindmes()
status = self.repository.remove_remindmes()
num_remindmes = self.repository.count()
self.assertEqual(0, num_remindmes,
'''RemindmeRepository#remove_remindmes fails to remove all
remindmes''')
self.assertTrue(status,
'''RemindmeRepository#remove_remindmes fails to return True
when removing remindes''')
for r in remindmes:
self.repository.insert_remindme(r)
def test_find(self):
title = "SoME CoMlex! Title tHaT MuST be UniqUE"
remindme = Remindme(title, "some damn content", self.repository)
self.repository.insert_remindme(remindme)
found_remindmes = self.repository.find(lambda r: r.get_title() == title)
self.assertEqual(1, len(found_remindmes),
'''RemindmeRepository#find fails to get remindme qualifying a lambda''')
self.assertEqual(remindme, found_remindmes[0],
'''RemindmeRepository#find fails to get the remindme''')
|
{
"content_hash": "fa8d5590adf97fad4071993af37aaa6a",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 87,
"avg_line_length": 45.9051724137931,
"alnum_prop": 0.6570892018779343,
"repo_name": "GochoMugo/remindme",
"id": "75cc4d2767db5ee977b01b02f7755cace92570dd",
"size": "5325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_RemindmeRepository.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "808"
},
{
"name": "Python",
"bytes": "44402"
}
],
"symlink_target": ""
}
|
"""The tests for the hassio component."""
import os
import pytest
from homeassistant.auth.const import GROUP_ID_ADMIN
from homeassistant.components import frontend
from homeassistant.components.hassio import STORAGE_KEY
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"}
@pytest.fixture(autouse=True)
def mock_all(aioclient_mock):
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"})
aioclient_mock.get(
"http://127.0.0.1/info",
json={
"result": "ok",
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
},
)
aioclient_mock.get(
"http://127.0.0.1/host/info",
json={
"result": "ok",
"data": {
"result": "ok",
"data": {
"chassis": "vm",
"operating_system": "Debian GNU/Linux 10 (buster)",
"kernel": "4.19.0-6-amd64",
},
},
},
)
aioclient_mock.get(
"http://127.0.0.1/core/info",
json={"result": "ok", "data": {"version_latest": "1.0.0"}},
)
aioclient_mock.get(
"http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}}
)
async def test_setup_api_ping(hass, aioclient_mock):
"""Test setup with API ping."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {})
assert result
assert aioclient_mock.call_count == 7
assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0"
assert hass.components.hassio.is_hassio()
async def test_setup_api_panel(hass, aioclient_mock):
"""Test setup with API ping."""
assert await async_setup_component(hass, "frontend", {})
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {})
assert result
panels = hass.data[frontend.DATA_PANELS]
assert panels.get("hassio").to_response() == {
"component_name": "custom",
"icon": "hass:home-assistant",
"title": "Supervisor",
"url_path": "hassio",
"require_admin": True,
"config": {
"_panel_custom": {
"embed_iframe": True,
"js_url": "/api/hassio/app/entrypoint.js",
"name": "hassio-main",
"trust_external": False,
}
},
}
async def test_setup_api_push_api_data(hass, aioclient_mock):
"""Test setup with API push."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}}
)
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 9999
assert aioclient_mock.mock_calls[1][2]["watchdog"]
async def test_setup_api_push_api_data_server_host(hass, aioclient_mock):
"""Test setup with API push with active server host."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 9999
assert not aioclient_mock.mock_calls[1][2]["watchdog"]
async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 8123
refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"]
hassio_user = await hass.auth.async_get_user(
hass_storage[STORAGE_KEY]["data"]["hassio_user"]
)
assert hassio_user is not None
assert hassio_user.system_generated
assert len(hassio_user.groups) == 1
assert hassio_user.groups[0].id == GROUP_ID_ADMIN
for token in hassio_user.refresh_tokens.values():
if token.token == refresh_token:
break
else:
assert False, "refresh token not found"
async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
# Create user without admin
user = await hass.auth.async_create_system_user("Hass.io")
assert not user.is_admin
await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {
"data": {"hassio_user": user.id},
"key": STORAGE_KEY,
"version": 1,
}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert user.is_admin
async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
user = await hass.auth.async_create_system_user("Hass.io test")
token = await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 8123
assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token
async def test_setup_core_push_timezone(hass, aioclient_mock):
"""Test setup with API push default data."""
hass.config.time_zone = "testzone"
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone"
await hass.config.async_update(time_zone="America/New_York")
await hass.async_block_till_done()
assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York"
async def test_setup_hassio_no_additional_data(hass, aioclient_mock):
"""Test setup with API push default data."""
with patch.dict(os.environ, MOCK_ENVIRON), patch.dict(
os.environ, {"HASSIO_TOKEN": "123456"}
):
result = await async_setup_component(hass, "hassio", {"hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
async def test_fail_setup_without_environ_var(hass):
"""Fail setup if no environ variable set."""
with patch.dict(os.environ, {}, clear=True):
result = await async_setup_component(hass, "hassio", {})
assert not result
async def test_warn_when_cannot_connect(hass, caplog):
"""Fail warn when we cannot connect."""
with patch.dict(os.environ, MOCK_ENVIRON), patch(
"homeassistant.components.hassio.HassIO.is_connected",
return_value=None,
):
result = await async_setup_component(hass, "hassio", {})
assert result
assert hass.components.hassio.is_hassio()
assert "Not connected with Hass.io / system too busy!" in caplog.text
async def test_service_register(hassio_env, hass):
"""Check if service will be setup."""
assert await async_setup_component(hass, "hassio", {})
assert hass.services.has_service("hassio", "addon_start")
assert hass.services.has_service("hassio", "addon_stop")
assert hass.services.has_service("hassio", "addon_restart")
assert hass.services.has_service("hassio", "addon_stdin")
assert hass.services.has_service("hassio", "host_shutdown")
assert hass.services.has_service("hassio", "host_reboot")
assert hass.services.has_service("hassio", "host_reboot")
assert hass.services.has_service("hassio", "snapshot_full")
assert hass.services.has_service("hassio", "snapshot_partial")
assert hass.services.has_service("hassio", "restore_full")
assert hass.services.has_service("hassio", "restore_partial")
async def test_service_calls(hassio_env, hass, aioclient_mock):
"""Call service and check the API calls behind that."""
assert await async_setup_component(hass, "hassio", {})
aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"})
aioclient_mock.post(
"http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"}
)
aioclient_mock.post(
"http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"}
)
await hass.services.async_call("hassio", "addon_start", {"addon": "test"})
await hass.services.async_call("hassio", "addon_stop", {"addon": "test"})
await hass.services.async_call("hassio", "addon_restart", {"addon": "test"})
await hass.services.async_call(
"hassio", "addon_stdin", {"addon": "test", "input": "test"}
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 7
assert aioclient_mock.mock_calls[-1][2] == "test"
await hass.services.async_call("hassio", "host_shutdown", {})
await hass.services.async_call("hassio", "host_reboot", {})
await hass.async_block_till_done()
assert aioclient_mock.call_count == 9
await hass.services.async_call("hassio", "snapshot_full", {})
await hass.services.async_call(
"hassio",
"snapshot_partial",
{"addons": ["test"], "folders": ["ssl"], "password": "123456"},
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 11
assert aioclient_mock.mock_calls[-1][2] == {
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
}
await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"})
await hass.services.async_call(
"hassio",
"restore_partial",
{
"snapshot": "test",
"homeassistant": False,
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
},
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 13
assert aioclient_mock.mock_calls[-1][2] == {
"addons": ["test"],
"folders": ["ssl"],
"homeassistant": False,
"password": "123456",
}
async def test_service_calls_core(hassio_env, hass, aioclient_mock):
"""Call core service and check the API calls behind that."""
assert await async_setup_component(hass, "hassio", {})
aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"})
await hass.services.async_call("homeassistant", "stop")
await hass.async_block_till_done()
assert aioclient_mock.call_count == 4
await hass.services.async_call("homeassistant", "check_config")
await hass.async_block_till_done()
assert aioclient_mock.call_count == 4
with patch(
"homeassistant.config.async_check_ha_config_file", return_value=None
) as mock_check_config:
await hass.services.async_call("homeassistant", "restart")
await hass.async_block_till_done()
assert mock_check_config.called
assert aioclient_mock.call_count == 5
|
{
"content_hash": "9a03f61dd5b89217c8178b2f4fae8b02",
"timestamp": "",
"source": "github",
"line_count": 340,
"max_line_length": 88,
"avg_line_length": 37.23529411764706,
"alnum_prop": 0.6225908372827804,
"repo_name": "tchellomello/home-assistant",
"id": "56792295fec3ebaafb2d3ba9e74d7906019771e7",
"size": "12660",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/hassio/test_init.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "26713364"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
}
|
import unittest
import paddle
from paddle.distributed.fleet import auto
from paddle.fluid import program_guard
from paddle.fluid.backward import append_backward
paddle.enable_static()
def make_program_dp2():
main_program = paddle.fluid.Program()
start_program = paddle.fluid.Program()
with paddle.static.program_guard(main_program, start_program):
x = paddle.static.data(name='x', shape=[4, 5, 6], dtype='float32')
x.stop_gradient = False
auto.shard_tensor(
x, auto.ProcessMesh([0, 1], dim_names=["x"]), ["x", None, None]
)
tmp_0 = paddle.norm(x, p=2)
return main_program, start_program, tmp_0
def make_program_serial():
main_program = paddle.fluid.Program()
start_program = paddle.fluid.Program()
with paddle.static.program_guard(main_program, start_program):
x = paddle.static.data(name='x', shape=[4, 5, 6], dtype='float32')
x.stop_gradient = False
auto.shard_tensor(
x, auto.ProcessMesh([0], dim_names=["x"]), [None, None, None]
)
tmp_0 = paddle.norm(x, p=2)
return main_program, start_program, tmp_0
def parallelizer(program_func, rank):
from paddle.distributed.auto_parallel.completion import Completer
from paddle.distributed.auto_parallel.dist_context import DistributedContext
from paddle.distributed.auto_parallel.partitioner import Partitioner
main_program, start_program, loss = program_func()
dist_context = DistributedContext()
completer = Completer(dist_context)
completer.complete_forward_annotation(main_program)
dist_context.block_state.parse_forward_blocks(main_program)
with program_guard(main_program, start_program):
params_grads = append_backward(
loss, distop_context=dist_context.dist_op_context
)
completer.complete_backward_annotation(main_program)
dist_context.block_state.parse_backward_blocks(main_program)
partitioner = Partitioner(dist_context, rank)
dist_main_prog, _, _ = partitioner.partition(
main_program, start_program, []
)
return dist_main_prog, dist_context
class TestDistPNorm(unittest.TestCase):
def test_dist_pnorm_dp2(self):
for rank in range(2):
dist_main_prog, dist_context = parallelizer(make_program_dp2, rank)
ops = dist_main_prog.global_block().ops
op_types = []
for op in ops:
op_types.append(op.type)
op_dist_attr = dist_context.get_op_dist_attr_for_program(op)
if op.type == "p_norm":
assert op_dist_attr.impl_type == "p_norm"
if op.type in ["p_norm", "p_norm_grad"]:
for input_attr in op_dist_attr.inputs_dist_attrs.values():
assert set(input_attr.dims_mapping) == set([-1])
for output_attr in op_dist_attr.outputs_dist_attrs.values():
assert set(output_attr.dims_mapping) == set([-1])
if op.type == 'c_allgather':
for input_attr in op_dist_attr.inputs_dist_attrs.values():
assert input_attr.dims_mapping[0] == 0
assert set(input_attr.dims_mapping[1:]) == set([-1])
for output_attr in op_dist_attr.outputs_dist_attrs.values():
assert set(output_attr.dims_mapping) == set([-1])
if op.type == 'slice':
for input_attr in op_dist_attr.inputs_dist_attrs.values():
assert set(input_attr.dims_mapping) == set([-1])
for output_attr in op_dist_attr.outputs_dist_attrs.values():
assert output_attr.dims_mapping[0] == 0
assert set(output_attr.dims_mapping[1:]) == set([-1])
assert op_types == [
"c_allgather",
"p_norm",
"fill_constant",
"p_norm_grad",
"slice",
]
def test_dist_pnorm_serial(self):
dist_main_prog, dist_context = parallelizer(make_program_serial, 0)
ops = dist_main_prog.global_block().ops
for op in ops:
op_dist_attr = dist_context.get_op_dist_attr_for_program(op)
assert op_dist_attr.impl_type == "default"
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "3af65d73706294923f240fc684bcbf33",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 80,
"avg_line_length": 40.09090909090909,
"alnum_prop": 0.5913832199546485,
"repo_name": "PaddlePaddle/Paddle",
"id": "969bab0a69f194cc5c8a7d7e5a6a21c54f46a3bc",
"size": "5021",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/auto_parallel/test_dist_pnorm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
}
|
"""
Plexer
-----
Plexer is a simple lexer (tokenizer) for Python.
License: MIT
How to...?
````````````
-----
Print all #include statements in a C/C++ file?
-----
from plexer import TYPE, tokenize_lines
lines = tokenize_lines('#include "foo.h"\nint val = 42;\n')
for line in lines:
first_token = line[0]
if first_token['value'] == '#include':
# print the line.
print ''.join([token['value'] for token in line])
-----
Setup?
-----
$ easy_install Plexer
Links
`````
* `website <http://github.com/shawnpresser/plexer/>`_
"""
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup
def run_tests():
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
from plexer_tests import suite
return suite()
setup(
name='Plexer',
version='1.0.0',
url='http://github.com/shawnpresser/plexer/',
license='MIT',
author='Shawn Presser',
author_email='shawnpresser@gmail.com',
description='A simple lexer to tokenize text (for example, a C file)',
keywords='lexer tokenize tokenization parser text',
py_modules=['distribute_setup','plexer'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='__main__.run_tests'
)
|
{
"content_hash": "59dfec1f11ad7ab7369d6008d517a4c3",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 74,
"avg_line_length": 22.87323943661972,
"alnum_prop": 0.6120689655172413,
"repo_name": "shawnpresser/plexer",
"id": "1fab9372315ea3a27ca6dacaa096ddbcba080513",
"size": "1624",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35441"
}
],
"symlink_target": ""
}
|
import logging
import config
import time
from multiprocessing import Process, Queue
from detect import *
from process import _runner
def process_queue():
"""Process inter-process messages.
Process the inter-process :py:class:`multiprocessing.Queue`
objects, which receive messages from the spawned process for
logging events and function returns.
Args:
None
Note:
Warnings are emitted to log if an invalid message is received.
"""
# Get main process logger
logger = logging.getLogger()
# Loop through queues...
for dummymp_queue in config.dummymp_queues:
# Make sure there's something to fetch from the queue!
if not dummymp_queue.empty():
# Make a request to get the queue, with a timeout to ensure
# no blocking (or long waiting)
qout = dummymp_queue.get(timeout = 0.001)
# Check if it's a list or not
if type(qout) != list:
logger.warning("WARNING: Received invalid message from process! This may be a bug! Message: %s" % str(qout))
continue
# Check the message type IDs!
# Format: [ [ DUMMYMP_MSG_TYPE_ID, SYSTEM_PID, INTERNAL_ID ], DATA... ]
if qout[0][0] == config.DUMMYMP_LOG_ID:
# Append PID info text
qout[1].msg = ("[PID %i] " % qout[0][1]) + qout[1].msg
# Emit the modified log record
logger.handle(qout[1])
elif qout[0][0] == config.DUMMYMP_RET_ID:
# Store return into return dictionary
config.dummymp_rets[qout[0][2]] = qout[1]
else:
logger.warning("WARNING: Received invalid message from process! (Invalid message type ID!) This may be a bug! Message: %s" % str(qout))
def process_process():
"""Process the execution queue and inter-process messages.
Process the execution queue by starting processes in said queue,
handle processes that have completed, and process inter-process
messages via :py:func:`process_queue()`.
(In plain English: start the queued processes, check processes to
see if they are done running, and grab any inter-process messages
sent from the spawned process.)
Args:
None
Returns:
bool: A boolean indicating whether the execution queue has
completed or not. Returns True if it has completed, False if it
has not. This return value can be used in a while loop to block
until processes have completed. (This is somewhat similar to
multiprocessing's join().)
"""
nproc = 0
# Loop through processes via index!
while nproc < len(config.dummymp_procs):
dummymp_proc = config.dummymp_procs[nproc]
# Check if process is complete! (In this case, ensure that
# the process is not in a start queue and it isn't alive
# anymore!)
if (not dummymp_proc in config.dummymp_start_procs) and (not dummymp_proc.is_alive()):
# Run process_queue() to fetch the remaining queue items
# from the process.
process_queue()
# Remove the queue and process
pi = config.dummymp_procs.index(dummymp_proc)
# Make sure to close the queue!
config.dummymp_queues[pi].close()
config.dummymp_queues.pop(pi)
config.dummymp_procs.pop(pi)
logging.debug("Process complete!")
# Add to the completed count and remove from running count...
config.total_completed += 1
config.total_running -= 1
# Make any callbacks, if necessary.
if config.PROCESS_END_CALLBACK:
config.PROCESS_END_CALLBACK(config.total_completed, config.total_running, config.total_procs)
# Deincrement index counter, since we just removed a process
# from the list.
nproc -= 1
# Increment
nproc += 1
# Fetch available CPUs
avail_cpus = getCPUAvail() - config.total_running
# Check if we need to update CPU avail
if not needUpdateCPUAvail():
nproc = 0
# Loop through process execution queue
while nproc < len(config.dummymp_start_procs):
dummymp_proc_entry = config.dummymp_start_procs[nproc]
# Check to make sure we can meet max_processes limit
# (0 means no limit set)
if (config.max_processes == 0) or (config.total_running < config.max_processes):
# If there's no available CPUs, check to make sure that a
# process isn't already running, and that the mode set is
# not GENEROUS.
if ((avail_cpus == 0) and (config.total_running == 0) and (config.DUMMYMP_MODE != config.DUMMYMP_GENEROUS)):
# Force a single process to run!
avail_cpus += 1
logging.debug("Not in generous mode, so forcing one task to run.")
# Check if we have any available (or "available") CPUs!
if avail_cpus > 0:
logging.debug("%i CPUs available, spawning process!" % avail_cpus)
# Deincrement counter
avail_cpus -= 1
# Setup Queue
# We create the Queue and Process here so that we can
# prevent the error from opening too many Queue objects
# in multiprocessing.Pipe:
# IOError: handle out of range in select()
# Bug: http://bugs.python.org/issue10527
q = Queue()
# Extract internal PID, function, final_args, and
# final_kwargs
int_pid = dummymp_proc_entry[0]
func = dummymp_proc_entry[1]
final_args = dummymp_proc_entry[2]
final_kwargs = dummymp_proc_entry[3]
# Now add some arguments to the front:
# Function to actually run
final_args.insert(0, func)
# Queue
final_args.insert(0, q)
# Process ID
final_args.insert(0, int_pid)
# Create Process object
p = Process(target = _runner, args = final_args, kwargs = final_kwargs)
# Save it
config.dummymp_queues.append(q)
config.dummymp_procs.append(p)
# Start the process...
p.start()
# ...and remove it from the starting queue.
config.dummymp_start_procs.remove(dummymp_proc_entry)
# Increment running counter...
config.total_running += 1
# Make any callbacks, if necessary.
if config.PROCESS_START_CALLBACK:
config.PROCESS_START_CALLBACK(config.total_completed, config.total_running, config.total_procs)
# Deincrement index counter, since we just removed a process
# from the start queue list.
nproc -= 1
else:
logging.debug("Max processes limit of %i reached, waiting for process to terminate." % config.max_processes)
# Increment
nproc += 1
# Check to see if we are done!
if len(config.dummymp_procs) == 0:
logging.debug("All processes complete, returning True.")
return True
return False
def process_until_done():
"""Process the execution queue until all have been completed.
Process the execution queue until it has indicated that all
processes in the queue have been completed. (This is somewhat
similar to multiprocessing's
:py:meth:`multiprocessing.Process.join()`.)
Args:
None
"""
# Run process_queue() and process_process() until process_process()
# returns False (when it completes the process queue)
while not process_process():
process_queue()
time.sleep(0.001)
|
{
"content_hash": "af2f8cfb21335b1ee5dcd45e02affbd6",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 151,
"avg_line_length": 40.91509433962264,
"alnum_prop": 0.5438090846207055,
"repo_name": "alberthdev/pyradmon",
"id": "8042cc7e7488a31a11a3aadf5eed6b8e5964c07f",
"size": "9459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyradmon/dummymp/taskmgr.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FORTRAN",
"bytes": "87668"
},
{
"name": "Makefile",
"bytes": "8312"
},
{
"name": "PHP",
"bytes": "18113"
},
{
"name": "Python",
"bytes": "534688"
},
{
"name": "Shell",
"bytes": "11333"
}
],
"symlink_target": ""
}
|
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.ifconfig', 'sphinxcontrib.phpdomain']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CodeIgniter'
copyright = u'2014 - 2016, British Columbia Institute of Technology'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.0.7-dev'
# The full version, including alpha/beta/rc tags.
release = '3.0.7-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :php:func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. php:function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
highlight_language = 'ci'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# Specifying a few options; just a starting point & we can play with it.
html_theme_options = {
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["./_themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'images/ci-icon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CodeIgniterdoc'
html_copy_source = False
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CodeIgniter.tex', u'CodeIgniter Documentation',
u'British Columbia Institute of Technology', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'codeigniter', u'CodeIgniter Documentation',
[u'British Columbia Institute of Technology'], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'CodeIgniter'
epub_author = u'British Columbia Institute of Technology'
epub_publisher = u'British Columbia Institute of Technology'
epub_copyright = u'2014 - 2016, British Columbia Institute of Technology'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
{
"content_hash": "9c4a7e7556fad21dea4c608b66f231b4",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 80,
"avg_line_length": 32.528,
"alnum_prop": 0.7089276930644368,
"repo_name": "Gloobywerts/Newspaper",
"id": "26f854d85af3947ec1da796a77587192d339bbb7",
"size": "8554",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "user_guide_src/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "240"
},
{
"name": "CSS",
"bytes": "2486"
},
{
"name": "HTML",
"bytes": "27046"
},
{
"name": "JavaScript",
"bytes": "6398"
},
{
"name": "Makefile",
"bytes": "4614"
},
{
"name": "PHP",
"bytes": "1987869"
},
{
"name": "Python",
"bytes": "11585"
}
],
"symlink_target": ""
}
|
import os,sys,re,socket,datetime,time
OVERWRITE = True
# define source and target
SOURCE_SERVER = "t3serv017.mit.edu"
TARGET_SERVER = "xrootd18.cmsaf.mit.edu"
#---
SOURCE = "/data/submit/cms"
#SOURCE = "root://%s/"%(SOURCE_SERVER)
TARGET = "root://%s/"%(TARGET_SERVER)
#---------------------------------------------------------------------------------------------------
# H E L P E R S
#---------------------------------------------------------------------------------------------------
def showSetup(status):
if status == 'start':
print("\n=-=-=-= Show who and where we are =-=-=-=\n")
print(" Script: %s"%(os.path.basename(__file__)))
print(" Arguments: %s"%(" ".join(sys.argv[1:])))
print(" ")
print(" user executing: " + os.getenv('USER','unknown user'))
print(" running on : %s"%(socket.gethostname()))
print(" running in : %s"%(os.getcwd()))
print(" start time : %s"%(str(datetime.datetime.now())))
elif status == 'end':
print(" end time : %s"%(str(datetime.datetime.now())))
else:
print(" now time : %s (%s)"%(str(datetime.datetime.now()),str(status)))
print(" ")
return
def exeCmd(cmd,debug=0):
# execute a given command and show what is going on
rc = 0
if debug>1:
print(' =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=')
if debug>0:
print(' =Execute: %s'%(cmd))
rc = os.system(cmd) ##print(' !! DISABLED EXECUTION !! ')
if debug>1:
print(' =E=N=D=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n')
return rc
def extractLfn(fullFile,debug=0):
# extract the lfn rom the given file name
if debug>1:
print(" File: %s"%(fullFile))
lfn = fullFile[fullFile.index('/store'):]
#lfn = lfn.replace("wangz","paus") # for files from Qier
return lfn
def downloadFile(file_xrootd,lfn,debug=0):
# execute the file download
# peer-to-peer copy fails for now with redirector limit reached?!
#cmd = "xrdcp -T first %s%s %s%s"%(SOURCE,lfn,TARGET,lfn)
cmd = "xrdcp --path %s%s %s%s"%(SOURCE,lfn,TARGET,lfn)
print("CMD: %s"%(cmd))
rc = exeCmd(cmd,debug)
if rc == 0:
print(" download worked (%s)."%(lfn))
else:
print(" download FAILED with %d (%s)."%(rc,lfn))
return rc
def removeRemainder(lfn,debug=0):
# remove remainder from failed download
cmd = "xrdfs %s rm %s"%(TARGET_SERVER,lfn)
# cmd = "rm %s%s >& /dev/null"%(TARGET,lfn)
rc = exeCmd(cmd,debug)
if rc == 0:
print(" removed remainder: %s%s."%(TARGET,lfn))
else:
print(" removing remainder FAILED (rc=%s): %s."%(rc,lfn))
return rc
def existFile(lfn,debug=0):
# check if file exists already
if OVERWRITE: # force overwrite
return 1
cmd = "xrdfs %s ls %s >& /dev/null"%(TARGET_SERVER,lfn)
# cmd = "ls -l %s%s >& /dev/null"%(TARGET,lfn)
rc = exeCmd(cmd,debug)
if rc == 0:
print(" file listed successfully: %s."%(lfn))
else:
print(" file listing FAILED (rc=%s) so we need to download: %s."%(rc,lfn))
dir = "/".join(lfn.split("/")[:-1])
print("DIR: %s%s"%(TARGET,dir))
cmd = "ls -l %s%s >& /dev/null"%(TARGET,dir)
tmprc = exeCmd(cmd,debug)
if tmprc == 0:
print(" directory exists: %s."%(lfn))
else:
cmd = "mkdir -p %s%s >& /dev/null"%(TARGET,dir)
tmprc = exeCmd(cmd,debug)
print(" directory created (RC=%d): %s."%(int(tmprc),lfn))
return rc
#---------------------------------------------------------------------------------------------------
# M A I N
#---------------------------------------------------------------------------------------------------
debug = 2
# make announcement
showSetup('start')
# make sure we have at least one parameter
if len(sys.argv)<2:
print('\n ERROR - Missing file name as parameter.\n')
showExit(1)
# read command line parameters
fullFile = " ".join(sys.argv[1:])
# make sure to trim the input file if needed (want to go back to lfn = /store/...)
lfn = extractLfn(fullFile,debug)
# show the certificate
exeCmd("voms-proxy-init --valid 168:00 -voms cms",debug)
exeCmd("voms-proxy-info -all",debug)
# does the file exist already?
rc = existFile(lfn,debug)
if rc == 0:
print("\n Our work is done, file exists already.\nEXIT\n")
showSetup(rc)
sys.exit(rc)
# download the file to local
rc = downloadFile(fullFile,lfn,debug)
if rc != 0:
print("\n File download failed. EXIT!\n Cleanup potential remainders.")
removeRemainder(lfn,debug)
showSetup(rc)
sys.exit(rc)
# make announcement
showSetup('end')
sys.exit(0)
|
{
"content_hash": "7235e9318cae62047900863f2f31ef47",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 100,
"avg_line_length": 30.316129032258065,
"alnum_prop": 0.5218131517344116,
"repo_name": "cpausmit/FiBS",
"id": "dcacda001148129c45fb8111d59eebe14cc6e073",
"size": "5111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "task/download.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61935"
},
{
"name": "Shell",
"bytes": "12641"
}
],
"symlink_target": ""
}
|
"""
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or self.implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""
from pyjamas import DOM
from pyjamas.ui.FocusWidget import FocusWidget
from pyjamas.ui.Widget import Widget
from pyjamas.ui import Focus
from pyjamas.Canvas.Color import Color
from pyjamas.Canvas.LinearGradientImplDefault import LinearGradientImplDefault
from pyjamas.Canvas.RadialGradientImplDefault import RadialGradientImplDefault
from pyjamas.Canvas.GWTCanvasImplDefault import GWTCanvasImplDefault
from pyjamas.Canvas.LinearGradientImplIE6 import LinearGradientImplIE6
from pyjamas.Canvas.RadialGradientImplIE6 import RadialGradientImplIE6
from pyjamas.Canvas.GWTCanvasImplIE6 import GWTCanvasImplIE6
"""*
* 2D Graphics API. API mimicks functionality found in the Javascript canvas API
* (see <a href="http:#developer.mozilla.org/en/docs/Canvas_tutorial">canvas
* tutorial</a>).
*
* <p>
* Performance may scale differently for IE than for browsers with a native
* canvas self.implementation. Sub-pixel precision is supported where possible.
* </p>
"""
class GWTCanvas(FocusWidget):
"""*
* Creates a GWTCanvas element. Element type depends on deferred binding.
* Default is CANVAS HTML5 DOM element. In the case of IE it should be VML.
*
* <p>
* Different coordinate spaces and pixel spaces will cause aliased scaling.
* Use <code>scale(double,double)</code> and consistent coordinate and pixel
* spaces for better results.
* </p>
*
* @param coordX the size of the coordinate space in the x direction
* @param coordY the size of the coordinate space in the y direction
* @param pixelX the CSS width in pixels of the canvas element
* @param pixelY the CSS height in pixels of the canvas element
"""
def __init__(self, coordX=300, coordY=150, pixelX=300, pixelY=150,
**kwargs):
"""
* Impl Instance. Compiler should statify all the methods, so we
* do not end up with duplicate code for each canvas instance.
"""
self.impl = self.getCanvasImpl()
self.coordHeight = 0
self.coordWidth = 0
focusable = Focus.createFocusable()
self.canvas = self.impl.createElement()
DOM.appendChild(focusable, self.canvas)
FocusWidget.__init__(self, focusable, **kwargs)
self.setPixelWidth(pixelX)
self.setPixelHeight(pixelY)
self.setCoordSize(coordX, coordY)
def getCanvasElement(self):
return self.canvas
def getCanvasImpl(self):
return GWTCanvasImplDefault()
def setWidth(self, width):
self.setPixelWidth(width)
def setHeight(self, height):
self.setPixelHeight(height)
"""*
* Draws an arc. If the context has a non-empty path, then the method must add
* a straight line from the last point in the path to the start point of the
* arc.
*
* @param x center X coordinate
* @param y center Y coordinate
* @param radius radius of drawn arc
* @param startAngle angle measured from positive X axis to start of arc CW
* @param endAngle angle measured from positive X axis to end of arc CW
* @param antiClockwise direction that the arc line is drawn
"""
def arc(self, x, y, radius, startAngle, endAngle, antiClockwise):
self.impl.arc(x, y, radius, startAngle, endAngle, antiClockwise)
"""*
* Erases the current path and prepares it for a path.
"""
def beginPath(self):
self.impl.beginPath()
"""*
* Clears the entire canvas.
"""
def clear(self):
# we used local references instead of looking up the attributes
# on the DOM element
self.impl.clear(self.coordWidth, self.coordHeight)
"""*
* Closes the current path. "Closing" simply means that a line is drawn from
* the last element in the path back to the first.
"""
def closePath(self):
self.impl.closePath()
"""*
*
* Creates a LinearGradient Object for use as a fill or stroke style.
*
* @param x0 x coord of start point of gradient
* @param y0 y coord of start point of gradient
* @param x1 x coord of end point of gradient
* @param y1 y coord of end point of gradient
* @return returns the CanvasGradient
"""
def createLinearGradient(self, x0, y0, x1, y1):
return LinearGradientImplDefault(x0, y0, x1, y1, self.getCanvasElement())
"""*
*
* Creates a RadialGradient Object for use as a fill or stroke style.
*
* @param x0 x coord of origin of start circle
* @param y0 y coord of origin of start circle
* @param r0 radius of start circle
* @param x1 x coord of origin of end circle
* @param y1 y coord of origin of end circle
* @param r1 radius of the end circle
* @return returns the CanvasGradient
"""
def createRadialGradient(self, x0, y0, r0, x1, y1, r1):
return RadialGradientImplDefault(x0, y0, r0, x1, y1, r1,
self.getCanvasElement())
"""*
*
* Does nothing if the context's path is empty. Otherwise, it connects the
* last point in the path to the given point <b>(x, y)</b> using a cubic
* Bezier curve with control points <b>(cp1x, cp1y)</b> and <b>(cp2x,
* cp2y)</b>. Then, it must add the point <b>(x, y)</b> to the path.
*
* This function corresponds to the
* <code>bezierCurveTo(cp1x, cp1y, cp2x, cp2y, x, y)</code> method in canvas
* element Javascript API.
*
* @param cp1x x coord of first Control Point
* @param cp1y y coord of first Control Point
* @param cp2x x coord of second Control Point
* @param cp2y y coord of second Control Point
* @param x x coord of point
* @param y x coord of point
"""
def cubicCurveTo(self, cp1x, cp1y, cp2x, cp2y, x, y):
self.impl.cubicCurveTo(cp1x, cp1y, cp2x, cp2y, x, y)
"""*
* Draws an input image at a given position on the canvas. Resizes image
* according to specified width and height and samples from the specified
* sourceY and sourceX.
*
* <p>
* We recommend that the pixel and coordinate spaces be the same to provide
* consistent positioning and scaling results
* </p>
*
option 1:
* @param img the image to be drawn
* @param offsetX x coord of the top left corner in the destination space
* @param offsetY y coord of the top left corner in the destination space
* @param img The image to be drawn
option 2:
* @param offsetX x coord of the top left corner in the destination space
* @param offsetY y coord of the top left corner in the destination space
* @param width the size of the image in the destination space
* @param height the size of the image in the destination space
option 3:
* @param img the image to be drawn
* @param sourceX the start X position in the source image
* @param sourceY the start Y position in the source image
* @param sourceWidth the width in the source image you want to sample
* @param sourceHeight the height in the source image you want to sample
* @param destX the start X position in the destination image
* @param destY the start Y position in the destination image
* @param destWidth the width of drawn image in the destination
* @param destHeight the height of the drawn image in the destination
"""
def drawImage(self, img, *args):
if isinstance(img, Widget):
img_width = img.getWidth()
img_height = img.getHeight()
else:
img_width = DOM.getIntAttribute(img, "offsetWidth")
img_height = DOM.getIntAttribute(img, "offsetHeight")
if len(args) == 8:
self.impl.drawImage(img, args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7])
elif len(args) == 4:
sourceX = 0
sourceY = 0
sourceWidth = img_width
sourceHeight = img_height
destX = args[0]
destY = args[1]
destWidth = args[2]
destHeight = args[3]
self.impl.drawImage(img, sourceX, sourceY,
sourceWidth, sourceHeight,
destX, destY, destWidth, destHeight)
elif len(args) == 2:
self.impl.drawImage(img, args[0], args[1])
"""*
* Fills the current path according to the current fillstyle.
"""
def fill(self):
self.impl.fill()
"""*
* Fills a rectangle of the specified dimensions, at the specified start
* coords, according to the current fillstyle.
*
* @param startX x coord of the top left corner in the destination space
* @param startY y coord of the top left corner in the destination space
* @param width destination width of image
* @param height destination height of image
"""
def fillRect(self, startX, startY, width, height):
self.impl.fillRect(startX, startY, width, height)
"""*
* Places text, at the specified start
* coords, according to the current fillstyle.
*
* @param startX x coord of the top left corner in the destination space
* @param startY y coord of the top left corner in the destination space
* @param maxWidth maximum width of text
"""
def fillText(self, text, startX, startY, maxWidth=None):
self.impl.fillText(text, startX, startY, maxWidth)
"""*
* Returns the height in pixels of the canvas.
*
* @return returns the height in pixels of the canvas
"""
def getCoordHeight(self):
return self.coordHeight
"""*
*
* Returns the width in pixels of the canvas.
*
* @return returns the width in pixels of the canvas
"""
def getCoordWidth(self):
return self.coordWidth
"""*
* See self.setter method for a fully detailed description.
*
* @return
* @see GWTCanvas#setGlobalAlpha(double)
"""
def getGlobalAlpha(self):
return self.impl.getGlobalAlpha()
"""*
* See self.setter method for a fully detailed description.
*
* @return
* @see GWTCanvas#setGlobalCompositeOperation(String)
"""
def getGlobalCompositeOperation(self):
return self.impl.getGlobalCompositeOperation()
"""*
* See self.setter method for a fully detailed description.
*
* @return
* @see GWTCanvas#setLineCap(String)
"""
def getLineCap(self):
return self.impl.getLineCap()
"""*
* See self.setter method for a fully detailed description.
*
* @return
* @see GWTCanvas#setLineJoin(String)
"""
def getLineJoin(self):
return self.impl.getLineJoin()
"""*
* See self.setter method for a fully detailed description.
*
* @return
* @see GWTCanvas#setLineWidth(double)
"""
def getLineWidth(self):
return self.impl.getLineWidth()
"""*
* See self.setter method for a fully detailed description.
*
* @return
* @see GWTCanvas#setMiterLimit(double)
"""
def getMiterLimit(self):
return self.impl.getMiterLimit()
"""*
* Adds a line from the last point in the current path to the point defined by
* x and y.
*
* @param x x coord of point
* @param y y coord of point
"""
def lineTo(self, x, y):
self.impl.lineTo(x, y)
"""*
* Makes the last point in the current path be <b>(x,y)</b>.
*
* @param x x coord of point
* @param y y coord of point
"""
def moveTo(self, x, y):
self.impl.moveTo(x, y)
"""*
* Does nothing if the context has an empty path. Otherwise it connects the
* last point in the path to the given point <b>(x, y)</b> using a quadratic
* Bezier curve with control point <b>(cpx, cpy)</b>, and then adds the given
* point <b>(x, y)</b> to the path.
*
* @param cpx x coord of the control point
* @param cpy y coord of the control point
* @param x x coord of the point
* @param y y coord of the point
"""
def quadraticCurveTo(self, cpx, cpy, x, y):
self.impl.quadraticCurveTo(cpx, cpy, x, y)
"""*
* Adds a rectangle to the current path, and closes the path.
*
* @param startX x coord of the top left corner of the rectangle
* @param startY y coord of the top left corner of the rectangle
* @param width the width of the rectangle
* @param height the height of the rectangle
"""
def rect(self, startX, startY, width, height):
self.impl.rect(startX, startY, width, height)
"""*
* Convenience function for resizing the canvas with consistent coordinate and
* screen pixel spaces. Equivalent to doing:
*
* <pre><code>
* canvas.setCoordSize(width, height)
* canvas.setPixelHeight(height)
* canvas.setPixelWidth(width)
* </code></pre>
*
* @param width
* @param height
"""
def resize(self, width, height):
self.setCoordSize(width, height)
self.setPixelHeight(height)
self.setPixelWidth(width)
"""*
* Restores the last saved context from the context stack.
"""
def restoreContext(self):
self.impl.restoreContext()
"""*
* Adds a rotation of the specified angle to the current transform.
*
* @param angle the angle to rotate by, <b>in radians</b>
"""
def rotate(self, angle):
self.impl.rotate(angle)
"""*
* Saves the current context to the context stack.
"""
def saveContext(self):
self.impl.saveContext()
"""*
* Adds a scale transformation to the current transformation matrix.
*
* @param x ratio that we must scale in the X direction
* @param y ratio that we must scale in the Y direction
"""
def scale(self, x, y):
self.impl.scale(x, y)
"""*
* Sets the background color of the canvas element.
*
* @param color the background color.
"""
def setBackgroundColor(self, color):
self.impl.setBackgroundColor(self.getCanvasElement(), str(color))
"""*
* Sets the coordinate height of the Canvas.
* <p>
* This will erase the canvas contents!
* </p>
*
* @param height the size of the y component of the coordinate space
"""
def setCoordHeight(self, height):
self.impl.setCoordHeight(self.getCanvasElement(), height)
self.coordHeight = height
"""*
* Sets the coordinate space of the Canvas.
* <p>
* This will erase the canvas contents!
* </p>
*
* @param width the size of the x component of the coordinate space
* @param height the size of the y component of the coordinate space
"""
def setCoordSize(self, width, height):
self.setCoordWidth(width)
self.setCoordHeight(height)
"""*
* Sets the coordinate width of the Canvas.
* <p>
* This will erase the canvas contents!
* </p>
*
* @param width the size of the x component of the coordinate space
"""
def setCoordWidth(self, width):
self.impl.setCoordWidth(self.getCanvasElement(), width)
self.coordWidth = width
"""*
* Set the current Fill Style to the specified color gradient.
*
* @param grad {@link CanvasGradient}
"""
def setFillStyle(self, grad):
self.impl.setFillStyle(grad)
"""*
* Set the global transparency to the specified alpha.
*
* @param alpha alpha value
"""
def setGlobalAlpha(self, alpha):
self.impl.setGlobalAlpha(alpha)
"""*
* Set the canvas font attribute.
*
* @param font CSS font
"""
def setFont(self, font):
self.impl.setFont(font)
"""*
* Determines how the canvas is displayed relative to any background content.
* The string identifies the desired compositing mode. If you do not self.set this
* value explicitly, the canvas uses the <code>GWTCanvas.SOURCE_OVER</code>
* compositing mode.
* <p>
* The valid compositing operators are:
* <ul>
* <li><code>GWTCanvas.SOURCE_OVER</code>
* <li><code>GWTCanvas.DESTINATION_OVER</code>
* </ul>
* <p>
*
* @param globalCompositeOperation
"""
def setGlobalCompositeOperation(self, globalCompositeOperation):
self.impl.setGlobalCompositeOperation(globalCompositeOperation)
"""*
* A string value that determines the end style used when drawing a line.
* Specify the string <code>GWTCanvas.BUTT</code> for a flat edge that is
* perpendicular to the line itself, <code>GWTCanvas.ROUND</code> for round
* endpoints, or <code>GWTCanvas.SQUARE</code> for square endpoints. If you do
* not self.set this value explicitly, the canvas uses the
* <code>GWTCanvas.BUTT</code> line cap style.
*
* @param lineCap
"""
def setLineCap(self, lineCap):
self.impl.setLineCap(lineCap)
"""*
* A string value that determines the join style between lines. Specify the
* string <code>GWTCanvas.ROUND</code> for round joins,
* <code>GWTCanvas.BEVEL</code> for beveled joins, or
* <code>GWTCanvas.MITER</code> for miter joins. If you do not self.set this value
* explicitly, the canvas uses the <code>GWTCanvas.MITER</code> line join
* style.
*
* @param lineJoin
"""
def setLineJoin(self, lineJoin):
self.impl.setLineJoin(lineJoin)
"""*
* Sets the current context's linewidth. Line width is the thickness of a
* stroked line.
*
* @param width the width of the canvas
"""
def setLineWidth(self, width):
self.impl.setLineWidth(width)
"""*
* A double value with the miter limit. You use this property to specify
* how the canvas draws the juncture between connected line segments. If the
* line join is self.set to <code>GWTCanvas.MITER</code>, the canvas uses the miter
* limit to determine whether the lines should be joined with a bevel instead
* of a miter. The canvas divides the length of the miter by the line width.
* If the result is greater than the miter limit, the style is converted to a
* bevel.
*
* @param miterLimit
"""
def setMiterLimit(self, miterLimit):
self.impl.setMiterLimit(miterLimit)
"""*
* Sets the CSS height of the canvas in pixels.
*
* @param height the height of the canvas in pixels
"""
def setPixelHeight(self, height):
FocusWidget.setHeight(self, str(height) + "px")
self.impl.setPixelHeight(self.getCanvasElement(), height)
"""*
* Sets the CSS width in pixels for the canvas.
*
* @param width width of the canvas in pixels
"""
def setPixelWidth(self, width):
FocusWidget.setWidth(self, str(width) + "px")
self.impl.setPixelWidth(self.getCanvasElement(), width)
"""*
* Set the current Stroke Style to the specified color gradient.
*
* @param grad {@link CanvasGradient}
"""
def setStrokeStyle(self, grad):
self.impl.setStrokeStyle(grad)
"""*
* Strokes the current path according to the current stroke style.
"""
def stroke(self):
self.impl.stroke()
"""*
* Strokes a rectangle defined by the supplied arguments.
*
* @param startX x coord of the top left corner
* @param startY y coord of the top left corner
* @param width width of the rectangle
* @param height height of the rectangle
"""
def strokeRect(self, startX, startY, width, height):
self.impl.strokeRect(startX, startY, width, height)
"""*
* <code>The transform(m11, m12, m21, m22, dx, dy)</code> method must multiply
* the current transformation matrix with the input matrix. Input described
* by:
*
* <pre>
* m11 m21 dx
* m12 m22 dy
* 0 0 1
*</pre>
*
* @param m11 top left cell of 2x2 rotation matrix
* @param m12 top right cell of 2x2 rotation matrix
* @param m21 bottom left cell of 2x2 rotation matrix
* @param m22 bottom right cell of 2x2 rotation matrix
* @param dx Translation in X direction
* @param dy Translation in Y direction
"""
def transform(self, m11, m12, m21, m22, dx, dy):
self.impl.transform(m11, m12, m21, m22, dx, dy)
"""*
* Applies a translation (linear shift) by x in the horizontal and by y in the
* vertical.
*
* @param x amount to shift in the x direction
* @param y amount to shift in the y direction
"""
def translate(self, x, y):
self.impl.translate(x, y)
|
{
"content_hash": "07f8af7b06162a25456fd2bcd5c3afc8",
"timestamp": "",
"source": "github",
"line_count": 682,
"max_line_length": 108,
"avg_line_length": 31.513196480938415,
"alnum_prop": 0.6314442583286805,
"repo_name": "minghuascode/pyj",
"id": "bcd4317f9cba91cba96711243f1be702078dc05d",
"size": "21492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "library/pyjamas/Canvas/GWTCanvas.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "107608"
},
{
"name": "JavaScript",
"bytes": "116371"
},
{
"name": "PHP",
"bytes": "5473"
},
{
"name": "Python",
"bytes": "7572605"
},
{
"name": "Shell",
"bytes": "24231"
}
],
"symlink_target": ""
}
|
from personal_classifier import PersonalClassifier
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from sklearn import svm
class SupportVectorMachine(PersonalClassifier):
def __init__(self, data_set, labels):
super(SupportVectorMachine, self).__init__(data_set, labels)
def train(self, kernel='linear'):
x = self.data_set
y = self.labels
clf = svm.SVC(kernel=kernel, gamma=10)
clf.fit(x, y)
self.set_classifier(clf)
|
{
"content_hash": "59b9ae193d0894c67d5f7608980152b6",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 68,
"avg_line_length": 25.95,
"alnum_prop": 0.6859344894026975,
"repo_name": "BavoGoosens/Gaiter",
"id": "113d12de5daef1c894be473a17f2fc77dce17d42",
"size": "519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "personal_classifier/support_vector_machine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64227"
},
{
"name": "Shell",
"bytes": "700"
}
],
"symlink_target": ""
}
|
from unittest import mock
from django.utils import timezone
from freezegun import freeze_time
from rest_framework import test
from waldur_mastermind.common.utils import parse_datetime
from waldur_mastermind.invoices import models as invoices_models
from waldur_mastermind.marketplace import callbacks
from waldur_mastermind.marketplace import models as marketplace_models
from waldur_mastermind.marketplace.signals import resource_limit_update_succeeded
from waldur_mastermind.marketplace.tests import factories as marketplace_factories
from waldur_mastermind.marketplace_openstack import (
CORES_TYPE,
RAM_TYPE,
STORAGE_MODE_DYNAMIC,
STORAGE_MODE_FIXED,
STORAGE_TYPE,
)
from waldur_openstack.openstack_base.tests.fixtures import OpenStackFixture
from .. import TENANT_TYPE
@freeze_time('2019-09-10')
class BaseTenantInvoiceTest(test.APITransactionTestCase):
def setUp(self):
self.offering = marketplace_factories.OfferingFactory(type=TENANT_TYPE)
self.limits = {
RAM_TYPE: 1 * 1024,
CORES_TYPE: 2,
STORAGE_TYPE: 3 * 1024,
}
self.prices = {
RAM_TYPE: 10,
CORES_TYPE: 100,
STORAGE_TYPE: 1,
}
for ct in [RAM_TYPE, CORES_TYPE, STORAGE_TYPE]:
marketplace_factories.OfferingComponentFactory(
offering=self.offering,
type=ct,
billing_type=marketplace_models.OfferingComponent.BillingTypes.LIMIT,
)
def create_plan(self, prices, unit=marketplace_models.Plan.Units.PER_DAY):
plan = marketplace_factories.PlanFactory(offering=self.offering, unit=unit)
for ct in prices.keys():
marketplace_factories.PlanComponentFactory(
plan=plan,
component=self.offering.components.get(type=ct),
price=prices[ct],
)
return plan
def create_resource(
self, prices, limits, unit=marketplace_models.Plan.Units.PER_DAY
) -> marketplace_models.Resource:
plan = self.create_plan(prices, unit)
resource = marketplace_factories.ResourceFactory(
offering=self.offering,
plan=plan,
limits=limits,
state=marketplace_models.Resource.States.CREATING,
)
callbacks.resource_creation_succeeded(resource)
return resource
def update_resource_limits(self, resource, new_limits):
order = marketplace_factories.OrderFactory(
project=resource.project, state=marketplace_models.Order.States.EXECUTING,
)
order_item = marketplace_factories.OrderItemFactory(
order=order,
offering=self.offering,
resource=resource,
type=marketplace_models.OrderItem.Types.UPDATE,
state=marketplace_models.OrderItem.States.EXECUTING,
limits=new_limits,
)
resource_limit_update_succeeded.send(
sender=resource.__class__, order_item=order_item
)
def delete_resource(self, resource):
callbacks.resource_deletion_succeeded(resource)
class TenantInvoiceTest(BaseTenantInvoiceTest):
def test_when_resource_is_created_invoice_is_updated(self):
resource = self.create_resource(self.prices, self.limits)
invoice_items = invoices_models.InvoiceItem.objects.filter(resource=resource)
self.assertEqual(invoice_items.count(), 3)
def test_when_resource_limits_are_updated_invoice_items_are_updated(self):
new_limits = {
RAM_TYPE: 10 * 1024,
CORES_TYPE: 20,
STORAGE_TYPE: 30 * 1024,
}
with freeze_time('2017-01-01'):
resource = self.create_resource(self.prices, self.limits)
with freeze_time('2017-01-10'):
self.update_resource_limits(resource, new_limits)
invoice_items = invoices_models.InvoiceItem.objects.filter(resource=resource)
self.assertEqual(invoice_items.count(), 3)
def test_when_resource_is_deleted_invoice_is_updated(self):
resource = self.create_resource(self.prices, self.limits)
with freeze_time('2019-09-18'):
resource.set_state_terminating()
resource.save()
self.delete_resource(resource)
invoice_item = invoices_models.InvoiceItem.objects.filter(
resource=resource
).last()
self.assertEqual(invoice_item.end.day, 18)
def test_resource_limit_period_is_updated_when_resource_is_terminated(self):
resource = self.create_resource(self.prices, self.limits)
with freeze_time('2019-09-18'):
resource.set_state_terminating()
resource.save()
resource.set_state_terminated()
resource.save()
invoice_item = invoices_models.InvoiceItem.objects.filter(
resource=resource
).last()
self.assertEqual(
parse_datetime(
invoice_item.details['resource_limit_periods'][-1]['end']
),
timezone.now(),
)
class StorageModeInvoiceTest(BaseTenantInvoiceTest):
def setUp(self):
# Arrange
super(StorageModeInvoiceTest, self).setUp()
fixture = OpenStackFixture()
tenant = fixture.openstack_tenant
offering_component = marketplace_models.OfferingComponent.objects.create(
offering=self.offering,
type='gigabytes_gpfs',
billing_type=marketplace_models.OfferingComponent.BillingTypes.LIMIT,
)
plan = self.create_plan(self.prices)
marketplace_models.PlanComponent.objects.create(
component=offering_component, plan=plan, price=10,
)
self.resource = marketplace_factories.ResourceFactory(
offering=self.offering,
plan=plan,
limits=self.limits,
state=marketplace_models.Resource.States.CREATING,
)
callbacks.resource_creation_succeeded(self.resource)
self.resource.scope = tenant
self.resource.save()
tenant.set_quota_limit('vcpu', 6)
tenant.set_quota_limit('ram', 10 * 1024)
tenant.set_quota_usage('storage', 30 * 1024)
tenant.set_quota_usage('gigabytes_gpfs', 100 * 1024)
def test_when_storage_mode_is_switched_to_dynamic_limits_are_updated(self):
# Act
with freeze_time('2019-09-20'):
self.offering.plugin_options['storage_mode'] = STORAGE_MODE_DYNAMIC
self.offering.save()
# Assert
self.resource.refresh_from_db()
self.assertEqual(self.resource.limits.get('cores'), 6)
self.assertEqual(self.resource.limits.get('ram'), 10 * 1024)
self.assertEqual(self.resource.limits.get('storage'), None)
self.assertEqual(self.resource.limits.get('gigabytes_gpfs'), 100 * 1024)
invoice_item = invoices_models.InvoiceItem.objects.filter(
resource=self.resource, details__offering_component_type='gigabytes_gpfs'
).get()
last_period = invoice_item.details['resource_limit_periods'][-1]
self.assertEqual(last_period['quantity'], 100 * 1024)
def test_when_storage_mode_is_switched_to_fixed_limits_are_updated(self):
# Act
with freeze_time('2019-09-20'):
self.offering.plugin_options['storage_mode'] = STORAGE_MODE_FIXED
self.offering.save()
# Assert
self.resource.refresh_from_db()
self.assertEqual(self.resource.limits.get('cores'), 6)
self.assertEqual(self.resource.limits.get('ram'), 10 * 1024)
self.assertEqual(self.resource.limits.get('storage'), 30 * 1024)
self.assertEqual(self.resource.limits.get('gigabytes_gpfs'), None)
invoice_item = invoices_models.InvoiceItem.objects.filter(
resource=self.resource
).last()
last_period = invoice_item.details['resource_limit_periods'][-1]
self.assertEqual(last_period['quantity'], 30)
@mock.patch(
'waldur_mastermind.marketplace_openstack.utils.import_limits_when_storage_mode_is_switched'
)
def test_when_storage_mode_is_not_switched_limits_are_not_updated(
self, mocked_utils
):
# Act
with freeze_time('2019-09-20'):
self.offering.plugin_options['FOO'] = 'BAR'
self.offering.save()
# Assert
self.assertEqual(mocked_utils.call_count, 0)
|
{
"content_hash": "84c1898d3283ba0336a79c8d6fdf4456",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 99,
"avg_line_length": 38.70454545454545,
"alnum_prop": 0.6414562536699941,
"repo_name": "opennode/waldur-mastermind",
"id": "7fcbc5467b9124e3372a5d5562db7a63053bf8a6",
"size": "8515",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/waldur_mastermind/marketplace_openstack/tests/test_invoice.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4429"
},
{
"name": "Dockerfile",
"bytes": "6258"
},
{
"name": "HTML",
"bytes": "42329"
},
{
"name": "JavaScript",
"bytes": "729"
},
{
"name": "Python",
"bytes": "5520019"
},
{
"name": "Shell",
"bytes": "15429"
}
],
"symlink_target": ""
}
|
"""The test for switch device automation."""
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.switch import DOMAIN
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_time_changed,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a switch."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": "turned_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"platform": "device",
"domain": DOMAIN,
"type": "turned_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert triggers == expected_triggers
async def test_get_trigger_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a switch trigger."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, "trigger", trigger
)
assert capabilities == expected_capabilities
async def test_if_fires_on_state_change(hass, calls):
"""Test for turn_on and turn_off triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_on",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_off",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "turn_off device - {} - on - off - None".format(
ent1.entity_id
)
hass.states.async_set(ent1.entity_id, STATE_ON)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "turn_on device - {} - off - on - None".format(
ent1.entity_id
)
async def test_if_fires_on_state_change_with_for(hass, calls):
"""Test for triggers firing with delay."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert len(calls) == 1
await hass.async_block_till_done()
assert calls[0].data["some"] == "turn_off device - {} - on - off - 0:00:05".format(
ent1.entity_id
)
|
{
"content_hash": "b52b91a1c8772fcccff5aabfe3de73d8",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 87,
"avg_line_length": 34.76724137931034,
"alnum_prop": 0.4764443342424994,
"repo_name": "Teagan42/home-assistant",
"id": "73d12d0a7296e6ac04be8db4e926f406b3fb6059",
"size": "8066",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/switch/test_device_trigger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19774313"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import print_function, unicode_literals, absolute_import
from collections import namedtuple
import json
import logging
import os
import os.path
import stat
import sys
import warnings
import getpass
from functools import wraps
from contextlib import contextmanager
from types import GeneratorType
from osbs.build.build_requestv2 import (
BaseBuildRequest,
BuildRequestV2,
SourceBuildRequest,
)
from osbs.build.user_params import (
load_user_params_from_json,
BuildUserParams,
SourceContainerUserParams
)
from osbs.build.plugins_configuration import (
PluginsConfiguration,
SourceContainerPluginsConfiguration,
)
from osbs.build.build_response import BuildResponse
from osbs.build.pod_response import PodResponse
from osbs.build.config_map_response import ConfigMapResponse
from osbs.constants import (BUILD_RUNNING_STATES, WORKER_OUTER_TEMPLATE,
WORKER_INNER_TEMPLATE, WORKER_CUSTOMIZE_CONF,
ORCHESTRATOR_OUTER_TEMPLATE, ORCHESTRATOR_INNER_TEMPLATE,
ORCHESTRATOR_CUSTOMIZE_CONF, BUILD_TYPE_WORKER,
BUILD_TYPE_ORCHESTRATOR, BUILD_FINISHED_STATES,
DEFAULT_ARRANGEMENT_VERSION, REACTOR_CONFIG_ARRANGEMENT_VERSION,
FILTER_KEY, RELEASE_LABEL_FORMAT, VERSION_LABEL_FORBIDDEN_CHARS,
ORCHESTRATOR_SOURCES_OUTER_TEMPLATE,
USER_PARAMS_KIND_IMAGE_BUILDS,
USER_PARAMS_KIND_SOURCE_CONTAINER_BUILDS,
)
from osbs.core import Openshift
from osbs.exceptions import (OsbsException, OsbsValidationException, OsbsResponseException,
OsbsOrchestratorNotEnabled)
from osbs.utils.labels import Labels
# import utils in this way, so that we can mock standalone functions with flexmock
from osbs import utils
from osbs.utils import (retry_on_conflict, graceful_chain_get, RegistryURI, ImageName,
stringify_values)
from six.moves import http_client, input
# Decorator for API methods.
def osbsapi(func):
@wraps(func)
def catch_exceptions(*args, **kwargs):
if kwargs.pop("namespace", None):
warnings.warn("OSBS.%s: the 'namespace' argument is no longer supported" %
func.__name__)
try:
return func(*args, **kwargs)
except OsbsException:
# Re-raise OsbsExceptions
raise
except Exception as ex:
# Propogate flexmock errors immediately (used in test cases)
if getattr(ex, '__module__', None) == 'flexmock':
raise
# Convert anything else to OsbsException
# Python 3 has implicit exception chaining and enhanced
# reporting, so you get the original traceback as well as
# the one originating here.
# For Python 2, let's do that explicitly.
raise OsbsException(cause=ex, traceback=sys.exc_info()[2])
return catch_exceptions
_REQUIRED_PARAM = object()
logger = logging.getLogger(__name__)
LogEntry = namedtuple('LogEntry', ['platform', 'line'])
def validate_arrangement_version(arrangement_version):
"""Validate if the arrangement_version is supported
Shows a warning when version is deprecated
:param int|None arrangement_version: version to be validated
:raises ValueError: when version is not supported
"""
if arrangement_version is None:
return
if arrangement_version <= 5:
raise ValueError('arrangement_version <= 5 is no longer supported')
class OSBS(object):
"""
Note: all API methods return osbs.http.Response object. This is, due to historical
reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
and dict respectively.
"""
_GIT_LABEL_KEYS = ('git-repo-name', 'git-branch', 'git-full-repo')
_OLD_LABEL_KEYS = ('git-repo-name', 'git-branch')
@osbsapi
def __init__(self, openshift_configuration, build_configuration):
""" """
self.os_conf = openshift_configuration
self.build_conf = build_configuration
self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
k8s_api_url=self.os_conf.get_k8s_api_uri(),
verbose=self.os_conf.get_verbosity(),
username=self.os_conf.get_username(),
password=self.os_conf.get_password(),
use_kerberos=self.os_conf.get_use_kerberos(),
client_cert=self.os_conf.get_client_cert(),
client_key=self.os_conf.get_client_key(),
kerberos_keytab=self.os_conf.get_kerberos_keytab(),
kerberos_principal=self.os_conf.get_kerberos_principal(),
kerberos_ccache=self.os_conf.get_kerberos_ccache(),
use_auth=self.os_conf.get_use_auth(),
verify_ssl=self.os_conf.get_verify_ssl(),
token=self.os_conf.get_oauth2_token(),
namespace=self.os_conf.get_namespace())
self._bm = None
@osbsapi
def list_builds(self, field_selector=None, koji_task_id=None, running=None,
labels=None):
"""
List builds with matching fields
:param field_selector: str, field selector for Builds
:param koji_task_id: str, only list builds for Koji Task ID
:return: BuildResponse list
"""
if running:
running_fs = ",".join(["status!={status}".format(status=status.capitalize())
for status in BUILD_FINISHED_STATES])
if not field_selector:
field_selector = running_fs
else:
field_selector = ','.join([field_selector, running_fs])
response = self.os.list_builds(field_selector=field_selector,
koji_task_id=koji_task_id, labels=labels)
serialized_response = response.json()
build_list = []
for build in serialized_response["items"]:
build_list.append(BuildResponse(build, self))
return build_list
def watch_builds(self, field_selector=None):
kwargs = {}
if field_selector is not None:
kwargs['fieldSelector'] = field_selector
for changetype, obj in self.os.watch_resource("builds", **kwargs):
yield changetype, obj
@osbsapi
def get_build(self, build_id):
response = self.os.get_build(build_id)
build_response = BuildResponse(response.json(), self)
return build_response
@osbsapi
def cancel_build(self, build_id):
response = self.os.cancel_build(build_id)
build_response = BuildResponse(response.json(), self)
return build_response
@osbsapi
def get_pod_for_build(self, build_id):
"""
:return: PodResponse object for pod relating to the build
"""
pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
serialized_response = pods.json()
pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
if not pod_list:
raise OsbsException("No pod for build")
elif len(pod_list) != 1:
raise OsbsException("Only one pod expected but %d returned" % len(pod_list))
return pod_list[0]
def _set_build_request_resource_limits(self, build_request):
"""Apply configured resource limits to build_request"""
assert isinstance(build_request, BaseBuildRequest)
cpu_limit = self.build_conf.get_cpu_limit()
memory_limit = self.build_conf.get_memory_limit()
storage_limit = self.build_conf.get_storage_limit()
if any(
limit is not None
for limit in (cpu_limit, memory_limit, storage_limit)
):
build_request.set_resource_limits(cpu=cpu_limit,
memory=memory_limit,
storage=storage_limit)
@osbsapi
def get_build_request(self, inner_template=None,
outer_template=None, customize_conf=None,
arrangement_version=DEFAULT_ARRANGEMENT_VERSION,
user_params=None, repo_info=None, **kwargs
):
"""
return instance of BuildRequestV2
:param build_type: str, unused
:param inner_template: str, name of inner template for BuildRequest
:param outer_template: str, name of outer template for BuildRequest
:param customize_conf: str, name of customization config for BuildRequest
:param arrangement_version: int, value of the arrangement version
:param repo_info: RepoInfo, git repo data for the build
:return: instance of BuildRequestV2
"""
validate_arrangement_version(arrangement_version)
build_request = BuildRequestV2(
build_json_store=self.os_conf.get_build_json_store(),
osbs_api=self,
outer_template=outer_template,
customize_conf=customize_conf,
user_params=user_params,
repo_info=repo_info,
)
self._set_build_request_resource_limits(build_request)
return build_request
@osbsapi
def get_source_container_build_request(
self, outer_template=None,
arrangement_version=DEFAULT_ARRANGEMENT_VERSION,
user_params=None,
):
"""
return instance of SourceBuildRequest
:param str outer_template: name of outer template for SourceBuildRequest
:return: instance of SourceBuildRequest
"""
validate_arrangement_version(arrangement_version)
build_request = SourceBuildRequest(
osbs_api=self,
outer_template=outer_template,
user_params=user_params,
)
self._set_build_request_resource_limits(build_request)
return build_request
@osbsapi
def create_build_from_buildrequest(self, build_request):
"""
render provided build_request and submit build from it
:param build_request: instance of build.build_request.BuildRequest
:return: instance of build.build_response.BuildResponse
"""
build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
build = build_request.render()
response = self.os.create_build(json.dumps(build))
build_response = BuildResponse(response.json(), self)
return build_response
def _get_running_builds_for_build_config(self, build_config_id):
all_builds_for_bc = self.os.list_builds(build_config_id=build_config_id).json()['items']
running = []
for b in all_builds_for_bc:
br = BuildResponse(b, self)
if br.is_pending() or br.is_running():
running.append(br)
return running
def _get_not_cancelled_builds_for_koji_task(self, koji_task_id):
all_builds_for_task = self.os.list_builds(koji_task_id=koji_task_id).json()['items']
not_cancelled = []
for b in all_builds_for_task:
br = BuildResponse(b, self)
build_labels = br.get_labels()
if not br.is_cancelled() and build_labels['is_autorebuild'] == "false":
not_cancelled.append(br)
return not_cancelled
def _verify_labels_match(self, new_build_config, existing_build_config):
new_labels = new_build_config['metadata']['labels']
existing_labels = existing_build_config['metadata']['labels']
for key in self._GIT_LABEL_KEYS:
new_label_value = new_labels.get(key)
existing_label_value = existing_labels.get(key)
if (existing_label_value and existing_label_value != new_label_value):
msg = (
'Git labels collide with existing build config "%s". '
'Existing labels: %r, '
'New labels: %r ') % (
existing_build_config['metadata']['name'],
existing_labels,
new_labels)
raise OsbsValidationException(msg)
def _get_existing_build_config(self, build_config):
"""
Uses the given build config to find an existing matching build config.
Build configs are a match if:
- metadata.labels.git-repo-name AND metadata.labels.git-branch AND
metadata.labels.git-full-repo are equal
OR
- metadata.labels.git-repo-name AND metadata.labels.git-branch are equal AND
metadata.spec.source.git.uri are equal
OR
- metadata.name are equal
"""
bc_labels = build_config['metadata']['labels']
git_labels = {
"label_selectors": [(key, bc_labels[key]) for key in self._GIT_LABEL_KEYS]
}
old_labels_kwargs = {
"label_selectors": [(key, bc_labels[key]) for key in self._OLD_LABEL_KEYS],
"filter_key": FILTER_KEY,
"filter_value": graceful_chain_get(build_config, *FILTER_KEY.split('.'))
}
name = {
"build_config_id": build_config['metadata']['name']
}
queries = (
(self.os.get_build_config_by_labels, git_labels),
(self.os.get_build_config_by_labels_filtered, old_labels_kwargs),
(self.os.get_build_config, name),
)
existing_bc = None
for func, kwargs in queries:
try:
existing_bc = func(**kwargs)
# build config found
break
except OsbsException as exc:
# doesn't exist
logger.info('Build config NOT found via %s: %s',
func.__name__, str(exc))
continue
return existing_bc
def _verify_running_builds(self, build_config_name):
running_builds = self._get_running_builds_for_build_config(build_config_name)
rb_len = len(running_builds)
if rb_len > 0:
# report the number of simeltamous builds to detect build spam or runaway processes
builds = ', '.join(['%s: %s' % (b.get_build_name(), b.status) for b in running_builds])
logger.info("Multiple builds for %s running: %s", build_config_name, builds)
def _create_scratch_build(self, build_request):
return self._create_build_directly(build_request)
def _create_isolated_build(self, build_request):
return self._create_build_directly(build_request,
unique=('git-repo-name', 'git-branch',
'isolated', 'isolated-release'))
def _create_build_directly(self, build_request, unique=None):
logger.debug(build_request)
build_json = build_request.render()
build_json['kind'] = 'Build'
build_json['spec']['serviceAccount'] = 'builder'
builder_img = build_json['spec']['strategy']['customStrategy']['from']
build_name = builder_img['name']
build_kind = builder_img['kind']
kind = builder_img['kind']
if kind == 'ImageStreamTag':
# Only BuildConfigs get to specify an ImageStreamTag. When
# creating Builds directly we need to specify a
# DockerImage.
response = self.get_image_stream_tag(builder_img['name'])
ref = response.json()['image']['dockerImageReference']
builder_img['kind'] = 'DockerImage'
builder_img['name'] = ref
build_json['metadata'].setdefault('annotations', {})
build_json['metadata']['annotations']['from'] = json.dumps({
'kind': build_kind,
'name': build_name})
if unique:
unique_labels = {}
for u in unique:
unique_labels[u] = build_json['metadata']['labels'][u]
running_builds = self.list_builds(running=True, labels=unique_labels)
if running_builds:
raise RuntimeError('Matching build(s) already running: {}'
.format(', '.join(x.get_build_name() for x in running_builds)))
return BuildResponse(self.os.create_build(build_json).json(), self)
@retry_on_conflict
def _get_or_create_imagestream(self, imagestream_name, build_request):
insecure = False
source_registry_uri = RegistryURI(build_request.source_registry['url']).docker_uri
source_registry_insecure = build_request.source_registry.get('insecure', False)
docker_image_repo = ImageName.parse(build_request.base_image)
if not docker_image_repo.registry:
docker_image_repo.registry = source_registry_uri
insecure = source_registry_insecure
else:
if docker_image_repo.registry == source_registry_uri:
insecure = source_registry_insecure
else:
allowed_registry = False
if build_request.pull_registries:
for pull_reg in build_request.pull_registries:
if docker_image_repo.registry == RegistryURI(pull_reg['url']).docker_uri:
insecure = pull_reg.get('insecure', False)
allowed_registry = True
break
if not allowed_registry:
raise RuntimeError('Not allowed explicitly specified registry: {}'.
format(docker_image_repo.registry))
# enclose only for source registry
if docker_image_repo.registry == source_registry_uri and build_request.organization:
docker_image_repo.enclose(build_request.organization)
try:
imagestream = self.get_image_stream(imagestream_name)
except OsbsResponseException as x:
if x.status_code != 404:
raise
logger.info('Creating ImageStream %s for %s', imagestream_name, docker_image_repo)
imagestream = self.create_image_stream(imagestream_name)
return imagestream, docker_image_repo.to_str(), insecure
def _get_image_stream_info_for_build_request(self, build_request):
"""Return ImageStream, and ImageStreamTag name for base_image of build_request
If build_request is not auto instantiated, objects are not fetched
and None, None is returned.
"""
image_stream_json = None
image_stream_tag_name = None
docker_image_repo = None
insecure = None
if build_request.has_ist_trigger():
image_stream_tag_id = build_request.trigger_imagestreamtag
image_stream_id, image_stream_tag_name = image_stream_tag_id.split(':')
image_stream, docker_image_repo, insecure =\
self._get_or_create_imagestream(image_stream_id, build_request)
image_stream_json = image_stream.json()
return image_stream_json, image_stream_tag_name, docker_image_repo, insecure
@retry_on_conflict
def _update_build_config_when_exist(self, build_json):
existing_bc = self._get_existing_build_config(build_json)
self._verify_labels_match(build_json, existing_bc)
# Existing build config may have a different name if matched by
# git-repo-name and git-branch labels. Continue using existing
# build config name.
build_config_name = existing_bc['metadata']['name']
logger.debug('existing build config name to be used "%s"',
build_config_name)
self._verify_running_builds(build_config_name)
# Remove nodeSelector, will be set from build_json for worker build
old_nodeselector = existing_bc['spec'].pop('nodeSelector', None)
logger.debug("removing build config's nodeSelector %s", old_nodeselector)
# Remove koji_task_id
koji_task_id = utils.graceful_chain_get(existing_bc, 'metadata', 'labels',
'koji-task-id')
if koji_task_id is not None:
logger.debug("removing koji-task-id %r", koji_task_id)
utils.graceful_chain_del(existing_bc, 'metadata', 'labels', 'koji-task-id')
utils.buildconfig_update(existing_bc, build_json)
# Reset name change that may have occurred during
# update above, since renaming is not supported.
existing_bc['metadata']['name'] = build_config_name
logger.debug('build config for %s already exists, updating...',
build_config_name)
self.os.update_build_config(build_config_name, json.dumps(existing_bc))
return existing_bc
@retry_on_conflict
def _update_build_config_with_triggers(self, build_json, triggers, is_autorebuild=False):
existing_bc = self._get_existing_build_config(build_json)
existing_bc['spec']['triggers'] = triggers
build_config_name = existing_bc['metadata']['name']
existing_bc['metadata']['labels']['is_autorebuild'] = "true" if is_autorebuild else "false"
self.os.update_build_config(build_config_name, json.dumps(existing_bc))
return existing_bc
def _create_build_config_and_build(self, build_request):
build_json = build_request.render()
build_config_name = build_json['metadata']['name']
logger.debug('build config to be named "%s"', build_config_name)
original_bc = self._get_existing_build_config(build_json)
image_stream, image_stream_tag_name, docker_image_repo, insecure = \
self._get_image_stream_info_for_build_request(build_request)
# Remove triggers in BuildConfig to avoid accidental
# auto instance of Build. If defined, triggers will
# be added to BuildConfig after ImageStreamTag object
# is properly configured.
triggers = build_json['spec'].pop('triggers', [])
if original_bc:
build_config_name = original_bc['metadata']['name']
existing_bc = self._update_build_config_when_exist(build_json)
else:
logger.debug("build config for %s doesn't exist, creating...",
build_config_name)
existing_bc = self.os.create_build_config(json.dumps(build_json)).json()
tag_id = None
if image_stream:
changed_ist = self.ensure_image_stream_tag(image_stream,
image_stream_tag_name,
docker_image_repo,
scheduled=True,
insecure=insecure)
logger.debug('Changed parent ImageStreamTag? %s', changed_ist)
tag_id = '{}:{}'.format(image_stream['metadata']['name'], image_stream_tag_name)
original_trigger = original_bc['spec']['triggers'] if original_bc else []
if original_trigger:
original_trigger[0]['imageChange'].pop('lastTriggeredImageID', None)
if triggers or original_trigger:
if triggers == original_trigger:
logger.info("Trigger didn't change")
else:
logger.info("Trigger changed from : %s to %s", original_trigger, triggers)
if triggers:
is_autorebuild = False
if build_request.skip_build and tag_id:
imstreamtag = None
try:
imstreamtag = self.get_image_stream_tag_with_retry(tag_id).json()
except OsbsResponseException as exc:
if exc.status_code == http_client.NOT_FOUND:
logger.info("Imagestream tag doesn't exist yet: %s", tag_id)
else:
raise
# when imagestream tag doesn't exist yet, we will just add the trigger,
# without setting lastTriggeredImageID
if imstreamtag:
triggers[0]['imageChange']['lastTriggeredImageID'] =\
imstreamtag['image']['dockerImageReference']
is_autorebuild = True
if build_request.triggered_after_koji_task is not None:
is_autorebuild = True
existing_bc = self._update_build_config_with_triggers(build_json, triggers,
is_autorebuild)
if build_request.skip_build:
logger.info('Build skipped')
return
if image_stream and triggers:
# verify that imagestreamtag exists (if it doesn't non-existent image was provided)
# because setting up autorebuilds with non-existent image is allowed, so users
# may prepare their images for next build which will trigger autorebuilds
# but they run build manually it will be waiting for new BC instance which
# won't ever appear, because imagestreamtag doesn't exist yet
try:
self.get_image_stream_tag_with_retry(tag_id).json()
except OsbsResponseException as exc:
if exc.status_code == http_client.NOT_FOUND:
logger.info("Imagestream tag doesn't exist yet: %s", tag_id)
raise OsbsException('Provided base image does not exist: '
'{}'.format(docker_image_repo))
else:
raise
prev_version = existing_bc['status']['lastVersion']
build_id = self.os.wait_for_new_build_config_instance(
build_config_name, prev_version)
build = BuildResponse(self.os.get_build(build_id).json(), self)
else:
response = self.os.start_build(build_config_name)
build = BuildResponse(response.json(), self)
return build
def _check_labels(self, repo_info):
labels = repo_info.labels
required_missing = False
req_labels = {}
# required labels which needs to have explicit value (not from env variable)
explicit_labels = [Labels.LABEL_TYPE_NAME,
Labels.LABEL_TYPE_COMPONENT]
# version label isn't used here, but is required label in Dockerfile
# and is used and required for atomic reactor
# if we don't catch error here, it will fail in atomic reactor later
for label in [Labels.LABEL_TYPE_NAME,
Labels.LABEL_TYPE_COMPONENT,
Labels.LABEL_TYPE_VERSION]:
try:
_, req_labels[label] = labels.get_name_and_value(label)
if label in explicit_labels and not req_labels[label]:
required_missing = True
logger.error("required label doesn't have explicit value in Dockerfile : %s",
labels.get_name(label))
except KeyError:
required_missing = True
logger.error("required label missing from Dockerfile : %s",
labels.get_name(label))
try:
_, release_value = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
if release_value and not RELEASE_LABEL_FORMAT.match(release_value):
logger.error("release label '%s' doesn't match regex : %s", release_value,
RELEASE_LABEL_FORMAT.pattern)
raise OsbsValidationException("release label doesn't have proper format")
except KeyError:
pass
try:
_, version_value = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
# version doesn't exist
except KeyError:
pass
else:
if version_value:
wrong_chars = \
[denied for denied in VERSION_LABEL_FORBIDDEN_CHARS if denied in version_value]
if wrong_chars:
msg = "version label '{}' contains not allowed chars : '{}'".\
format(version_value, wrong_chars)
logger.error(msg)
raise OsbsValidationException(msg)
if required_missing:
raise OsbsValidationException("required label missing from Dockerfile")
# Verify the name label meets requirements.
# It is made up of slash-separated name components.
#
# When pulling an image, the first component of the name
# pulled is interpreted as a registry name if it contains a
# '.' character, and otherwise the configured registries are
# queried in turn.
#
# Due to this, a name with '.' in its initial component will
# be awkward to pull from a registry because the registry name
# will have to be explicitly supplied, e.g. "docker pull
# foo.bar/baz" will fail because the "foo.bar" registry cannot
# be contacted.
#
# Avoid this awkwardness by forbidding '.' in the initial
# component of the image name.
name_components = req_labels[Labels.LABEL_TYPE_NAME].split('/', 1)
if '.' in name_components[0]:
raise OsbsValidationException("initial image name component "
"must not contain '.'")
return req_labels
# Gives flexmock something to mock
def get_user_params(self, component=None, req_labels=None, **kwargs):
req_labels = req_labels or {}
user_component = component or req_labels[Labels.LABEL_TYPE_COMPONENT]
return BuildUserParams.make_params(build_json_dir=self.os_conf.get_build_json_store(),
build_conf=self.build_conf,
component=user_component,
name_label=req_labels[Labels.LABEL_TYPE_NAME],
**kwargs)
def _do_create_prod_build(self,
git_uri=_REQUIRED_PARAM, git_ref=_REQUIRED_PARAM,
git_branch=_REQUIRED_PARAM,
inner_template=None,
outer_template=None,
customize_conf=None,
build_type=None,
component=None,
flatpak=None,
git_commit_depth=None,
isolated=None,
koji_task_id=None,
target=None,
operator_csv_modifications_url=None,
**kwargs):
required_params = {"git_uri": git_uri, "git_ref": git_ref, "git_branch": git_branch}
missing_params = []
for param_name, param_arg in required_params.items():
if param_arg is _REQUIRED_PARAM:
missing_params.append(param_name)
if missing_params:
raise OsbsException('required parameter {} missing'.format(", ".join(missing_params)))
if flatpak:
if isolated:
# Flatpak builds from a particular stream autogenerate the release
# as <module_version>.<n>; it doesn't make sense to make a fix
# from specific one of these autogenerated version. What an isolated
# fix for module requires will have to be determined from experience.
raise ValueError("Flatpak build cannot be isolated")
if not git_branch:
raise OsbsValidationException("required argument 'git_branch' can't be None")
repo_info = utils.get_repo_info(git_uri, git_ref, git_branch=git_branch,
depth=git_commit_depth)
if flatpak and not repo_info.configuration.is_flatpak:
raise OsbsException(
"Flatpak build, "
"but repository doesn't have a container.yaml with a flatpak: section")
if not flatpak and repo_info.configuration.is_flatpak:
raise OsbsException(
"Not a flatpak build, "
"but repository has a container.yaml with a flatpak: section")
if operator_csv_modifications_url and not isolated:
raise OsbsException('Only isolated build can update operator CSV metadata')
req_labels = self._check_labels(repo_info)
user_params = self.get_user_params(
base_image=repo_info.base_image,
build_type=build_type,
component=component,
flatpak=flatpak,
isolated=isolated,
koji_target=target,
koji_task_id=koji_task_id,
req_labels=req_labels,
repo_info=repo_info,
operator_csv_modifications_url=operator_csv_modifications_url,
**kwargs)
build_request = self.get_build_request(inner_template=inner_template,
outer_template=outer_template,
customize_conf=customize_conf,
user_params=user_params,
repo_info=repo_info)
build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
builds_for_koji_task = []
if koji_task_id and build_type == BUILD_TYPE_ORCHESTRATOR:
# try to find build for koji_task which isn't canceled and use that one
builds_for_koji_task = self._get_not_cancelled_builds_for_koji_task(koji_task_id)
builds_count = len(builds_for_koji_task)
if builds_count == 1:
logger.info("found running build for koji task: %s",
builds_for_koji_task[0].get_build_name())
response =\
BuildResponse(self.os.get_build(builds_for_koji_task[0].get_build_name()).json(),
self)
elif builds_count > 1:
raise OsbsException("Multiple builds %s for koji task id %s" %
(builds_count, koji_task_id))
elif build_request.scratch:
logger.info("creating scratch build")
response = self._create_scratch_build(build_request)
elif build_request.isolated:
logger.info("creating isolated build")
response = self._create_isolated_build(build_request)
else:
logger.info("creating build from build_config")
response = self._create_build_config_and_build(build_request)
# when build is skipped
if response is None:
return
logger.debug(response.json)
return response
@osbsapi
def create_build(self, **kwargs):
"""
take input args, create build request and submit the build
:param kwargs: keyword args for build
:return: instance of BuildRequest
"""
return self._do_create_prod_build(**kwargs)
@osbsapi
def create_source_container_build(self,
outer_template=None,
arrangement_version=None,
component=None,
koji_task_id=None,
target=None,
**kwargs):
"""
Take input args, create build request and submit the source image build
:return: instance of BuildRequest
"""
build_json_store = self.os_conf.get_build_json_store()
user_params = SourceContainerUserParams.make_params(
build_json_dir=build_json_store,
arrangement_version=arrangement_version,
build_conf=self.build_conf,
component=component,
koji_target=target,
koji_task_id=koji_task_id,
**kwargs
)
build_request = self.get_source_container_build_request(
outer_template=outer_template or ORCHESTRATOR_SOURCES_OUTER_TEMPLATE,
user_params=user_params
)
build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
error_messages = []
if not component:
error_messages.append("required argument 'component' can't be empty")
if error_messages:
raise OsbsValidationException(", ".join(error_messages))
builds_for_koji_task = []
if koji_task_id:
# try to find build for koji_task which isn't canceled and use that one
builds_for_koji_task = self._get_not_cancelled_builds_for_koji_task(koji_task_id)
builds_count = len(builds_for_koji_task)
if builds_count == 1:
logger.info("found running build for koji task: %s",
builds_for_koji_task[0].get_build_name())
response =\
BuildResponse(self.os.get_build(builds_for_koji_task[0].get_build_name()).json(),
self)
elif builds_count > 1:
raise OsbsException("Multiple builds %s for koji task id %s" %
(builds_count, koji_task_id))
else:
logger.info("creating source container image build")
response = self._create_build_directly(build_request)
logger.debug(response.json)
return response
@osbsapi
def create_worker_build(self, **kwargs):
"""
Create a worker build
Pass through method to create_prod_build with the following
modifications:
- platform param is required
- release param is required
- arrangement_version param is required, which is used to
select which worker_inner:n.json template to use
- inner template set to worker_inner:n.json if not set
- outer template set to worker.json if not set
- customize configuration set to worker_customize.json if not set
:return: BuildResponse instance
"""
missing = set()
for required in ('platform', 'release', 'arrangement_version'):
if not kwargs.get(required):
missing.add(required)
if missing:
raise ValueError("Worker build missing required parameters: %s" %
missing)
if kwargs.get('platforms'):
raise ValueError("Worker build called with unwanted platforms param")
arrangement_version = kwargs['arrangement_version']
kwargs.setdefault('inner_template', WORKER_INNER_TEMPLATE.format(
arrangement_version=arrangement_version))
kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE)
kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF)
kwargs['build_type'] = BUILD_TYPE_WORKER
try:
return self._do_create_prod_build(**kwargs)
except IOError as ex:
if os.path.basename(ex.filename) == kwargs['inner_template']:
raise OsbsValidationException("worker invalid arrangement_version %s" %
arrangement_version)
raise
@osbsapi
def create_orchestrator_build(self, **kwargs):
"""
Create an orchestrator build
Pass through method to create_prod_build with the following
modifications:
- platforms param is required
- arrangement_version param may be used to select which
orchestrator_inner:n.json template to use
- inner template set to orchestrator_inner:n.json if not set
- outer template set to orchestrator.json if not set
- customize configuration set to orchestrator_customize.json if not set
:return: BuildResponse instance
"""
if not self.can_orchestrate():
raise OsbsOrchestratorNotEnabled("can't create orchestrate build "
"when can_orchestrate isn't enabled")
extra = [x for x in ('platform',) if kwargs.get(x)]
if extra:
raise ValueError("Orchestrator build called with unwanted parameters: %s" %
extra)
arrangement_version = kwargs.setdefault('arrangement_version',
self.build_conf.get_arrangement_version())
if arrangement_version < REACTOR_CONFIG_ARRANGEMENT_VERSION and not kwargs.get('platforms'):
raise ValueError('Orchestrator build requires platforms param')
kwargs.setdefault('inner_template', ORCHESTRATOR_INNER_TEMPLATE.format(
arrangement_version=arrangement_version))
kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE)
kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF)
kwargs['build_type'] = BUILD_TYPE_ORCHESTRATOR
try:
return self._do_create_prod_build(**kwargs)
except IOError as ex:
if os.path.basename(ex.filename) == kwargs['inner_template']:
raise OsbsValidationException("orchestrator invalid arrangement_version %s" %
arrangement_version)
raise
def _decode_build_logs_generator(self, logs):
for line in logs:
line = line.decode("utf-8").rstrip()
yield line
@osbsapi
def get_build_logs(self, build_id, follow=False, build_json=None, wait_if_missing=False,
decode=False):
"""
provide logs from build
NOTE: Since atomic-reactor 1.6.25, logs are always in UTF-8, so if
asked to decode, we assume that is the encoding in use. Otherwise, we
return the bytes exactly as they came from the container.
:param build_id: str
:param follow: bool, fetch logs as they come?
:param build_json: dict, to save one get-build query
:param wait_if_missing: bool, if build doesn't exist, wait
:param decode: bool, whether or not to decode logs as utf-8
:return: None, bytes, or iterable of bytes
"""
logs = self.os.logs(build_id, follow=follow, build_json=build_json,
wait_if_missing=wait_if_missing)
if decode and isinstance(logs, GeneratorType):
return self._decode_build_logs_generator(logs)
# str or None returned from self.os.logs()
if decode and logs is not None:
logs = logs.decode("utf-8").rstrip()
return logs
@staticmethod
def _parse_build_log_entry(entry):
items = entry.split()
if len(items) < 4:
# This is not a valid build log entry
return (None, entry)
platform = items[2]
if not platform.startswith("platform:"):
# Line logged without using the appropriate LoggerAdapter
return (None, entry)
platform = platform.split(":", 1)[1]
if platform == "-":
return (None, entry) # proper orchestrator build log entry
# Anything else should be a worker build log entry, so we strip off
# the leading 8 wrapping orchestrator log fields:
# <date> <time> <platform> - <name> - <level> -
plen = sum(len(items[i]) + 1 # include trailing space
for i in range(8))
line = entry[plen:]
# if the 3rd field is "platform:-", we strip it out
items = line.split()
if len(items) > 2 and items[2] == "platform:-":
plen = sum(len(items[i]) + 1 # include trailing space
for i in range(3))
line = "%s %s %s" % (items[0], items[1], line[plen:])
return (platform, line)
@osbsapi
def get_orchestrator_build_logs(self, build_id, follow=False, wait_if_missing=False):
"""
provide logs from orchestrator build
:param build_id: str
:param follow: bool, fetch logs as they come?
:param wait_if_missing: bool, if build doesn't exist, wait
:return: generator yielding objects with attributes 'platform' and 'line'
"""
logs = self.get_build_logs(build_id=build_id, follow=follow,
wait_if_missing=wait_if_missing, decode=True)
if logs is None:
return
if isinstance(logs, GeneratorType):
for entries in logs:
for entry in entries.splitlines():
yield LogEntry(*self._parse_build_log_entry(entry))
else:
for entry in logs.splitlines():
yield LogEntry(*self._parse_build_log_entry(entry))
@osbsapi
def wait_for_build_to_finish(self, build_id):
response = self.os.wait_for_build_to_finish(build_id)
build_response = BuildResponse(response, self)
return build_response
@osbsapi
def wait_for_build_to_get_scheduled(self, build_id):
response = self.os.wait_for_build_to_get_scheduled(build_id)
build_response = BuildResponse(response, self)
return build_response
@osbsapi
def update_labels_on_build(self, build_id, labels):
response = self.os.update_labels_on_build(build_id, labels)
return response
@osbsapi
def set_labels_on_build(self, build_id, labels):
response = self.os.set_labels_on_build(build_id, labels)
return response
@osbsapi
def update_labels_on_build_config(self, build_config_id, labels):
response = self.os.update_labels_on_build_config(build_config_id, labels)
return response
@osbsapi
def set_labels_on_build_config(self, build_config_id, labels):
response = self.os.set_labels_on_build_config(build_config_id, labels)
return response
@osbsapi
def update_annotations_on_build(self, build_id, annotations):
# annotations support only string, make sure it's string
# or json serializable object
annotations = stringify_values(annotations)
return self.os.update_annotations_on_build(build_id, annotations)
@osbsapi
def set_annotations_on_build(self, build_id, annotations):
# annotations support only string, make sure it's string
# or json serializable object
annotations = stringify_values(annotations)
return self.os.set_annotations_on_build(build_id, annotations)
@osbsapi
def import_image_tags(self, name, tags, repository, insecure=False):
"""Import image tags from specified container repository.
:param name: str, name of ImageStream object
:param tags: iterable, tags to be imported
:param repository: str, remote location of container image
in the format <registry>/<repository>
:param insecure: bool, indicates whenever registry is secure
:return: bool, whether tags were imported
"""
stream_import_file = os.path.join(self.os_conf.get_build_json_store(),
'image_stream_import.json')
with open(stream_import_file) as f:
stream_import = json.load(f)
return self.os.import_image_tags(name, stream_import, tags,
repository, insecure)
@osbsapi
def get_token(self):
if self.os.use_kerberos:
return self.os.get_oauth_token()
else:
if self.os.token:
return self.os.token
raise OsbsValidationException("no token stored for %s" % self.os_conf.conf_section)
@osbsapi
def login(self, token=None, username=None, password=None):
if self.os.use_kerberos:
raise OsbsValidationException("can't use login when using kerberos")
if not token:
if username:
self.os.username = username
else:
self.os.username = input("Username: ")
if password:
self.os.password = password
else:
self.os.password = getpass.getpass()
self.os.use_auth = True
token = self.os.get_oauth_token()
self.os.token = token
try:
self.os.get_user()
except OsbsResponseException as ex:
if ex.status_code == http_client.UNAUTHORIZED:
raise OsbsValidationException("token is not valid")
raise
token_file = utils.get_instance_token_file_name(self.os_conf.conf_section)
token_file_dir = os.path.dirname(token_file)
if not os.path.exists(token_file_dir):
os.makedirs(token_file_dir)
# Inspired by http://stackoverflow.com/a/15015748/5998718
# For security, remove file with potentially elevated mode
if os.path.exists(token_file):
os.remove(token_file)
# Open file descriptor
fdesc = os.open(token_file,
os.O_WRONLY | os.O_CREAT | os.O_EXCL,
stat.S_IRUSR | stat.S_IWUSR)
with os.fdopen(fdesc, 'w') as f:
f.write(token + '\n')
@osbsapi
def get_user(self, username="~"):
return self.os.get_user(username).json()
@osbsapi
def get_serviceaccount_tokens(self, username="~"):
return self.os.get_serviceaccount_tokens(username)
@osbsapi
def get_image_stream_tag(self, tag_id):
return self.os.get_image_stream_tag(tag_id)
@osbsapi
def get_image_stream_tag_with_retry(self, tag_id):
return self.os.get_image_stream_tag_with_retry(tag_id)
@osbsapi
def ensure_image_stream_tag(self, stream, tag_name, docker_image_repo, scheduled=False,
insecure=False):
"""Ensures the tag is monitored in ImageStream
:param stream: dict, ImageStream object
:param tag_name: str, name of tag to check, without name of
ImageStream as prefix
:param docker_image_repo: str full name of repository
:param scheduled: bool, if True, importPolicy.scheduled will be
set to True in ImageStreamTag
:return: bool, whether or not modifications were performed
"""
img_stream_tag_file = os.path.join(self.os_conf.get_build_json_store(),
'image_stream_tag.json')
with open(img_stream_tag_file) as f:
tag_template = json.load(f)
return self.os.ensure_image_stream_tag(stream, tag_name, tag_template,
docker_image_repo, scheduled,
insecure=insecure)
@osbsapi
def get_image_stream(self, stream_id):
return self.os.get_image_stream(stream_id)
@osbsapi
def create_image_stream(self, name):
"""
Create an ImageStream object
Raises exception on error
:param name: str, name of ImageStream
:return: response
"""
img_stream_file = os.path.join(self.os_conf.get_build_json_store(), 'image_stream.json')
with open(img_stream_file) as f:
stream = json.load(f)
stream['metadata']['name'] = name
stream['metadata'].setdefault('annotations', {})
return self.os.create_image_stream(json.dumps(stream))
def _load_quota_json(self, quota_name=None):
quota_file = os.path.join(self.os_conf.get_build_json_store(),
'pause_quota.json')
with open(quota_file) as fp:
quota_json = json.load(fp)
if quota_name:
quota_json['metadata']['name'] = quota_name
return quota_json['metadata']['name'], quota_json
@osbsapi
def pause_builds(self, quota_name=None):
# First, set quota so 0 pods are allowed to be running
quota_name, quota_json = self._load_quota_json(quota_name)
self.os.create_resource_quota(quota_name, quota_json)
# Now wait for running builds to finish
while True:
field_selector = ','.join(['status=%s' % status.capitalize()
for status in BUILD_RUNNING_STATES])
builds = self.list_builds(field_selector)
# Double check builds are actually in running state.
running_builds = [build for build in builds if build.is_running()]
if not running_builds:
break
name = running_builds[0].get_build_name()
logger.info("waiting for build to finish: %s", name)
self.wait_for_build_to_finish(name)
@osbsapi
def resume_builds(self, quota_name=None):
quota_name, _ = self._load_quota_json(quota_name)
self.os.delete_resource_quota(quota_name)
# implements subset of OpenShift's export logic in pkg/cmd/cli/cmd/exporter.go
@staticmethod
def _prepare_resource(resource):
utils.graceful_chain_del(resource, 'metadata', 'resourceVersion')
@osbsapi
def dump_resource(self, resource_type):
return self.os.dump_resource(resource_type).json()
@osbsapi
def restore_resource(self, resource_type, resources, continue_on_error=False):
nfailed = 0
for r in resources["items"]:
name = utils.graceful_chain_get(r, 'metadata', 'name') or '(no name)'
logger.debug("restoring %s/%s", resource_type, name)
try:
self._prepare_resource(r)
self.os.restore_resource(resource_type, r)
except Exception:
if continue_on_error:
logger.exception("failed to restore %s/%s", resource_type, name)
nfailed += 1
else:
raise
if continue_on_error:
ntotal = len(resources["items"])
logger.info("restored %s/%s %s", ntotal - nfailed, ntotal, resource_type)
@osbsapi
def list_resource_quotas(self):
return self.os.list_resource_quotas().json()
@osbsapi
def get_resource_quota(self, quota_name):
return self.os.get_resource_quota(quota_name).json()
@osbsapi
def can_orchestrate(self):
return self.build_conf.get_can_orchestrate()
@osbsapi
def create_config_map(self, name, data):
"""
Create an ConfigMap object on the server
Raises exception on error
:param name: str, name of configMap
:param data: dict, dictionary of data to be stored
:returns: ConfigMapResponse containing the ConfigMap with name and data
"""
config_data_file = os.path.join(self.os_conf.get_build_json_store(), 'config_map.json')
with open(config_data_file) as f:
config_data = json.load(f)
config_data['metadata']['name'] = name
data_dict = {}
for key, value in data.items():
data_dict[key] = json.dumps(value)
config_data['data'] = data_dict
response = self.os.create_config_map(config_data)
config_map_response = ConfigMapResponse(response.json())
return config_map_response
@osbsapi
def get_config_map(self, name):
"""
Get a ConfigMap object from the server
Raises exception on error
:param name: str, name of configMap to get from the server
:returns: ConfigMapResponse containing the ConfigMap with the requested name
"""
response = self.os.get_config_map(name)
config_map_response = ConfigMapResponse(response.json())
return config_map_response
@osbsapi
def delete_config_map(self, name):
"""
Delete a ConfigMap object from the server
Raises exception on error
:param name: str, name of configMap to delete from the server
"""
self.os.delete_config_map(name)
@contextmanager
def retries_disabled(self):
"""
Context manager to disable retries on requests
:returns: OSBS object
"""
self.os.retries_enabled = False
yield
self.os.retries_enabled = True
@osbsapi
def render_plugins_configuration(self, user_params_json):
user_params = load_user_params_from_json(user_params_json)
if user_params.KIND == USER_PARAMS_KIND_IMAGE_BUILDS:
return PluginsConfiguration(user_params).render()
elif user_params.KIND == USER_PARAMS_KIND_SOURCE_CONTAINER_BUILDS:
return SourceContainerPluginsConfiguration(user_params).render()
else:
raise RuntimeError(
"Unexpected user params kind: {}".format(user_params.KIND)
)
|
{
"content_hash": "abbc72e39e386a98eeec8e37056cdb16",
"timestamp": "",
"source": "github",
"line_count": 1398,
"max_line_length": 100,
"avg_line_length": 41.31974248927039,
"alnum_prop": 0.5845061888686921,
"repo_name": "projectatomic/osbs-client",
"id": "a98320ac80c1a243f647b53e9bb1bc1ecd728c3c",
"size": "57765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "osbs/api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "1379"
},
{
"name": "Python",
"bytes": "932661"
},
{
"name": "Shell",
"bytes": "5034"
}
],
"symlink_target": ""
}
|
"""Sparse Dtype"""
from __future__ import annotations
import re
from typing import (
TYPE_CHECKING,
Any,
)
import warnings
import numpy as np
from pandas._typing import (
Dtype,
DtypeObj,
type_t,
)
from pandas.errors import PerformanceWarning
from pandas.core.dtypes.base import (
ExtensionDtype,
register_extension_dtype,
)
from pandas.core.dtypes.cast import astype_nansafe
from pandas.core.dtypes.common import (
is_bool_dtype,
is_object_dtype,
is_scalar,
is_string_dtype,
pandas_dtype,
)
from pandas.core.dtypes.missing import (
isna,
na_value_for_dtype,
)
if TYPE_CHECKING:
from pandas.core.arrays.sparse.array import SparseArray
@register_extension_dtype
class SparseDtype(ExtensionDtype):
"""
Dtype for data stored in :class:`SparseArray`.
This dtype implements the pandas ExtensionDtype interface.
Parameters
----------
dtype : str, ExtensionDtype, numpy.dtype, type, default numpy.float64
The dtype of the underlying array storing the non-fill value values.
fill_value : scalar, optional
The scalar value not stored in the SparseArray. By default, this
depends on `dtype`.
=========== ==========
dtype na_value
=========== ==========
float ``np.nan``
int ``0``
bool ``False``
datetime64 ``pd.NaT``
timedelta64 ``pd.NaT``
=========== ==========
The default value may be overridden by specifying a `fill_value`.
Attributes
----------
None
Methods
-------
None
"""
# We include `_is_na_fill_value` in the metadata to avoid hash collisions
# between SparseDtype(float, 0.0) and SparseDtype(float, nan).
# Without is_na_fill_value in the comparison, those would be equal since
# hash(nan) is (sometimes?) 0.
_metadata = ("_dtype", "_fill_value", "_is_na_fill_value")
def __init__(self, dtype: Dtype = np.float64, fill_value: Any = None):
if isinstance(dtype, type(self)):
if fill_value is None:
fill_value = dtype.fill_value
dtype = dtype.subtype
dtype = pandas_dtype(dtype)
if is_string_dtype(dtype):
dtype = np.dtype("object")
if fill_value is None:
fill_value = na_value_for_dtype(dtype)
if not is_scalar(fill_value):
raise ValueError(f"fill_value must be a scalar. Got {fill_value} instead")
self._dtype = dtype
self._fill_value = fill_value
def __hash__(self):
# Python3 doesn't inherit __hash__ when a base class overrides
# __eq__, so we explicitly do it here.
return super().__hash__()
def __eq__(self, other: Any) -> bool:
# We have to override __eq__ to handle NA values in _metadata.
# The base class does simple == checks, which fail for NA.
if isinstance(other, str):
try:
other = self.construct_from_string(other)
except TypeError:
return False
if isinstance(other, type(self)):
subtype = self.subtype == other.subtype
if self._is_na_fill_value:
# this case is complicated by two things:
# SparseDtype(float, float(nan)) == SparseDtype(float, np.nan)
# SparseDtype(float, np.nan) != SparseDtype(float, pd.NaT)
# i.e. we want to treat any floating-point NaN as equal, but
# not a floating-point NaN and a datetime NaT.
fill_value = (
other._is_na_fill_value
and isinstance(self.fill_value, type(other.fill_value))
or isinstance(other.fill_value, type(self.fill_value))
)
else:
fill_value = self.fill_value == other.fill_value
return subtype and fill_value
return False
@property
def fill_value(self):
"""
The fill value of the array.
Converting the SparseArray to a dense ndarray will fill the
array with this value.
.. warning::
It's possible to end up with a SparseArray that has ``fill_value``
values in ``sp_values``. This can occur, for example, when setting
``SparseArray.fill_value`` directly.
"""
return self._fill_value
@property
def _is_na_fill_value(self) -> bool:
return isna(self.fill_value)
@property
def _is_numeric(self) -> bool:
return not is_object_dtype(self.subtype)
@property
def _is_boolean(self) -> bool:
return is_bool_dtype(self.subtype)
@property
def kind(self):
"""
The sparse kind. Either 'integer', or 'block'.
"""
return self.subtype.kind
@property
def type(self):
return self.subtype.type
@property
def subtype(self):
return self._dtype
@property
def name(self):
return f"Sparse[{self.subtype.name}, {repr(self.fill_value)}]"
def __repr__(self) -> str:
return self.name
@classmethod
def construct_array_type(cls) -> type_t[SparseArray]:
"""
Return the array type associated with this dtype.
Returns
-------
type
"""
from pandas.core.arrays.sparse.array import SparseArray
return SparseArray
@classmethod
def construct_from_string(cls, string: str) -> SparseDtype:
"""
Construct a SparseDtype from a string form.
Parameters
----------
string : str
Can take the following forms.
string dtype
================ ============================
'int' SparseDtype[np.int64, 0]
'Sparse' SparseDtype[np.float64, nan]
'Sparse[int]' SparseDtype[np.int64, 0]
'Sparse[int, 0]' SparseDtype[np.int64, 0]
================ ============================
It is not possible to specify non-default fill values
with a string. An argument like ``'Sparse[int, 1]'``
will raise a ``TypeError`` because the default fill value
for integers is 0.
Returns
-------
SparseDtype
"""
if not isinstance(string, str):
raise TypeError(
f"'construct_from_string' expects a string, got {type(string)}"
)
msg = f"Cannot construct a 'SparseDtype' from '{string}'"
if string.startswith("Sparse"):
try:
sub_type, has_fill_value = cls._parse_subtype(string)
except ValueError as err:
raise TypeError(msg) from err
else:
result = SparseDtype(sub_type)
msg = (
f"Cannot construct a 'SparseDtype' from '{string}'.\n\nIt "
"looks like the fill_value in the string is not "
"the default for the dtype. Non-default fill_values "
"are not supported. Use the 'SparseDtype()' "
"constructor instead."
)
if has_fill_value and str(result) != string:
raise TypeError(msg)
return result
else:
raise TypeError(msg)
@staticmethod
def _parse_subtype(dtype: str) -> tuple[str, bool]:
"""
Parse a string to get the subtype
Parameters
----------
dtype : str
A string like
* Sparse[subtype]
* Sparse[subtype, fill_value]
Returns
-------
subtype : str
Raises
------
ValueError
When the subtype cannot be extracted.
"""
xpr = re.compile(r"Sparse\[(?P<subtype>[^,]*)(, )?(?P<fill_value>.*?)?\]$")
m = xpr.match(dtype)
has_fill_value = False
if m:
subtype = m.groupdict()["subtype"]
has_fill_value = bool(m.groupdict()["fill_value"])
elif dtype == "Sparse":
subtype = "float64"
else:
raise ValueError(f"Cannot parse {dtype}")
return subtype, has_fill_value
@classmethod
def is_dtype(cls, dtype: object) -> bool:
dtype = getattr(dtype, "dtype", dtype)
if isinstance(dtype, str) and dtype.startswith("Sparse"):
sub_type, _ = cls._parse_subtype(dtype)
dtype = np.dtype(sub_type)
elif isinstance(dtype, cls):
return True
return isinstance(dtype, np.dtype) or dtype == "Sparse"
def update_dtype(self, dtype):
"""
Convert the SparseDtype to a new dtype.
This takes care of converting the ``fill_value``.
Parameters
----------
dtype : Union[str, numpy.dtype, SparseDtype]
The new dtype to use.
* For a SparseDtype, it is simply returned
* For a NumPy dtype (or str), the current fill value
is converted to the new dtype, and a SparseDtype
with `dtype` and the new fill value is returned.
Returns
-------
SparseDtype
A new SparseDtype with the correct `dtype` and fill value
for that `dtype`.
Raises
------
ValueError
When the current fill value cannot be converted to the
new `dtype` (e.g. trying to convert ``np.nan`` to an
integer dtype).
Examples
--------
>>> SparseDtype(int, 0).update_dtype(float)
Sparse[float64, 0.0]
>>> SparseDtype(int, 1).update_dtype(SparseDtype(float, np.nan))
Sparse[float64, nan]
"""
cls = type(self)
dtype = pandas_dtype(dtype)
if not isinstance(dtype, cls):
if not isinstance(dtype, np.dtype):
raise TypeError("sparse arrays of extension dtypes not supported")
fill_value = astype_nansafe(np.array(self.fill_value), dtype).item()
dtype = cls(dtype, fill_value=fill_value)
return dtype
@property
def _subtype_with_str(self):
"""
Whether the SparseDtype's subtype should be considered ``str``.
Typically, pandas will store string data in an object-dtype array.
When converting values to a dtype, e.g. in ``.astype``, we need to
be more specific, we need the actual underlying type.
Returns
-------
>>> SparseDtype(int, 1)._subtype_with_str
dtype('int64')
>>> SparseDtype(object, 1)._subtype_with_str
dtype('O')
>>> dtype = SparseDtype(str, '')
>>> dtype.subtype
dtype('O')
>>> dtype._subtype_with_str
<class 'str'>
"""
if isinstance(self.fill_value, str):
return type(self.fill_value)
return self.subtype
def _get_common_dtype(self, dtypes: list[DtypeObj]) -> DtypeObj | None:
# TODO for now only handle SparseDtypes and numpy dtypes => extend
# with other compatible extension dtypes
if any(
isinstance(x, ExtensionDtype) and not isinstance(x, SparseDtype)
for x in dtypes
):
return None
fill_values = [x.fill_value for x in dtypes if isinstance(x, SparseDtype)]
fill_value = fill_values[0]
# np.nan isn't a singleton, so we may end up with multiple
# NaNs here, so we ignore the all NA case too.
if not (len(set(fill_values)) == 1 or isna(fill_values).all()):
warnings.warn(
"Concatenating sparse arrays with multiple fill "
f"values: '{fill_values}'. Picking the first and "
"converting the rest.",
PerformanceWarning,
stacklevel=6,
)
np_dtypes = [x.subtype if isinstance(x, SparseDtype) else x for x in dtypes]
return SparseDtype(np.find_common_type(np_dtypes, []), fill_value=fill_value)
|
{
"content_hash": "5accdf4e8a645ae0e4eeda3d53e7a028",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 86,
"avg_line_length": 30.71212121212121,
"alnum_prop": 0.5473606314750863,
"repo_name": "jorisvandenbossche/pandas",
"id": "915e13bc3bbb2042b5ecf1b99deee5d05b4be8cb",
"size": "12162",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pandas/core/arrays/sparse/dtype.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "127"
},
{
"name": "C",
"bytes": "360342"
},
{
"name": "CSS",
"bytes": "1438"
},
{
"name": "Cython",
"bytes": "1083849"
},
{
"name": "Dockerfile",
"bytes": "1690"
},
{
"name": "HTML",
"bytes": "456275"
},
{
"name": "Makefile",
"bytes": "507"
},
{
"name": "Python",
"bytes": "17541583"
},
{
"name": "Shell",
"bytes": "10719"
},
{
"name": "Smarty",
"bytes": "7820"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
import dbdriver
setup(
name='dbdriver',
version=dbdriver.__version__,
packages=find_packages(),
author="Asteroide",
author_email="asteroide__AT__domtombox.net",
description="An API server",
long_description=open('README.md').read(),
# install_requires= ,
include_package_data=True,
url='https://github.com/asteroide/immo_spider',
classifiers=[
"Programming Language :: Python",
"Development Status :: 1 - Planning",
"License :: OSI Approved",
"Natural Language :: French",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.5",
"Topic :: Internet :: WWW/HTTP :: Indexing/Search",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)"
],
entry_points={
'console_scripts': [
'spider_db = dbdriver.controller:main',
],
},
)
|
{
"content_hash": "bd6d3be4e5c940c6bffb13a5b56d7608",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 82,
"avg_line_length": 22.6046511627907,
"alnum_prop": 0.6018518518518519,
"repo_name": "asteroide/immo_spider",
"id": "106761af3334e28572592bdbb6c32dd472de641a",
"size": "972",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/dbdriver/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1344"
},
{
"name": "HTML",
"bytes": "3640"
},
{
"name": "JavaScript",
"bytes": "7172"
},
{
"name": "Python",
"bytes": "45960"
},
{
"name": "Shell",
"bytes": "3193"
}
],
"symlink_target": ""
}
|
import threading
import os
from collections import OrderedDict
from .pgcompleter import PGCompleter
class CompletionRefresher:
refreshers = OrderedDict()
def __init__(self):
self._completer_thread = None
self._restart_refresh = threading.Event()
def refresh(self, executor, special, callbacks, history=None, settings=None):
"""
Creates a PGCompleter object and populates it with the relevant
completion suggestions in a background thread.
executor - PGExecute object, used to extract the credentials to connect
to the database.
special - PGSpecial object used for creating a new completion object.
settings - dict of settings for completer object
callbacks - A function or a list of functions to call after the thread
has completed the refresh. The newly created completion
object will be passed in as an argument to each callback.
"""
if executor.is_virtual_database():
# do nothing
return [(None, None, None, "Auto-completion refresh can't be started.")]
if self.is_refreshing():
self._restart_refresh.set()
return [(None, None, None, "Auto-completion refresh restarted.")]
else:
self._completer_thread = threading.Thread(
target=self._bg_refresh,
args=(executor, special, callbacks, history, settings),
name="completion_refresh",
)
self._completer_thread.setDaemon(True)
self._completer_thread.start()
return [
(None, None, None, "Auto-completion refresh started in the background.")
]
def is_refreshing(self):
return self._completer_thread and self._completer_thread.is_alive()
def _bg_refresh(self, pgexecute, special, callbacks, history=None, settings=None):
settings = settings or {}
completer = PGCompleter(
smart_completion=True, pgspecial=special, settings=settings
)
if settings.get("single_connection"):
executor = pgexecute
else:
# Create a new pgexecute method to populate the completions.
executor = pgexecute.copy()
# If callbacks is a single function then push it into a list.
if callable(callbacks):
callbacks = [callbacks]
while 1:
for refresher in self.refreshers.values():
refresher(completer, executor)
if self._restart_refresh.is_set():
self._restart_refresh.clear()
break
else:
# Break out of while loop if the for loop finishes natually
# without hitting the break statement.
break
# Start over the refresh from the beginning if the for loop hit the
# break statement.
continue
# Load history into pgcompleter so it can learn user preferences
n_recent = 100
if history:
for recent in history.get_strings()[-n_recent:]:
completer.extend_query_history(recent, is_init=True)
for callback in callbacks:
callback(completer)
if not settings.get("single_connection") and executor.conn:
# close connection established with pgexecute.copy()
executor.conn.close()
def refresher(name, refreshers=CompletionRefresher.refreshers):
"""Decorator to populate the dictionary of refreshers with the current
function.
"""
def wrapper(wrapped):
refreshers[name] = wrapped
return wrapped
return wrapper
@refresher("schemata")
def refresh_schemata(completer, executor):
completer.set_search_path(executor.search_path())
completer.extend_schemata(executor.schemata())
@refresher("tables")
def refresh_tables(completer, executor):
completer.extend_relations(executor.tables(), kind="tables")
completer.extend_columns(executor.table_columns(), kind="tables")
completer.extend_foreignkeys(executor.foreignkeys())
@refresher("views")
def refresh_views(completer, executor):
completer.extend_relations(executor.views(), kind="views")
completer.extend_columns(executor.view_columns(), kind="views")
@refresher("types")
def refresh_types(completer, executor):
completer.extend_datatypes(executor.datatypes())
@refresher("databases")
def refresh_databases(completer, executor):
completer.extend_database_names(executor.databases())
@refresher("casing")
def refresh_casing(completer, executor):
casing_file = completer.casing_file
if not casing_file:
return
generate_casing_file = completer.generate_casing_file
if generate_casing_file and not os.path.isfile(casing_file):
casing_prefs = "\n".join(executor.casing())
with open(casing_file, "w") as f:
f.write(casing_prefs)
if os.path.isfile(casing_file):
with open(casing_file) as f:
completer.extend_casing([line.strip() for line in f])
@refresher("functions")
def refresh_functions(completer, executor):
completer.extend_functions(executor.functions())
|
{
"content_hash": "910226622debea17054dc698132dd29a",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 88,
"avg_line_length": 34.42483660130719,
"alnum_prop": 0.6394531991646099,
"repo_name": "dbcli/pgcli",
"id": "1039d51599f40b02bdf84cb3282330a09aa71225",
"size": "5267",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pgcli/completion_refresher.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "72"
},
{
"name": "Gherkin",
"bytes": "6498"
},
{
"name": "Python",
"bytes": "443552"
},
{
"name": "Shell",
"bytes": "1944"
}
],
"symlink_target": ""
}
|
from math import pi as PI
from module import Klass
def class_decorator(cls):
cls.__call__ = lambda self: print('Cabbage!')
return cls
@class_decorator
class Class(Klass):
@property
def property(self):
temp, ellipsis = self._property
return {temp} if temp%0x12f2 else set()
@property.setter
def property(self, value):
try:
temp = value//0o123
except TypeError:
temp = 1.
def do_something():
nonlocal temp
return temp, ...
self._property = do_something()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
print("I'm", "alive!", sep='\n')
def func(self: 'Class', domain: [0b00, PI], opt:bool=True) -> None:
""" doc string """
# Comment
x, y, z = r'[[]', R'[]]', r'[^a-zA-Z_]'
if __name__ == '__main__':
c = Class()
c.func(.12)
c.property = 0b1011101110
|
{
"content_hash": "164b0db4087716413b548ab2816dedd3",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 68,
"avg_line_length": 21.128205128205128,
"alnum_prop": 0.6104368932038835,
"repo_name": "hrsetyono/theme_pacific",
"id": "6a00bf7a74c744b7b4909d1f31418eec4c28033c",
"size": "846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sample/python3.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
from hanlp.datasets.parsing.loaders._ctb_utils import make_ctb
_CTB8_HOME = 'https://wakespace.lib.wfu.edu/bitstream/handle/10339/39379/LDC2013T21.tgz#data/'
CTB8_CWS_TRAIN = _CTB8_HOME + 'tasks/cws/train.txt'
'''Training set for ctb8 Chinese word segmentation.'''
CTB8_CWS_DEV = _CTB8_HOME + 'tasks/cws/dev.txt'
'''Dev set for ctb8 Chinese word segmentation.'''
CTB8_CWS_TEST = _CTB8_HOME + 'tasks/cws/test.txt'
'''Test set for ctb8 Chinese word segmentation.'''
CTB8_POS_TRAIN = _CTB8_HOME + 'tasks/pos/train.tsv'
'''Training set for ctb8 PoS tagging.'''
CTB8_POS_DEV = _CTB8_HOME + 'tasks/pos/dev.tsv'
'''Dev set for ctb8 PoS tagging.'''
CTB8_POS_TEST = _CTB8_HOME + 'tasks/pos/test.tsv'
'''Test set for ctb8 PoS tagging.'''
CTB8_BRACKET_LINE_TRAIN = _CTB8_HOME + 'tasks/par/train.txt'
'''Training set for ctb8 constituency parsing with empty categories.'''
CTB8_BRACKET_LINE_DEV = _CTB8_HOME + 'tasks/par/dev.txt'
'''Dev set for ctb8 constituency parsing with empty categories.'''
CTB8_BRACKET_LINE_TEST = _CTB8_HOME + 'tasks/par/test.txt'
'''Test set for ctb8 constituency parsing with empty categories.'''
CTB8_BRACKET_LINE_NOEC_TRAIN = _CTB8_HOME + 'tasks/par/train.noempty.txt'
'''Training set for ctb8 constituency parsing without empty categories.'''
CTB8_BRACKET_LINE_NOEC_DEV = _CTB8_HOME + 'tasks/par/dev.noempty.txt'
'''Dev set for ctb8 constituency parsing without empty categories.'''
CTB8_BRACKET_LINE_NOEC_TEST = _CTB8_HOME + 'tasks/par/test.noempty.txt'
'''Test set for ctb8 constituency parsing without empty categories.'''
CTB8_SD330_TRAIN = _CTB8_HOME + 'tasks/dep/train.conllx'
'''Training set for ctb8 in Stanford Dependencies 3.3.0 standard.'''
CTB8_SD330_DEV = _CTB8_HOME + 'tasks/dep/dev.conllx'
'''Dev set for ctb8 in Stanford Dependencies 3.3.0 standard.'''
CTB8_SD330_TEST = _CTB8_HOME + 'tasks/dep/test.conllx'
'''Test set for ctb8 in Stanford Dependencies 3.3.0 standard.'''
make_ctb(_CTB8_HOME)
|
{
"content_hash": "005a5ef4a7ac0f6ba530a72b0a03eba6",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 94,
"avg_line_length": 48.375,
"alnum_prop": 0.7266149870801034,
"repo_name": "hankcs/HanLP",
"id": "048e7b9241254f4e70ceab23cf50f3ede096972b",
"size": "2001",
"binary": false,
"copies": "1",
"ref": "refs/heads/doc-zh",
"path": "hanlp/datasets/parsing/ctb8.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "40933"
},
{
"name": "Jupyter Notebook",
"bytes": "566269"
},
{
"name": "Python",
"bytes": "2196905"
}
],
"symlink_target": ""
}
|
"""Writes a build_config file.
The build_config file for a target is a json file containing information about
how to build that target based on the target's dependencies. This includes
things like: the javac classpath, the list of android resources dependencies,
etc. It also includes the information needed to create the build_config for
other targets that depend on that one.
Android build scripts should not refer to the build_config directly, and the
build specification should instead pass information in using the special
file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
of values in a json dict in a file and looks like this:
--python-arg=@FileArg(build_config_path:javac:classpath)
Note: If paths to input files are passed in this way, it is important that:
1. inputs/deps of the action ensure that the files are available the first
time the action runs.
2. Either (a) or (b)
a. inputs/deps ensure that the action runs whenever one of the files changes
b. the files are added to the action's depfile
"""
import optparse
import os
import sys
import xml.dom.minidom
from util import build_utils
import write_ordered_libraries
class AndroidManifest(object):
def __init__(self, path):
self.path = path
dom = xml.dom.minidom.parse(path)
manifests = dom.getElementsByTagName('manifest')
assert len(manifests) == 1
self.manifest = manifests[0]
def GetInstrumentation(self):
instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
if len(instrumentation_els) == 0:
return None
if len(instrumentation_els) != 1:
raise Exception(
'More than one <instrumentation> element found in %s' % self.path)
return instrumentation_els[0]
def CheckInstrumentation(self, expected_package):
instr = self.GetInstrumentation()
if not instr:
raise Exception('No <instrumentation> elements found in %s' % self.path)
instrumented_package = instr.getAttributeNS(
'http://schemas.android.com/apk/res/android', 'targetPackage')
if instrumented_package != expected_package:
raise Exception(
'Wrong instrumented package. Expected %s, got %s'
% (expected_package, instrumented_package))
def GetPackageName(self):
return self.manifest.getAttribute('package')
dep_config_cache = {}
def GetDepConfig(path):
if not path in dep_config_cache:
dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
return dep_config_cache[path]
def DepsOfType(wanted_type, configs):
return [c for c in configs if c['type'] == wanted_type]
def GetAllDepsConfigsInOrder(deps_config_paths):
def GetDeps(path):
return set(GetDepConfig(path)['deps_configs'])
return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
class Deps(object):
def __init__(self, direct_deps_config_paths):
self.all_deps_config_paths = GetAllDepsConfigsInOrder(
direct_deps_config_paths)
self.direct_deps_configs = [
GetDepConfig(p) for p in direct_deps_config_paths]
self.all_deps_configs = [
GetDepConfig(p) for p in self.all_deps_config_paths]
def All(self, wanted_type=None):
if type is None:
return self.all_deps_configs
return DepsOfType(wanted_type, self.all_deps_configs)
def Direct(self, wanted_type=None):
if wanted_type is None:
return self.direct_deps_configs
return DepsOfType(wanted_type, self.direct_deps_configs)
def AllConfigPaths(self):
return self.all_deps_config_paths
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--build-config', help='Path to build_config output.')
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
parser.add_option(
'--possible-deps-configs',
help='List of paths for dependency\'s build_config files. Some '
'dependencies may not write build_config files. Missing build_config '
'files are handled differently based on the type of this target.')
# android_resources options
parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
parser.add_option('--r-text', help='Path to target\'s R.txt file.')
parser.add_option('--package-name',
help='Java package name for these resources.')
parser.add_option('--android-manifest', help='Path to android manifest.')
# java library options
parser.add_option('--jar-path', help='Path to target\'s jar output.')
parser.add_option('--supports-android', action='store_true',
help='Whether this library supports running on the Android platform.')
parser.add_option('--requires-android', action='store_true',
help='Whether this library requires running on the Android platform.')
parser.add_option('--bypass-platform-checks', action='store_true',
help='Bypass checks for support/require Android platform.')
# android library options
parser.add_option('--dex-path', help='Path to target\'s dex output.')
# native library options
parser.add_option('--native-libs', help='List of top-level native libs.')
parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
parser.add_option('--tested-apk-config',
help='Path to the build config of the tested apk (for an instrumentation '
'test apk).')
options, args = parser.parse_args(argv)
if args:
parser.error('No positional arguments should be given.')
if not options.type in [
'java_library', 'android_resources', 'android_apk', 'deps_dex']:
raise Exception('Unknown type: <%s>' % options.type)
required_options = ['build_config'] + {
'java_library': ['jar_path'],
'android_resources': ['resources_zip'],
'android_apk': ['jar_path', 'dex_path', 'resources_zip'],
'deps_dex': ['dex_path']
}[options.type]
if options.native_libs:
required_options.append('readelf_path')
build_utils.CheckOptions(options, parser, required_options)
if options.type == 'java_library':
if options.supports_android and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
if options.requires_android and not options.supports_android:
raise Exception(
'--supports-android is required when using --requires-android')
possible_deps_config_paths = build_utils.ParseGypList(
options.possible_deps_configs)
allow_unknown_deps = options.type == 'android_apk'
unknown_deps = [
c for c in possible_deps_config_paths if not os.path.exists(c)]
if unknown_deps and not allow_unknown_deps:
raise Exception('Unknown deps: ' + str(unknown_deps))
direct_deps_config_paths = [
c for c in possible_deps_config_paths if not c in unknown_deps]
deps = Deps(direct_deps_config_paths)
direct_library_deps = deps.Direct('java_library')
all_library_deps = deps.All('java_library')
direct_resources_deps = deps.Direct('android_resources')
all_resources_deps = deps.All('android_resources')
# Resources should be ordered with the highest-level dependency first so that
# overrides are done correctly.
all_resources_deps.reverse()
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_resources_deps = tested_apk_deps.All('android_resources')
all_resources_deps = [
d for d in all_resources_deps if not d in tested_apk_resources_deps]
# Initialize some common config.
config = {
'deps_info': {
'name': os.path.basename(options.build_config),
'path': options.build_config,
'type': options.type,
'deps_configs': direct_deps_config_paths,
}
}
deps_info = config['deps_info']
if options.type == 'java_library' and not options.bypass_platform_checks:
deps_info['requires_android'] = options.requires_android
deps_info['supports_android'] = options.supports_android
deps_require_android = (all_resources_deps +
[d['name'] for d in all_library_deps if d['requires_android']])
deps_not_support_android = (
[d['name'] for d in all_library_deps if not d['supports_android']])
if deps_require_android and not options.requires_android:
raise Exception('Some deps require building for the Android platform: ' +
str(deps_require_android))
if deps_not_support_android and options.supports_android:
raise Exception('Not all deps support the Android platform: ' +
str(deps_not_support_android))
if options.type in ['java_library', 'android_apk']:
javac_classpath = [c['jar_path'] for c in direct_library_deps]
java_full_classpath = [c['jar_path'] for c in all_library_deps]
deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
deps_info['jar_path'] = options.jar_path
if options.type == 'android_apk' or options.supports_android:
deps_info['dex_path'] = options.dex_path
config['javac'] = {
'classpath': javac_classpath,
}
config['java'] = {
'full_classpath': java_full_classpath
}
if options.type == 'java_library':
# Only resources might have srcjars (normal srcjar targets are listed in
# srcjar_deps). A resource's srcjar contains the R.java file for those
# resources, and (like Android's default build system) we allow a library to
# refer to the resources in any of its dependents.
config['javac']['srcjars'] = [
c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
if options.type == 'android_apk':
# Apks will get their resources srcjar explicitly passed to the java step.
config['javac']['srcjars'] = []
if options.type == 'android_resources':
deps_info['resources_zip'] = options.resources_zip
if options.srcjar:
deps_info['srcjar'] = options.srcjar
if options.android_manifest:
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if options.package_name:
deps_info['package_name'] = options.package_name
if options.r_text:
deps_info['r_text'] = options.r_text
if options.type == 'android_resources' or options.type == 'android_apk':
config['resources'] = {}
config['resources']['dependency_zips'] = [
c['resources_zip'] for c in all_resources_deps]
config['resources']['extra_package_names'] = []
config['resources']['extra_r_text_files'] = []
if options.type == 'android_apk':
config['resources']['extra_package_names'] = [
c['package_name'] for c in all_resources_deps if 'package_name' in c]
config['resources']['extra_r_text_files'] = [
c['r_text'] for c in all_resources_deps if 'r_text' in c]
if options.type in ['android_apk', 'deps_dex']:
deps_dex_files = [c['dex_path'] for c in all_library_deps]
# An instrumentation test apk should exclude the dex files that are in the apk
# under test.
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_library_deps = tested_apk_deps.All('java_library')
tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
deps_dex_files = [
p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
tested_apk_config = GetDepConfig(options.tested_apk_config)
expected_tested_package = tested_apk_config['package_name']
AndroidManifest(options.android_manifest).CheckInstrumentation(
expected_tested_package)
# Dependencies for the final dex file of an apk or a 'deps_dex'.
if options.type in ['android_apk', 'deps_dex']:
config['final_dex'] = {}
dex_config = config['final_dex']
# TODO(cjhopman): proguard version
dex_config['dependency_dex_files'] = deps_dex_files
if options.type == 'android_apk':
config['dist_jar'] = {
'dependency_jars': [
c['jar_path'] for c in all_library_deps
]
}
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if not options.tested_apk_config and manifest.GetInstrumentation():
# This must then have instrumentation only for itself.
manifest.CheckInstrumentation(manifest.GetPackageName())
library_paths = []
java_libraries_list = []
if options.native_libs:
libraries = build_utils.ParseGypList(options.native_libs)
if libraries:
libraries_dir = os.path.dirname(libraries[0])
write_ordered_libraries.SetReadelfPath(options.readelf_path)
write_ordered_libraries.SetLibraryDirs([libraries_dir])
all_native_library_deps = (
write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
libraries))
# Create a java literal array with the "base" library names:
# e.g. libfoo.so -> foo
java_libraries_list = '{%s}' % ','.join(
['"%s"' % s[3:-3] for s in all_native_library_deps])
library_paths = map(
write_ordered_libraries.FullLibraryPath, all_native_library_deps)
config['native'] = {
'libraries': library_paths,
'java_libraries_list': java_libraries_list
}
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
deps.AllConfigPaths() + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
{
"content_hash": "306d755964b4de2fa5d4e894ff6faa51",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 80,
"avg_line_length": 38.90571428571429,
"alnum_prop": 0.6868620107218918,
"repo_name": "chinmaygarde/mojo",
"id": "8507a95d0b982808d5ca08eb12a6a86aa0a55d73",
"size": "13804",
"binary": false,
"copies": "10",
"ref": "refs/heads/ios",
"path": "build/android/gyp/write_build_config.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1880713"
},
{
"name": "C++",
"bytes": "35838874"
},
{
"name": "Dart",
"bytes": "969667"
},
{
"name": "Go",
"bytes": "186519"
},
{
"name": "Groff",
"bytes": "29030"
},
{
"name": "HTML",
"bytes": "41854"
},
{
"name": "Java",
"bytes": "1274683"
},
{
"name": "JavaScript",
"bytes": "208100"
},
{
"name": "Makefile",
"bytes": "402"
},
{
"name": "Objective-C",
"bytes": "75638"
},
{
"name": "Objective-C++",
"bytes": "408801"
},
{
"name": "Protocol Buffer",
"bytes": "1048"
},
{
"name": "Python",
"bytes": "5645880"
},
{
"name": "Shell",
"bytes": "148167"
},
{
"name": "Yacc",
"bytes": "31141"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
}
|
"""
yappi.py
Yet Another Python Profiler
Sumer Cip 2014
"""
import os
import sys
import _yappi
import pickle
import threading
try:
from thread import get_ident # Python 2
except ImportError:
from threading import get_ident # Python 3
class YappiError(Exception): pass
__all__ = ['start', 'stop', 'get_func_stats', 'get_thread_stats', 'clear_stats', 'is_running',
'get_clock_time', 'get_clock_type', 'set_clock_type', 'get_clock_info', 'get_mem_usage']
LINESEP = os.linesep
COLUMN_GAP = 2
YPICKLE_PROTOCOL = 2
COLUMNS_FUNCSTATS = ["name", "ncall", "ttot", "tsub", "tavg"]
COLUMNS_THREADSTATS = ["name", "id", "tid", "ttot", "scnt"]
SORT_TYPES_FUNCSTATS = {"name":0, "callcount":3, "totaltime":6, "subtime":7, "avgtime":10,
"ncall":3, "ttot":6, "tsub":7, "tavg":10}
SORT_TYPES_CHILDFUNCSTATS = {"name":10, "callcount":1, "totaltime":3, "subtime":4, "avgtime":5,
"ncall":1, "ttot":3, "tsub":4, "tavg":5}
SORT_TYPES_THREADSTATS = {"name":0, "id":1, "tid":2, "totaltime":3, "schedcount":4,
"ttot":3, "scnt":4}
SORT_ORDERS = {"ascending":0, "asc":0, "descending":1, "desc":1}
DEFAULT_SORT_TYPE = "totaltime"
DEFAULT_SORT_ORDER = "desc"
CLOCK_TYPES = {"WALL":0, "CPU":1}
def _validate_sorttype(sort_type, list):
sort_type = sort_type.lower()
if sort_type not in list:
raise YappiError("Invalid SortType parameter: '%s'" % (sort_type))
return sort_type
def _validate_sortorder(sort_order):
sort_order = sort_order.lower()
if sort_order not in SORT_ORDERS:
raise YappiError("Invalid SortOrder parameter: '%s'" % (sort_order))
return sort_order
def _validate_columns(name, list):
name = name.lower()
if name not in list:
raise YappiError("Invalid Column name: '%s'" % (name))
def _ctx_name_callback():
"""
We don't use threading.current_thread() becuase it will deadlock if
called when profiling threading._active_limbo_lock.acquire().
See: #Issue48.
"""
try:
current_thread = threading._active[get_ident()]
return current_thread.__class__.__name__
except KeyError:
# Threads may not be registered yet in first few profile callbacks.
return None
def _profile_thread_callback(frame, event, arg):
"""
_profile_thread_callback will only be called once per-thread. _yappi will detect
the new thread and changes the profilefunc param of the ThreadState
structure. This is an internal function please don't mess with it.
"""
_yappi._profile_event(frame, event, arg)
def _fft(x, COL_SIZE=8):
"""
function to prettify time columns in stats.
"""
_rprecision = 6
while(_rprecision > 0):
_fmt = "%0." + "%d" % (_rprecision) + "f"
s = _fmt % (x)
if len(s) <= COL_SIZE:
break
_rprecision -= 1
return s
def _func_fullname(builtin, module, lineno, name):
if builtin:
return "%s.%s" % (module, name)
else:
return "%s:%d %s" % (module, lineno, name)
"""
Converts our internal yappi's YFuncStats (YSTAT type) to PSTAT. So there are
some differences between the statistics parameters. The PSTAT format is as following:
PSTAT expects a dict. entry as following:
stats[("mod_name", line_no, "func_name")] = \
( total_call_count, actual_call_count, total_time, cumulative_time,
{
("mod_name", line_no, "func_name") :
(total_call_count, --> total count caller called the callee
actual_call_count, --> total count caller called the callee - (recursive calls)
total_time, --> total time caller spent _only_ for this function (not further subcalls)
cumulative_time) --> total time caller spent for this function
} --> callers dict
)
Note that in PSTAT the total time spent in the function is called as cumulative_time and
the time spent _only_ in the function as total_time. From Yappi's perspective, this means:
total_time (inline time) = tsub
cumulative_time (total time) = ttot
Other than that we hold called functions in a profile entry as named 'children'. On the
other hand, PSTAT expects to have a dict of callers of the function. So we also need to
convert children to callers dict.
From Python Docs:
'''
With cProfile, each caller is preceded by three numbers:
the number of times this specific call was made, and the total
and cumulative times spent in the current function while it was
invoked by this specific caller.
'''
That means we only need to assign ChildFuncStat's ttot/tsub values to the caller
properly. Docs indicate that when b() is called by a() pstat holds the total time
of b() when called by a, just like yappi.
PSTAT only expects to have the above dict to be saved.
"""
def convert2pstats(stats):
from collections import defaultdict
"""
Converts the internal stat type of yappi(which is returned by a call to YFuncStats.get())
as pstats object.
"""
if not isinstance(stats, YFuncStats):
raise YappiError("Source stats must be derived from YFuncStats.")
import pstats
class _PStatHolder:
def __init__(self, d):
self.stats = d
def create_stats(self):
pass
def pstat_id(fs):
return (fs.module, fs.lineno, fs.name)
_pdict = {}
# convert callees to callers
_callers = defaultdict(dict)
for fs in stats:
for ct in fs.children:
_callers[ct][pstat_id(fs)] = (ct.ncall, ct.nactualcall, ct.tsub ,ct.ttot)
# populate the pstat dict.
for fs in stats:
_pdict[pstat_id(fs)] = (fs.ncall, fs.nactualcall, fs.tsub, fs.ttot, _callers[fs], )
return pstats.Stats(_PStatHolder(_pdict))
def profile(clock_type="cpu", profile_builtins=False, return_callback=None):
"""
A profile decorator that can be used to profile a single call.
We need to clear_stats() on entry/exit of the function unfortunately.
As yappi is a per-interpreter resource, we cannot simply resume profiling
session upon exit of the function, that is because we _may_ simply change
start() params which may differ from the paused session that may cause instable
results. So, if you use a decorator, then global profiling may return bogus
results or no results at all.
"""
def _profile_dec(func):
def wrapper(*args, **kwargs):
if func._rec_level == 0:
clear_stats()
set_clock_type(clock_type)
start(profile_builtins, profile_threads=False)
func._rec_level += 1
try:
return func(*args, **kwargs)
finally:
func._rec_level -= 1
# only show profile information when recursion level of the
# function becomes 0. Otherwise, we are in the middle of a
# recursive call tree and not finished yet.
if func._rec_level == 0:
try:
stop()
if return_callback is None:
sys.stdout.write(LINESEP)
sys.stdout.write("Executed in %s %s clock seconds" %
(_fft(get_thread_stats()[0].ttot), clock_type.upper()))
sys.stdout.write(LINESEP)
get_func_stats().print_all()
else:
return_callback(func, get_func_stats())
finally:
clear_stats()
func._rec_level = 0
return wrapper
return _profile_dec
class StatString(object):
"""
Class to prettify/trim a profile result column.
"""
_TRAIL_DOT = ".."
_LEFT = 1
_RIGHT = 2
def __init__(self, s):
self._s = str(s)
def _trim(self, length, direction):
if (len(self._s) > length):
if direction == self._LEFT:
self._s = self._s[-length:]
return self._TRAIL_DOT + self._s[len(self._TRAIL_DOT):]
elif direction == self._RIGHT:
self._s = self._s[:length]
return self._s[:-len(self._TRAIL_DOT)] + self._TRAIL_DOT
return self._s + (" " * (length - len(self._s)))
def ltrim(self, length):
return self._trim(length, self._LEFT)
def rtrim(self, length):
return self._trim(length, self._RIGHT)
class YStat(dict):
"""
Class to hold a profile result line in a dict object, which all items can also be accessed as
instance attributes where their attribute name is the given key. Mimicked NamedTuples.
"""
_KEYS = {}
def __init__(self, values):
super(YStat, self).__init__()
for key,i in self._KEYS.items():
setattr(self, key, values[i])
def __setattr__(self, name, value):
self[self._KEYS[name]] = value
super(YStat, self).__setattr__(name, value)
class YFuncStat(YStat):
"""
Class holding information for function stats.
"""
_KEYS = {'name':0, 'module':1, 'lineno':2, 'ncall':3, 'nactualcall':4, 'builtin':5, 'ttot':6, 'tsub':7, 'index':8, 'children':9, 'ctx_id':10, 'tavg':11, 'full_name':12}
def __eq__(self, other):
if other is None:
return False
return self.full_name == other.full_name
def __ne__(self, other):
return not self == other
def __add__(self, other):
# do not merge if merging the same instance
if self is other:
return self
self.ncall += other.ncall
self.nactualcall += other.nactualcall
self.ttot += other.ttot
self.tsub += other.tsub
self.tavg = self.ttot / self.ncall
for other_child_stat in other.children:
# all children point to a valid entry, and we shall have merged previous entries by here.
self.children.append(other_child_stat)
return self
def __hash__(self):
return hash(self.full_name)
def is_recursive(self):
# we have a known bug where call_leave not called for some thread functions(run() especially)
# in that case ncalls will be updated in call_enter, however nactualcall will not. This is for
# checking that case.
if self.nactualcall == 0:
return False
return self.ncall != self.nactualcall
def strip_dirs(self):
self.module = os.path.basename(self.module)
self.full_name = _func_fullname(self.builtin, self.module, self.lineno,
self.name)
return self
def _print(self, out, columns):
for x in sorted(columns.keys()):
title, size = columns[x]
if title == "name":
out.write(StatString(self.full_name).ltrim(size))
out.write(" " * COLUMN_GAP)
elif title == "ncall":
if self.is_recursive():
out.write(StatString("%d/%d" % (self.ncall,
self.nactualcall)).rtrim(size))
else:
out.write(StatString(self.ncall).rtrim(size))
out.write(" " * COLUMN_GAP)
elif title == "tsub":
out.write(StatString(_fft(self.tsub, size)).rtrim(size))
out.write(" " * COLUMN_GAP)
elif title == "ttot":
out.write(StatString(_fft(self.ttot, size)).rtrim(size))
out.write(" " * COLUMN_GAP)
elif title == "tavg":
out.write(StatString(_fft(self.tavg, size)).rtrim(size))
out.write(LINESEP)
class YChildFuncStat(YFuncStat):
"""
Class holding information for children function stats.
"""
_KEYS = {'index':0, 'ncall':1, 'nactualcall':2, 'ttot':3, 'tsub':4, 'tavg':5, 'builtin':6, 'full_name':7, 'module':8, 'lineno':9, 'name':10}
def __add__(self, other):
if other is None:
return self
self.nactualcall += other.nactualcall
self.ncall += other.ncall
self.ttot += other.ttot
self.tsub += other.tsub
self.tavg = self.ttot / self.ncall
return self
class YThreadStat(YStat):
"""
Class holding information for thread stats.
"""
_KEYS = {'name':0, 'id':1, 'tid':2, 'ttot':3,'sched_count':4,}
def __eq__(self, other):
if other is None:
return False
return self.id == other.id
def __ne__(self, other):
return not self == other
def __hash__(self, *args, **kwargs):
return hash(self.id)
def _print(self, out, columns):
for x in sorted(columns.keys()):
title, size = columns[x]
if title == "name":
out.write(StatString(self.name).ltrim(size))
out.write(" " * COLUMN_GAP)
elif title == "id":
out.write(StatString(self.id).rtrim(size))
out.write(" " * COLUMN_GAP)
elif title == "tid":
out.write(StatString(self.tid).rtrim(size))
out.write(" " * COLUMN_GAP)
elif title == "ttot":
out.write(StatString(_fft(self.ttot, size)).rtrim(size))
out.write(" " * COLUMN_GAP)
elif title == "scnt":
out.write(StatString(self.sched_count).rtrim(size))
out.write(LINESEP)
class YStats(object):
"""
Main Stats class where we collect the information from _yappi and apply the user filters.
"""
def __init__(self):
self._clock_type = None
self._as_dict = {}
self._as_list = []
def get(self):
self._clock_type = _yappi.get_clock_type()
self.sort(DEFAULT_SORT_TYPE, DEFAULT_SORT_ORDER)
return self
def sort(self, sort_type, sort_order):
self._as_list.sort(key=lambda stat: stat[sort_type],
reverse=(sort_order==SORT_ORDERS["desc"]))
return self
def clear(self):
del self._as_list[:]
self._as_dict.clear()
def empty(self):
return (len(self._as_list) == 0)
def __getitem__(self, key):
try:
return self._as_list[key]
except IndexError:
return None
def count(self, item):
return self._as_list.count(item)
def __iter__(self):
return iter(self._as_list)
def __len__(self):
return len(self._as_list)
def pop(self):
item = self._as_list.pop()
del self._as_dict[item]
return item
def append(self, item):
# increment/update the stat if we already have it
existing = self._as_dict.get(item)
if existing:
existing += item
return
self._as_list.append(item)
self._as_dict[item] = item
def _print_header(self, out, columns):
for x in sorted(columns.keys()):
title, size = columns[x]
if len(title) > size:
raise YappiError("Column title exceeds available length[%s:%d]" % \
(title, size))
out.write(title)
out.write(" " * (COLUMN_GAP + size - len(title)))
out.write(LINESEP)
def _debug_check_sanity(self):
"""
Check for basic sanity errors in stats. e.g: Check for duplicate stats.
"""
for x in self:
if self.count(x) > 1:
return False
return True
class YStatsIndexable(YStats):
def __init__(self):
super(YStatsIndexable, self).__init__()
self._additional_indexing = {}
def clear(self):
super(YStatsIndexable, self).clear()
self._additional_indexing.clear()
def pop(self):
item = super(YStatsIndexable, self).pop()
self._additional_indexing.pop(item.index, None)
self._additional_indexing.pop(item.full_name, None)
return item
def append(self, item):
super(YStatsIndexable, self).append(item)
# setdefault so that we don't replace them if they're already there.
self._additional_indexing.setdefault(item.index, item)
self._additional_indexing.setdefault(item.full_name, item)
def __getitem__(self, key):
if isinstance(key, int):
# search by item.index
return self._additional_indexing.get(key, None)
elif isinstance(key, str):
# search by item.full_name
return self._additional_indexing.get(key, None)
elif isinstance(key, YFuncStat) or isinstance(key, YChildFuncStat):
return self._additional_indexing.get(key.index, None)
return super(YStatsIndexable, self).__getitem__(key)
class YChildFuncStats(YStatsIndexable):
def sort(self, sort_type, sort_order="desc"):
sort_type = _validate_sorttype(sort_type, SORT_TYPES_CHILDFUNCSTATS)
sort_order = _validate_sortorder(sort_order)
return super(YChildFuncStats, self).sort(SORT_TYPES_CHILDFUNCSTATS[sort_type], SORT_ORDERS[sort_order])
def print_all(self, out=sys.stdout, columns= {0:("name",36), 1:("ncall", 5),
2:("tsub", 8), 3: ("ttot", 8), 4:("tavg",8)}):
"""
Prints all of the child function profiler results to a given file. (stdout by default)
"""
if self.empty() or len(columns) == 0:
return
for _, col in columns.items():
_validate_columns(col[0], COLUMNS_FUNCSTATS)
out.write(LINESEP)
self._print_header(out, columns)
for stat in self:
stat._print(out, columns)
def strip_dirs(self):
for stat in self:
stat.strip_dirs()
return self
class YFuncStats(YStatsIndexable):
_idx_max = 0
_sort_type = None
_sort_order = None
_SUPPORTED_LOAD_FORMATS = ['YSTAT']
_SUPPORTED_SAVE_FORMATS = ['YSTAT', 'CALLGRIND', 'PSTAT']
def __init__(self, files=[]):
super(YFuncStats, self).__init__()
self.add(files)
def strip_dirs(self):
for stat in self:
stat.strip_dirs()
stat.children.strip_dirs()
return self
def get(self, filter=None):
_yappi._pause()
self.clear()
try:
self._filter = filter
_yappi.enum_func_stats(self._enumerator)
self._filter = None
# convert the children info from tuple to YChildFuncStat
for stat in self:
_childs = YChildFuncStats()
for child_tpl in stat.children:
rstat = self[child_tpl[0]]
# sometimes even the profile results does not contain the result because of filtering
# or timing(call_leave called but call_enter is not), with this we ensure that the children
# index always point to a valid stat.
if rstat is None:
continue
tavg = rstat.ttot / rstat.ncall
cfstat = YChildFuncStat(child_tpl+(tavg, rstat.builtin, rstat.full_name, rstat.module,
rstat.lineno, rstat.name,))
_childs.append(cfstat)
stat.children = _childs
result = super(YFuncStats, self).get()
finally:
_yappi._resume()
return result
def _enumerator(self, stat_entry):
fname, fmodule, flineno, fncall, fnactualcall, fbuiltin, fttot, ftsub, \
findex, fchildren, fctxid = stat_entry
# builtin function?
ffull_name = _func_fullname(bool(fbuiltin), fmodule, flineno, fname)
ftavg = fttot / fncall
fstat = YFuncStat(stat_entry + (ftavg, ffull_name))
# do not show profile stats of yappi itself.
if os.path.basename(fstat.module) == "yappi.py" or fstat.module == "_yappi":
return
fstat.builtin = bool(fstat.builtin)
if self._filter:
for k,v in self._filter.items():
if getattr(fstat, k) != v:
return
self.append(fstat)
# hold the max idx number for merging new entries(for making the merging entries indexes unique)
if self._idx_max < fstat.index:
self._idx_max = fstat.index
def _add_from_YSTAT(self, file):
try:
saved_stats, saved_clock_type = pickle.load(file)
except:
raise YappiError("Unable to load the saved profile information from %s." % (file.name))
# check if we really have some stats to be merged?
if not self.empty():
if self._clock_type != saved_clock_type and self._clock_type is not None:
raise YappiError("Clock type mismatch between current and saved profiler sessions.[%s,%s]" % \
(self._clock_type, saved_clock_type))
self._clock_type = saved_clock_type
# add 'not present' previous entries with unique indexes
for saved_stat in saved_stats:
if saved_stat not in self:
self._idx_max += 1
saved_stat.index = self._idx_max
self.append(saved_stat)
# fix children's index values
for saved_stat in saved_stats:
for saved_child_stat in saved_stat.children:
# we know for sure child's index is pointing to a valid stat in saved_stats
# so as saved_stat is already in sync. (in above loop), we can safely assume
# that we shall point to a valid stat in current_stats with the child's full_name
saved_child_stat.index = self[saved_child_stat.full_name].index
# merge stats
for saved_stat in saved_stats:
saved_stat_in_curr = self[saved_stat.full_name]
saved_stat_in_curr += saved_stat
def _save_as_YSTAT(self, path):
with open(path, "wb") as f:
pickle.dump((self, self._clock_type), f, YPICKLE_PROTOCOL)
def _save_as_PSTAT(self, path):
"""
Save the profiling information as PSTAT.
"""
_stats = convert2pstats(self)
_stats.dump_stats(path)
def _save_as_CALLGRIND(self, path):
"""
Writes all the function stats in a callgrind-style format to the given
file. (stdout by default)
"""
header = """version: 1\ncreator: %s\npid: %d\ncmd: %s\npart: 1\n\nevents: Ticks""" % \
('yappi', os.getpid(), ' '.join(sys.argv))
lines = [header]
# add function definitions
file_ids = ['']
func_ids = ['']
for func_stat in self:
file_ids += [ 'fl=(%d) %s' % (func_stat.index, func_stat.module) ]
func_ids += [ 'fn=(%d) %s %s:%s' % (func_stat.index, func_stat.name, func_stat.module, func_stat.lineno) ]
lines += file_ids + func_ids
# add stats for each function we have a record of
for func_stat in self:
func_stats = [ '',
'fl=(%d)' % func_stat.index,
'fn=(%d)' % func_stat.index]
func_stats += [ '%s %s' % (func_stat.lineno, int(func_stat.tsub * 1e6)) ]
# children functions stats
for child in func_stat.children:
func_stats += [ 'cfl=(%d)' % child.index,
'cfn=(%d)' % child.index,
'calls=%d 0' % child.ncall,
'0 %d' % int(child.ttot * 1e6)
]
lines += func_stats
with open(path, "w") as f:
f.write('\n'.join(lines))
def add(self, files, type="ystat"):
type = type.upper()
if type not in self._SUPPORTED_LOAD_FORMATS:
raise NotImplementedError('Loading from (%s) format is not possible currently.')
if isinstance(files, str):
files = [files, ]
for fd in files:
with open(fd, "rb") as f:
add_func = getattr(self, "_add_from_%s" % (type))
add_func(file=f)
return self.sort(DEFAULT_SORT_TYPE, DEFAULT_SORT_ORDER)
def save(self, path, type="ystat"):
type = type.upper()
if type not in self._SUPPORTED_SAVE_FORMATS:
raise NotImplementedError('Saving in "%s" format is not possible currently.' % (type))
save_func = getattr(self, "_save_as_%s" % (type))
save_func(path=path)
def print_all(self, out=sys.stdout, columns={0:("name",36), 1:("ncall", 5),
2:("tsub", 8), 3:("ttot", 8), 4:("tavg",8)}):
"""
Prints all of the function profiler results to a given file. (stdout by default)
"""
if self.empty():
return
for _, col in columns.items():
_validate_columns(col[0], COLUMNS_FUNCSTATS)
out.write(LINESEP)
out.write("Clock type: %s" % (self._clock_type.upper()))
out.write(LINESEP)
out.write("Ordered by: %s, %s" % (self._sort_type, self._sort_order))
out.write(LINESEP)
out.write(LINESEP)
self._print_header(out, columns)
for stat in self:
stat._print(out, columns)
def sort(self, sort_type, sort_order="desc"):
sort_type = _validate_sorttype(sort_type, SORT_TYPES_FUNCSTATS)
sort_order = _validate_sortorder(sort_order)
self._sort_type = sort_type
self._sort_order = sort_order
return super(YFuncStats, self).sort(SORT_TYPES_FUNCSTATS[sort_type], SORT_ORDERS[sort_order])
def debug_print(self):
if self.empty():
return
console = sys.stdout
CHILD_STATS_LEFT_MARGIN = 5
for stat in self:
console.write("index: %d" % stat.index)
console.write(LINESEP)
console.write("full_name: %s" % stat.full_name)
console.write(LINESEP)
console.write("ncall: %d/%d" % (stat.ncall, stat.nactualcall))
console.write(LINESEP)
console.write("ttot: %s" % _fft(stat.ttot))
console.write(LINESEP)
console.write("tsub: %s" % _fft(stat.tsub))
console.write(LINESEP)
console.write("children: ")
console.write(LINESEP)
for child_stat in stat.children:
console.write(LINESEP)
console.write(" " * CHILD_STATS_LEFT_MARGIN)
console.write("index: %d" % child_stat.index)
console.write(LINESEP)
console.write(" " * CHILD_STATS_LEFT_MARGIN)
console.write("child_full_name: %s" % child_stat.full_name)
console.write(LINESEP)
console.write(" " * CHILD_STATS_LEFT_MARGIN)
console.write("ncall: %d/%d" % (child_stat.ncall, child_stat.nactualcall))
console.write(LINESEP)
console.write(" " * CHILD_STATS_LEFT_MARGIN)
console.write("ttot: %s" % _fft(child_stat.ttot))
console.write(LINESEP)
console.write(" " * CHILD_STATS_LEFT_MARGIN)
console.write("tsub: %s" % _fft(child_stat.tsub))
console.write(LINESEP)
console.write(LINESEP)
class YThreadStats(YStats):
def get(self):
_yappi._pause()
self.clear()
try:
_yappi.enum_thread_stats(self._enumerator)
result = super(YThreadStats, self).get()
finally:
_yappi._resume()
return result
def _enumerator(self, stat_entry):
tstat = YThreadStat(stat_entry)
self.append(tstat)
def sort(self, sort_type, sort_order="desc"):
sort_type = _validate_sorttype(sort_type, SORT_TYPES_THREADSTATS)
sort_order = _validate_sortorder(sort_order)
return super(YThreadStats, self).sort(SORT_TYPES_THREADSTATS[sort_type], SORT_ORDERS[sort_order])
def print_all(self, out=sys.stdout, columns={0:("name",13), 1:("id", 5),
2:("tid", 15), 3:("ttot", 8), 4:("scnt", 10)}):
"""
Prints all of the thread profiler results to a given file. (stdout by default)
"""
if self.empty():
return
for _, col in columns.items():
_validate_columns(col[0], COLUMNS_THREADSTATS)
out.write(LINESEP)
self._print_header(out, columns)
for stat in self:
stat._print(out, columns)
def strip_dirs(self):
pass # do nothing
def is_running():
"""
Returns true if the profiler is running, false otherwise.
"""
return bool(_yappi.is_running())
def start(builtins=False, profile_threads=True):
"""
Start profiler.
"""
if profile_threads:
threading.setprofile(_profile_thread_callback)
_yappi.start(builtins, profile_threads)
def get_func_stats(filter=None):
"""
Gets the function profiler results with given filters and returns an iterable.
"""
# multiple invocation pause/resume is allowed. This is needed because
# not only get() is executed here.
_yappi._pause()
try:
stats = YFuncStats().get(filter=filter)
finally:
_yappi._resume()
return stats
def get_thread_stats():
"""
Gets the thread profiler results with given filters and returns an iterable.
"""
_yappi._pause()
try:
stats = YThreadStats().get()
finally:
_yappi._resume()
return stats
def stop():
"""
Stop profiler.
"""
_yappi.stop()
threading.setprofile(None)
def clear_stats():
"""
Clears all of the profile results.
"""
_yappi._pause()
try:
_yappi.clear_stats()
finally:
_yappi._resume()
def get_clock_time():
"""
Returns the current clock time with regard to current clock type.
"""
return _yappi.get_clock_time()
def get_clock_type():
"""
Returns the underlying clock type
"""
return _yappi.get_clock_type()
def get_clock_info():
"""
Returns a dict containing the OS API used for timing, the precision of the
underlying clock.
"""
return _yappi.get_clock_info()
def set_clock_type(type):
"""
Sets the internal clock type for timing. Profiler shall not have any previous stats.
Otherwise an exception is thrown.
"""
type = type.upper()
if type not in CLOCK_TYPES:
raise YappiError("Invalid clock type:%s" % (type))
_yappi.set_clock_type(CLOCK_TYPES[type])
def shift_context_time(context_id, amount):
"""
Adjust a context's start time, and the time of all functions currently on
the context's stack. 'amount' is in the same units as get_clock_type(). A
negative 'amount' increases the 'ttot' statistic for this context and all
functions on the stack, and a positive 'amount' decreases 'ttot'.
"""
_yappi.shift_context_time(context_id, amount)
def get_mem_usage():
"""
Returns the internal memory usage of the profiler itself.
"""
return _yappi.get_mem_usage()
def set_context_id_callback(callback):
"""
Use a number other than thread_id to determine the current context.
The callback must take no arguments and return an integer. For example:
>>> import greenlet, yappi
>>> yappi.set_context_id_callback(lambda: id(greenlet.getcurrent()))
"""
return _yappi.set_context_id_callback(callback)
def set_context_name_callback(callback):
"""
Set the callback to retrieve current context's name.
The callback must take no arguments and return a string. For example:
>>> import greenlet, yappi
>>> yappi.set_context_name_callback(
... lambda: greenlet.getcurrent().__class__.__name__)
If the callback cannot return the name at this time but may be able to
return it later, it should return None.
"""
if callback is None:
return _yappi.set_context_name_callback(_ctx_name_callback)
return _yappi.set_context_name_callback(callback)
# set _ctx_name_callback by default at import time.
set_context_name_callback(None)
def main():
from optparse import OptionParser
usage = "yappi.py [-b] [-o output_file] [-f output_format] [-s] [scriptfile] args ..."
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option("-b", "--builtins",
action="store_true", dest="profile_builtins", default=False,
help="Profiles builtin functions when set. [default: False]")
parser.add_option("-o", "--output-file", metavar="output_file",
help="Write stats to output_file.")
parser.add_option("-f", "--output-format", default="pstat",
choices=("pstat", "callgrind", "ystat"),
metavar="output_format", help="Write stats in the specified"
"format (\"pstat\", \"callgrind\" or \"ystat\", default is "
"\"pstat\").")
parser.add_option("-s", "--single_thread",
action="store_true", dest="profile_single_thread", default=False,
help="Profiles only the thread that calls start(). [default: False]")
if not sys.argv[1:]:
parser.print_usage()
sys.exit(2)
(options, args) = parser.parse_args()
sys.argv[:] = args
if (len(sys.argv) > 0):
sys.path.insert(0, os.path.dirname(sys.argv[0]))
start(options.profile_builtins, not options.profile_single_thread)
try:
if sys.version_info >= (3, 0):
exec(compile(open(sys.argv[0]).read(), sys.argv[0], 'exec'),
sys._getframe(1).f_globals, sys._getframe(1).f_locals)
else:
execfile(sys.argv[0], sys._getframe(1).f_globals, sys._getframe(1).f_locals)
finally:
stop()
if options.output_file:
stats = get_func_stats()
stats.save(options.output_file, options.output_format)
else:
# we will currently use default params for these
get_func_stats().print_all()
get_thread_stats().print_all()
else:
parser.print_usage()
if __name__ == "__main__":
main()
|
{
"content_hash": "46b245015c51fde46c709ad87c2f6a50",
"timestamp": "",
"source": "github",
"line_count": 988,
"max_line_length": 172,
"avg_line_length": 34.75506072874494,
"alnum_prop": 0.5729512493447493,
"repo_name": "yousong/yappi",
"id": "6176a23df84fcd3de5037b552435c65dba5ae95f",
"size": "34338",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yappi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "55808"
},
{
"name": "Python",
"bytes": "97109"
}
],
"symlink_target": ""
}
|
import logging
import logging.config
import threading
import time
import math
from ambercommon.common import runtime
import os
from amberdriver.tools import config
import collision_avoidance_logic as logic
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
logging.config.fileConfig('%s/collision_avoidance.ini' % pwd)
config.add_config_ini('%s/collision_avoidance.ini' % pwd)
LOGGER_NAME = 'CollisionAvoidance'
ROBO_WIDTH = float(config.ROBO_WIDTH)
MAX_SPEED = float(config.MAX_SPEED)
MAX_ROTATING_SPEED = float(config.MAX_ROTATING_SPEED)
SOFT_LIMIT = float(config.SOFT_LIMIT)
HARD_LIMIT = float(config.HARD_LIMIT)
SCANNER_DIST_OFFSET = float(config.SCANNER_DIST_OFFSET)
ANGLE_RANGE = float(config.ANGLE_RANGE)
DISTANCE_ALPHA = float(config.DISTANCE_ALPHA)
RODEO_SWAP_ALPHA = float(config.RODEO_SWAP_ALPHA)
def bound_sleep_interval(value, min_value=0.2, max_value=2.0):
return value if min_value < value < max_value else max_value if value > max_value else min_value
class CollisionAvoidance(object):
def __init__(self, roboclaw_proxy, hokuyo_proxy):
self.__roboclaw_proxy = roboclaw_proxy
self.__hokuyo_proxy = hokuyo_proxy
self.__scan = []
self.__scan_timestamp = 0.0
self.__scanning_lock = threading.Condition()
self.__driving_speed = (0, 0, 0, 0)
self.__driving_speed_timestamp = 0.0
self.__driving_lock = threading.Condition()
self.__is_active = True
self.__wait_for_data_lock = threading.Condition()
self.__logger = logging.getLogger(LOGGER_NAME)
runtime.add_shutdown_hook(self.terminate)
def set_speed(self, front_left, front_right, rear_left, rear_right):
try:
self.__driving_lock.acquire()
self.__driving_speed = front_left, front_right, rear_left, rear_right
self.__driving_speed_timestamp = time.time()
self.__notify()
finally:
self.__driving_lock.release()
def stop(self):
self.set_speed(0, 0, 0, 0)
def get_scan(self):
try:
self.__scanning_lock.acquire()
return self.__scan
finally:
self.__scanning_lock.release()
def scanning_loop(self):
sleep_interval = 0.2
last_scan_timestamp = 0.0
while self.__is_active:
scan = self.__hokuyo_proxy.get_single_scan()
scan.wait_available(sleep_interval * 1.1)
if scan.is_available():
try:
self.__scanning_lock.acquire()
self.__scan = scan.get_points()
self.__scan_timestamp = scan.get_timestamp()
current_scan_timestamp = scan.get_timestamp()
self.__notify()
finally:
self.__scanning_lock.release()
scan_interval = current_scan_timestamp - last_scan_timestamp
last_scan_timestamp = current_scan_timestamp
if scan_interval < 2.0:
sleep_interval += 0.5 * (scan_interval - sleep_interval)
sleep_interval = bound_sleep_interval(sleep_interval)
time.sleep(sleep_interval)
def driving_loop(self):
wait_timeout = 0.2
last_scan_timestamp = 0.0
last_command_timestamp = 0.0
last_left, last_right = 0.0, 0.0
while self.__is_active:
self.__wait(wait_timeout * 1.1)
try:
self.__driving_lock.acquire()
front_left, front_right, rear_left, rear_right = self.__driving_speed
current_command_timestamp = self.__driving_speed_timestamp
finally:
self.__driving_lock.release()
try:
self.__scanning_lock.acquire()
scan = self.__scan
current_scan_timestamp = self.__scan_timestamp
finally:
self.__scanning_lock.release()
if current_scan_timestamp > last_scan_timestamp or current_command_timestamp > last_command_timestamp:
left = sum([front_left, rear_left]) / 2.0
right = sum([front_right, rear_right]) / 2.0
left, right = CollisionAvoidance.rodeo_swap(left, right, scan)
left, right = CollisionAvoidance.limit_due_to_reverse_direction(left, right)
left, right = CollisionAvoidance.limit_due_to_distance(left, right, scan)
left, right = CollisionAvoidance.low_pass_filter(left, right)
left, right = CollisionAvoidance.limit_to_max_speed(left, right)
else:
left, right = last_left, last_right
current_timestamp = time.time()
trust_level = CollisionAvoidance.scan_trust(current_scan_timestamp, current_timestamp) * \
CollisionAvoidance.command_trust(current_command_timestamp, current_timestamp)
left *= trust_level
right *= trust_level
left, right = int(left), int(right)
self.__roboclaw_proxy.send_motors_command(left, right, left, right)
last_left, last_right = left, right
command_interval = current_command_timestamp - last_command_timestamp
last_command_timestamp = current_command_timestamp
scan_interval = current_scan_timestamp - last_scan_timestamp
last_scan_timestamp = current_command_timestamp
min_interval = min(command_interval, scan_interval)
if min_interval < 2.0:
wait_timeout += 0.5 * (min_interval - wait_timeout)
wait_timeout = bound_sleep_interval(wait_timeout)
def terminate(self):
self.stop()
self.__is_active = False
@staticmethod
def limit_due_to_distance(left, right, scan):
if left > 0 or right > 0:
current_angle = logic.get_angle(left, right, ROBO_WIDTH)
current_speed = logic.get_speed(left, right)
if scan is not None:
min_distance, _ = logic.get_min_distance(scan, current_angle,
SCANNER_DIST_OFFSET, ANGLE_RANGE)
if min_distance is not None:
soft_limit = logic.get_soft_limit(current_speed, MAX_SPEED,
SOFT_LIMIT * 1.3, HARD_LIMIT * 1.3, DISTANCE_ALPHA)
if HARD_LIMIT * 1.3 < min_distance < soft_limit:
max_speed = logic.get_max_speed(min_distance, soft_limit, HARD_LIMIT * 1.3, MAX_SPEED)
if current_speed > max_speed:
left, right = CollisionAvoidance.__calculate_new_left_right(left, right,
max_speed, current_speed)
elif min_distance <= HARD_LIMIT * 1.3:
left, right = 0, 0
else:
print 'distance: no scan!'
left, right = 0.0, 0.0
return left, right
@staticmethod
def __calculate_new_left_right(left, right, max_speed, current_speed):
if current_speed > 0:
divide = max_speed / current_speed
return left * divide, right * divide
else:
return left, right
@staticmethod
def limit_to_max_speed(left, right):
left = CollisionAvoidance.__limit_to_max_speed(left)
right = CollisionAvoidance.__limit_to_max_speed(right)
return left, right
@staticmethod
def __limit_to_max_speed(value):
max_speed = MAX_SPEED
return max_speed if value > max_speed \
else -max_speed if value < -max_speed \
else value
@staticmethod
def limit_due_to_reverse_direction(left, right):
max_speed = MAX_SPEED
if (left + right) / 2.0 < 0:
if left < 0 and right < 0:
left = left if left > -max_speed else -max_speed
right = right if right > -max_speed else -max_speed
elif left < 0 < right:
right = right if right < max_speed else max_speed
left = -right
elif left > 0 > right:
left = left if left < max_speed else max_speed
right = -left
return left, right
@staticmethod
def rodeo_swap(left, right, scan):
current_angle = logic.get_angle(left, right, ROBO_WIDTH)
current_speed = logic.get_speed(left, right)
min_distance, min_distance_angle = logic.get_min_distance(scan, current_angle,
SCANNER_DIST_OFFSET, ANGLE_RANGE)
if min_distance is not None:
soft_limit = logic.get_soft_limit(current_speed, MAX_SPEED,
SOFT_LIMIT, HARD_LIMIT, RODEO_SWAP_ALPHA)
if min_distance < soft_limit:
if min_distance_angle < current_angle:
if left > 0:
left = left if left < MAX_ROTATING_SPEED else MAX_ROTATING_SPEED
right = -left
else:
if right > 0:
_t = left
left = right
right = _t
else:
if right > 0:
right = right if right < MAX_ROTATING_SPEED else MAX_ROTATING_SPEED
left = -right
else:
if left > 0:
_t = right
right = left
left = _t
elif min_distance < soft_limit * 0.4:
left = -left
right = -right
return left, right
@staticmethod
def low_pass_filter(left, right):
# TODO implement low pass filter
return left, right
@staticmethod
def scan_trust(scan_timestamp, current_timestamp):
val = scan_timestamp / 1000.0 - current_timestamp
return math.pow(4.0 / 3.0, val)
@staticmethod
def command_trust(command_timestamp, current_timestamp):
val = command_timestamp - current_timestamp
return math.pow(4.0 / 3.0, val)
def __notify(self):
self.__wait_for_data_lock.acquire()
try:
self.__wait_for_data_lock.notify_all()
finally:
self.__wait_for_data_lock.release()
def __wait(self, wait_timeout):
self.__wait_for_data_lock.acquire()
try:
self.__wait_for_data_lock.wait(wait_timeout)
finally:
self.__wait_for_data_lock.release()
|
{
"content_hash": "c37bc3e3f0083d53dca194f4ef205078",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 114,
"avg_line_length": 35.554098360655736,
"alnum_prop": 0.5462928808557728,
"repo_name": "showmen15/testEEE",
"id": "d61e13990144ca28bf4884a86628c23c9cb00f98",
"size": "10844",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/amberdriver/collision_avoidance/collision_avoidance.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Protocol Buffer",
"bytes": "3753"
},
{
"name": "Python",
"bytes": "107069"
},
{
"name": "Shell",
"bytes": "5309"
}
],
"symlink_target": ""
}
|
import math, random
from panda3d.core import Vec3, Quat, GeomVertexFormat, TextureStage, GeoMipTerrain, PNMImage
from terrain.bakery.bakery import Tile, parseFile, loadTex
from terrain import collisionUtil
import meshManager
class GroundFactory(meshManager.MeshFactory):
def __init__(self,path,heightScale,shader=None,skipTextures=False):
self.shader=shader
meshManager.MeshFactory.__init__(self)
self.dataIndex={}
self.heightScale=heightScale
d=parseFile(path+'/texList.txt')
self.mapTexStages={}
self.specialMaps={}
for m in d['Special']:
s=m.split('\t')
self.specialMaps[s[1]]=s[0]
# List of non map texture stages, and their sizes
# (TexStage,Size)
self.texList=[]
if not skipTextures:
if "Tex2D" in d:
sort=0;
for m in d["Tex2D"]:
sort+=1
s=m.split()
name=s[0]
texStage=TextureStage(name+'stage'+str(sort))
texStage.setSort(sort)
source=s[1]
# def setTexModes(modeText):
# combineMode=[]
# for t in modeText:
# if t[:1]=='M':
# texStage.setMode(getRenderMapType(t))
# elif t[:1]=='C':
# combineMode.append(getCombineMode(t))
# elif t=='Save':
# texStage.setSavedResult(True)
# else:
# print "Illegal mode info for "+name
# if len(combineMode)>0:
# texStage.setCombineRgb(*combineMode)
# if len(modeText)==0:
# texStage.setMode(TextureStage.MModulate)
if source=='file':
# setTexModes(s[3:])
tex=loadTex(path+"/textures/"+name)
# self.terrainNode.setTexture(texStage,tex)
# self.terrainNode.setShaderInput('tex2D_'+name,tex)
self.texList.append((texStage,float(s[2]),tex,name))
elif source=='map':
# setTexModes(s[2:])
self.mapTexStages[s[0]]=texStage
#
# else:
# print 'Invalid source for '+name+' int Tex2D'
self.LOD=meshManager.LOD(float('inf'),0)
def getLODs(self):
return [self.LOD]
def regesterGeomRequirements(self,LOD,collection):
# for now, going to use our own node, so we don't have any special requirements
assert LOD==self.LOD
requirements=meshManager.GeomRequirements(
geomVertexFormat=GeomVertexFormat.getV3n3t2()
)
self.dataIndex[LOD]=collection.add(requirements)
def makeBlock(self,drawResourcesFactories,x,y,x1,y1,tileCenter,collision):
drawResourcesFactory=drawResourcesFactories[self.LOD]
tile=drawResourcesFactory.getTile()
resources=drawResourcesFactory.getDrawResources(self.dataIndex[self.LOD])
# Set up the GeoMipTerrain
terrain = GeoMipTerrain("TerrainTile")
heightTex=tile.bakedTile.renderMaps[self.specialMaps["height"]].tex
heightTexSize=heightTex.getXSize()
pnmImage=PNMImage()
#heightTex.makeRamImage() # Makes it run without having ran image in advance, but it all ends up flat.
heightTex.store(pnmImage)
terrain.setHeightfield(pnmImage)
# Set terrain properties
terrain.setBruteforce(True)
# Store the root NodePath for convenience
root = terrain.getRoot()
root.setPos(tile.bakedTile.x-tileCenter.getX(),tile.bakedTile.y-tileCenter.getY(),0)
for t in self.texList:
texScale=1.0/(t[1])
root.setTexture(t[0],t[2])
root.setShaderInput('tex2D_'+t[3],t[2])
root.setTexScale(t[0],texScale*tile.tileScale)
root.setTexOffset(t[0],(tile.getX() % t[1])*texScale,(tile.getY() % t[1])*texScale)
for t in self.mapTexStages:
tex=tile.bakedTile.renderMaps[t].tex
root.setTexture(self.mapTexStages[t],tex)
# Here we apply a transform to the textures so centers of the edge pixels fall on the edges of the tile
# Normally the edges of the edge pixels would fall on the edges of the tiles.
# The benifits of this should be visible, though they have not been varified sucessfully yet.
# In fact, these transforms appear to not do anything.
# This is troubling, but the problem they are supposed to fix is currently invisible as well.
#size=tex.getXSize()
#margin=bakery.texMargin(size)
#tile.setTexOffset(t,-margin,-margin)
#tile.setTexScale(t,float(size+margin*2)/size)
root.setShaderInput("offset",tile.bakedTile.x,tile.bakedTile.y,0.0,0.0)
root.setShaderInput("scale",tile.bakedTile.scale)
xyScale=float(tile.tileScale)/(heightTexSize-1)
root.setScale(xyScale,xyScale,self.heightScale)
if self.shader: root.setShader(self.shader)
# Generate it.
terrain.generate()
#root.flattenLight()
if collision:
col=collisionUtil.rebuildGeomNodesToColPolys(root,collision)
col.setCollideMask(collisionUtil.groundMask)
col.reparentTo(collision)
return root
def draw(self,drawResourcesFactories,x,y,x1,y1,tileCenter,collision):
drawResourcesFactory=drawResourcesFactories[self.LOD]
resources=drawResourcesFactory.getDrawResources(self.dataIndex[self.LOD])
resources.attachNode(self.makeBlock(drawResourcesFactories,x,y,x1,y1,tileCenter,collision))
|
{
"content_hash": "8b9e2eac609633fea6c11be6560f8ed2",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 115,
"avg_line_length": 38.77710843373494,
"alnum_prop": 0.5403138107814199,
"repo_name": "Craig-Macomber/Panda3D-Terrain-System",
"id": "efe696bd217d2a127680b8b2a30b746d06c444df",
"size": "6437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meshManager/groundFactory.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "115383"
}
],
"symlink_target": ""
}
|
"""
Created on Sun Sep 10 14:37:53 2017
@author: dmare
"""
import classification
|
{
"content_hash": "f40d6fa83e01513f81891efe725ce4dc",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 35,
"avg_line_length": 14.333333333333334,
"alnum_prop": 0.6744186046511628,
"repo_name": "ceroytres/cat_nets",
"id": "7f9a88c5a268ba6c3587d9bdaa9028754b326c7d",
"size": "111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cat_nets/cost_functions/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9073"
},
{
"name": "Shell",
"bytes": "257"
}
],
"symlink_target": ""
}
|
from urllib import parse as urlparse
from oslo_utils import strutils
import webob
from nova.api.openstack.api_version_request \
import MAX_PROXY_API_SUPPORT_VERSION
from nova.api.openstack.api_version_request \
import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION
from nova.api.openstack.compute.schemas import quota_sets
from nova.api.openstack import identity
from nova.api.openstack import wsgi
from nova.api import validation
import nova.conf
from nova import exception
from nova.i18n import _
from nova import objects
from nova.policies import quota_sets as qs_policies
from nova import quota
CONF = nova.conf.CONF
QUOTAS = quota.QUOTAS
FILTERED_QUOTAS_2_36 = ["fixed_ips", "floating_ips",
"security_group_rules", "security_groups"]
FILTERED_QUOTAS_2_57 = list(FILTERED_QUOTAS_2_36)
FILTERED_QUOTAS_2_57.extend(['injected_files', 'injected_file_content_bytes',
'injected_file_path_bytes'])
class QuotaSetsController(wsgi.Controller):
def _format_quota_set(self, project_id, quota_set, filtered_quotas):
"""Convert the quota object to a result dict."""
if project_id:
result = dict(id=str(project_id))
else:
result = {}
for resource in QUOTAS.resources:
if (resource not in filtered_quotas and
resource in quota_set):
result[resource] = quota_set[resource]
return dict(quota_set=result)
def _validate_quota_limit(self, resource, limit, minimum, maximum):
def conv_inf(value):
return float("inf") if value == -1 else value
if conv_inf(limit) < conv_inf(minimum):
msg = (_("Quota limit %(limit)s for %(resource)s must "
"be greater than or equal to already used and "
"reserved %(minimum)s.") %
{'limit': limit, 'resource': resource, 'minimum': minimum})
raise webob.exc.HTTPBadRequest(explanation=msg)
if conv_inf(limit) > conv_inf(maximum):
msg = (_("Quota limit %(limit)s for %(resource)s must be "
"less than or equal to %(maximum)s.") %
{'limit': limit, 'resource': resource, 'maximum': maximum})
raise webob.exc.HTTPBadRequest(explanation=msg)
def _get_quotas(self, context, id, user_id=None, usages=False):
if user_id:
values = QUOTAS.get_user_quotas(context, id, user_id,
usages=usages)
else:
values = QUOTAS.get_project_quotas(context, id, usages=usages)
if usages:
# NOTE(melwitt): For the detailed quota view with usages, the API
# returns a response in the format:
# {
# "quota_set": {
# "cores": {
# "in_use": 0,
# "limit": 20,
# "reserved": 0
# },
# ...
# We've re-architected quotas to eliminate reservations, so we no
# longer have a 'reserved' key returned from get_*_quotas, so set
# it here to satisfy the REST API response contract.
reserved = QUOTAS.get_reserved()
for v in values.values():
v['reserved'] = reserved
return values
else:
return {k: v['limit'] for k, v in values.items()}
@wsgi.Controller.api_version("2.1", MAX_PROXY_API_SUPPORT_VERSION)
@wsgi.expected_errors(400)
def show(self, req, id):
return self._show(req, id, [])
@wsgi.Controller.api_version( # noqa
MIN_WITHOUT_PROXY_API_SUPPORT_VERSION, '2.56')
@wsgi.expected_errors(400)
def show(self, req, id): # noqa
return self._show(req, id, FILTERED_QUOTAS_2_36)
@wsgi.Controller.api_version('2.57') # noqa
@wsgi.expected_errors(400)
def show(self, req, id): # noqa
return self._show(req, id, FILTERED_QUOTAS_2_57)
@validation.query_schema(quota_sets.query_schema_275, '2.75')
@validation.query_schema(quota_sets.query_schema, '2.0', '2.74')
def _show(self, req, id, filtered_quotas):
context = req.environ['nova.context']
context.can(qs_policies.POLICY_ROOT % 'show', {'project_id': id})
identity.verify_project_id(context, id)
params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
user_id = params.get('user_id', [None])[0]
return self._format_quota_set(id,
self._get_quotas(context, id, user_id=user_id),
filtered_quotas=filtered_quotas)
@wsgi.Controller.api_version("2.1", MAX_PROXY_API_SUPPORT_VERSION)
@wsgi.expected_errors(400)
def detail(self, req, id):
return self._detail(req, id, [])
@wsgi.Controller.api_version( # noqa
MIN_WITHOUT_PROXY_API_SUPPORT_VERSION, '2.56')
@wsgi.expected_errors(400)
def detail(self, req, id): # noqa
return self._detail(req, id, FILTERED_QUOTAS_2_36)
@wsgi.Controller.api_version('2.57') # noqa
@wsgi.expected_errors(400)
def detail(self, req, id): # noqa
return self._detail(req, id, FILTERED_QUOTAS_2_57)
@validation.query_schema(quota_sets.query_schema_275, '2.75')
@validation.query_schema(quota_sets.query_schema, '2.0', '2.74')
def _detail(self, req, id, filtered_quotas):
context = req.environ['nova.context']
context.can(qs_policies.POLICY_ROOT % 'detail', {'project_id': id})
identity.verify_project_id(context, id)
user_id = req.GET.get('user_id', None)
return self._format_quota_set(
id,
self._get_quotas(context, id, user_id=user_id, usages=True),
filtered_quotas=filtered_quotas)
@wsgi.Controller.api_version("2.1", MAX_PROXY_API_SUPPORT_VERSION)
@wsgi.expected_errors(400)
@validation.schema(quota_sets.update)
def update(self, req, id, body):
return self._update(req, id, body, [])
@wsgi.Controller.api_version( # noqa
MIN_WITHOUT_PROXY_API_SUPPORT_VERSION, '2.56')
@wsgi.expected_errors(400)
@validation.schema(quota_sets.update_v236)
def update(self, req, id, body): # noqa
return self._update(req, id, body, FILTERED_QUOTAS_2_36)
@wsgi.Controller.api_version('2.57') # noqa
@wsgi.expected_errors(400)
@validation.schema(quota_sets.update_v257)
def update(self, req, id, body): # noqa
return self._update(req, id, body, FILTERED_QUOTAS_2_57)
@validation.query_schema(quota_sets.query_schema_275, '2.75')
@validation.query_schema(quota_sets.query_schema, '2.0', '2.74')
def _update(self, req, id, body, filtered_quotas):
context = req.environ['nova.context']
context.can(qs_policies.POLICY_ROOT % 'update', {'project_id': id})
identity.verify_project_id(context, id)
project_id = id
params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
user_id = params.get('user_id', [None])[0]
quota_set = body['quota_set']
# NOTE(stephenfin): network quotas were only used by nova-network and
# therefore should be explicitly rejected
if 'networks' in quota_set:
raise webob.exc.HTTPBadRequest(
explanation=_('The networks quota has been removed'))
force_update = strutils.bool_from_string(quota_set.get('force',
'False'))
settable_quotas = QUOTAS.get_settable_quotas(context, project_id,
user_id=user_id)
# NOTE(dims): Pass #1 - In this loop for quota_set.items(), we validate
# min/max values and bail out if any of the items in the set is bad.
valid_quotas = {}
for key, value in body['quota_set'].items():
if key == 'force' or (not value and value != 0):
continue
# validate whether already used and reserved exceeds the new
# quota, this check will be ignored if admin want to force
# update
value = int(value)
if not force_update:
minimum = settable_quotas[key]['minimum']
maximum = settable_quotas[key]['maximum']
self._validate_quota_limit(key, value, minimum, maximum)
valid_quotas[key] = value
# NOTE(dims): Pass #2 - At this point we know that all the
# values are correct and we can iterate and update them all in one
# shot without having to worry about rolling back etc as we have done
# the validation up front in the loop above.
for key, value in valid_quotas.items():
try:
objects.Quotas.create_limit(context, project_id,
key, value, user_id=user_id)
except exception.QuotaExists:
objects.Quotas.update_limit(context, project_id,
key, value, user_id=user_id)
# Note(gmann): Removed 'id' from update's response to make it same
# as V2. If needed it can be added with microversion.
return self._format_quota_set(
None,
self._get_quotas(context, id, user_id=user_id),
filtered_quotas=filtered_quotas)
@wsgi.Controller.api_version("2.0", MAX_PROXY_API_SUPPORT_VERSION)
@wsgi.expected_errors(400)
def defaults(self, req, id):
return self._defaults(req, id, [])
@wsgi.Controller.api_version( # noqa
MIN_WITHOUT_PROXY_API_SUPPORT_VERSION, '2.56')
@wsgi.expected_errors(400)
def defaults(self, req, id): # noqa
return self._defaults(req, id, FILTERED_QUOTAS_2_36)
@wsgi.Controller.api_version('2.57') # noqa
@wsgi.expected_errors(400)
def defaults(self, req, id): # noqa
return self._defaults(req, id, FILTERED_QUOTAS_2_57)
def _defaults(self, req, id, filtered_quotas):
context = req.environ['nova.context']
context.can(qs_policies.POLICY_ROOT % 'defaults', {'project_id': id})
identity.verify_project_id(context, id)
values = QUOTAS.get_defaults(context)
return self._format_quota_set(id, values,
filtered_quotas=filtered_quotas)
# TODO(oomichi): Here should be 204(No Content) instead of 202 by v2.1
# +microversions because the resource quota-set has been deleted completely
# when returning a response.
@wsgi.expected_errors(())
@validation.query_schema(quota_sets.query_schema_275, '2.75')
@validation.query_schema(quota_sets.query_schema, '2.0', '2.74')
@wsgi.response(202)
def delete(self, req, id):
context = req.environ['nova.context']
context.can(qs_policies.POLICY_ROOT % 'delete', {'project_id': id})
params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
user_id = params.get('user_id', [None])[0]
if user_id:
objects.Quotas.destroy_all_by_project_and_user(
context, id, user_id)
else:
objects.Quotas.destroy_all_by_project(context, id)
|
{
"content_hash": "81e44cf2d3394f0203bbf51ec60c6edd",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 79,
"avg_line_length": 41.87313432835821,
"alnum_prop": 0.597932632329353,
"repo_name": "klmitch/nova",
"id": "d955e1b1560199317f3d0bcb20007889752d7ced",
"size": "11858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/quota_sets.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "851"
},
{
"name": "HTML",
"bytes": "1386"
},
{
"name": "PHP",
"bytes": "44222"
},
{
"name": "Python",
"bytes": "22328409"
},
{
"name": "Shell",
"bytes": "29138"
},
{
"name": "Smarty",
"bytes": "405441"
}
],
"symlink_target": ""
}
|
"""
This module allows importing AbstractBaseUser even when django.contrib.auth is
not in INSTALLED_APPS.
"""
import unicodedata
from django.contrib.auth import password_validation
from django.contrib.auth.hashers import (
check_password, is_password_usable, make_password,
)
from django.db import models
from django.utils.crypto import get_random_string, salted_hmac
from django.utils.translation import gettext_lazy as _
class BaseUserManager(models.Manager):
@classmethod
def normalize_email(cls, email):
"""
Normalize the email address by lowercasing the domain part of it.
"""
email = email or ''
try:
email_name, domain_part = email.strip().rsplit('@', 1)
except ValueError:
pass
else:
email = email_name + '@' + domain_part.lower()
return email
def make_random_password(self, length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyz'
'ABCDEFGHJKLMNPQRSTUVWXYZ'
'23456789'):
"""
Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or
"O" or letters and digits that look similar -- just to avoid confusion.
"""
return get_random_string(length, allowed_chars)
def get_by_natural_key(self, username):
return self.get(**{self.model.USERNAME_FIELD: username})
class AbstractBaseUser(models.Model):
password = models.CharField(_('password'), max_length=128)
last_login = models.DateTimeField(_('last login'), blank=True, null=True)
is_active = True
REQUIRED_FIELDS = []
# Stores the raw password if set_password() is called so that it can
# be passed to password_changed() after the model is saved.
_password = None
class Meta:
abstract = True
def __str__(self):
return self.get_username()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self._password is not None:
password_validation.password_changed(self._password, self)
self._password = None
def get_username(self):
"""Return the username for this User."""
return getattr(self, self.USERNAME_FIELD)
def clean(self):
setattr(self, self.USERNAME_FIELD, self.normalize_username(self.get_username()))
def natural_key(self):
return (self.get_username(),)
@property
def is_anonymous(self):
"""
Always return False. This is a way of comparing User objects to
anonymous users.
"""
return False
@property
def is_authenticated(self):
"""
Always return True. This is a way to tell if the user has been
authenticated in templates.
"""
return True
def set_password(self, raw_password):
self.password = make_password(raw_password)
self._password = raw_password
def check_password(self, raw_password):
"""
Return a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
"""
def setter(raw_password):
self.set_password(raw_password)
# Password hash upgrades shouldn't be considered password changes.
self._password = None
self.save(update_fields=["password"])
return check_password(raw_password, self.password, setter)
def set_unusable_password(self):
# Set a value that will never be a valid hash
self.password = make_password(None)
def has_usable_password(self):
"""
Return False if set_unusable_password() has been called for this user.
"""
return is_password_usable(self.password)
def _legacy_get_session_auth_hash(self):
# RemovedInDjango40Warning: pre-Django 3.1 hashes will be invalid.
key_salt = 'django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash'
return salted_hmac(key_salt, self.password, algorithm='sha1').hexdigest()
def get_session_auth_hash(self):
"""
Return an HMAC of the password field.
"""
key_salt = "django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash"
return salted_hmac(key_salt, self.password, algorithm='sha256').hexdigest()
@classmethod
def get_email_field_name(cls):
try:
return cls.EMAIL_FIELD
except AttributeError:
return 'email'
@classmethod
def normalize_username(cls, username):
return unicodedata.normalize('NFKC', username) if isinstance(username, str) else username
|
{
"content_hash": "b2049f6b250301afa652886f03bba1fe",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 97,
"avg_line_length": 33.05555555555556,
"alnum_prop": 0.6273109243697479,
"repo_name": "theo-l/django",
"id": "bb51cfbcc9a93fcc96c8362855e9ccac75bb77e7",
"size": "4760",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "django/contrib/auth/base_user.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "54515"
},
{
"name": "HTML",
"bytes": "172728"
},
{
"name": "JavaScript",
"bytes": "247742"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11279991"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
from django.views.generic.list import ListView
from django.views.generic.edit import UpdateView, DeleteView
from django.views.generic import FormView
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.exceptions import PermissionDenied
from core.spaces import url_names as urln
from core.spaces.models import Space, Document
from core.spaces.forms import SpaceForm, DocForm
class AddDocument(FormView):
"""
Upload a new document and attach it to the current space.
:permissions required: admin_space, mod_space
:rtype: Object
:context: form, get_place
"""
form_class = DocForm
template_name = 'spaces/document_form.html'
def dispatch(self, request, *args, **kwargs):
space = get_object_or_404(Space, url=kwargs['space_url'])
if (request.user.has_perm('admin_space', space) or
request.user.has_perm('mod_space', space)):
return super(AddDocument, self).dispatch(request, *args, **kwargs)
else:
raise PermissionDenied
def get_success_url(self):
space = self.kwargs['space_url']
return reverse(urln.SPACE_INDEX, kwargs={'space_url': space})
def form_valid(self, form):
self.space = get_object_or_404(Space, url=self.kwargs['space_url'])
form_uncommited = form.save(commit=False)
form_uncommited.space = self.space
form_uncommited.author = self.request.user
form_uncommited.save()
return super(AddDocument, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(AddDocument, self).get_context_data(**kwargs)
space = get_object_or_404(Space, url=self.kwargs['space_url'])
context['get_place'] = space
return context
class EditDocument(UpdateView):
"""
Returns a DocForm filled with the current document data.
:permissions required: admin_space, mod_space
:rtype: HTML Form
:context: doc, get_place
"""
model = Document
template_name = 'spaces/document_form.html'
def dispatch(self, request, *args, **kwargs):
space = get_object_or_404(Space, url=kwargs['space_url'])
if (request.user.has_perm('admin_space', space) or
request.user.has_perm('mod_space', space)):
return super(EditDocument, self).dispatch(request, *args, **kwargs)
else:
raise PermissionDenied
def get_success_url(self):
space = self.kwargs['space_url']
return reverse(urln.SPACE_INDEX, kwargs={'space_url': space})
def get_object(self):
cur_doc = get_object_or_404(Document, pk=self.kwargs['doc_id'])
return cur_doc
def get_context_data(self, **kwargs):
context = super(EditDocument, self).get_context_data(**kwargs)
space = get_object_or_404(Space, url=self.kwargs['space_url'])
context['get_place'] = space
context['user_is_admin'] = (has_space_permission(self.request.user,
space, allow=['admins', 'mods']) or has_all_permissions(
self.request.user))
return context
class DeleteDocument(DeleteView):
"""
Returns a confirmation page before deleting the current document.
:permissions required: admin_space, mod_space
:rtype: Confirmation
:context: get_place
"""
def dispatch(self, request, *args, **kwargs):
space = get_object_or_404(Space, url=kwargs['space_url'])
doc = get_object_or_404(Document, pk=kwargs['doc_id'])
if (request.user.has_perm('admin_space', space) or
request.user.has_perm('mod_space')):
return super(DeleteDocument, self).dispatch(request, *args, **kwargs)
else:
raise PermissionDenied
def get_object(self):
return get_object_or_404(Document, pk=self.kwargs['doc_id'])
def get_success_url(self):
space = self.kwargs['space_url']
# Now we delete the file for real. It's not the best place, but here
# we know that our user gave confirmation.
f = get_object_or_404(Document, pk=self.kwargs['doc_id'])
f.delete()
return reverse(urln.SPACE_INDEX, kwargs={'space_url': space})
def get_context_data(self, **kwargs):
context = super(DeleteDocument, self).get_context_data(**kwargs)
context['get_place'] = get_object_or_404(Space,
url=self.kwargs['space_url'])
return context
class ListDocs(ListView):
"""
Returns a list of documents attached to the current space.
:rtype: Object list
:context: object_list, get_place
"""
paginate_by = 25
context_object_name = 'document_list'
def dispatch(self, request, *args, **kwargs):
space = get_object_or_404(Space, url=kwargs['space_url'])
if request.user.has_perm('view_space', space):
return super(ListDocs, self).dispatch(request, *args, **kwargs)
else:
raise PermissionDenied
def get_queryset(self):
place = get_object_or_404(Space, url=self.kwargs['space_url'])
objects = Document.objects.all().filter(space=place.id) \
.order_by('pub_date')
return objects
def get_context_data(self, **kwargs):
context = super(ListDocs, self).get_context_data(**kwargs)
context['get_place'] = get_object_or_404(Space,
url=self.kwargs['space_url'])
return context
|
{
"content_hash": "70dec90a35ca2a9e612820d5606b649c",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 81,
"avg_line_length": 34.4625,
"alnum_prop": 0.6420021762785637,
"repo_name": "cidadania/ecidadania-ng",
"id": "80f38d26fe326d58fce131cb6986622e662b7cdb",
"size": "6176",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/apps/spaces/views/documents.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "41262"
},
{
"name": "HTML",
"bytes": "85966"
},
{
"name": "JavaScript",
"bytes": "3818"
},
{
"name": "Python",
"bytes": "148480"
},
{
"name": "Ruby",
"bytes": "946"
}
],
"symlink_target": ""
}
|
import logging
from singleton import Singleton
class ColorizedStreamHandler(logging.StreamHandler):
color_map = {
'red': 1,
'green': 2,
'yellow': 3,
'white': 7,
}
level_map = {
logging.DEBUG: ('white', True),
logging.INFO: ('green', False),
logging.WARNING: ('yellow', False),
logging.ERROR: ('red', False),
logging.CRITICAL: ('red', True),
}
csi = '\x1b['
reset = '\x1b[0m'
def emit(self, record):
message = logging.StreamHandler.format(self, record)
stream = self.stream
if not getattr(stream, 'isatty', None):
stream.write(message)
else:
stream.write(self.colorize(message, record))
stream.write(getattr(self, 'terminator', '\n'))
self.flush()
def colorize(self, message, record):
if record.levelno in self.level_map:
fg, bold = self.level_map[record.levelno]
params = []
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
if bold:
params.append('1')
if params:
message = ''.join((self.csi, ';'.join(params), 'm', message, self.reset))
return message
class SnixLogger:
"""A Colorized Logger that can be used aross Snix. This is a singleton. """
__metaclass__ = Singleton
def __init__(self):
self.logger = logging.getLogger("SNIX")
self.logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s-%(levelname)s-%(message)s", "%m/%d/%Y %I:%M:%S %p")
handler = ColorizedStreamHandler()
handler.setFormatter(formatter)
self.logger.addHandler(handler)
@staticmethod
def logger():
return SnixLogger().logger
# TODO : Convert this into a unit test.
if __name__ == "__main__":
logger = SnixLogger.logger()
logger.info("hello")
logger.debug("blah")
logger.error("oops")
logger.critical("gah")
|
{
"content_hash": "14d0857c9455f6c93b3400d0990b3d76",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 102,
"avg_line_length": 28.549295774647888,
"alnum_prop": 0.5663542180562408,
"repo_name": "yaise/snix",
"id": "3ac5fbde492d2bb25e23fa5ab180590cc17e9614",
"size": "2049",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snixLogger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32342"
}
],
"symlink_target": ""
}
|
"""
Legend of a chart.
"""
from __future__ import absolute_import, print_function, unicode_literals
from ..enum.chart import XL_LEGEND_POSITION
from ..text.text import Font
from ..util import lazyproperty
class Legend(object):
"""
Represents the legend in a chart. A chart can have at most one legend.
"""
def __init__(self, legend_elm):
super(Legend, self).__init__()
self._element = legend_elm
@lazyproperty
def font(self):
"""
The |Font| object that provides access to the text properties for
this legend, such as bold, italic, etc.
"""
defRPr = self._element.defRPr
font = Font(defRPr)
return font
@property
def horz_offset(self):
"""
Adjustment of the x position of the legend from its default.
Expressed as a float between -1.0 and 1.0 representing a fraction of
the chart width. Negative values move the legend left, positive
values move it to the right. |None| if no setting is specified.
"""
return self._element.horz_offset
@horz_offset.setter
def horz_offset(self, value):
self._element.horz_offset = value
@property
def include_in_layout(self):
"""|True| if legend should be located inside plot area.
Read/write boolean specifying whether legend should be placed inside
the plot area. In many cases this will cause it to be superimposed on
the chart itself. Assigning |None| to this property causes any
`c:overlay` element to be removed, which is interpreted the same as
|True|. This use case should rarely be required and assigning
a boolean value is recommended.
"""
overlay = self._element.overlay
if overlay is None:
return True
return overlay.val
@include_in_layout.setter
def include_in_layout(self, value):
if value is None:
self._element._remove_overlay()
return
self._element.get_or_add_overlay().val = bool(value)
@property
def position(self):
"""
Read/write :ref:`XlLegendPosition` enumeration value specifying the
general region of the chart in which to place the legend.
"""
legendPos = self._element.legendPos
if legendPos is None:
return XL_LEGEND_POSITION.RIGHT
return legendPos.val
@position.setter
def position(self, position):
self._element.get_or_add_legendPos().val = position
|
{
"content_hash": "0398d20bb65df89965cc9480995a304d",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 77,
"avg_line_length": 31.825,
"alnum_prop": 0.631578947368421,
"repo_name": "ryfeus/lambda-packs",
"id": "fe13bf32165bd664874dd368bebfa51e2531d510",
"size": "2565",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Pdf_docx_pptx_xlsx_epub_png/source/pptx/chart/legend.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
}
|
"""
Tests for group API.
"""
from unittest import mock
import ddt
from cinder import context
from cinder import exception
import cinder.group
from cinder import objects
from cinder.objects import fields
from cinder.policies import group_snapshots as g_snap_policies
from cinder import quota
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_volume
from cinder.tests.unit import test
from cinder.tests.unit import utils
GROUP_QUOTAS = quota.GROUP_QUOTAS
@ddt.ddt
class GroupAPITestCase(test.TestCase):
"""Test Case for group API."""
def setUp(self):
super(GroupAPITestCase, self).setUp()
self.group_api = cinder.group.API()
self.ctxt = context.RequestContext(fake.USER_ID, fake.PROJECT_ID,
auth_token=True,
is_admin=True)
self.user_ctxt = context.RequestContext(
fake.USER_ID, fake.PROJECT_ID, auth_token=True)
@mock.patch('cinder.objects.Group.get_by_id')
def test_get(self, mock_group_get):
fake_group = {'name': 'fake_group'}
mock_group_get.return_value = fake_group
grp = self.group_api.get(self.ctxt, fake.GROUP_ID)
self.assertEqual(fake_group, grp)
@ddt.data(True, False)
@mock.patch('cinder.objects.GroupList.get_all')
@mock.patch('cinder.objects.GroupList.get_all_by_project')
def test_get_all(self, is_admin, mock_get_all_by_project,
mock_get_all):
self.group_api.LOG = mock.Mock()
fake_groups = ['fake_group1', 'fake_group2']
fake_groups_by_project = ['fake_group1']
mock_get_all.return_value = fake_groups
mock_get_all_by_project.return_value = fake_groups_by_project
if is_admin:
grps = self.group_api.get_all(self.ctxt,
filters={'all_tenants': True})
self.assertEqual(fake_groups, grps)
else:
grps = self.group_api.get_all(self.user_ctxt)
self.assertEqual(fake_groups_by_project, grps)
@mock.patch('cinder.volume.rpcapi.VolumeAPI.delete_group')
@mock.patch('cinder.db.volume_get_all_by_generic_group')
@mock.patch('cinder.db.volumes_update')
@mock.patch('cinder.group.api.API._cast_create_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.objects.Group')
@mock.patch('cinder.db.group_type_get')
@mock.patch('cinder.db.volume_types_get_by_name_or_id')
def test_create_delete(self, mock_volume_types_get,
mock_group_type_get, mock_group,
mock_update_quota, mock_cast_create_group,
mock_volumes_update, mock_volume_get_all,
mock_rpc_delete_group):
mock_volume_types_get.return_value = [{'id': fake.VOLUME_TYPE_ID}]
mock_group_type_get.return_value = {'id': fake.GROUP_TYPE_ID}
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova', host=None,
name=name, description=description,
status=fields.GroupStatus.CREATING)
mock_group.return_value = grp
ret_group = self.group_api.create(self.ctxt, name, description,
fake.GROUP_TYPE_ID,
[fake.VOLUME_TYPE_ID],
availability_zone='nova')
self.assertEqual(grp.obj_to_primitive(), ret_group.obj_to_primitive())
ret_group.host = "test_host@fakedrv#fakepool"
ret_group.status = fields.GroupStatus.AVAILABLE
ret_group.assert_not_frozen = mock.Mock(return_value=True)
ret_group.group_snapshots = []
self.group_api.delete(self.ctxt, ret_group, delete_volumes=True)
mock_volume_get_all.assert_called_once_with(mock.ANY, ret_group.id)
mock_volumes_update.assert_called_once_with(self.ctxt, [])
mock_rpc_delete_group.assert_called_once_with(self.ctxt, ret_group)
@mock.patch('cinder.group.api.API._cast_create_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.objects.Group')
@mock.patch('cinder.db.group_type_get_by_name')
@mock.patch('cinder.db.volume_types_get_by_name_or_id')
def test_create_with_group_name(self, mock_volume_types_get,
mock_group_type_get, mock_group,
mock_update_quota, mock_cast_create_group):
mock_volume_types_get.return_value = [{'id': fake.VOLUME_TYPE_ID}]
mock_group_type_get.return_value = {'id': fake.GROUP_TYPE_ID}
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova', host=None,
name=name, description=description,
status=fields.GroupStatus.CREATING)
mock_group.return_value = grp
ret_group = self.group_api.create(self.ctxt, name, description,
"fake-grouptype-name",
[fake.VOLUME_TYPE_ID],
availability_zone='nova')
self.assertEqual(grp.obj_to_primitive(), ret_group.obj_to_primitive())
mock_group_type_get.assert_called_once_with(self.ctxt,
"fake-grouptype-name")
@mock.patch('cinder.group.api.API._cast_create_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.db.group_type_get')
@mock.patch('cinder.db.group_type_get_by_name')
@mock.patch('cinder.db.volume_types_get_by_name_or_id')
def test_create_with_uuid_format_group_type_name(
self, mock_volume_types_get, mock_group_type_get_by_name,
mock_group_type_get, mock_update_quota, mock_cast_create_group):
uuid_format_type_name = fake.UUID1
mock_volume_types_get.return_value = [{'id': fake.VOLUME_TYPE_ID}]
mock_group_type_get.side_effect = exception.GroupTypeNotFound(
group_type_id=uuid_format_type_name)
mock_group_type_get_by_name.return_value = {'id': fake.GROUP_TYPE_ID}
ret_group = self.group_api.create(self.ctxt, "test_group", '',
uuid_format_type_name,
[fake.VOLUME_TYPE_ID],
availability_zone='nova')
self.assertEqual(ret_group["group_type_id"],
fake.GROUP_TYPE_ID)
@mock.patch('cinder.group.api.API._cast_create_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.db.group_type_get_by_name')
@mock.patch('cinder.db.sqlalchemy.api._volume_type_get')
@mock.patch('cinder.db.sqlalchemy.api._volume_type_get_by_name')
def test_create_with_uuid_format_volume_type_name(
self, mock_vol_t_get_by_name, mock_vol_types_get_by_id,
mock_group_type_get, mock_update_quota, mock_cast_create_group):
uuid_format_name = fake.UUID1
mock_group_type_get.return_value = {'id': fake.GROUP_TYPE_ID}
volume_type = {'id': fake.VOLUME_TYPE_ID, 'name': uuid_format_name}
mock_vol_types_get_by_id.side_effect = exception.VolumeTypeNotFound(
volume_type_id=uuid_format_name)
mock_vol_t_get_by_name.return_value = volume_type
group = self.group_api.create(self.ctxt, "test_group",
"this is a test group",
"fake-grouptype-name",
[uuid_format_name],
availability_zone='nova')
self.assertEqual(group["volume_type_ids"],
[volume_type['id']])
@mock.patch('cinder.group.api.API._cast_create_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.db.group_type_get_by_name')
@mock.patch('cinder.db.volume_types_get_by_name_or_id')
def test_create_with_multi_types(self, mock_volume_types_get,
mock_group_type_get,
mock_update_quota,
mock_cast_create_group):
volume_types = [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]
mock_volume_types_get.return_value = volume_types
mock_group_type_get.return_value = {'id': fake.GROUP_TYPE_ID}
volume_type_names = ['fake-volume-type1', 'fake-volume-type2']
name = "test_group"
description = "this is a test group"
group = self.group_api.create(self.ctxt, name, description,
"fake-grouptype-name",
volume_type_names,
availability_zone='nova')
self.assertEqual(group["volume_type_ids"],
[t['id'] for t in volume_types])
self.assertEqual(group["group_type_id"], fake.GROUP_TYPE_ID)
mock_group_type_get.assert_called_once_with(self.ctxt,
"fake-grouptype-name")
mock_volume_types_get.assert_called_once_with(mock.ANY,
volume_type_names)
@mock.patch('oslo_utils.timeutils.utcnow')
@mock.patch('cinder.objects.Group')
def test_reset_status(self, mock_group, mock_time_util):
mock_time_util.return_value = "time_now"
self.group_api.reset_status(self.ctxt, mock_group,
fields.GroupStatus.AVAILABLE)
update_field = {'updated_at': "time_now",
'status': fields.GroupStatus.AVAILABLE}
mock_group.update.assert_called_once_with(update_field)
mock_group.save.assert_called_once_with()
@mock.patch.object(GROUP_QUOTAS, "reserve")
@mock.patch('cinder.objects.Group')
@mock.patch('cinder.db.group_type_get_by_name')
@mock.patch('cinder.db.volume_types_get_by_name_or_id')
def test_create_group_failed_update_quota(self,
mock_volume_types_get,
mock_group_type_get, mock_group,
mock_group_quota_reserve):
mock_volume_types_get.return_value = [{'id': fake.VOLUME_TYPE_ID}]
mock_group_type_get.return_value = {'id': fake.GROUP_TYPE_ID}
fake_overs = ['groups']
fake_quotas = {'groups': 1}
fake_usages = {'groups': {'reserved': 0, 'in_use': 1}}
mock_group_quota_reserve.side_effect = exception.OverQuota(
overs=fake_overs,
quotas=fake_quotas,
usages=fake_usages)
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova', host=None,
name=name, description=description,
status=fields.GroupStatus.CREATING)
mock_group.return_value = grp
self.assertRaises(exception.GroupLimitExceeded,
self.group_api.create,
self.ctxt, name, description,
"fake-grouptype-name",
[fake.VOLUME_TYPE_ID],
availability_zone='nova')
@mock.patch('cinder.objects.Group')
@mock.patch('cinder.db.volume_get')
def test__validate_add_volumes(self, mock_volume_get, mock_group):
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova', host=None,
name="name", description="description",
status=fields.GroupStatus.CREATING)
mock_group.return_value = grp
fake_volume_obj = fake_volume.fake_volume_obj(self.ctxt)
mock_volume_get.return_value = fake_volume_obj
self.assertRaises(exception.InvalidVolume,
self.group_api._validate_add_volumes, self.ctxt,
[], ['123456789'], grp)
@ddt.data(['test_host@fakedrv#fakepool', 'test_host@fakedrv#fakepool'],
['test_host@fakedrv#fakepool', 'test_host2@fakedrv#fakepool'])
@mock.patch('cinder.volume.rpcapi.VolumeAPI.update_group')
@mock.patch('cinder.db.volume_get_all_by_generic_group')
@mock.patch('cinder.group.api.API._cast_create_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.objects.Group')
@mock.patch('cinder.db.group_type_get')
@mock.patch('cinder.db.volume_types_get_by_name_or_id')
def test_update(self, hosts, mock_volume_types_get,
mock_group_type_get, mock_group,
mock_update_quota, mock_cast_create_group,
mock_volume_get_all, mock_rpc_update_group):
vol_type_dict = {'id': fake.VOLUME_TYPE_ID,
'name': 'fake_volume_type'}
vol_type = objects.VolumeType(self.ctxt, **vol_type_dict)
mock_volume_types_get.return_value = [{'id': fake.VOLUME_TYPE_ID}]
mock_group_type_get.return_value = {'id': fake.GROUP_TYPE_ID}
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova', host=None,
name=name, description=description,
status=fields.GroupStatus.CREATING)
mock_group.return_value = grp
ret_group = self.group_api.create(self.ctxt, name, description,
fake.GROUP_TYPE_ID,
[fake.VOLUME_TYPE_ID],
availability_zone='nova')
self.assertEqual(grp.obj_to_primitive(), ret_group.obj_to_primitive())
ret_group.volume_types = [vol_type]
ret_group.host = hosts[0]
# set resource_backend directly because ret_group
# is instance of MagicMock
ret_group.resource_backend = 'fake-cluster'
ret_group.status = fields.GroupStatus.AVAILABLE
ret_group.id = fake.GROUP_ID
vol1 = utils.create_volume(
self.ctxt, host=hosts[1],
availability_zone=ret_group.availability_zone,
volume_type_id=fake.VOLUME_TYPE_ID,
cluster_name='fake-cluster')
vol2 = utils.create_volume(
self.ctxt, host=hosts[1],
availability_zone=ret_group.availability_zone,
volume_type_id=fake.VOLUME_TYPE_ID,
group_id=fake.GROUP_ID,
cluster_name='fake-cluster')
vol2_dict = {
'id': vol2.id,
'group_id': fake.GROUP_ID,
'volume_type_id': fake.VOLUME_TYPE_ID,
'availability_zone': ret_group.availability_zone,
'host': hosts[1],
'status': 'available',
}
mock_volume_get_all.return_value = [vol2_dict]
new_name = "new_group_name"
new_desc = "this is a new group"
self.group_api.update(self.ctxt, ret_group, new_name, new_desc,
vol1.id, vol2.id)
mock_volume_get_all.assert_called_once_with(mock.ANY, ret_group.id)
mock_rpc_update_group.assert_called_once_with(self.ctxt, ret_group,
add_volumes=vol1.id,
remove_volumes=vol2.id)
@mock.patch('cinder.objects.GroupSnapshot.get_by_id')
@mock.patch('cinder.context.RequestContext.authorize')
def test_get_group_snapshot(self, mock_authorize, mock_group_snap):
fake_group_snap = 'fake_group_snap'
mock_group_snap.return_value = fake_group_snap
grp_snap = self.group_api.get_group_snapshot(
self.ctxt, fake.GROUP_SNAPSHOT_ID)
self.assertEqual(fake_group_snap, grp_snap)
mock_authorize.assert_called_once_with(
g_snap_policies.GET_POLICY,
target_obj=fake_group_snap)
@ddt.data(True, False)
@mock.patch('cinder.objects.GroupSnapshotList.get_all')
@mock.patch('cinder.objects.GroupSnapshotList.get_all_by_project')
def test_get_all_group_snapshots(self, is_admin,
mock_get_all_by_project,
mock_get_all):
fake_group_snaps = ['fake_group_snap1', 'fake_group_snap2']
fake_group_snaps_by_project = ['fake_group_snap1']
mock_get_all.return_value = fake_group_snaps
mock_get_all_by_project.return_value = fake_group_snaps_by_project
if is_admin:
grp_snaps = self.group_api.get_all_group_snapshots(
self.ctxt, filters={'all_tenants': True})
self.assertEqual(fake_group_snaps, grp_snaps)
else:
grp_snaps = self.group_api.get_all_group_snapshots(
self.user_ctxt)
self.assertEqual(fake_group_snaps_by_project, grp_snaps)
@mock.patch('cinder.objects.GroupSnapshot')
def test_update_group_snapshot(self, mock_group_snap):
grp_snap_update = {"name": "new_name",
"description": "This is a new description"}
self.group_api.update_group_snapshot(self.ctxt, mock_group_snap,
grp_snap_update)
mock_group_snap.update.assert_called_once_with(grp_snap_update)
mock_group_snap.save.assert_called_once_with()
@mock.patch('cinder.volume.rpcapi.VolumeAPI.delete_group_snapshot')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.create_group_snapshot')
@mock.patch('cinder.volume.api.API.create_snapshots_in_db')
@mock.patch('cinder.objects.Group')
@mock.patch('cinder.objects.GroupSnapshot')
@mock.patch('cinder.objects.SnapshotList.get_all_for_group_snapshot')
def test_create_delete_group_snapshot(self,
mock_snap_get_all,
mock_group_snap, mock_group,
mock_create_in_db,
mock_create_api, mock_delete_api):
name = "fake_name"
description = "fake description"
mock_group.id = fake.GROUP_ID
mock_group.group_type_id = fake.GROUP_TYPE_ID
mock_group.assert_not_frozen = mock.Mock(return_value=True)
mock_group.volumes = []
ret_group_snap = self.group_api.create_group_snapshot(
self.ctxt, mock_group, name, description)
mock_snap_get_all.return_value = []
options = {'group_id': fake.GROUP_ID,
'user_id': self.ctxt.user_id,
'project_id': self.ctxt.project_id,
'status': "creating",
'name': name,
'description': description,
'group_type_id': fake.GROUP_TYPE_ID}
mock_group_snap.assert_called_once_with(self.ctxt, **options)
ret_group_snap.create.assert_called_once_with()
mock_create_in_db.assert_called_once_with(self.ctxt, [],
ret_group_snap.name,
ret_group_snap.description,
None,
ret_group_snap.id)
mock_create_api.assert_called_once_with(self.ctxt, ret_group_snap)
ret_group_snap.assert_not_frozen = mock.Mock(return_value=True)
self.group_api.delete_group_snapshot(self.ctxt, ret_group_snap)
mock_delete_api.assert_called_once_with(mock.ANY, ret_group_snap)
@mock.patch('cinder.volume.api.API.delete')
@mock.patch('cinder.objects.VolumeType.get_by_name_or_id')
@mock.patch('cinder.db.group_volume_type_mapping_create')
@mock.patch('cinder.volume.api.API.create')
@mock.patch('cinder.objects.GroupSnapshot.get_by_id')
@mock.patch('cinder.objects.SnapshotList.get_all_for_group_snapshot')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.create_group_from_src')
@mock.patch('cinder.objects.VolumeList.get_all_by_generic_group')
def test_create_group_from_snap_volume_failed(
self, mock_volume_get_all,
mock_rpc_create_group_from_src,
mock_snap_get_all, mock_group_snap_get,
mock_volume_api_create,
mock_mapping_create,
mock_get_volume_type, mock_volume_delete):
mock_volume_api_create.side_effect = [exception.CinderException]
vol_type = fake_volume.fake_volume_type_obj(
self.ctxt,
id=fake.VOLUME_TYPE_ID,
name='fake_volume_type')
mock_get_volume_type.return_value = vol_type
grp_snap = utils.create_group_snapshot(
self.ctxt, fake.GROUP_ID,
group_type_id=fake.GROUP_TYPE_ID,
status=fields.GroupStatus.CREATING)
mock_group_snap_get.return_value = grp_snap
vol1 = utils.create_volume(
self.ctxt,
availability_zone='nova',
volume_type_id=vol_type['id'],
group_id=fake.GROUP_ID)
snap = utils.create_snapshot(self.ctxt, vol1.id,
volume_type_id=vol_type['id'],
status=fields.GroupStatus.CREATING)
mock_snap_get_all.return_value = [snap]
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
name=name, description=description,
group_snapshot_id=grp_snap.id,
status=fields.GroupStatus.CREATING)
vol2 = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=vol_type['id'],
group_id=grp.id,
snapshot_id=snap.id)
mock_volume_get_all.return_value = [vol2]
self.assertRaises(
exception.CinderException,
self.group_api._create_group_from_group_snapshot,
self.ctxt, grp, grp_snap.id)
mock_volume_api_create.assert_called_once_with(
self.ctxt, 1, None, None,
availability_zone=grp.availability_zone,
group_snapshot=grp_snap,
group=grp,
snapshot=snap,
volume_type=vol_type)
mock_rpc_create_group_from_src.assert_not_called()
mock_volume_delete.assert_called_once_with(self.ctxt, vol2)
vol2.destroy()
grp.destroy()
snap.destroy()
vol1.destroy()
grp_snap.destroy()
@mock.patch('cinder.group.api.API._update_volumes_host')
@mock.patch('cinder.objects.VolumeType.get_by_name_or_id')
@mock.patch('cinder.db.group_volume_type_mapping_create')
@mock.patch('cinder.volume.api.API.create')
@mock.patch('cinder.objects.GroupSnapshot.get_by_id')
@mock.patch('cinder.objects.SnapshotList.get_all_for_group_snapshot')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.create_group_from_src')
@mock.patch('cinder.objects.VolumeList.get_all_by_generic_group')
def test_create_group_from_snap(self, mock_volume_get_all,
mock_rpc_create_group_from_src,
mock_snap_get_all, mock_group_snap_get,
mock_volume_api_create,
mock_mapping_create,
mock_get_volume_type,
mock_update_volumes_host):
vol_type = fake_volume.fake_volume_type_obj(
self.ctxt,
id=fake.VOLUME_TYPE_ID,
name='fake_volume_type')
mock_get_volume_type.return_value = vol_type
grp_snap = utils.create_group_snapshot(
self.ctxt, fake.GROUP_ID,
group_type_id=fake.GROUP_TYPE_ID,
status=fields.GroupStatus.CREATING)
mock_group_snap_get.return_value = grp_snap
vol1 = utils.create_volume(
self.ctxt,
availability_zone='nova',
volume_type_id=vol_type['id'],
group_id=fake.GROUP_ID)
snap = utils.create_snapshot(self.ctxt, vol1.id,
volume_type_id=vol_type['id'],
status=fields.GroupStatus.CREATING)
mock_snap_get_all.return_value = [snap]
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
name=name, description=description,
group_snapshot_id=grp_snap.id,
status=fields.GroupStatus.CREATING)
vol2 = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=vol_type['id'],
group_id=grp.id,
snapshot_id=snap.id)
mock_volume_get_all.return_value = [vol2]
self.group_api._create_group_from_group_snapshot(self.ctxt, grp,
grp_snap.id)
mock_volume_api_create.assert_called_once_with(
self.ctxt, 1, None, None,
availability_zone=grp.availability_zone,
group_snapshot=grp_snap,
group=grp,
snapshot=snap,
volume_type=vol_type)
mock_rpc_create_group_from_src.assert_called_once_with(
self.ctxt, grp, grp_snap)
mock_update_volumes_host.assert_called_once_with(
self.ctxt, grp
)
vol2.destroy()
grp.destroy()
snap.destroy()
vol1.destroy()
grp_snap.destroy()
@mock.patch('cinder.group.api.API._update_volumes_host')
@mock.patch('cinder.objects.VolumeType.get_by_name_or_id')
@mock.patch('cinder.db.group_volume_type_mapping_create')
@mock.patch('cinder.volume.api.API.create')
@mock.patch('cinder.objects.Group.get_by_id')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.create_group_from_src')
@mock.patch('cinder.objects.VolumeList.get_all_by_generic_group')
def test_create_group_from_group(self, mock_volume_get_all,
mock_rpc_create_group_from_src,
mock_group_get,
mock_volume_api_create,
mock_mapping_create,
mock_get_volume_type,
mock_update_volumes_host):
vol_type = fake_volume.fake_volume_type_obj(
self.ctxt,
id=fake.VOLUME_TYPE_ID,
name='fake_volume_type')
mock_get_volume_type.return_value = vol_type
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
status=fields.GroupStatus.CREATING)
mock_group_get.return_value = grp
vol = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=fake.VOLUME_TYPE_ID,
group_id=grp.id)
mock_volume_get_all.return_value = [vol]
grp2 = utils.create_group(self.ctxt,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
source_group_id=grp.id,
status=fields.GroupStatus.CREATING)
vol2 = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=vol_type['id'],
group_id=grp2.id,
source_volid=vol.id)
self.group_api._create_group_from_source_group(self.ctxt, grp2,
grp.id)
mock_volume_api_create.assert_called_once_with(
self.ctxt, 1, None, None,
availability_zone=grp.availability_zone,
source_group=grp,
group=grp2,
source_volume=vol,
volume_type=vol_type)
mock_rpc_create_group_from_src.assert_called_once_with(
self.ctxt, grp2, None, grp)
mock_update_volumes_host.assert_called_once_with(
self.ctxt, grp2
)
vol2.destroy()
grp2.destroy()
vol.destroy()
grp.destroy()
@mock.patch('cinder.volume.api.API.delete')
@mock.patch('cinder.objects.VolumeType.get_by_name_or_id')
@mock.patch('cinder.db.group_volume_type_mapping_create')
@mock.patch('cinder.volume.api.API.create')
@mock.patch('cinder.objects.Group.get_by_id')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.create_group_from_src')
@mock.patch('cinder.objects.VolumeList.get_all_by_generic_group')
def test_create_group_from_group_create_volume_failed(
self, mock_volume_get_all, mock_rpc_create_group_from_src,
mock_group_get, mock_volume_api_create, mock_mapping_create,
mock_get_volume_type, mock_volume_delete):
vol_type = fake_volume.fake_volume_type_obj(
self.ctxt,
id=fake.VOLUME_TYPE_ID,
name='fake_volume_type')
mock_get_volume_type.return_value = vol_type
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
status=fields.GroupStatus.CREATING)
mock_group_get.return_value = grp
vol1 = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=fake.VOLUME_TYPE_ID,
group_id=grp.id)
vol2 = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=fake.VOLUME_TYPE_ID,
group_id=grp.id)
mock_volume_get_all.side_effect = [[vol1, vol2], [vol1]]
grp2 = utils.create_group(self.ctxt,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
source_group_id=grp.id,
status=fields.GroupStatus.CREATING)
mock_volume_api_create.side_effect = [None, exception.CinderException]
self.assertRaises(
exception.CinderException,
self.group_api._create_group_from_source_group,
self.ctxt, grp2, grp.id)
mock_rpc_create_group_from_src.assert_not_called()
mock_volume_delete.assert_called_once_with(self.ctxt, vol1)
grp2.destroy()
vol2.destroy()
vol1.destroy()
grp.destroy()
@mock.patch('cinder.group.api.API._create_group_from_group_snapshot')
@mock.patch('cinder.group.api.API._create_group_from_source_group')
@mock.patch('cinder.group.api.API.update_quota')
@mock.patch('cinder.objects.GroupSnapshot.get_by_id')
@mock.patch('cinder.objects.SnapshotList.get_all_for_group_snapshot')
@mock.patch('cinder.scheduler.rpcapi.SchedulerAPI.validate_host_capacity')
def test_create_from_src(self, mock_validate_host, mock_snap_get_all,
mock_group_snap_get, mock_update_quota,
mock_create_from_group,
mock_create_from_snap):
name = "test_group"
description = "this is a test group"
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova',
name=name, description=description,
status=fields.GroupStatus.AVAILABLE,)
vol1 = utils.create_volume(
self.ctxt,
availability_zone='nova',
volume_type_id=fake.VOLUME_TYPE_ID,
group_id=grp.id)
snap = utils.create_snapshot(self.ctxt, vol1.id,
volume_type_id=fake.VOLUME_TYPE_ID,
status=fields.SnapshotStatus.AVAILABLE)
mock_snap_get_all.return_value = [snap]
mock_validate_host.return_host = True
grp_snap = utils.create_group_snapshot(
self.ctxt, grp.id,
group_type_id=fake.GROUP_TYPE_ID,
status=fields.GroupStatus.AVAILABLE)
mock_group_snap_get.return_value = grp_snap
grp2 = utils.create_group(self.ctxt,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],
availability_zone='nova',
name=name, description=description,
status=fields.GroupStatus.CREATING,
group_snapshot_id=grp_snap.id)
with mock.patch('cinder.objects.Group') as mock_group:
mock_group.return_value = grp2
with mock.patch('cinder.objects.group.Group.create'):
ret_group = self.group_api.create_from_src(
self.ctxt, name, description,
group_snapshot_id=grp_snap.id,
source_group_id=None)
self.assertEqual(grp2.obj_to_primitive(),
ret_group.obj_to_primitive())
mock_create_from_snap.assert_called_once_with(
self.ctxt, grp2, grp_snap.id)
snap.destroy()
grp_snap.destroy()
vol1.destroy()
grp.destroy()
grp2.destroy()
@mock.patch('oslo_utils.timeutils.utcnow')
@mock.patch('cinder.objects.GroupSnapshot')
def test_reset_group_snapshot_status(self,
mock_group_snapshot,
mock_time_util):
mock_time_util.return_value = "time_now"
self.group_api.reset_group_snapshot_status(
self.ctxt, mock_group_snapshot, fields.GroupSnapshotStatus.ERROR)
update_field = {'updated_at': "time_now",
'status': fields.GroupSnapshotStatus.ERROR}
mock_group_snapshot.update.assert_called_once_with(update_field)
mock_group_snapshot.save.assert_called_once_with()
@mock.patch('cinder.scheduler.rpcapi.SchedulerAPI.validate_host_capacity')
def test_create_group_from_src_frozen(self, mock_validate_host):
service = utils.create_service(self.ctxt, {'frozen': True})
group = utils.create_group(self.ctxt, host=service.host,
group_type_id='gt')
mock_validate_host.return_value = True
group_api = cinder.group.api.API()
self.assertRaises(exception.InvalidInput,
group_api.create_from_src,
self.ctxt, 'group', 'desc',
group_snapshot_id=None, source_group_id=group.id)
@mock.patch('cinder.objects.volume.Volume.host',
new_callable=mock.PropertyMock)
@mock.patch('cinder.objects.volume.Volume.cluster_name',
new_callable=mock.PropertyMock)
@mock.patch('cinder.objects.VolumeList.get_all_by_generic_group')
def test_update_volumes_host(self, mock_volume_get_all, mock_cluster_name,
mock_host):
vol_type = utils.create_volume_type(self.ctxt, name='test_vol_type')
grp = utils.create_group(self.ctxt, group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type['id']],
availability_zone='nova',
status=fields.GroupStatus.CREATING,
cluster_name='fake_cluster')
vol1 = utils.create_volume(
self.ctxt,
availability_zone=grp.availability_zone,
volume_type_id=fake.VOLUME_TYPE_ID,
group_id=grp.id)
mock_volume = mock.Mock()
mock_volume_get_all.return_value = [mock_volume]
group_api = cinder.group.api.API()
group_api._update_volumes_host(None, grp)
mock_cluster_name.assert_called()
mock_host.assert_called()
self.assertEqual(grp.host, mock_volume.host)
self.assertEqual(grp.cluster_name, mock_volume.cluster_name)
mock_volume.save.assert_called_once_with()
vol1.destroy()
grp.destroy()
def test_delete_group_frozen(self):
service = utils.create_service(self.ctxt, {'frozen': True})
group = utils.create_group(self.ctxt, host=service.host,
group_type_id='gt')
group_api = cinder.group.api.API()
self.assertRaises(exception.InvalidInput,
group_api.delete, self.ctxt, group)
def test_create_group_snapshot_frozen(self):
service = utils.create_service(self.ctxt, {'frozen': True})
group = utils.create_group(self.ctxt, host=service.host,
group_type_id='gt')
group_api = cinder.group.api.API()
self.assertRaises(exception.InvalidInput,
group_api.create_group_snapshot,
self.ctxt, group, 'group_snapshot', 'desc')
def test_delete_group_snapshot_frozen(self):
service = utils.create_service(self.ctxt, {'frozen': True})
group = utils.create_group(self.ctxt, host=service.host,
group_type_id='gt')
gsnap = utils.create_group_snapshot(self.ctxt, group.id)
group_api = cinder.group.api.API()
self.assertRaises(exception.InvalidInput,
group_api.delete_group_snapshot,
self.ctxt, gsnap)
@mock.patch('cinder.volume.volume_types.get_volume_type_qos_specs',
return_value={'qos_specs': {}})
@mock.patch('cinder.scheduler.rpcapi.SchedulerAPI.create_group')
def test_cast_create_group(self,
mock_create_group,
mock_get_volume_type_qos_specs):
vol_type = utils.create_volume_type(self.ctxt, name='test_vol_type')
encryption_key_id = mock.sentinel.encryption_key_id
description = mock.sentinel.description
name = mock.sentinel.name
req_spec = {'volume_type': vol_type,
'encryption_key_id': encryption_key_id,
'description': description,
'name': name}
grp_name = "test_group"
grp_description = "this is a test group"
grp_spec = {'name': grp_name,
'description': grp_description}
grp = utils.create_group(self.ctxt,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[vol_type.id],
availability_zone='nova')
grp_filter_properties = mock.sentinel.group_filter_properties
filter_properties_list = mock.sentinel.filter_properties_list
self.group_api._cast_create_group(self.ctxt,
grp,
grp_spec,
[req_spec],
grp_filter_properties,
filter_properties_list)
mock_get_volume_type_qos_specs.assert_called_once_with(vol_type.id)
exp_vol_properties = {
'size': 0,
'user_id': self.ctxt.user_id,
'project_id': self.ctxt.project_id,
'status': 'creating',
'attach_status': 'detached',
'encryption_key_id': encryption_key_id,
'display_description': description,
'display_name': name,
'volume_type_id': vol_type.id,
'group_type_id': grp.group_type_id,
'availability_zone': grp.availability_zone
}
exp_req_spec = {
'volume_type': vol_type,
'encryption_key_id': encryption_key_id,
'description': description,
'name': name,
'volume_properties': exp_vol_properties,
'qos_specs': None
}
exp_grp_properties = {
'size': 0,
'user_id': self.ctxt.user_id,
'project_id': self.ctxt.project_id,
'status': 'creating',
'display_description': grp_description,
'display_name': grp_name,
'group_type_id': grp.group_type_id,
}
exp_grp_spec = {
'name': grp_name,
'description': grp_description,
'volume_properties': exp_grp_properties,
'qos_specs': None
}
mock_create_group.assert_called_once_with(
self.ctxt,
grp,
group_spec=exp_grp_spec,
request_spec_list=[exp_req_spec],
group_filter_properties=grp_filter_properties,
filter_properties_list=filter_properties_list)
|
{
"content_hash": "e5fed07f1055ef654be67ca75c268f77",
"timestamp": "",
"source": "github",
"line_count": 930,
"max_line_length": 79,
"avg_line_length": 46.2247311827957,
"alnum_prop": 0.5517225336714043,
"repo_name": "openstack/cinder",
"id": "10dbd4f3414222b0e4cb11bc701f0dfc97c28f35",
"size": "43625",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/group/test_groups_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "259"
},
{
"name": "Mako",
"bytes": "976"
},
{
"name": "Python",
"bytes": "25078349"
},
{
"name": "Shell",
"bytes": "6456"
},
{
"name": "Smarty",
"bytes": "67595"
}
],
"symlink_target": ""
}
|
import sys
import os
import time
import random
import tempfile
import argparse
import Menu
class PopQ(object):
resource_prefix = '/tmp/'
temp_file = "popresource.txt"
def _create_nonexisting_resourcefile(self, resource_file_path):
if not os.path.isfile(resource_file_path):
open(resource_file_path).close()
def _configure_popq(self, args):
self.question_files = args.question_files
if args.resource_file:
self.resource_file_path = args.resource_file
else:
self.resource_file_path = self.resource_prefix + self.temp_file
def reset(self, num_correct):
self._correct = num_correct
self.questions = list()
self.checklist = list()
self.fname = '/'.join([tempfile.gettempdir(), self.temp_file]) # Touch a file for first use
self.n_questions = 0
self.total = 1
def read_questions(self, question_files):
questions = []
for filepath in self.question_files:
with open(filepath) as fp:
print("Opening: " + filepath)
fp_code = fp.read()
if fp_code:
content = eval(fp_code)
print("Loaded :",len(content)," questions")
questions += content
else:
print("Error while importing %s" % filepath)
try:
time.ctime(os.path.getmtime(self.fname))
except OSError as e:
with file(self.fname, 'a'):
os.utime(self.fname, None)
print("Tempfile stored in %s" % self.fname)
lasttime = time.ctime(os.path.getmtime(self.fname))
print("last modified: %s" % lasttime)
return questions
def __init__(self, num_correct=10):
self.reset(num_correct)
parser = argparse.ArgumentParser(description = 'A terminal pop-question memory test')
parser.add_argument('-q', '--question_files', nargs='+', required = True, type = str)
parser.add_argument('-r', '--resource_file', type = str)
self._configure_popq(parser.parse_args())
print("Resource file: " + self.resource_file_path)
self._create_nonexisting_resourcefile(self.resource_file_path)
self.questions = self.read_questions(self.question_files)
self.n_questions = len(self.questions)
def new_questionnaire(self):
"""Start a new questionnaire round"""
self.correct = min(self._correct, self.n_questions)
self.total = self.correct
for x in range(0, min(self._correct, self.n_questions)):
try:
rand_num = int(random.uniform(0, len(self.questions)))
while rand_num in self.checklist:
rand_num = int(random.uniform(0, len(self.questions)))
self.checklist.append(rand_num)
randq = self.questions[rand_num]
print(randq[0])
ans = input("> ")
if ans.lower() != randq[1].lower():
print("The answer is: %s" % randq[1])
self.correct -= 1
except KeyboardInterrupt:
os.system("clear")
exit(1)
except EOFError:
os.system("clear")
exit(1)
def __str__(self):
"""Presented score string from latest round"""
return "Score: %1.2f%s [%d/%d]" % (float(self.correct)/self.total*100.,\
'%', self.correct, self.total)
def print_score(self):
"""Print the core from latest round"""
os.system("clear")
print(self.__str__())
if __name__ == "__main__":
Q = PopQ()
Q.new_questionnaire()
Q.print_score()
|
{
"content_hash": "8072fd3f39d3e1816d72b24cb8e57a95",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 99,
"avg_line_length": 32.80869565217391,
"alnum_prop": 0.5534057778955738,
"repo_name": "ulvgard/PopQ",
"id": "154ea613bce78464656f9dc3184c5850ec9236d8",
"size": "3796",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4065"
}
],
"symlink_target": ""
}
|
from webev.response import Response
class View(object):
http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace']
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def dispatch(self, request, start_response, **kwargs):
method = request['REQUEST_METHOD'].lower()
if method in self.http_method_names:
handler = getattr(self, method)
else:
raise AttributeError("Method not allowed")
response = Response(**kwargs)
start_response(response.status, response.headers)
yield str(handler(request))
|
{
"content_hash": "702a4245a4c6774b2ccb99fe0a2c84aa",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 93,
"avg_line_length": 35.1578947368421,
"alnum_prop": 0.6107784431137725,
"repo_name": "Korkki/web.ev",
"id": "fbb25644fbaf9aa08ae28df555b71895d8beb4c1",
"size": "668",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "webev/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "122"
},
{
"name": "Python",
"bytes": "4658"
}
],
"symlink_target": ""
}
|
import getopt
import io
import pdb
import sys
from atom import TRUE, FALSE
from env import Environment
from error import Error
from fun import Function, Lambda, Closure
from lisp import Lisp
from reader import Reader
NAME = 'eLisp'
VERSION = 'v0.1'
PROMPT = '>>> '
DEPTH_MARK = '.'
class Elisp(Lisp):
def __init__(self):
self.stdin = sys.stdin
self.stdout = sys.stdout
self.stderr = sys.stderr
self.debug = False
self.verbose = True
self.core = True
self.closure = True
self.reader = Reader()
self.env = Environment()
self.init()
def init(self):
# core functions
self.env.set('eq', Function(self.eq))
self.env.set('quote', Function(self.quote))
self.env.set('car', Function(self.car))
self.env.set('cdr', Function(self.cdr))
self.env.set('cons', Function(self.cons))
self.env.set('atom', Function(self.atom))
self.env.set('cond', Function(self.cond))
# utility functions
self.env.set('print', Function(self.println))
# special forms
self.env.set('lambda', Function(self.lambda_fun))
self.env.set('label', Function(self.label))
# meta-elements
self.env.set('__elisp__', self)
self.env.set('__global__', self.env)
def lambda_fun(self, env, args):
if self.env != env.get('__global__') and self.closure:
return Closure(env, args[0], args[1:])
else:
return Lambda(args[0], args[1:])
def usage(self):
self.print_banner()
print ('%s <options> [elisp files]\n' % NAME.lower())
def print_banner(self):
print ('The %s programming shell %s' % (NAME, VERSION))
print (' Type `help` for more information\n')
def print_help(self):
print ('Help for eLisp %s' % VERSION)
print (' Type `help` for more information')
print (' Type `env` to see the bindings in current environment')
print (' Type `load` followed by one or more filenames to load source files')
print (' Type `quit` to exit the interpreter')
def push(self, env=None):
if env:
self.env = self.env.push(env)
else:
self.env = self.env.push()
def pop(self):
self.env = self.env.pop()
def repl(self):
while True:
source = self.get_complete_command()
try:
if source in ['quit']:
break
elif source in ['help']:
self.print_help()
elif source.startswith('load'):
files = source.split(' ')[1:]
self.process_files(files)
elif source in ['env']:
print (self.env)
else:
self.process(source)
except AttributeError:
print ('Could not process command: ', source)
return
def process(self, source):
sexpr = self.reader.get_sexpr(source)
while sexpr:
result = None
try:
result = self.eval(sexpr)
except Error as err:
print (err)
if self.verbose:
self.stdout.write(' %s\n' % result)
sexpr = self.reader.get_sexpr()
def eval(self, sexpr):
try:
return sexpr.eval(self.env)
except ValueError as err:
print (err)
return FALSE
def get_complete_command(self, line="", depth=0):
if line != '':
line = line + ' '
if self.env.level != 0:
prompt = PROMPT + '%i%s ' % (self.env.level, DEPTH_MARK * (depth + 1))
else:
if depth == 0:
prompt = PROMPT
else:
prompt = PROMPT + "%s " % (DEPTH_MARK * (depth + 1))
line = line + self.read_line(prompt)
balance = 0
for c in line:
if c == '(':
balance += 1
elif c == ')':
balance -= 1
if balance > 0:
return self.get_complete_command(line, depth + 1)
elif balance < 0:
raise ValueError('Invalid paren pattern')
else:
return line
def read_line(self, prompt):
if prompt and self.verbose:
self.stdout.write('%s' % prompt)
self.stdout.flush()
line = self.stdin.readline()
if len(line) == 0:
return 'EOF'
if line [-1] == '\n':
line = line[:-1]
return line
def process_files(self, files):
self.verbose = False
for filename in files:
infile = open(filename, 'r')
self.stdin = infile
source = self.get_complete_command()
while source not in ['EOF']:
self.process(source)
source = self.get_complete_command()
infile.close()
self.stdin = sys.stdin
self.verbose = True
if __name__ == '__main__':
elisp = Elisp()
try:
opts, files = getopt.getopt(sys.argv[1:],
'hd', ['help', 'debug', 'no-core', 'no-closures'])
except getopt.GetoptError as err:
print (str(err))
elisp.usage()
sys.exit(1)
for opt, arg in opts:
if opt in ('--help', '-h'):
elisp.usage()
sys.exit(1)
elif opt in ('--debug', '-d'):
elisp.verbose = True
elif opt in ('--no-core'):
elisp.core = False
elif opt in ('--no-closures'):
elisp.closure = False
else:
print ('unknown option ' + opt)
if elisp.core:
elisp.process_files(['core.el'])
if len(files) > 0:
elisp.process_files(files)
elisp.print_banner()
elisp.repl()
|
{
"content_hash": "b66af589cfd8effea785e36c267e830d",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 88,
"avg_line_length": 28.206572769953052,
"alnum_prop": 0.49816910785619173,
"repo_name": "ASMlover/study",
"id": "286cd7d83bb31d330f0df7928f17d12d9cf40fd1",
"size": "7404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compiler/eLisp2/eLisp/elisp.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "3055440"
},
{
"name": "Batchfile",
"bytes": "4662"
},
{
"name": "Brainfuck",
"bytes": "571"
},
{
"name": "C",
"bytes": "13569580"
},
{
"name": "C#",
"bytes": "3959"
},
{
"name": "C++",
"bytes": "14741264"
},
{
"name": "CMake",
"bytes": "543917"
},
{
"name": "CSS",
"bytes": "11505"
},
{
"name": "Common Lisp",
"bytes": "114"
},
{
"name": "Emacs Lisp",
"bytes": "6042"
},
{
"name": "Go",
"bytes": "105203"
},
{
"name": "Groovy",
"bytes": "2907"
},
{
"name": "HTML",
"bytes": "911945"
},
{
"name": "Lex",
"bytes": "9370"
},
{
"name": "Lua",
"bytes": "32829"
},
{
"name": "Makefile",
"bytes": "1000611"
},
{
"name": "NASL",
"bytes": "3609"
},
{
"name": "NewLisp",
"bytes": "5805"
},
{
"name": "Perl",
"bytes": "594"
},
{
"name": "Python",
"bytes": "2752752"
},
{
"name": "SWIG",
"bytes": "91"
},
{
"name": "Shell",
"bytes": "9993"
},
{
"name": "Vim script",
"bytes": "92204"
},
{
"name": "Yacc",
"bytes": "6278"
}
],
"symlink_target": ""
}
|
"""
Utility module containing methods commonly used throughout the Python framework.
"""
# NOTE we still import these from __future__ here because many machines still running
# python 2.X need to use this file (for example the plugin installer)
from __future__ import division, print_function, absolute_import
# *************************** NOTE FOR DEVELOPERS ***************************
# Do not import numpy or scipy or other libraries that are not *
# built into python. Otherwise the import can fail, and since utils *
# are used by --library-report, this can cause diagnostic messages to fail. *
# ***************************************************************************
import bisect
import sys
import os
import glob
import errno
import shutil
import inspect
import subprocess
import platform
from importlib import import_module
# import numpy # DO NOT import! See note above.
# import six # DO NOT import! see note above.
from difflib import SequenceMatcher
class Object(object):
"""
Simple custom inheritance object.
"""
pass
#custom errors
class NoMoreSamplesNeeded(GeneratorExit):
"""
Custom RAVEN error available for use in the framework.
"""
pass
class byPass(object):
"""
This is dummy class that is needed to emulate the "dataObject" resetData method
"""
def __init__(self):
self.name = ""
def resetData(self):
"""
This is dummy method that is needed to emulate the "dataObject" resetData method
@ In, None
@ Out, None
"""
pass
class StringPartialFormatDict(dict):
"""
Allows partially formatting a template string.
See https://stackoverflow.com/questions/17215400/python-format-string-unused-named-arguments
Use as '{a} {b} {a}'.format_map(StringPartialFormatDict(a='one')) -> 'one {b} one'
"""
def __missing__(self, key):
"""
Replaces missing keys with formatting entries. May not work for any formats like {b:1.3e}.
@ In, key, str, formatting string key (the friend between the braces)
@ Out, key, str, re-formatted string
"""
return '{' + key + '}'
def partialFormat(msg, info):
"""
Automates the partial formatting of a string (msg) with a format dictionary (info).
Example: '{a} {b} {c}'.partialFormat({b:'two'}) -> '{a} two {c}'
Note formatting is lost or may cause errors; that is,
Example: '{a:3s} {b:2d} {c:3s}'.partialFormat({b=2}) -> '{a} 2 {c}'
@ In, msg, string, string to partially format
@ In, info, dict, keywords to apply
"""
return msg.format_map(StringPartialFormatDict(**info))
# ID separator that should be used cross the code when combined ids need to be assembled.
# For example, when the "EnsembleModel" creates new ``prefix`` ids for sub-models
__idSeparator = "++"
def identifyIfExternalModelExists(caller, moduleIn, workingDir):
"""
Method to check if a external module exists and in case return the module that needs to be loaded with
the correct path
@ In, caller,object, the RAVEN caller (i.e. self)
@ In, moduleIn, string, module read from the XML file
@ In, workingDir, string, the path of the working directory
@ Out, (moduleToLoad, fileName), tuple, a tuple containing the module to load (that should be used in method importFromPath) and the filename (no path)
"""
if moduleIn.endswith('.py'):
moduleToLoadString = moduleIn[:-3]
else:
moduleToLoadString = moduleIn
workingDirModule = os.path.abspath(os.path.join(workingDir,moduleToLoadString))
if os.path.exists(workingDirModule+".py"):
moduleToLoadString = workingDirModule
path, filename = os.path.split(workingDirModule)
os.sys.path.append(os.path.abspath(path))
else:
path, filename = os.path.split(moduleToLoadString)
if (path != ''):
abspath = os.path.abspath(path)
if '~' in abspath:
abspath = os.path.expanduser(abspath)
if os.path.exists(abspath):
caller.raiseAWarning('file '+moduleToLoadString+' should be relative to working directory. Working directory: '+workingDir+' Module expected at '+abspath)
os.sys.path.append(abspath)
else:
caller.raiseAnError(IOError,'The path provided for the' + caller.type + ' named '+ caller.name +' does not exist!!! Got: ' + abspath + ' and ' + workingDirModule)
return moduleToLoadString, filename
def checkIfUnknowElementsinList(referenceList,listToTest):
"""
Method to check if a list contains elements not contained in another
@ In, referenceList, list, reference list
@ In, listToTest, list, list to test
@ Out, unknownElements, list, list of elements of 'listToTest' not contained in 'referenceList'
"""
unknownElements = []
for elem in listToTest:
if elem not in referenceList:
unknownElements.append(elem)
return unknownElements
def checkIfPathAreAccessedByAnotherProgram(pathname, timelapse = 10.0):
"""
Method to check if a path (file or directory) is currently
used by another program. It is based on accessing time...
Probably there is a better way.
@ In, pathname, string containing the all path
@ In, timelapse, float, tollerance on time modification
@ Out, boolReturn, bool, True if it is used by another program, False otherwise
"""
import stat
import time
mode = os.stat(pathname).st_mode
if not (stat.S_ISREG(mode) or stat.S_ISDIR(mode)):
raise Exception(UreturnPrintTag('UTILITIES')+': ' +UreturnPrintPostTag('ERROR') + '-> path '+pathname+ ' is neither a file nor a dir!')
boolReturn = abs(os.stat(pathname).st_mtime - time.time()) < timelapse
return boolReturn
def checkIfLockedRavenFileIsPresent(pathName,fileName="ravenLockedKey.raven"):
"""
Method to check if a path (directory) contains an hidden raven file
@ In, pathName, string, string containing the path
@ In, fileName, string, optional, string containing the file name
@ Out, filePresent, bool, True if it is present, False otherwise
"""
filePresent = os.path.isfile(os.path.join(pathName,fileName))
if not filePresent:
open(os.path.join(pathName,fileName), 'w')
return filePresent
def removeFile(pathAndFileName):
"""
Method to remove a file
@ In, pathAndFileName, string, string containing the path and filename
@ Out, None
"""
if os.path.isfile(pathAndFileName):
os.remove(pathAndFileName)
def removeDir(strPath):
"""
Method to remove a directory.
@ In, strPath, string, path to directory to remove
@ Out, None
"""
path = os.path.abspath(os.path.expanduser(strPath))
shutil.rmtree(path, onerror=_removeDirErrorHandler)
def _removeDirErrorHandler(func, path, excinfo):
"""
Handles errors arising from using shutil.rmtree
Argument descriptions from shutil documentation
@ In, func, is the function which raised the exception; it depends on the platform and
implementation
@ In, path, will be the path name passed to function
@ In, excinfo, will be the exception information returned by sys.exc_info()
@ Out, None
"""
print('utils.removeDir WARNING: unable to remove {path} using {func}, ' +
'raising the following exception: {excinfo}. Continuing ...'
.format(path=path, func=func, excinfo=excinfo))
def returnImportModuleString(obj,moduleOnly=False):
"""
Method to return a list of strings that represent the
modules on which the 'obj' depends on. It already implements
the 'import' statement or the 'from x import y'
@ In, obj, instance, the object that needs to be inquired
@ In, moduleOnly, bool, optional, get the modules only (True) or also the function dependencies(False)
@ Out, mods, list, list of string containing the modules
"""
mods = []
for key, value in dict(inspect.getmembers(obj)).items():
if moduleOnly:
if not inspect.ismodule(value):
continue
else:
if not (inspect.ismodule(value) or inspect.ismethod(value)):
continue
if key != value.__name__:
if value.__name__.split(".")[-1] != key:
mods.append(str('import ' + value.__name__ + ' as '+ key))
else:
mods.append(str('from ' + '.'.join(value.__name__.split(".")[:-1]) + ' import '+ key))
else:
mods.append(str(key))
return mods
def getPrintTagLenght():
"""
Method to return the length of the strings used for Screen output
@ In, None,
@ Out, tagLenght, int, the default tag length
"""
tagLenght = 25
return tagLenght
def UreturnPrintTag(intag):
"""
Method to return the a string formatted with respect to the length
obtained by the method getPrintTagLenght() (generally used for pre tag)
@ In, intag, string, string that needs to be formatted
@ Out, returnString, string, the formatted string
"""
returnString = intag.ljust(getPrintTagLenght())[0:getPrintTagLenght()]
return returnString
def UreturnPrintPostTag(intag):
"""
Method to return the a string formatted with respect to the length
obtained by the method getPrintTagLenght() - 15 (generally used for post tag)
@ In, intag, string, string that needs to be formatted
@ Out, returnString, string, the formatted string
"""
returnString = intag.ljust(getPrintTagLenght()-15)[0:(getPrintTagLenght()-15)]
return returnString
def convertMultipleToBytes(sizeString):
"""
Convert multiple (e.g. Mbytes, Gbytes,Kbytes) in bytes
International system type (e.g., 1 Mb = 10^6)
@ In, sizeString, string, string that needs to be converted in bytes
@ Out, convertMultipleToBytes, integer, the number of bytes
"""
if 'mb' in sizeString:
return int(sizeString.replace("mb",""))*10**6
elif 'kb' in sizeString:
return int(sizeString.replace("kb",""))*10**3
elif 'gb' in sizeString:
return int(sizeString.replace("gb",""))*10**9
else:
try:
return int(sizeString)
except:
raise IOError(UreturnPrintTag('UTILITIES')+': ' +UreturnPrintPostTag('ERROR') + '-> can not understand how to convert expression '+str(sizeString)+' to number of bytes. Accepted Mb,Gb,Kb (no case sentive)!')
# I don't think there's a reason to make this an enum, but it could be done.
trueThingsFull = ('True', 'Yes', '1')
trueThings = tuple(x[0].lower() for x in trueThingsFull)
def stringIsTrue(s):
"""
Determines if provided entity corresponds to a truth statement
@ In, s, string or castable, entity to check
@ Out, stringIsTrue, bool, True if string is recognized by RAVEN as evaluating to True
"""
# as far as I know, nothing in python cannot be cast as a string.
s = str(s).strip()
return s.lower().startswith(trueThings)
# I don't think there's a reason to make this an enum, but it could be done.
falseThingsFull = ('False', 'No', '0')
falseThings = tuple(x[0].lower() for x in falseThingsFull)
def stringIsFalse(s):
"""
Determines if provided entity corresponds to a falsehood statement
@ In, s, string or castable, entity to check
@ Out, stringIsFalse, bool, False if string is recognized by RAVEN as evaluating to False
"""
# as far as I know, nothing in python cannot be cast as a string.
s = str(s).strip()
return s.lower().startswith(falseThings)
boolThingsFull = tuple(list(trueThingsFull)+list(falseThingsFull))
def stringsThatMeanSilent():
"""
Return list of strings that indicate a verbosity of the lowest level (just errors). You linguists add what you wish
@ In, None
@ Out, listOfStrings, list, list of strings that mean Silent in RAVEN
"""
listOfStrings = list(['0','silent','false','f','n','no','none'])
return listOfStrings
def stringsThatMeanPartiallyVerbose():
"""
Return list of strings that indicate a verbosity of the medium level (errors and warnings). You linguists add what you wish.
@ In, None
@ Out, listOfStrings, list, list of strings that mean Quiet in RAVEN
"""
listOfStrings = list(['1','quiet','some'])
return listOfStrings
def stringsThatMeanVerbose():
"""
Return list of strings that indicate full verbosity (errors warnings, messages). You linguists add what you wish.
@ In, None
@ Out, listOfStrings, list, list of strings that mean Full Verbosity in RAVEN
"""
listOfStrings = list(['2','loud','true','t','y','yes','all'])
return listOfStrings
def interpretBoolean(inArg):
"""
Utility method to convert an inArg into a boolean.
The inArg can be either a string or integer
@ In, inArg, object, object to convert
@ Out, interpretedObject, bool, the interpreted boolean
"""
if type(inArg).__name__ == "bool":
return inArg
elif type(inArg).__name__ == "integer":
if inArg == 0:
return False
else:
return True
elif type(inArg).__name__ in ['str','bytes','unicode']:
if stringIsTrue(inArg):
return True
elif stringIsFalse(inArg):
return False
else:
raise Exception(UreturnPrintTag('UTILITIES')+': ' +UreturnPrintPostTag("ERROR") + '-> can not convert string to boolean in method interpretBoolean!!!!')
else:
raise Exception(UreturnPrintTag('UTILITIES')+': ' +UreturnPrintPostTag("ERROR") + '-> type unknown in method interpretBoolean. Got' + type(inArg).__name__)
def isClose(f1, f2, relTolerance=1e-14, absTolerance=0.0):
"""
Method to compare two floats
@ In, f1, float, first float
@ In, f2, float, first float
@ In, relTolerance, float, optional, relative tolerance
@ In, absTolerance, float, optional, absolute tolerance
@ Out, isClose, bool, is it close enough?
"""
return abs(f1-f2) <= max(relTolerance * max(abs(f1), abs(f2)), absTolerance)
def compare(s1,s2,relTolerance = 1e-14):
"""
Method aimed to compare two strings. This method tries to convert the 2
strings in float and uses an integer representation to compare them.
In case the conversion is not possible (string or only one of the strings is
convertable), the method compares strings as they are.
@ In, s1, string, first string to be compared
@ In, s2, string, second string to be compared
@ In, relTolerance, float, relative tolerance
@ Out, response, bool, the boolean response (True if s1==s2, False otherwise)
"""
w1, w2 = floatConversion(s1), floatConversion(s2)
if type(w1) == type(w2) and type(w1) != float:
return s1 == s2
elif type(w1) == type(w2) and type(w1) == float:
from . import mathUtils
return mathUtils.compareFloats(w1,w2,relTolerance)
elif type(w1) != type(w2) and type(w1) in [float,int] and type(w2) in [float,int]:
w1, w2 = float(w1), float(w2)
return compare(w1,w2)
else:
return (w1 == w2)
def intConversion (s):
"""
Method aimed to cast a string as integer. If the conversion is not possible,
it returns None
@ In, s, string, string to be converted
@ Out, response, int or None, the casted value
"""
try:
return int(s)
except (ValueError,TypeError) as e:
return None
def floatConversion (s):
"""
Method aimed to cast a string as float. If the conversion is not possible,
it returns None
@ In, s, string, string to be converted
@ Out, response, float or None, the casted value
"""
try:
return float(s)
except (ValueError,TypeError) as e:
return None
def partialEval(s):
"""
Method aimed to evaluate a string as float or integer.
If neither a float nor an integer can be casted, return
the un-casted string
@ In, s, string, string to be converted
@ Out, response, float or int or string, the casted value
"""
evalS = intConversion(s)
if evalS is None:
evalS = floatConversion(s)
if evalS is None:
return s
else:
return evalS
def toString(s):
"""
Method aimed to convert a string in type str
@ In, s, string, string to be converted
@ Out, response, string, the casted value
"""
if type(s) == type(""):
return s
else:
return s.decode()
def toBytes(s):
"""
Method aimed to convert a string in type bytes
@ In, s, string, string to be converted
@ Out, response, bytes, the casted value
"""
if type(s) == type(""):
return s.encode()
elif type(s).__name__ in ['unicode','str','bytes']:
return bytes(s)
else:
return s
def toBytesIterative(s):
"""
Method aimed to convert all the string-compatible content of
an object (dict, list, or string) in type bytes (recursively call toBytes(s))
@ In, s, object, object whose content needs to be converted
@ Out, response, object, a copy of the object in which the string-compatible has been converted
"""
if type(s) == list:
return [toBytes(x) for x in s]
elif type(s) == dict:
if len(s) == 0:
return None
tempdict = {}
for key,value in s.items():
tempdict[toBytes(key)] = toBytesIterative(value)
return tempdict
else:
return toBytes(s)
def toStrish(s):
"""
Method aimed to convert a string in str type
@ In, s, string, string to be converted
@ Out, response, str, the casted value
"""
if type(s) == type(""):
return s
elif type(s) == type(b""):
return s
else:
return str(s)
def keyIn(dictionary,key):
"""
Method that return the key or toBytes key if in, else returns None.
Use like
inKey = keyIn(adict,key)
if inKey is not None:
foo = adict[inKey]
else:
pass #not found
@ In, dictionary, dict, the dictionary whose key needs to be returned
@ Out, response, str or bytes, the key (converted in bytes if needed)
"""
if key in dictionary:
return key
else:
bin_key = toBytes(key)
if bin_key in dictionary:
return bin_key
else:
return None
def first(c):
"""
Method to return the first element of collections,
for a list this is equivalent to c[0], but this also
work for things that are views
@ In, c, collection, the collection
@ Out, response, item, the next item in the collection
"""
return next(iter(c))
def importFromPath(filename, printImporting = True):
"""
Method to import a module from a given path
@ In, filename, str, the full path of the module to import
@ In, printImporting, bool, True if information about the importing needs to be printed out
@ Out, importedModule, module, the imported module
"""
if printImporting:
print('( ) '+UreturnPrintTag('UTILS') + ': '+UreturnPrintPostTag('Message')+ ' -> importing module '+ filename)
import imp, os.path
try:
(path, name) = os.path.split(filename)
(name, ext) = os.path.splitext(name)
(file, filename, data) = imp.find_module(name, [path])
importedModule = imp.load_module(name, file, filename, data)
pythonPath = os.environ.get("PYTHONPATH","")
absPath = os.path.abspath(path)
if absPath not in pythonPath:
os.environ['PYTHONPATH'] = pythonPath+ os.pathsep + absPath
except Exception as ae:
raise Exception('( ) '+ UreturnPrintTag('UTILS') + ': '+UreturnPrintPostTag('ERROR')+ '-> importing module '+ filename + ' at '+path+os.sep+name+' failed with error '+str(ae))
return importedModule
def getRelativeSortedListEntry(sortedList,value,tol=1e-15):
"""
!!WARNING!! This method expects "sortedList" to already be a sorted list of float values!
There are faster methods if they are not floats, and this will NOT work at all on unsorted lists.
- Looks for a (close enough) match to "value" in "sortedList" using binomial search. If found,
returns the index and value of the matching entry. If not found, adds a new entry to the sortedList
and returns the new index with the original value.
It is recommended that this method be used to add ALL entries into the sorted list to keep it sorted.
@ In, sortedList, list, list of __sorted__ float values
@ In, value, float, value to search for match
@ Out, sortedList, list, possibly modified by still ordered list of floats
@ Out, match_index, int, index of match in sortedList
@ Out, match, float, matching float
"""
from .mathUtils import compareFloats #necessary to prevent errors at module load
index = bisect.bisect_left(sortedList,value)
match_index = None
match = None
#if "value" is smallest value in list...
if index == 0:
if len(sortedList)>0:
#check if current first matches
if compareFloats(sortedList[0], value, tol=tol):
match = sortedList[0]
match_index = index
#if "value" is largest value in list...
elif index > len(sortedList)-1:
#check if current last matches
if compareFloats(sortedList[-1], value, tol=tol):
match = sortedList[-1]
match_index = len(sortedList)-1
#if "value" is in the middle...
else:
#check both neighbors (left and right) for a match
for idx in [index-1, index]:
if compareFloats(sortedList[idx], value, tol=tol):
match = sortedList[idx]
match_index = idx
#if no match found, add it
if match is None:
sortedList.insert(index,value)
match = value
match_index = index
return sortedList,match_index,match
# def metaclass_insert__getstate__(self):
# """
# Overwrite state (for pickle-ing)
# we do not pickle the HDF5 (C++) instance
# but only the info to re-load it
# """
# # capture what is normally pickled
# state = self.__dict__.copy()
# # we pop the database instance and close it
# state.pop("database")
# self.database.closeDatabaseW()
# # what we return here will be stored in the pickle
# return state
#
# def metaclass_insert__setstate__(self, newstate):
# self.__dict__.update(newstate)
# self.exist = True
def metaclass_insert(metaclass,*baseClasses):
"""
This allows a metaclass to be inserted as a base class.
Metaclasses substitute in as a type(name,bases,namespace) function,
and can be anywhere in the hierarchy. This instantiates the
metaclass so it can be used as a base class.
Example use:
class Foo(metaclass_insert(Metaclass)):
This function is based on the method used in Benjamin Peterson's six.py
@ In, metaclass, abc, the metaclass
@ In, baseClasses, args*, base classes
@ Out, metaclass, class, the new metaclass
"""
namespace={}
return metaclass("NewMiddleClass",baseClasses,namespace)
class abstractstatic(staticmethod):
"""
This can be make an abstract static method
import abc
class A(metaclass_insert(abc.ABCMeta)):
@abstractstatic
def test():
pass
class B(A):
@staticmethod
def test():
return 5
"""
def __init__(self, function):
"""
Constructor
@ In, function, pointer, the function to 'abstract'
@ Out, None
"""
super(abstractstatic, self).__init__(function)
function.__isabstractmethod__ = True
__isabstractmethod__ = True
def find_crow(framework_dir):
"""
Make sure that the crow path is in the python path. If not, add the path.
@ In, framework_dir, string, the absolute path of the framework
@ Out, None
"""
try:
import crow_modules.distribution1D
return
except:
ravenDir = os.path.dirname(framework_dir)
#Add the module directory to the search path.
crowDirs = [os.path.join(ravenDir,"crow"),
os.path.join(os.path.dirname(ravenDir),"crow"),
ravenDir]
if "CROW_DIR" in os.environ:
crowDirs.insert(0,os.path.join(os.environ["CROW_DIR"]))
#Check for editable install
if len(glob.glob(os.path.join(ravenDir, "src", "crow_modules", "_randomENG*"))) > 0:
sys.path.append(os.path.join(ravenDir, "src"))
return
for crowDir in crowDirs:
pmoduleDir = os.path.join(crowDir,"install")
if os.path.exists(pmoduleDir):
sys.path.append(pmoduleDir)
# we add it in pythonpath too
os.environ['PYTHONPATH'] = os.environ.get("PYTHONPATH","") + os.pathsep + pmoduleDir
return
for crowDir in crowDirs:
if os.path.exists(os.path.join(crowDir,"tests")):
raise IOError(UreturnPrintTag('UTILS') + ': '+UreturnPrintPostTag('ERROR')+ ' -> Crow was found in '+crowDir+' but does not seem to be compiled')
raise IOError(UreturnPrintTag('UTILS') + ': '+UreturnPrintPostTag('ERROR')+ ' -> Crow has not been found. It location is supposed to be one of '+str(crowDirs)+'. Has RAVEN been built?')
def add_path(absolutepath):
"""
Method to add a path in the PYTHON PATH
@ In, absolutepath, string, the absolute path to be added
@ Out, None
"""
if not os.path.exists(absolutepath):
raise IOError(UreturnPrintTag('UTILS') + ': '+UreturnPrintPostTag('ERROR')+ ' -> "'+absolutepath+ '" directory has not been found!')
sys.path.append(absolutepath)
# we add it in pythonpath too
newPath = os.environ.get("PYTHONPATH","") + os.pathsep + absolutepath
if len(newPath) >= 32000: #Some OS's have a limit of 2**15 for environ
print("WARNING: excessive length PYTHONPATH:'"+str(newPath)+"'")
os.environ['PYTHONPATH'] = newPath
def add_path_recursively(absoluteInitialPath):
"""
Method to recursively add all the path and subpaths contained in absoluteInitialPath in the pythonpath
@ In, absoluteInitialPath, string, the absolute path to add
@ Out, None
"""
for dirr,_,_ in os.walk(absoluteInitialPath):
add_path(dirr)
def findCrowModule(name):
"""
Method to find one of the crow module (e.g. distribution1D, interpolationNDpy, randomENG, etc.) and return it.
@ In, name, str, the name of the module
@ Out, module, instance, the instance of module of "name"
"""
availableCrowModules = ['distribution1D','interpolationND','randomENG']
# assert
assert(name in availableCrowModules)
# find the module
try:
module = import_module("crow_modules.{}".format(name))
except (ImportError, ModuleNotFoundError) as ie:
if not str(ie).startswith("No module named"):
print('sys.path:', sys.path)
raise ie
module = import_module("{}".format(name))
return module
def getPythonCommand():
"""
Method to get the prefered python command.
@ In, None
@ Out, pythonCommand, str, the name of the command to use.
"""
if os.name == "nt":
pythonCommand = "python"
else:
pythonCommand = sys.executable
## Alternative method. However, if called by run_tests or raven_framework
## sys.executable is already taken into account PYTHON_COMMAND and this
## logic
#if sys.version_info.major > 2:
# if os.name == "nt":
# #Command is python on windows in conda and Python.org install
# pythonCommand = "python"
# else:
# pythonCommand = "python3"
#else:
# pythonCommand = "python"
#pythonCommand = os.environ.get("PYTHON_COMMAND", pythonCommand)
return pythonCommand
def printCsv(csv,*args):
"""
Writes the values contained in args to a csv file specified by csv
@ In, csv, File instance, an open file object to which we will be writing
@ In, args, dict, an arbitrary collection of values to write to the file
@ Out, None
"""
print(*args,file=csv,sep=',')
def printCsvPart(csv,*args):
"""
Writes the values contained in args to a csv file specified by csv appending a comma
to the end to allow more data to be written to the line.
@ In, csv, File instance, an open file object to which we will be writing
@ In, args, dict, an arbitrary collection of values to write to the file
@ Out, None
"""
print(*args,file=csv,sep=',',end=',')
def tryParse(text):
"""
A convenience function for attempting to parse a string as a number (first,
attempts to create an integer, and falls back to a float if the value has
a decimal, and finally resorting to just returning the string in the case
where the data cannot be converted).
@ In, text, string we are trying to parse
@ Out, value, int/float/string, the possibly converted type
"""
## FIXME is there anything that is a float that will raise an
## exception for int?
## Yes, inf and nan do not convert well to int, but would you
## ever have these in an input file? - dpm 6/8/16
try:
value = int(text)
except ValueError:
try:
value = float(text)
except ValueError:
value = text
## If this tag exists, but has no internal text, then it is most likely
## a boolean value
except TypeError:
return True
return value
def makeDir(dirName):
"""
Function that will attempt to create a directory. If the directory already
exists, this function will return silently with no error, however if it
fails to create the directory for any other reason, then an error is
raised.
@ In, dirName, string, specifying the new directory to be created
@ Out, None
"""
try:
os.makedirs(dirName)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(dirName):
## The path already exists so we can safely ignore this exception
pass
else:
## If it failed for some other reason, we want to see what the
## error is still
raise
class pickleSafeSubprocessPopen(subprocess.Popen):
"""
Subclass of subprocess.Popen used internally to prevent _handle member from being pickled. On
Windows, _handle contains an operating system reference that throws an exception when deep copied.
"""
# Only define these methods on Windows to override deep copy/pickle (member may not exist on other
# platforms.
if platform.system() == 'Windows':
def __getstate__(self):
"""
Returns a dictionary of the object state for pickling/deep copying. Omits member '_handle',
which cannot be deep copied when non-None.
@ In, None
@ Out, result, dict, the get state dict
"""
result = self.__dict__.copy()
del result['_handle']
return result
def __setstate__(self, d):
"""
Used to load an object dictionary when unpickling. Since member '_handle' could not be
deep copied, load it back as value None.
@ In, d, dict, previously stored namespace to restore
@ Out, None
"""
self.__dict__ = d
self._handle = None
def removeDuplicates(objectList):
"""
Method to efficiently remove duplicates from a list and maintain their
order based on first appearance. See the url below for a description of why
this is optimal:
http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order
@ In, objectList, list, list from which to remove duplicates
@ Out, uniqueObjectList, list, list with unique values ordered by their
first appearance in objectList
"""
seen = set()
## Store this locally so it doesn't have to be re-evaluated at each iteration
## below
seen_add = seen.add
## Iterate through the list and only take x if it has not been seen.
## The 'or' here acts as a short circuit if the first condition is True, then
## the second will not be executed, otherwise x will be added to seen and
## since adding to a set is not a conditional operation, it will always return
## False, so in conjunction with the 'not' this will ensure that the first
## occurrence of x is added to seen and uniqueObjectList. Long explanation,
## but efficient computation.
uniqueObjectList = [x for x in objectList if not (x in seen or seen_add(x))]
return uniqueObjectList
def typeMatch(var,varTypeStr):
"""
This method is aimed to check if a variable changed datatype
@ In, var, python datatype, the first variable to compare
@ In, varTypeStr, string, the type that this variable should have
@ Out, match, bool, is the datatype changed?
"""
typeVar = type(var)
match = typeVar.__name__ == varTypeStr or typeVar.__module__+"."+typeVar.__name__ == varTypeStr
if not match:
# check if the types start with the same root
if len(typeVar.__name__) <= len(varTypeStr):
if varTypeStr.startswith(typeVar.__name__):
match = True
else:
if typeVar.__name__.startswith(varTypeStr):
match = True
return match
def isASubset(setToTest,pileList):
"""
Check if setToTest is ordered subset of pileList in O(n)
@ In, setToTest, list, set that needs to be tested
@ In, pileList, list, pile of sets
@ Out, isASubset, bool, True if setToTest is a subset
"""
if len(pileList) < len(setToTest):
return False
index = 0
for element in setToTest:
try:
index = pileList.index(element, index) + 1
except ValueError:
return False
else:
return True
def filterAllSubSets(listOfLists):
"""
Given list of listOfLists, return new list of listOfLists without subsets
@ In, listOfLists, list of lists, all lists to check
@ Out, setToTest, iterator, iterator over the list without subsets
"""
for setToTest in listOfLists:
if not any(isASubset(setToTest, pileList) for pileList in listOfLists
if setToTest is not pileList):
yield setToTest
def mergeDictionaries(*dictArgs):
"""
Given any number of dicts, shallow copy and merge into a new dict,
precedence goes to key value pairs in latter dicts.
Adapted from: http://stackoverflow.com/questions/38987/how-to-merge-two-python-dictionaries-in-a-single-expression
@ In, dictArgs, dict, a list of dictionaries to merge
@ Out, mergedDict, dict, merged dictionary including keys from everything in dictArgs.
"""
mergedDict = {}
for dictionary in dictArgs:
overlap = set(dictionary.keys()).intersection(mergedDict.keys())
if len(overlap):
raise IOError(UreturnPrintTag('UTILS') + ': '+UreturnPrintPostTag('ERROR')+ ' -> mergeDictionaries: the dictionaries being merged have the following overlapping keys: ' + ', '.join(overlap))
mergedDict.update(dictionary)
return mergedDict
def mergeSequences(seq1,seq2):
"""
This method has been taken from "http://stackoverflow.com"
It is aimed to merge two sequences (lists) into one preserving the order in the two lists
e.g. ['A', 'B', 'C', 'D', 'E', 'H', 'I']
['A', 'B', 'E', 'F', 'G', 'H', 'J', 'K']
will become
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K']
@ In, seq1, list, the first sequence to be merged
@ In, seq2, list, the second sequence to be merged
@ Out, merged, list, the merged list of elements
"""
sm=SequenceMatcher(a=seq1,b=seq2)
merged = []
for (op, start1, end1, start2, end2) in sm.get_opcodes():
if op == 'equal' or op=='delete':
#This range appears in both sequences, or only in the first one.
merged += seq1[start1:end1]
elif op == 'insert':
#This range appears in only the second sequence.
merged += seq2[start2:end2]
elif op == 'replace':
#There are different ranges in each sequence - add both.
merged += seq1[start1:end1]
merged += seq2[start2:end2]
return merged
def checkTypeRecursively(inObject):
"""
This method check the type of the inner object in the inObject.
If inObject is an interable, this method returns the type of the first element
@ In, inObject, object, a pyhon object
@ Out, returnType, str, the type of the inner object
"""
returnType = type(inObject).__name__
try:
for val in inObject:
returnType = checkTypeRecursively(val)
break
except:
pass
return returnType
def returnIdSeparator():
"""
This method is aimed to return the ID separator that should be used cross the code when
combined ids need to be assembled. For example, when the "EnsembleModel" creates new
``prefix`` ids for sub-models
@ In, None
@ Out, __idSeparator, string, the id separator
"""
return __idSeparator
def getAllSubclasses(cls):
"""
Recursively collect all of the classes that are a subclass of cls
@ In, cls, the class to retrieve sub-classes.
@ Out, getAllSubclasses, list of class objects for each subclass of cls.
"""
return cls.__subclasses__() + [g for s in cls.__subclasses__() for g in getAllSubclasses(s)]
def displayAvailable():
"""
The return variable for backend default setting of whether a display is
available or not. For instance, if we are running on the HPC without an X11
instance, then we don't have the ability to display the plot, only to save it
to a file
@ In, None
@ Out, dispaly, bool, return True if platform is Windows or environment varialbe
'DISPLAY' is available, otherwise return False
"""
display = False
if platform.system() == 'Windows':
display = True
else:
if os.getenv('DISPLAY'):
display = True
else:
display = False
return display
def which(cmd):
"""
Emulate the which method in shutil.
Return the path to an executable which would be run if the given cmd was called.
If no cmd would be called, return None.
@ In, cmd, str, the exe to check
@ Out, which, str, the full path or None if not found
"""
def _access_check(fn):
"""
Just check if the path is executable
@ In, fn, string, the file to check
@ Out, _access_check, bool, if accessable or not?
"""
return (os.path.exists(fn) and os.access(fn, os.X_OK) and not os.path.isdir(fn))
if os.path.dirname(cmd):
if _access_check(cmd):
return cmd
return None
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
if not os.curdir in path:
path.insert(0, os.curdir)
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name):
return name
return None
|
{
"content_hash": "2d6ff15f67c762d22c80e6806792fea2",
"timestamp": "",
"source": "github",
"line_count": 1040,
"max_line_length": 214,
"avg_line_length": 36.50865384615385,
"alnum_prop": 0.6708103979562274,
"repo_name": "joshua-cogliati-inl/raven",
"id": "c4b6416044d4dfa11396f6bef7a8fd6351fd7a17",
"size": "38558",
"binary": false,
"copies": "2",
"ref": "refs/heads/devel",
"path": "ravenframework/utils/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1556080"
},
{
"name": "Batchfile",
"bytes": "1095"
},
{
"name": "C",
"bytes": "148504"
},
{
"name": "C++",
"bytes": "48279546"
},
{
"name": "CMake",
"bytes": "9998"
},
{
"name": "Jupyter Notebook",
"bytes": "84202"
},
{
"name": "MATLAB",
"bytes": "202335"
},
{
"name": "Makefile",
"bytes": "2399"
},
{
"name": "Perl",
"bytes": "1297"
},
{
"name": "Python",
"bytes": "6952659"
},
{
"name": "R",
"bytes": "67"
},
{
"name": "SWIG",
"bytes": "8574"
},
{
"name": "Shell",
"bytes": "124279"
},
{
"name": "TeX",
"bytes": "479725"
}
],
"symlink_target": ""
}
|
import binascii
import logging.handlers
import mimetypes
import os
import platform
import random
import re
import socket
import string
import subprocess
import sys
import base58
import requests
import simplejson as json
from urllib2 import urlopen
from appdirs import user_data_dir
from datetime import datetime
from decimal import Decimal
from twisted.web import server
from twisted.internet import defer, threads, error, reactor, task
from twisted.internet.task import LoopingCall
from txjsonrpc import jsonrpclib
from txjsonrpc.web import jsonrpc
from txjsonrpc.web.jsonrpc import Handler
from lbrynet import __version__ as lbrynet_version
from lbryum.version import LBRYUM_VERSION as lbryum_version
from lbrynet import analytics
from lbrynet.core.PaymentRateManager import PaymentRateManager
from lbrynet.core.server.BlobAvailabilityHandler import BlobAvailabilityHandlerFactory
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.core.Error import UnknownNameError, InsufficientFundsError, InvalidNameError
from lbrynet.lbryfile.StreamDescriptor import EncryptedFileStreamType
from lbrynet.lbryfile.client.EncryptedFileDownloader import EncryptedFileSaverFactory, EncryptedFileOpenerFactory
from lbrynet.lbryfile.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.lbrynet_daemon.UIManager import UIManager
from lbrynet.lbrynet_daemon.Downloader import GetStream
from lbrynet.lbrynet_daemon.Publisher import Publisher
from lbrynet.lbrynet_daemon.ExchangeRateManager import ExchangeRateManager
from lbrynet.lbrynet_daemon.Lighthouse import LighthouseClient
from lbrynet.metadata.Metadata import Metadata, verify_name_characters
from lbrynet.core import log_support
from lbrynet.core import utils
from lbrynet.core.utils import generate_id
from lbrynet.lbrynet_console.Settings import Settings
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, \
KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, DEFAULT_WALLET, \
DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME, DEFAULT_UI_BRANCH, \
LOG_POST_URL, LOG_FILE_NAME, REFLECTOR_SERVERS, SEARCH_SERVERS
from lbrynet.conf import DEFAULT_SD_DOWNLOAD_TIMEOUT
from lbrynet.conf import DEFAULT_TIMEOUT
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob, BlobStreamDescriptorReader
from lbrynet.core.Session import Session
from lbrynet.core.PTCWallet import PTCWallet
from lbrynet.core.Wallet import LBRYcrdWallet, LBRYumWallet
from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager
from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager, TempEncryptedFileMetadataManager
from lbrynet import reflector
# TODO: this code snippet is everywhere. Make it go away
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
log_dir = user_data_dir("LBRY")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME)
log = logging.getLogger(__name__)
if os.path.isfile(lbrynet_log):
with open(lbrynet_log, 'r') as f:
PREVIOUS_NET_LOG = len(f.read())
else:
PREVIOUS_NET_LOG = 0
INITIALIZING_CODE = 'initializing'
LOADING_DB_CODE = 'loading_db'
LOADING_WALLET_CODE = 'loading_wallet'
LOADING_FILE_MANAGER_CODE = 'loading_file_manager'
LOADING_SERVER_CODE = 'loading_server'
STARTED_CODE = 'started'
WAITING_FOR_FIRST_RUN_CREDITS = 'waiting_for_credits'
STARTUP_STAGES = [
(INITIALIZING_CODE, 'Initializing...'),
(LOADING_DB_CODE, 'Loading databases...'),
(LOADING_WALLET_CODE, 'Catching up with the blockchain... %s'),
(LOADING_FILE_MANAGER_CODE, 'Setting up file manager'),
(LOADING_SERVER_CODE, 'Starting lbrynet'),
(STARTED_CODE, 'Started lbrynet'),
(WAITING_FOR_FIRST_RUN_CREDITS, 'Waiting for first run credits...')
]
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
DOWNLOAD_TIMEOUT_CODE = 'timeout'
DOWNLOAD_RUNNING_CODE = 'running'
DOWNLOAD_STOPPED_CODE = 'stopped'
STREAM_STAGES = [
(INITIALIZING_CODE, 'Initializing...'),
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
]
CONNECT_CODE_VERSION_CHECK = 'version_check'
CONNECT_CODE_NETWORK = 'network_connection'
CONNECT_CODE_WALLET = 'wallet_catchup_lag'
CONNECTION_PROBLEM_CODES = [
(CONNECT_CODE_VERSION_CHECK, "There was a problem checking for updates on github"),
(CONNECT_CODE_NETWORK, "Your internet connection appears to have been interrupted"),
(CONNECT_CODE_WALLET, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY")
]
ALLOWED_DURING_STARTUP = ['is_running', 'is_first_run',
'get_time_behind_blockchain', 'stop',
'daemon_status', 'get_start_notice',
'version', 'get_search_servers']
BAD_REQUEST = 400
NOT_FOUND = 404
OK_CODE = 200
# TODO add login credentials in a conf file
# TODO alert if your copy of a lbry file is out of date with the name record
REMOTE_SERVER = "www.google.com"
class Parameters(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class Daemon(jsonrpc.JSONRPC):
"""
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
isLeaf = True
def __init__(self, root, wallet_type=None):
jsonrpc.JSONRPC.__init__(self)
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
self.startup_status = STARTUP_STAGES[0]
self.startup_message = None
self.announced_startup = False
self.connected_to_internet = True
self.connection_problem = None
self.query_handlers = {}
self.git_lbrynet_version = None
self.git_lbryum_version = None
self.ui_version = None
self.ip = None
# TODO: this is confusing to set here, and then to be reset below.
self.wallet_type = wallet_type
self.first_run = None
self.log_file = lbrynet_log
self.current_db_revision = 1
self.run_server = True
self.session = None
self.exchange_rate_manager = ExchangeRateManager()
self.lighthouse_client = LighthouseClient()
self.waiting_on = {}
self.streams = {}
self.pending_claims = {}
self.known_dht_nodes = KNOWN_DHT_NODES
self.first_run_after_update = False
self.uploaded_temp_files = []
self._session_id = base58.b58encode(generate_id())
if os.name == "nt":
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
self.db_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrynet")
try:
os.makedirs(self.db_dir)
except OSError:
if not os.path.isdir(self.db_dir):
raise
elif sys.platform == "darwin":
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
self.db_dir = user_data_dir("LBRY")
else:
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
try:
if not os.path.isdir(default_download_directory):
os.mkdir(default_download_directory)
except:
log.info("Couldn't make download directory, using home")
default_download_directory = os.path.expanduser("~")
old_conf_path = os.path.join(self.db_dir, 'daemon_settings.json')
self.daemon_conf = os.path.join(self.db_dir, 'daemon_settings.yml')
if os.path.isfile(old_conf_path):
log.info("Migrating .json config file to .yml")
tmp_settings = utils.load_settings(old_conf_path)
utils.save_settings(self.daemon_conf, tmp_settings)
try:
os.remove(old_conf_path)
log.info("Cleaned up old config file")
except:
log.warning("Failed to remove old config file")
self.default_settings = {
'run_on_startup': False,
'data_rate': MIN_BLOB_DATA_PAYMENT_RATE,
'max_key_fee': DEFAULT_MAX_KEY_FEE,
'download_directory': default_download_directory,
'max_upload': 0.0,
'max_download': 0.0,
'upload_log': True,
'search_timeout': DEFAULT_SEARCH_TIMEOUT,
'download_timeout': DEFAULT_TIMEOUT,
'max_search_results': DEFAULT_MAX_SEARCH_RESULTS,
'wallet_type': DEFAULT_WALLET,
'delete_blobs_on_remove': True,
'peer_port': 3333,
'dht_node_port': 4444,
'reflector_port': 5566,
'use_upnp': True,
'start_lbrycrdd': True,
'requested_first_run_credits': False,
'run_reflector_server': False,
'cache_time': DEFAULT_CACHE_TIME,
'startup_scripts': [],
'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}
}
if os.path.isfile(self.daemon_conf):
loaded_settings = utils.load_settings(self.daemon_conf)
missing_settings = {}
removed_settings = {}
for k in self.default_settings.keys():
if k not in loaded_settings.keys():
missing_settings[k] = self.default_settings[k]
for k in loaded_settings.keys():
if not k in self.default_settings.keys():
log.info("Removing unused setting: " + k + " with value: " + str(loaded_settings[k]))
removed_settings[k] = loaded_settings[k]
del loaded_settings[k]
for k in missing_settings.keys():
log.info("Adding missing setting: " + k + " with default value: " + str(missing_settings[k]))
loaded_settings[k] = missing_settings[k]
if loaded_settings['wallet_type'] != self.wallet_type and self.wallet_type:
loaded_settings['wallet_type'] = self.wallet_type
if missing_settings or removed_settings:
log.info("Updated and loaded lbrynet-daemon configuration")
else:
log.info("Loaded lbrynet-daemon configuration")
self.session_settings = loaded_settings
else:
missing_settings = self.default_settings
log.info("Writing default settings : " + json.dumps(self.default_settings) + " --> " + str(self.daemon_conf))
self.session_settings = self.default_settings
if 'last_version' in missing_settings.keys():
self.session_settings['last_version'] = None
if self.session_settings['last_version'] != self.default_settings['last_version']:
self.session_settings['last_version'] = self.default_settings['last_version']
self.first_run_after_update = True
log.info("First run after update")
log.info("lbrynet %s --> %s" % (self.session_settings['last_version']['lbrynet'], self.default_settings['last_version']['lbrynet']))
log.info("lbryum %s --> %s" % (self.session_settings['last_version']['lbryum'], self.default_settings['last_version']['lbryum']))
if "0.4.5" == self.default_settings['last_version']['lbrynet']:
log.info("Lowering name cache time")
self.session_settings['cache_time'] = DEFAULT_CACHE_TIME
utils.save_settings(self.daemon_conf, self.session_settings)
self.run_on_startup = self.session_settings['run_on_startup']
self.data_rate = self.session_settings['data_rate']
self.max_key_fee = self.session_settings['max_key_fee']
self.download_directory = self.session_settings['download_directory']
self.max_upload = self.session_settings['max_upload']
self.max_download = self.session_settings['max_download']
self.upload_log = self.session_settings['upload_log']
self.search_timeout = self.session_settings['search_timeout']
self.download_timeout = self.session_settings['download_timeout']
self.max_search_results = self.session_settings['max_search_results']
self.run_reflector_server = self.session_settings['run_reflector_server']
####
#
# Ignore the saved wallet type. Some users will have their wallet type
# saved as lbrycrd and we want wallets to be lbryum unless explicitly
# set on the command line to be lbrycrd.
#
# if self.session_settings['wallet_type'] in WALLET_TYPES and not wallet_type:
# self.wallet_type = self.session_settings['wallet_type']
# log.info("Using wallet type %s from config" % self.wallet_type)
# else:
# self.wallet_type = wallet_type
# self.session_settings['wallet_type'] = wallet_type
# log.info("Using wallet type %s specified from command line" % self.wallet_type)
#
# Instead, if wallet is not set on the command line, default to the default wallet
#
if wallet_type:
log.info("Using wallet type %s specified from command line", wallet_type)
self.wallet_type = wallet_type
else:
log.info("Using the default wallet type %s", DEFAULT_WALLET)
self.wallet_type = DEFAULT_WALLET
#
####
self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove']
self.peer_port = self.session_settings['peer_port']
self.reflector_port = self.session_settings['reflector_port']
self.dht_node_port = self.session_settings['dht_node_port']
self.use_upnp = self.session_settings['use_upnp']
self.start_lbrycrdd = self.session_settings['start_lbrycrdd']
self.requested_first_run_credits = self.session_settings['requested_first_run_credits']
self.cache_time = self.session_settings['cache_time']
self.startup_scripts = self.session_settings['startup_scripts']
if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")):
f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r")
self.name_cache = json.loads(f.read())
f.close()
log.info("Loaded claim info cache")
else:
self.name_cache = {}
if os.name == "nt":
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
self.lbrycrdd_path = "lbrycrdd.exe"
if self.wallet_type == "lbrycrd":
self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrycrd")
else:
self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbryum")
elif sys.platform == "darwin":
self.lbrycrdd_path = get_darwin_lbrycrdd_path()
if self.wallet_type == "lbrycrd":
self.wallet_dir = user_data_dir("lbrycrd")
else:
self.wallet_dir = user_data_dir("LBRY")
else:
self.lbrycrdd_path = "lbrycrdd"
if self.wallet_type == "lbrycrd":
self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd")
else:
self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbryum")
if os.name != 'nt':
lbrycrdd_path_conf = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf")
if not os.path.isfile(lbrycrdd_path_conf):
f = open(lbrycrdd_path_conf, "w")
f.write(str(self.lbrycrdd_path))
f.close()
self.created_data_dir = False
if not os.path.exists(self.db_dir):
os.mkdir(self.db_dir)
self.created_data_dir = True
self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
self.lbrycrd_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
self.autofetcher_conf = os.path.join(self.wallet_dir, "autofetcher.conf")
self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
self.wallet_user = None
self.wallet_password = None
self.internet_connection_checker = LoopingCall(self._check_network_connection)
self.version_checker = LoopingCall(self._check_remote_versions)
self.connection_problem_checker = LoopingCall(self._check_connection_problems)
self.pending_claim_checker = LoopingCall(self._check_pending_claims)
self.send_heartbeat = LoopingCall(self._send_heartbeat)
# self.lbrynet_connection_checker = LoopingCall(self._check_lbrynet_connection)
self.sd_identifier = StreamDescriptorIdentifier()
self.stream_info_manager = TempEncryptedFileMetadataManager()
self.settings = Settings(self.db_dir)
self.lbry_ui_manager = UIManager(root)
self.blob_request_payment_rate_manager = None
self.lbry_file_metadata_manager = None
self.lbry_file_manager = None
if self.wallet_type == "lbrycrd":
if os.path.isfile(self.lbrycrd_conf):
log.info("Using lbrycrd.conf found at " + self.lbrycrd_conf)
else:
log.info("No lbrycrd.conf found at " + self.lbrycrd_conf + ". Generating now...")
password = "".join(random.SystemRandom().choice(string.ascii_letters + string.digits + "_") for i in range(20))
with open(self.lbrycrd_conf, 'w') as f:
f.write("rpcuser=rpcuser\n")
f.write("rpcpassword=" + password)
log.info("Done writing lbrycrd.conf")
def _responseFailed(self, err, call):
log.debug(err.getTraceback())
def render(self, request):
origin = request.getHeader("Origin")
referer = request.getHeader("Referer")
if origin not in [None, 'http://localhost:5279']:
log.warning("Attempted api call from %s", origin)
return server.failure
if referer is not None and not referer.startswith('http://localhost:5279/'):
log.warning("Attempted api call from %s", referer)
return server.failure
request.content.seek(0, 0)
# Unmarshal the JSON-RPC data.
content = request.content.read()
parsed = jsonrpclib.loads(content)
functionPath = parsed.get("method")
args = parsed.get('params')
#TODO convert args to correct types if possible
id = parsed.get('id')
version = parsed.get('jsonrpc')
if version:
version = int(float(version))
elif id and not version:
version = jsonrpclib.VERSION_1
else:
version = jsonrpclib.VERSION_PRE1
# XXX this all needs to be re-worked to support logic for multiple
# versions...
if not self.announced_startup:
if functionPath not in ALLOWED_DURING_STARTUP:
return server.failure
if self.wallet_type == "lbryum" and functionPath in ['set_miner', 'get_miner_status']:
return server.failure
try:
function = self._getFunction(functionPath)
except jsonrpclib.Fault, f:
self._cbRender(f, request, id, version)
else:
request.setHeader("Access-Control-Allow-Origin", "localhost")
request.setHeader("content-type", "text/json")
if args == [{}]:
d = defer.maybeDeferred(function)
else:
d = defer.maybeDeferred(function, *args)
# cancel the response if the connection is broken
notify_finish = request.notifyFinish()
notify_finish.addErrback(self._responseFailed, d)
d.addErrback(self._ebRender, id)
d.addCallback(self._cbRender, request, id, version)
d.addErrback(notify_finish.errback)
return server.NOT_DONE_YET
def _cbRender(self, result, request, id, version):
def default_decimal(obj):
if isinstance(obj, Decimal):
return float(obj)
if isinstance(result, Handler):
result = result.result
if isinstance(result, dict):
result = result['result']
if version == jsonrpclib.VERSION_PRE1:
if not isinstance(result, jsonrpclib.Fault):
result = (result,)
# Convert the result (python) to JSON-RPC
try:
s = jsonrpclib.dumps(result, version=version, default=default_decimal)
except:
f = jsonrpclib.Fault(self.FAILURE, "can't serialize output")
s = jsonrpclib.dumps(f, version=version)
request.setHeader("content-length", str(len(s)))
request.write(s)
request.finish()
def _ebRender(self, failure, id):
if isinstance(failure.value, jsonrpclib.Fault):
return failure.value
log.error(failure)
return jsonrpclib.Fault(self.FAILURE, "error")
def setup(self, branch=DEFAULT_UI_BRANCH, user_specified=False, branch_specified=False, host_ui=True):
def _log_starting_vals():
log.info("Starting balance: " + str(self.session.wallet.wallet_balance))
return defer.succeed(None)
def _announce_startup():
def _wait_for_credits():
if float(self.session.wallet.wallet_balance) == 0.0:
self.startup_status = STARTUP_STAGES[6]
return reactor.callLater(1, _wait_for_credits)
else:
return _announce()
def _announce():
self.announced_startup = True
self.startup_status = STARTUP_STAGES[5]
log.info("Started lbrynet-daemon")
if len(self.startup_scripts):
log.info("Scheduling scripts")
reactor.callLater(3, self._run_scripts)
if self.first_run:
d = self._upload_log(log_type="first_run")
elif self.upload_log:
d = self._upload_log(exclude_previous=True, log_type="start")
else:
d = defer.succeed(None)
d.addCallback(lambda _: _announce())
return d
log.info("Starting lbrynet-daemon")
self.internet_connection_checker.start(3600)
self.version_checker.start(3600 * 12)
self.connection_problem_checker.start(1)
self.exchange_rate_manager.start()
if host_ui:
self.lbry_ui_manager.update_checker.start(1800, now=False)
d = defer.Deferred()
if host_ui:
d.addCallback(lambda _: self.lbry_ui_manager.setup(branch=branch,
user_specified=user_specified,
branch_specified=branch_specified))
d.addCallback(lambda _: self._initial_setup())
d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory))
d.addCallback(lambda _: self._check_db_migration())
d.addCallback(lambda _: self._get_settings())
d.addCallback(lambda _: self._set_events())
d.addCallback(lambda _: self._get_session())
d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier))
d.addCallback(lambda _: self._setup_stream_identifier())
d.addCallback(lambda _: self._setup_lbry_file_manager())
d.addCallback(lambda _: self._setup_lbry_file_opener())
d.addCallback(lambda _: self._setup_query_handlers())
d.addCallback(lambda _: self._setup_server())
d.addCallback(lambda _: _log_starting_vals())
d.addCallback(lambda _: _announce_startup())
d.addCallback(lambda _: self._load_analytics_api())
# TODO: handle errors here
d.callback(None)
return defer.succeed(None)
def _load_analytics_api(self):
self.analytics_api = analytics.Api.load()
self.send_heartbeat.start(60)
def _send_heartbeat(self):
heartbeat = self._events.heartbeat()
self.analytics_api.track(heartbeat)
def _send_download_started(self, name, stream_info=None):
event = self._events.download_started(name, stream_info)
self.analytics_api.track(event)
def _get_platform(self):
r = {
"processor": platform.processor(),
"python_version": platform.python_version(),
"platform": platform.platform(),
"os_release": platform.release(),
"os_system": platform.system(),
"lbrynet_version": lbrynet_version,
"lbryum_version": lbryum_version,
"ui_version": self.lbry_ui_manager.loaded_git_version,
}
if not self.ip:
try:
r['ip'] = json.load(urlopen('http://jsonip.com'))['ip']
self.ip = r['ip']
except:
r['ip'] = "Could not determine"
return r
def _initial_setup(self):
def _log_platform():
log.info("Platform: %s", json.dumps(self._get_platform()))
return defer.succeed(None)
d = _log_platform()
return d
def _set_events(self):
context = analytics.make_context(self._get_platform(), self.wallet_type)
self._events = analytics.Events(context, base58.b58encode(self.lbryid), self._session_id)
def _check_network_connection(self):
try:
host = socket.gethostbyname(REMOTE_SERVER)
s = socket.create_connection((host, 80), 2)
self.connected_to_internet = True
except:
log.info("Internet connection not working")
self.connected_to_internet = False
def _check_lbrynet_connection(self):
def _log_success():
log.info("lbrynet connectivity test passed")
def _log_failure():
log.info("lbrynet connectivity test failed")
wonderfullife_sh = "6f3af0fa3924be98a54766aa2715d22c6c1509c3f7fa32566df4899a41f3530a9f97b2ecb817fa1dcbf1b30553aefaa7"
d = download_sd_blob(self.session, wonderfullife_sh, self.session.base_payment_rate_manager)
d.addCallbacks(lambda _: _log_success, lambda _: _log_failure)
def _check_remote_versions(self):
def _get_lbryum_version():
try:
r = urlopen("https://raw.githubusercontent.com/lbryio/lbryum/master/lib/version.py").read().split('\n')
version = next(line.split("=")[1].split("#")[0].replace(" ", "")
for line in r if "LBRYUM_VERSION" in line)
version = version.replace("'", "")
log.info(
"remote lbryum %s > local lbryum %s = %s",
version, lbryum_version,
utils.version_is_greater_than(version, lbryum_version)
)
self.git_lbryum_version = version
return defer.succeed(None)
except:
log.info("Failed to get lbryum version from git")
self.git_lbryum_version = None
return defer.fail(None)
def _get_lbrynet_version():
try:
version = get_lbrynet_version_from_github()
log.info(
"remote lbrynet %s > local lbrynet %s = %s",
version, lbrynet_version,
utils.version_is_greater_than(version, lbrynet_version)
)
self.git_lbrynet_version = version
return defer.succeed(None)
except:
log.info("Failed to get lbrynet version from git")
self.git_lbrynet_version = None
return defer.fail(None)
d = _get_lbrynet_version()
d.addCallback(lambda _: _get_lbryum_version())
def _check_connection_problems(self):
if not self.git_lbrynet_version or not self.git_lbryum_version:
self.connection_problem = CONNECTION_PROBLEM_CODES[0]
elif self.startup_status[0] == 'loading_wallet':
if self.session.wallet.is_lagging:
self.connection_problem = CONNECTION_PROBLEM_CODES[2]
else:
self.connection_problem = None
if not self.connected_to_internet:
self.connection_problem = CONNECTION_PROBLEM_CODES[1]
def _add_to_pending_claims(self, name, txid):
log.info("Adding lbry://%s to pending claims, txid %s" % (name, txid))
self.pending_claims[name] = txid
return txid
def _check_pending_claims(self):
# TODO: this was blatantly copied from jsonrpc_start_lbry_file. Be DRY.
def _start_file(f):
d = self.lbry_file_manager.toggle_lbry_file_running(f)
d.addCallback(lambda _: self.lighthouse_client.announce_sd(f.sd_hash))
return defer.succeed("Started LBRY file")
def _get_and_start_file(name):
d = defer.succeed(self.pending_claims.pop(name))
d.addCallback(lambda _: self._get_lbry_file("name", name, return_json=False))
d.addCallback(lambda l: _start_file(l) if l.stopped else "LBRY file was already running")
def re_add_to_pending_claims(name):
txid = self.pending_claims.pop(name)
self._add_to_pending_claims(name, txid)
def _process_lbry_file(name, lbry_file):
# lbry_file is an instance of ManagedEncryptedFileDownloader or None
# TODO: check for sd_hash in addition to txid
ready_to_start = (
lbry_file and
self.pending_claims[name] == lbry_file.txid
)
if ready_to_start:
_get_and_start_file(name)
else:
re_add_to_pending_claims(name)
for name in self.pending_claims:
log.info("Checking if new claim for lbry://%s is confirmed" % name)
d = self._resolve_name(name, force_refresh=True)
d.addCallback(lambda _: self._get_lbry_file_by_uri(name))
d.addCallbacks(
lambda lbry_file: _process_lbry_file(name, lbry_file),
lambda _: re_add_to_pending_claims(name)
)
def _start_server(self):
if self.peer_port is not None:
server_factory = ServerProtocolFactory(self.session.rate_limiter,
self.query_handlers,
self.session.peer_manager)
try:
self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory)
except error.CannotListenError as e:
import traceback
log.error("Couldn't bind to port %d. %s", self.peer_port, traceback.format_exc())
raise ValueError("%s lbrynet may already be running on your computer.", str(e))
return defer.succeed(True)
def _start_reflector(self):
if self.run_reflector_server:
log.info("Starting reflector server")
if self.reflector_port is not None:
reflector_factory = reflector.ServerFactory(
self.session.peer_manager,
self.session.blob_manager
)
try:
self.reflector_server_port = reactor.listenTCP(self.reflector_port, reflector_factory)
log.info('Started reflector on port %s', self.reflector_port)
except error.CannotListenError as e:
log.exception("Couldn't bind reflector to port %d", self.reflector_port)
raise ValueError("{} lbrynet may already be running on your computer.".format(e))
return defer.succeed(True)
def _stop_reflector(self):
if self.run_reflector_server:
log.info("Stopping reflector server")
try:
if self.reflector_server_port is not None:
self.reflector_server_port, p = None, self.reflector_server_port
return defer.maybeDeferred(p.stopListening)
except AttributeError:
return defer.succeed(True)
return defer.succeed(True)
def _stop_server(self):
try:
if self.lbry_server_port is not None:
self.lbry_server_port, p = None, self.lbry_server_port
return defer.maybeDeferred(p.stopListening)
else:
return defer.succeed(True)
except AttributeError:
return defer.succeed(True)
def _setup_server(self):
def restore_running_status(running):
if running is True:
d = self._start_server()
d.addCallback(lambda _: self._start_reflector())
return defer.succeed(True)
self.startup_status = STARTUP_STAGES[4]
dl = self.settings.get_server_running_status()
dl.addCallback(restore_running_status)
return dl
def _setup_query_handlers(self):
handlers = [
# CryptBlobInfoQueryHandlerFactory(self.lbry_file_metadata_manager, self.session.wallet,
# self._server_payment_rate_manager),
BlobAvailabilityHandlerFactory(self.session.blob_manager),
# BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet,
# self._server_payment_rate_manager),
self.session.wallet.get_wallet_info_query_handler_factory(),
]
def get_blob_request_handler_factory(rate):
self.blob_request_payment_rate_manager = PaymentRateManager(
self.session.base_payment_rate_manager, rate
)
handlers.append(BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet,
self.blob_request_payment_rate_manager))
d1 = self.settings.get_server_data_payment_rate()
d1.addCallback(get_blob_request_handler_factory)
dl = defer.DeferredList([d1])
dl.addCallback(lambda _: self._add_query_handlers(handlers))
return dl
def _add_query_handlers(self, query_handlers):
def _set_query_handlers(statuses):
from future_builtins import zip
for handler, (success, status) in zip(query_handlers, statuses):
if success is True:
self.query_handlers[handler] = status
ds = []
for handler in query_handlers:
ds.append(self.settings.get_query_handler_status(handler.get_primary_query_identifier()))
dl = defer.DeferredList(ds)
dl.addCallback(_set_query_handlers)
return dl
def _upload_log(self, log_type=None, exclude_previous=False, force=False):
if self.upload_log or force:
for lm, lp in [('lbrynet', lbrynet_log)]:
if os.path.isfile(lp):
if exclude_previous:
f = open(lp, "r")
f.seek(PREVIOUS_NET_LOG)
log_contents = f.read()
f.close()
else:
f = open(lp, "r")
log_contents = f.read()
f.close()
params = {
'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
'hash': base58.b58encode(self.lbryid)[:20],
'sys': platform.system(),
'type': "%s-%s" % (lm, log_type) if log_type else lm,
'log': log_contents
}
requests.post(LOG_POST_URL, params)
return defer.succeed(None)
else:
return defer.succeed(None)
def _clean_up_temp_files(self):
for path in self.uploaded_temp_files:
try:
os.remove(path)
except OSError:
pass
def _shutdown(self):
log.info("Closing lbrynet session")
log.info("Status at time of shutdown: " + self.startup_status[0])
if self.internet_connection_checker.running:
self.internet_connection_checker.stop()
if self.version_checker.running:
self.version_checker.stop()
if self.connection_problem_checker.running:
self.connection_problem_checker.stop()
if self.lbry_ui_manager.update_checker.running:
self.lbry_ui_manager.update_checker.stop()
if self.pending_claim_checker.running:
self.pending_claim_checker.stop()
if self.send_heartbeat.running:
self.send_heartbeat.stop()
self._clean_up_temp_files()
d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True)
d.addCallback(lambda _: self._stop_server())
d.addCallback(lambda _: self._stop_reflector())
d.addErrback(lambda err: True)
d.addCallback(lambda _: self.lbry_file_manager.stop())
d.addErrback(lambda err: True)
if self.session is not None:
d.addCallback(lambda _: self.session.shut_down())
d.addErrback(lambda err: True)
return d
def _update_settings(self, settings):
for k in settings.keys():
if k == 'run_on_startup':
if type(settings['run_on_startup']) is bool:
self.session_settings['run_on_startup'] = settings['run_on_startup']
else:
return defer.fail()
elif k == 'data_rate':
if type(settings['data_rate']) is float:
self.session_settings['data_rate'] = settings['data_rate']
elif type(settings['data_rate']) is int:
self.session_settings['data_rate'] = float(settings['data_rate'])
else:
return defer.fail()
elif k == 'max_key_fee':
if type(settings['max_key_fee']) is float:
self.session_settings['max_key_fee'] = settings['max_key_fee']
elif type(settings['max_key_fee']) is int:
self.session_settings['max_key_fee'] = float(settings['max_key_fee'])
else:
return defer.fail()
elif k == 'download_directory':
if type(settings['download_directory']) is unicode:
if os.path.isdir(settings['download_directory']):
self.session_settings['download_directory'] = settings['download_directory']
else:
pass
else:
return defer.fail()
elif k == 'max_upload':
if type(settings['max_upload']) is float:
self.session_settings['max_upload'] = settings['max_upload']
elif type(settings['max_upload']) is int:
self.session_settings['max_upload'] = float(settings['max_upload'])
else:
return defer.fail()
elif k == 'max_download':
if type(settings['max_download']) is float:
self.session_settings['max_download'] = settings['max_download']
if type(settings['max_download']) is int:
self.session_settings['max_download'] = float(settings['max_download'])
else:
return defer.fail()
elif k == 'upload_log':
if type(settings['upload_log']) is bool:
self.session_settings['upload_log'] = settings['upload_log']
else:
return defer.fail()
elif k == 'download_timeout':
if type(settings['download_timeout']) is int:
self.session_settings['download_timeout'] = settings['download_timeout']
elif type(settings['download_timeout']) is float:
self.session_settings['download_timeout'] = int(settings['download_timeout'])
else:
return defer.fail()
elif k == 'search_timeout':
if type(settings['search_timeout']) is float:
self.session_settings['search_timeout'] = settings['search_timeout']
elif type(settings['search_timeout']) is int:
self.session_settings['search_timeout'] = float(settings['search_timeout'])
else:
return defer.fail()
elif k == 'cache_time':
if type(settings['cache_time']) is int:
self.session_settings['cache_time'] = settings['cache_time']
elif type(settings['cache_time']) is float:
self.session_settings['cache_time'] = int(settings['cache_time'])
else:
return defer.fail()
self.run_on_startup = self.session_settings['run_on_startup']
self.data_rate = self.session_settings['data_rate']
self.max_key_fee = self.session_settings['max_key_fee']
self.download_directory = self.session_settings['download_directory']
self.max_upload = self.session_settings['max_upload']
self.max_download = self.session_settings['max_download']
self.upload_log = self.session_settings['upload_log']
self.download_timeout = self.session_settings['download_timeout']
self.search_timeout = self.session_settings['search_timeout']
self.cache_time = self.session_settings['cache_time']
utils.save_settings(self.daemon_conf, self.session_settings)
return defer.succeed(True)
def _setup_data_directory(self):
self.startup_status = STARTUP_STAGES[1]
log.info("Loading databases...")
if self.created_data_dir:
db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w')
db_revision.write(str(self.current_db_revision))
db_revision.close()
log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision")))
if not os.path.exists(self.blobfile_dir):
os.mkdir(self.blobfile_dir)
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
def _check_db_migration(self):
old_revision = 1
db_revision_file = os.path.join(self.db_dir, "db_revision")
if os.path.exists(db_revision_file):
old_revision = int(open(db_revision_file).read().strip())
if old_revision < self.current_db_revision:
from lbrynet.db_migrator import dbmigrator
log.info("Upgrading your databases...")
d = threads.deferToThread(dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision)
def print_success(old_dirs):
success_string = "Finished upgrading the databases. It is now safe to delete the"
success_string += " following directories, if you feel like it. It won't make any"
success_string += " difference.\nAnyway here they are: "
for i, old_dir in enumerate(old_dirs):
success_string += old_dir
if i + 1 < len(old_dir):
success_string += ", "
log.info(success_string)
d.addCallback(print_success)
return d
return defer.succeed(True)
def _get_settings(self):
d = self.settings.start()
d.addCallback(lambda _: self.settings.get_lbryid())
d.addCallback(self._set_lbryid)
d.addCallback(lambda _: self._modify_loggly_formatter())
return d
def _set_lbryid(self, lbryid):
if lbryid is None:
return self._make_lbryid()
else:
log.info("LBRY ID: " + base58.b58encode(lbryid))
self.lbryid = lbryid
def _make_lbryid(self):
self.lbryid = generate_id()
log.info("Generated new LBRY ID: " + base58.b58encode(self.lbryid))
d = self.settings.save_lbryid(self.lbryid)
return d
def _modify_loggly_formatter(self):
log_support.configure_loggly_handler(
lbry_id=base58.b58encode(self.lbryid),
session_id=self._session_id
)
def _setup_lbry_file_manager(self):
self.startup_status = STARTUP_STAGES[3]
self.lbry_file_metadata_manager = DBEncryptedFileMetadataManager(self.db_dir)
d = self.lbry_file_metadata_manager.setup()
def set_lbry_file_manager():
self.lbry_file_manager = EncryptedFileManager(self.session,
self.lbry_file_metadata_manager,
self.sd_identifier,
download_directory=self.download_directory)
return self.lbry_file_manager.setup()
d.addCallback(lambda _: set_lbry_file_manager())
return d
def _get_session(self):
def get_default_data_rate():
d = self.settings.get_default_data_payment_rate()
d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else
MIN_BLOB_DATA_PAYMENT_RATE})
return d
def get_wallet():
if self.wallet_type == "lbrycrd":
log.info("Using lbrycrd wallet")
d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.lbrycrd_conf,
lbrycrdd_path=self.lbrycrdd_path))
elif self.wallet_type == "lbryum":
log.info("Using lbryum wallet")
d = defer.succeed(LBRYumWallet(self.db_dir))
elif self.wallet_type == "ptc":
log.info("Using PTC wallet")
d = defer.succeed(PTCWallet(self.db_dir))
else:
# TODO: should fail here. Can't switch to lbrycrd because the wallet_dir, conf and path won't be set
log.info("Requested unknown wallet '%s', using default lbryum", self.wallet_type)
d = defer.succeed(LBRYumWallet(self.db_dir))
d.addCallback(lambda wallet: {"wallet": wallet})
return d
d1 = get_default_data_rate()
d2 = get_wallet()
def combine_results(results):
r = {}
for success, result in results:
if success is True:
r.update(result)
return r
def create_session(results):
self.session = Session(results['default_data_payment_rate'], db_dir=self.db_dir, lbryid=self.lbryid,
blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port,
known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port,
use_upnp=self.use_upnp, wallet=results['wallet'])
self.startup_status = STARTUP_STAGES[2]
dl = defer.DeferredList([d1, d2], fireOnOneErrback=True)
dl.addCallback(combine_results)
dl.addCallback(create_session)
dl.addCallback(lambda _: self.session.setup())
return dl
def _setup_stream_identifier(self):
file_saver_factory = EncryptedFileSaverFactory(self.session.peer_finder, self.session.rate_limiter,
self.session.blob_manager, self.stream_info_manager,
self.session.wallet, self.download_directory)
self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
file_opener_factory = EncryptedFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter,
self.session.blob_manager, self.stream_info_manager,
self.session.wallet)
self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_opener_factory)
return defer.succeed(None)
def _setup_lbry_file_opener(self):
downloader_factory = EncryptedFileOpenerFactory(self.session.peer_finder, self.session.rate_limiter,
self.session.blob_manager, self.stream_info_manager,
self.session.wallet)
self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, downloader_factory)
return defer.succeed(True)
def _download_sd_blob(self, sd_hash, timeout=DEFAULT_SD_DOWNLOAD_TIMEOUT):
def cb(result):
if not r.called:
r.callback(result)
def eb():
if not r.called:
r.errback(Exception("sd timeout"))
r = defer.Deferred(None)
reactor.callLater(timeout, eb)
d = download_sd_blob(self.session, sd_hash, PaymentRateManager(self.session.base_payment_rate_manager))
d.addCallback(BlobStreamDescriptorReader)
d.addCallback(lambda blob: blob.get_info())
d.addCallback(cb)
return r
def _download_name(self, name, timeout=DEFAULT_TIMEOUT, download_directory=None,
file_name=None, stream_info=None, wait_for_write=True):
"""
Add a lbry file to the file manager, start the download, and return the new lbry file.
If it already exists in the file manager, return the existing lbry file
"""
self._send_download_started(name)
helper = _DownloadNameHelper(
self, name, timeout, download_directory, file_name, wait_for_write)
if not stream_info:
self.waiting_on[name] = True
d = self._resolve_name(name)
else:
d = defer.succeed(stream_info)
d.addCallback(helper._setup_stream)
d.addCallback(helper.wait_or_get_stream)
if not stream_info:
d.addCallback(helper._remove_from_wait)
return d
def add_stream(self, name, timeout, download_directory, file_name, stream_info):
"""Makes, adds and starts a stream"""
self.streams[name] = GetStream(self.sd_identifier,
self.session,
self.session.wallet,
self.lbry_file_manager,
self.exchange_rate_manager,
max_key_fee=self.max_key_fee,
data_rate=self.data_rate,
timeout=timeout,
download_directory=download_directory,
file_name=file_name)
d = self.streams[name].start(stream_info, name)
return d
def _get_long_count_timestamp(self):
return int((datetime.utcnow() - (datetime(year=2012, month=12, day=21))).total_seconds())
def _update_claim_cache(self):
f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "w")
f.write(json.dumps(self.name_cache))
f.close()
return defer.succeed(True)
def _resolve_name(self, name, force_refresh=False):
"""Resolves a name. Checks the cache first before going out to the blockchain.
Args:
name: the lbry://<name> to resolve
force_refresh: if True, always go out to the blockchain to resolve.
"""
if name.startswith('lbry://'):
raise ValueError('name {} should not start with lbry://'.format(name))
helper = _ResolveNameHelper(self, name, force_refresh)
return helper.get_deferred()
def _delete_lbry_file(self, lbry_file, delete_file=True):
d = self.lbry_file_manager.delete_lbry_file(lbry_file)
def finish_deletion(lbry_file):
d = lbry_file.delete_data()
d.addCallback(lambda _: _delete_stream_data(lbry_file))
return d
def _delete_stream_data(lbry_file):
s_h = lbry_file.stream_hash
d = self.lbry_file_manager.get_count_for_stream_hash(s_h)
# TODO: could possibly be a timing issue here
d.addCallback(lambda c: self.stream_info_manager.delete_stream(s_h) if c == 0 else True)
if delete_file:
d.addCallback(lambda _: os.remove(os.path.join(self.download_directory, lbry_file.file_name)) if
os.path.isfile(os.path.join(self.download_directory, lbry_file.file_name)) else defer.succeed(None))
return d
d.addCallback(lambda _: finish_deletion(lbry_file))
d.addCallback(lambda _: log.info("Delete lbry file"))
return d
def _get_est_cost(self, name):
def _check_est(d, name):
try:
if isinstance(d.result, float):
log.info("Cost est for lbry://" + name + ": " + str(d.result) + "LBC")
return defer.succeed(None)
except AttributeError:
pass
log.info("Timeout estimating cost for lbry://" + name + ", using key fee")
d.cancel()
return defer.succeed(None)
def _add_key_fee(data_cost):
d = self._resolve_name(name)
d.addCallback(lambda info: self.exchange_rate_manager.to_lbc(info.get('fee', None)))
d.addCallback(lambda fee: data_cost if fee is None else data_cost + fee.amount)
return d
d = self._resolve_name(name)
d.addCallback(lambda info: info['sources']['lbry_sd_hash'])
d.addCallback(lambda sd_hash: download_sd_blob(self.session, sd_hash,
self.blob_request_payment_rate_manager))
d.addCallback(self.sd_identifier.get_metadata_for_sd_blob)
d.addCallback(lambda metadata: metadata.validator.info_to_show())
d.addCallback(lambda info: int(dict(info)['stream_size']) / 1000000 * self.data_rate)
d.addCallbacks(_add_key_fee, lambda _: _add_key_fee(0.0))
reactor.callLater(self.search_timeout, _check_est, d, name)
return d
def _get_lbry_file_by_uri(self, name):
def _get_file(stream_info):
sd = stream_info['sources']['lbry_sd_hash']
for l in self.lbry_file_manager.lbry_files:
if l.sd_hash == sd:
return defer.succeed(l)
return defer.succeed(None)
d = self._resolve_name(name)
d.addCallback(_get_file)
return d
def _get_lbry_file_by_sd_hash(self, sd_hash):
for l in self.lbry_file_manager.lbry_files:
if l.sd_hash == sd_hash:
return defer.succeed(l)
return defer.succeed(None)
def _get_lbry_file_by_file_name(self, file_name):
for l in self.lbry_file_manager.lbry_files:
if l.file_name == file_name:
return defer.succeed(l)
return defer.succeed(None)
def _get_lbry_file(self, search_by, val, return_json=True):
def _log_get_lbry_file(f):
if f and val:
log.info("Found LBRY file for " + search_by + ": " + val)
elif val:
log.info("Did not find LBRY file for " + search_by + ": " + val)
return f
def _get_json_for_return(f):
def _get_file_status(file_status):
message = STREAM_STAGES[2][1] % (file_status.name, file_status.num_completed, file_status.num_known, file_status.running_status)
return defer.succeed(message)
def _generate_reply(size):
if f.key:
key = binascii.b2a_hex(f.key)
else:
key = None
if os.path.isfile(os.path.join(self.download_directory, f.file_name)):
written_file = file(os.path.join(self.download_directory, f.file_name))
written_file.seek(0, os.SEEK_END)
written_bytes = written_file.tell()
written_file.close()
else:
written_bytes = False
if search_by == "name":
if val in self.streams.keys():
status = self.streams[val].code
elif f in self.lbry_file_manager.lbry_files:
# if f.stopped:
# status = STREAM_STAGES[3]
# else:
status = STREAM_STAGES[2]
else:
status = [False, False]
else:
status = [False, False]
if status[0] == DOWNLOAD_RUNNING_CODE:
d = f.status()
d.addCallback(_get_file_status)
d.addCallback(lambda message: {'completed': f.completed, 'file_name': f.file_name,
'download_directory': f.download_directory,
'download_path': os.path.join(f.download_directory, f.file_name),
'mime_type': mimetypes.guess_type(os.path.join(f.download_directory, f.file_name))[0],
'key': key,
'points_paid': f.points_paid, 'stopped': f.stopped,
'stream_hash': f.stream_hash,
'stream_name': f.stream_name,
'suggested_file_name': f.suggested_file_name,
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash,
'lbry_uri': f.uri, 'txid': f.txid, 'claim_id': f.claim_id,
'total_bytes': size,
'written_bytes': written_bytes, 'code': status[0],
'message': message})
else:
d = defer.succeed({'completed': f.completed, 'file_name': f.file_name, 'key': key,
'download_directory': f.download_directory,
'download_path': os.path.join(f.download_directory, f.file_name),
'mime_type': mimetypes.guess_type(os.path.join(f.download_directory, f.file_name))[0],
'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash, 'total_bytes': size,
'written_bytes': written_bytes, 'lbry_uri': f.uri, 'txid': f.txid, 'claim_id': f.claim_id,
'code': status[0], 'message': status[1]})
return d
def _add_metadata(message):
def _add_to_dict(metadata):
message['metadata'] = metadata
return defer.succeed(message)
if f.txid:
d = self._resolve_name(f.uri)
d.addCallbacks(_add_to_dict, lambda _: _add_to_dict("Pending confirmation"))
else:
d = defer.succeed(message)
return d
if f:
d = f.get_total_bytes()
d.addCallback(_generate_reply)
d.addCallback(_add_metadata)
return d
else:
return False
if search_by == "name":
d = self._get_lbry_file_by_uri(val)
elif search_by == "sd_hash":
d = self._get_lbry_file_by_sd_hash(val)
elif search_by == "file_name":
d = self._get_lbry_file_by_file_name(val)
# d.addCallback(_log_get_lbry_file)
if return_json:
d.addCallback(_get_json_for_return)
return d
def _get_lbry_files(self):
d = defer.DeferredList([self._get_lbry_file('sd_hash', l.sd_hash) for l in self.lbry_file_manager.lbry_files])
return d
def _reflect(self, lbry_file):
if not lbry_file:
return defer.fail(Exception("no lbry file given to reflect"))
stream_hash = lbry_file.stream_hash
if stream_hash is None:
return defer.fail(Exception("no stream hash"))
log.info("Reflecting stream: %s" % stream_hash)
reflector_server = random.choice(REFLECTOR_SERVERS)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Start reflector client")
factory = reflector.ClientFactory(
self.session.blob_manager,
self.lbry_file_manager.stream_info_manager,
stream_hash
)
d = reactor.resolve(reflector_address)
d.addCallback(lambda ip: reactor.connectTCP(ip, reflector_port, factory))
d.addCallback(lambda _: factory.finished_deferred)
return d
def _reflect_blobs(self, blob_hashes):
if not blob_hashes:
return defer.fail(Exception("no lbry file given to reflect"))
log.info("Reflecting %i blobs" % len(blob_hashes))
reflector_server = random.choice(REFLECTOR_SERVERS)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Start reflector client")
factory = reflector.BlobClientFactory(
self.session.blob_manager,
blob_hashes
)
d = reactor.resolve(reflector_address)
d.addCallback(lambda ip: reactor.connectTCP(ip, reflector_port, factory))
d.addCallback(lambda _: factory.finished_deferred)
return d
def _log_to_slack(self, msg):
URL = "https://hooks.slack.com/services/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"
msg = platform.platform() + ": " + base58.b58encode(self.lbryid)[:20] + ", " + msg
requests.post(URL, json.dumps({"text": msg}))
return defer.succeed(None)
def _run_scripts(self):
if len([k for k in self.startup_scripts if 'run_once' in k.keys()]):
log.info("Removing one time startup scripts")
remaining_scripts = [s for s in self.startup_scripts if 'run_once' not in s.keys()]
startup_scripts = self.startup_scripts
self.startup_scripts = self.session_settings['startup_scripts'] = remaining_scripts
utils.save_settings(self.daemon_conf, self.session_settings)
for script in startup_scripts:
if script['script_name'] == 'migrateto025':
log.info("Running migrator to 0.2.5")
from lbrynet.lbrynet_daemon.daemon_scripts.migrateto025 import run as run_migrate
run_migrate(self)
if script['script_name'] == 'Autofetcher':
log.info("Starting autofetcher script")
from lbrynet.lbrynet_daemon.daemon_scripts.Autofetcher import run as run_autofetcher
run_autofetcher(self)
return defer.succeed(None)
def _search(self, search):
return self.lighthouse_client.search(search)
def _render_response(self, result, code):
return defer.succeed({'result': result, 'code': code})
def jsonrpc_is_running(self):
"""
Check if lbrynet daemon is running
Args:
None
Returns: true if daemon completed startup, otherwise false
"""
log.info("is_running: " + str(self.announced_startup))
if self.announced_startup:
return self._render_response(True, OK_CODE)
else:
return self._render_response(False, OK_CODE)
def jsonrpc_daemon_status(self):
"""
Get lbrynet daemon status information
Args:
None
Returns:
'message': startup status message
'code': status_code
'progress': progress, only used in loading_wallet
'is_lagging': flag set to indicate lag, if set message will contain relevant message
"""
r = {'code': self.startup_status[0], 'message': self.startup_status[1],
'progress': None, 'is_lagging': None, 'problem_code': None}
if self.connection_problem:
r['problem_code'] = self.connection_problem[0]
r['message'] = self.connection_problem[1]
r['is_lagging'] = True
elif self.startup_status[0] == LOADING_WALLET_CODE:
if self.wallet_type == 'lbryum':
if self.session.wallet.blocks_behind_alert != 0:
r['message'] = r['message'] % (str(self.session.wallet.blocks_behind_alert) + " blocks behind")
r['progress'] = self.session.wallet.catchup_progress
else:
r['message'] = "Catching up with the blockchain"
r['progress'] = 0
else:
r['message'] = "Catching up with the blockchain"
r['progress'] = 0
log.info("daemon status: " + str(r))
return self._render_response(r, OK_CODE)
def jsonrpc_is_first_run(self):
"""
Check if this is the first time lbrynet daemon has been run
Args:
None
Returns:
True if first run, otherwise False
"""
log.info("Check if is first run")
try:
d = self.session.wallet.is_first_run()
except:
d = defer.fail(None)
d.addCallbacks(lambda r: self._render_response(r, OK_CODE), lambda _: self._render_response(None, OK_CODE))
return d
def jsonrpc_get_start_notice(self):
"""
Get special message to be displayed at startup
Args:
None
Returns:
Startup message, such as first run notification
"""
log.info("Get startup notice")
if self.first_run and not self.session.wallet.wallet_balance:
return self._render_response(self.startup_message, OK_CODE)
elif self.first_run:
return self._render_response(None, OK_CODE)
else:
self._render_response(self.startup_message, OK_CODE)
def jsonrpc_version(self):
"""
Get lbry version information
Args:
None
Returns:
"platform": platform string
"os_release": os release string
"os_system": os name
"lbrynet_version: ": lbrynet_version,
"lbryum_version: ": lbryum_version,
"ui_version": commit hash of ui version being used
"remote_lbrynet": most recent lbrynet version available from github
"remote_lbryum": most recent lbryum version available from github
"""
platform_info = self._get_platform()
msg = {
'platform': platform_info['platform'],
'os_release': platform_info['os_release'],
'os_system': platform_info['os_system'],
'lbrynet_version': lbrynet_version,
'lbryum_version': lbryum_version,
'ui_version': self.ui_version,
'remote_lbrynet': self.git_lbrynet_version,
'remote_lbryum': self.git_lbryum_version,
'lbrynet_update_available': utils.version_is_greater_than(self.git_lbrynet_version, lbrynet_version),
'lbryum_update_available': utils.version_is_greater_than(self.git_lbryum_version, lbryum_version),
}
log.info("Get version info: " + json.dumps(msg))
return self._render_response(msg, OK_CODE)
def jsonrpc_get_settings(self):
"""
Get lbrynet daemon settings
Args:
None
Returns:
'run_on_startup': bool,
'data_rate': float,
'max_key_fee': float,
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'search_timeout': float,
'download_timeout': int
'max_search_results': int,
'wallet_type': string,
'delete_blobs_on_remove': bool,
'peer_port': int,
'dht_node_port': int,
'use_upnp': bool,
'start_lbrycrdd': bool,
"""
log.info("Get daemon settings")
return self._render_response(self.session_settings, OK_CODE)
def jsonrpc_set_settings(self, p):
"""
Set lbrynet daemon settings
Args:
'run_on_startup': bool,
'data_rate': float,
'max_key_fee': float,
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'download_timeout': int
Returns:
settings dict
"""
def _log_settings_change():
log.info("Set daemon settings to " + json.dumps(self.session_settings))
d = self._update_settings(p)
d.addErrback(lambda err: log.info(err.getTraceback()))
d.addCallback(lambda _: _log_settings_change())
d.addCallback(lambda _: self._render_response(self.session_settings, OK_CODE))
return d
def jsonrpc_help(self, p=None):
"""
Function to retrieve docstring for API function
Args:
optional 'function': function to retrieve documentation for
optional 'callable_during_startup':
Returns:
if given a function, returns given documentation
if given callable_during_startup flag, returns list of functions callable during the startup sequence
if no params are given, returns the list of callable functions
"""
if not p:
return self._render_response(self._listFunctions(), OK_CODE)
elif 'callable_during_start' in p.keys():
return self._render_response(ALLOWED_DURING_STARTUP, OK_CODE)
elif 'function' in p.keys():
func_path = p['function']
function = self._getFunction(func_path)
return self._render_response(function.__doc__, OK_CODE)
else:
return self._render_response(self.jsonrpc_help.__doc__, OK_CODE)
def jsonrpc_get_balance(self):
"""
Get balance
Args:
None
Returns:
balance, float
"""
log.info("Get balance")
return self._render_response(float(self.session.wallet.wallet_balance), OK_CODE)
def jsonrpc_stop(self):
"""
Stop lbrynet-daemon
Args:
None
Returns:
shutdown message
"""
def _disp_shutdown():
log.info("Shutting down lbrynet daemon")
d = self._shutdown()
d.addCallback(lambda _: _disp_shutdown())
d.addCallback(lambda _: reactor.callLater(0.0, reactor.stop))
return self._render_response("Shutting down", OK_CODE)
def jsonrpc_get_lbry_files(self):
"""
Get LBRY files
Args:
None
Returns:
List of lbry files:
'completed': bool
'file_name': string
'key': hex string
'points_paid': float
'stopped': bool
'stream_hash': base 58 string
'stream_name': string
'suggested_file_name': string
'upload_allowed': bool
'sd_hash': string
"""
d = self._get_lbry_files()
d.addCallback(lambda r: [d[1] for d in r])
d.addCallback(lambda r: self._render_response(r, OK_CODE) if len(r) else self._render_response(False, OK_CODE))
return d
def jsonrpc_get_lbry_file(self, p):
"""
Get lbry file
Args:
'name': get file by lbry uri,
'sd_hash': get file by the hash in the name claim,
'file_name': get file by its name in the downloads folder,
Returns:
'completed': bool
'file_name': string
'key': hex string
'points_paid': float
'stopped': bool
'stream_hash': base 58 string
'stream_name': string
'suggested_file_name': string
'upload_allowed': bool
'sd_hash': string
"""
if p.keys()[0] in ['name', 'sd_hash', 'file_name']:
search_type = p.keys()[0]
d = self._get_lbry_file(search_type, p[search_type])
else:
d = defer.fail()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_resolve_name(self, p):
"""
Resolve stream info from a LBRY uri
Args:
'name': name to look up, string, do not include lbry:// prefix
Returns:
metadata from name claim
"""
force = p.get('force', False)
if 'name' in p:
name = p['name']
else:
return self._render_response(None, BAD_REQUEST)
d = self._resolve_name(name, force_refresh=force)
d.addCallbacks(lambda info: self._render_response(info, OK_CODE), lambda _: server.failure)
return d
def jsonrpc_get_my_claim(self, p):
"""
Return existing claim for a given name
Args:
'name': name to look up
Returns:
claim info, False if no such claim exists
"""
name = p['name']
d = self.session.wallet.get_my_claim(name)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_claim_info(self, p):
"""
Resolve claim info from a LBRY uri
Args:
'name': name to look up, string, do not include lbry:// prefix
Returns:
txid, amount, value, n, height
"""
def _convert_amount_to_float(r):
if not r:
return False
else:
r['amount'] = float(r['amount']) / 10**8
return r
name = p['name']
txid = p.get('txid', None)
d = self.session.wallet.get_claim_info(name, txid)
d.addCallback(_convert_amount_to_float)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def _process_get_parameters(self, p):
"""Extract info from input parameters and fill in default values for `get` call."""
# TODO: this process can be abstracted s.t. each method
# can spec what parameters it expects and how to set default values
timeout = p.get('timeout', self.download_timeout)
download_directory = p.get('download_directory', self.download_directory)
file_name = p.get('file_name')
stream_info = p.get('stream_info')
sd_hash = get_sd_hash(stream_info)
wait_for_write = p.get('wait_for_write', True)
name = p.get('name')
return Parameters(
timeout=timeout,
download_directory=download_directory,
file_name=file_name,
stream_info=stream_info,
sd_hash=sd_hash,
wait_for_write=wait_for_write,
name=name
)
def jsonrpc_get(self, p):
"""Download stream from a LBRY uri.
Args:
'name': name to download, string
'download_directory': optional, path to directory where file will be saved, string
'file_name': optional, a user specified name for the downloaded file
'stream_info': optional, specified stream info overrides name
'timeout': optional
'wait_for_write': optional, defaults to True
Returns:
'stream_hash': hex string
'path': path of download
"""
params = self._process_get_parameters(p)
if not params.name:
return server.failure
if params.name in self.waiting_on:
return server.failure
d = self._download_name(name=params.name,
timeout=params.timeout,
download_directory=params.download_directory,
stream_info=params.stream_info,
file_name=params.file_name,
wait_for_write=params.wait_for_write)
# TODO: downloading can timeout. Not sure what to do when that happens
d.addCallbacks(get_output_callback(params), lambda err: str(err))
d.addCallback(lambda message: self._render_response(message, OK_CODE))
return d
def jsonrpc_stop_lbry_file(self, p):
"""
Stop lbry file
Args:
'name': stop file by lbry uri,
'sd_hash': stop file by the hash in the name claim,
'file_name': stop file by its name in the downloads folder,
Returns:
confirmation message
"""
def _stop_file(f):
d = self.lbry_file_manager.toggle_lbry_file_running(f)
d.addCallback(lambda _: "Stopped LBRY file")
return d
if p.keys()[0] in ['name', 'sd_hash', 'file_name']:
search_type = p.keys()[0]
d = self._get_lbry_file(search_type, p[search_type], return_json=False)
d.addCallback(lambda l: _stop_file(l) if not l.stopped else "LBRY file wasn't running")
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_start_lbry_file(self, p):
"""
Stop lbry file
Args:
'name': stop file by lbry uri,
'sd_hash': stop file by the hash in the name claim,
'file_name': stop file by its name in the downloads folder,
Returns:
confirmation message
"""
def _start_file(f):
d = self.lbry_file_manager.toggle_lbry_file_running(f)
return defer.succeed("Started LBRY file")
if p.keys()[0] in ['name', 'sd_hash', 'file_name']:
search_type = p.keys()[0]
d = self._get_lbry_file(search_type, p[search_type], return_json=False)
d.addCallback(lambda l: _start_file(l) if l.stopped else "LBRY file was already running")
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_est_cost(self, p):
"""
Get estimated cost for a lbry uri
Args:
'name': lbry uri
Returns:
estimated cost
"""
name = p['name']
force = p.get('force', False)
if force:
d = self._get_est_cost(name)
else:
d = self._search(name)
d.addCallback(lambda r: [i['cost'] for i in r][0])
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_search_nametrie(self, p):
"""
Search the nametrie for claims
Args:
'search': search query, string
Returns:
List of search results
"""
# TODO: change this function to "search"
if 'search' in p.keys():
search = p['search']
else:
return self._render_response(None, BAD_REQUEST)
# TODO: have ui accept the actual outputs
def _clean(n):
t = []
for i in n:
td = {k: i['value'][k] for k in i['value']}
td['cost_est'] = float(i['cost'])
td['thumbnail'] = i['value'].get('thumbnail', "img/Free-speech-flag.svg")
td['name'] = i['name']
t.append(td)
return t
log.info('Search: %s' % search)
d = self._search(search)
d.addCallback(_clean)
d.addCallback(lambda results: self._render_response(results, OK_CODE))
return d
def jsonrpc_delete_lbry_file(self, p):
"""
Delete a lbry file
Args:
'file_name': downloaded file name, string
Returns:
confirmation message
"""
if 'delete_target_file' in p.keys():
delete_file = p['delete_target_file']
else:
delete_file = True
def _delete_file(f):
file_name = f.file_name
d = self._delete_lbry_file(f, delete_file=delete_file)
d.addCallback(lambda _: "Deleted LBRY file" + file_name)
return d
if 'name' in p.keys() or 'sd_hash' in p.keys() or 'file_name' in p.keys():
search_type = [k for k in p.keys() if k != 'delete_target_file'][0]
d = self._get_lbry_file(search_type, p[search_type], return_json=False)
d.addCallback(lambda l: _delete_file(l) if l else False)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_publish(self, p):
"""
Make a new name claim and publish associated data to lbrynet
Args:
'name': name to be claimed, string
'file_path': path to file to be associated with name, string
'bid': amount of credits to commit in this claim, float
'metadata': metadata dictionary
optional 'fee'
Returns:
Claim txid
"""
def _set_address(address, currency, m):
log.info("Generated new address for key fee: " + str(address))
m['fee'][currency]['address'] = address
return m
def _reflect_if_possible(sd_hash, txid):
d = self._get_lbry_file('sd_hash', sd_hash, return_json=False)
d.addCallback(self._reflect)
d.addCallback(lambda _: txid)
return d
name = p['name']
log.info("Publish: ")
log.info(p)
try:
verify_name_characters(name)
except AssertionError:
log.error("Bad name")
return defer.fail(InvalidNameError("Bad name"))
bid = p['bid']
try:
metadata = Metadata(p['metadata'])
make_lbry_file = False
sd_hash = metadata['sources']['lbry_sd_hash']
except AssertionError:
make_lbry_file = True
sd_hash = None
metadata = p['metadata']
file_path = p['file_path']
if not self.pending_claim_checker.running:
self.pending_claim_checker.start(30)
d = self._resolve_name(name, force_refresh=True)
d.addErrback(lambda _: None)
if 'fee' in p:
metadata['fee'] = p['fee']
assert len(metadata['fee']) == 1, "Too many fees"
for c in metadata['fee']:
if 'address' not in metadata['fee'][c]:
d.addCallback(lambda _: self.session.wallet.get_new_address())
d.addCallback(lambda addr: _set_address(addr, c, metadata))
else:
d.addCallback(lambda _: metadata)
if make_lbry_file:
pub = Publisher(self.session, self.lbry_file_manager, self.session.wallet)
d.addCallback(lambda meta: pub.start(name, file_path, bid, meta))
else:
d.addCallback(lambda meta: self.session.wallet.claim_name(name, bid, meta))
if sd_hash:
d.addCallback(lambda txid: _reflect_if_possible(sd_hash, txid))
d.addCallback(lambda txid: self._add_to_pending_claims(name, txid))
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_abandon_claim(self, p):
"""
Abandon a name and reclaim credits from the claim
Args:
'txid': txid of claim, string
Return:
txid
"""
if 'txid' in p.keys():
txid = p['txid']
else:
return server.failure
def _disp(x):
log.info("Abandoned name claim tx " + str(x))
return self._render_response(x, OK_CODE)
d = defer.Deferred()
d.addCallback(lambda _: self.session.wallet.abandon_name(txid))
d.addCallback(_disp)
d.callback(None)
return d
def jsonrpc_abandon_name(self, p):
"""
DEPRECIATED, use abandon_claim
Args:
'txid': txid of claim, string
Return:
txid
"""
return self.jsonrpc_abandon_claim(p)
def jsonrpc_support_claim(self, p):
"""
Support a name claim
Args:
'name': name
'claim_id': claim id of claim to support
'amount': amount to support by
Return:
txid
"""
name = p['name']
claim_id = p['claim_id']
amount = p['amount']
d = self.session.wallet.support_claim(name, claim_id, amount)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_name_claims(self):
"""
Get my name claims
Args:
None
Returns
list of name claims
"""
def _clean(claims):
for c in claims:
for k in c.keys():
if isinstance(c[k], Decimal):
c[k] = float(c[k])
return defer.succeed(claims)
d = self.session.wallet.get_name_claims()
d.addCallback(_clean)
d.addCallback(lambda claims: self._render_response(claims, OK_CODE))
return d
def jsonrpc_get_claims_for_name(self, p):
"""
Get claims for a name
Args:
'name': name
Returns
list of name claims
"""
name = p['name']
d = self.session.wallet.get_claims_for_name(name)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_transaction_history(self):
"""
Get transaction history
Args:
None
Returns:
list of transactions
"""
d = self.session.wallet.get_history()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_transaction(self, p):
"""
Get a decoded transaction from a txid
Args:
txid: txid hex string
Returns:
JSON formatted transaction
"""
txid = p['txid']
d = self.session.wallet.get_tx_json(txid)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_address_is_mine(self, p):
"""
Checks if an address is associated with the current wallet.
Args:
address: string
Returns:
is_mine: bool
"""
address = p['address']
d = self.session.wallet.address_is_mine(address)
d.addCallback(lambda is_mine: self._render_response(is_mine, OK_CODE))
return d
def jsonrpc_get_public_key_from_wallet(self, p):
"""
Get public key from wallet address
Args:
wallet: wallet address, base58
Returns:
public key
"""
wallet = p['wallet']
d = self.session.wallet.get_pub_keys(wallet)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
def jsonrpc_get_time_behind_blockchain(self):
"""
Get number of blocks behind the blockchain
Args:
None
Returns:
number of blocks behind blockchain, int
"""
def _get_time_behind():
try:
local_height = self.session.wallet.network.get_local_height()
remote_height = self.session.wallet.network.get_server_height()
return defer.succeed(remote_height - local_height)
except:
return defer.fail()
d = _get_time_behind()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_new_address(self):
"""
Generate a new wallet address
Args:
None
Returns:
new wallet address, base 58 string
"""
def _disp(address):
log.info("Got new wallet address: " + address)
return defer.succeed(address)
d = self.session.wallet.get_new_address()
d.addCallback(_disp)
d.addCallback(lambda address: self._render_response(address, OK_CODE))
return d
def jsonrpc_send_amount_to_address(self, p):
"""
Send credits to an address
Args:
amount: the amount to send
address: the address of the recipient
Returns:
True if payment successfully scheduled
"""
if 'amount' in p.keys() and 'address' in p.keys():
amount = p['amount']
address = p['address']
else:
return server.failure
reserved_points = self.session.wallet.reserve_points(address, amount)
if reserved_points is None:
return defer.fail(InsufficientFundsError())
d = self.session.wallet.send_points_to_address(reserved_points, amount)
d.addCallback(lambda _: self._render_response(True, OK_CODE))
return d
def jsonrpc_get_best_blockhash(self):
"""
Get hash of most recent block
Args:
None
Returns:
Hash of most recent block
"""
d = self.session.wallet.get_best_blockhash()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_block(self, p):
"""
Get contents of a block
Args:
blockhash: hash of the block to look up
Returns:
requested block
"""
if 'blockhash' in p.keys():
blockhash = p['blockhash']
d = self.session.wallet.get_block(blockhash)
elif 'height' in p.keys():
height = p['height']
d = self.session.wallet.get_block_info(height)
d.addCallback(lambda blockhash: self.session.wallet.get_block(blockhash))
else:
return server.failure
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_claims_for_tx(self, p):
"""
Get claims for tx
Args:
txid: txid of a name claim transaction
Returns:
any claims contained in the requested tx
"""
if 'txid' in p.keys():
txid = p['txid']
else:
return server.failure
d = self.session.wallet.get_claims_from_tx(txid)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_download_descriptor(self, p):
"""
Download and return a sd blob
Args:
sd_hash
Returns
sd blob, dict
"""
sd_hash = p['sd_hash']
timeout = p.get('timeout', DEFAULT_SD_DOWNLOAD_TIMEOUT)
d = self._download_sd_blob(sd_hash, timeout)
d.addCallbacks(lambda r: self._render_response(r, OK_CODE), lambda _: self._render_response(False, OK_CODE))
return d
def jsonrpc_get_nametrie(self):
"""
Get the nametrie
Args:
None
Returns:
Name claim trie
"""
d = self.session.wallet.get_nametrie()
d.addCallback(lambda r: [i for i in r if 'txid' in i.keys()])
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_set_miner(self, p):
"""
Start of stop the miner, function only available when lbrycrd is set as the wallet
Args:
run: True/False
Returns:
miner status, True/False
"""
stat = p['run']
if stat:
d = self.session.wallet.start_miner()
else:
d = self.session.wallet.stop_miner()
d.addCallback(lambda _: self.session.wallet.get_miner_status())
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_miner_status(self):
"""
Get status of miner
Args:
None
Returns:
True/False
"""
d = self.session.wallet.get_miner_status()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_log(self, p):
"""
Log message
Args:
'message': message to be logged
Returns:
True
"""
message = p['message']
log.info("API client log request: %s" % message)
return self._render_response(True, OK_CODE)
def jsonrpc_upload_log(self, p=None):
"""
Upload log
Args, optional:
'name_prefix': prefix to indicate what is requesting the log upload
'exclude_previous': true/false, whether or not to exclude previous sessions from upload, defaults on true
Returns:
True
"""
if p:
if 'name_prefix' in p.keys():
log_type = p['name_prefix'] + '_api'
elif 'log_type' in p.keys():
log_type = p['log_type'] + '_api'
else:
log_type = None
if 'exclude_previous' in p.keys():
exclude_previous = p['exclude_previous']
else:
exclude_previous = True
if 'message' in p.keys():
log.info("Upload log message: " + str(p['message']))
if 'force' in p.keys():
force = p['force']
else:
force = False
else:
log_type = "api"
exclude_previous = True
d = self._upload_log(log_type=log_type, exclude_previous=exclude_previous, force=force)
if 'message' in p.keys():
d.addCallback(lambda _: self._log_to_slack(p['message']))
d.addCallback(lambda _: self._render_response(True, OK_CODE))
return d
def jsonrpc_configure_ui(self, p):
"""
Configure the UI being hosted
Args, optional:
'branch': a branch name on lbryio/lbry-web-ui
'path': path to a ui folder
"""
if 'check_requirements' in p:
check_require = p['check_requirements']
else:
check_require = True
if 'path' in p:
d = self.lbry_ui_manager.setup(user_specified=p['path'], check_requirements=check_require)
elif 'branch' in p:
d = self.lbry_ui_manager.setup(branch=p['branch'], check_requirements=check_require)
else:
d = self.lbry_ui_manager.setup(check_requirements=check_require)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_reveal(self, p):
"""
Reveal a file or directory in file browser
Args:
'path': path to be selected in file browser
Returns:
True, opens file browser
"""
path = p['path']
if sys.platform == "darwin":
d = threads.deferToThread(subprocess.Popen, ['open', '-R', path])
else:
# No easy way to reveal specific files on Linux, so just open the containing directory
d = threads.deferToThread(subprocess.Popen, ['xdg-open', os.path.dirname(path)])
d.addCallback(lambda _: self._render_response(True, OK_CODE))
return d
def jsonrpc_get_peers_for_hash(self, p):
"""
Get peers for blob hash
Args:
'blob_hash': blob hash
Returns:
List of contacts
"""
blob_hash = p['blob_hash']
d = self.session.peer_finder.find_peers_for_blob(blob_hash)
d.addCallback(lambda r: [[c.host, c.port, c.is_available()] for c in r])
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_announce_all_blobs_to_dht(self):
"""
Announce all blobs to the dht
Args:
None
Returns:
"""
d = self.session.blob_manager.immediate_announce_all_blobs()
d.addCallback(lambda _: self._render_response("Announced", OK_CODE))
return d
def jsonrpc_reflect(self, p):
"""
Reflect a stream
Args:
sd_hash: sd_hash of lbry file
Returns:
True or traceback
"""
sd_hash = p['sd_hash']
d = self._get_lbry_file('sd_hash', sd_hash, return_json=False)
d.addCallback(self._reflect)
d.addCallbacks(lambda _: self._render_response(True, OK_CODE), lambda err: self._render_response(err.getTraceback(), OK_CODE))
return d
def jsonrpc_get_blob_hashes(self):
"""
Returns all blob hashes
Args:
None
Returns:
list of blob hashes
"""
d = self.session.blob_manager.get_all_verified_blobs()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_reflect_all_blobs(self):
"""
Reflects all saved blobs
Args:
None
Returns:
True
"""
d = self.session.blob_manager.get_all_verified_blobs()
d.addCallback(self._reflect_blobs)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_get_search_servers(self):
"""
Get list of lighthouse servers
Args:
None
Returns:
List of address:port
"""
d = self._render_response(SEARCH_SERVERS, OK_CODE)
return d
def get_lbrynet_version_from_github():
"""Return the latest released version from github."""
response = requests.get('https://api.github.com/repos/lbryio/lbry/releases/latest')
release = response.json()
tag = release['tag_name']
# githubs documentation claims this should never happen, but we'll check just in case
if release['prerelease']:
raise Exception('Release {} is a pre-release'.format(tag))
return get_version_from_tag(tag)
def get_version_from_tag(tag):
match = re.match('v([\d.]+)', tag)
if match:
return match.group(1)
else:
raise Exception('Failed to parse version from tag {}'.format(tag))
def get_sd_hash(stream_info):
if not stream_info:
return None
try:
return stream_info['sources']['lbry_sd_hash']
except KeyError:
return stream_info.get('stream_hash')
def get_output_callback(params):
def callback(l):
return {
'stream_hash': params.sd_hash if params.stream_info else l.sd_hash,
'path': os.path.join(params.download_directory, l.file_name)
}
return callback
def get_darwin_lbrycrdd_path():
# use the path from the bundle if its available.
default = "./lbrycrdd"
try:
import Foundation
except ImportError:
log.warning('Foundation module not installed, falling back to default lbrycrdd path')
return default
else:
try:
bundle = Foundation.NSBundle.mainBundle()
return bundle.pathForResource_ofType_('lbrycrdd', None)
except Exception:
log.exception('Failed to get path from bundle, falling back to default')
return default
class _DownloadNameHelper(object):
def __init__(self, daemon, name, timeout=DEFAULT_TIMEOUT, download_directory=None,
file_name=None, wait_for_write=True):
self.daemon = daemon
self.name = name
self.timeout = timeout
if not download_directory or not os.path.isdir(download_directory):
self.download_directory = daemon.download_directory
else:
self.download_directory = download_directory
self.file_name = file_name
self.wait_for_write = wait_for_write
def _setup_stream(self, stream_info):
stream_hash = get_sd_hash(stream_info)
d = self.daemon._get_lbry_file_by_sd_hash(stream_hash)
d.addCallback(self._prepend_stream_info, stream_info)
return d
def _prepend_stream_info(self, lbry_file, stream_info):
if lbry_file:
if os.path.isfile(os.path.join(self.download_directory, lbry_file.file_name)):
return defer.succeed((stream_info, lbry_file))
return defer.succeed((stream_info, None))
def wait_or_get_stream(self, args):
stream_info, lbry_file = args
if lbry_file:
log.debug('Wait on lbry_file')
return self._wait_on_lbry_file(lbry_file)
else:
log.debug('No lbry_file, need to get stream')
return self._get_stream(stream_info)
def _get_stream(self, stream_info):
d = self.daemon.add_stream(
self.name, self.timeout, self.download_directory, self.file_name, stream_info)
def _handle_timeout(args):
was_successful, _, _ = args
if not was_successful:
log.warning("lbry://%s timed out, removing from streams", self.name)
del self.daemon.streams[self.name]
d.addCallback(_handle_timeout)
if self.wait_for_write:
d.addCallback(lambda _: self._wait_for_write())
def _get_stream_for_return():
stream = self.daemon.streams.get(self.name, None)
if stream:
return stream.downloader
else:
self._remove_from_wait("Timed out")
return defer.fail(Exception("Timed out"))
d.addCallback(lambda _: _get_stream_for_return())
return d
def _wait_for_write(self):
d = defer.succeed(None)
if not self.has_downloader_wrote():
d.addCallback(lambda _: reactor.callLater(1, self._wait_for_write))
return d
def has_downloader_wrote(self):
stream = self.daemon.streams.get(self.name, False)
if stream:
downloader = stream.downloader
else:
downloader = False
if not downloader:
return False
return self.get_written_bytes(downloader.file_name)
def _wait_on_lbry_file(self, f):
written_bytes = self.get_written_bytes(f.file_name)
if written_bytes:
return defer.succeed(self._disp_file(f))
return task.deferLater(reactor, 1, self._wait_on_lbry_file, f)
def get_written_bytes(self, file_name):
"""Returns the number of bytes written to `file_name`.
Returns False if there were issues reading `file_name`.
"""
try:
file_path = os.path.join(self.download_directory, file_name)
if os.path.isfile(file_path):
written_file = file(file_path)
written_file.seek(0, os.SEEK_END)
written_bytes = written_file.tell()
written_file.close()
else:
written_bytes = False
except Exception:
writen_bytes = False
return written_bytes
def _disp_file(self, f):
file_path = os.path.join(self.download_directory, f.file_name)
log.info("Already downloaded: %s --> %s", f.sd_hash, file_path)
return f
def _remove_from_wait(self, r):
if self.name in self.daemon.waiting_on:
del self.daemon.waiting_on[self.name]
return r
class _ResolveNameHelper(object):
def __init__(self, daemon, name, force_refresh):
self.daemon = daemon
self.name = name
self.force_refresh = force_refresh
def get_deferred(self):
if self.need_fresh_stream():
log.info("Resolving stream info for lbry://%s", self.name)
d = self.wallet.get_stream_info_for_name(self.name)
d.addCallbacks(self._cache_stream_info, lambda _: defer.fail(UnknownNameError))
else:
log.debug("Returning cached stream info for lbry://%s", self.name)
d = defer.succeed(self.name_data['claim_metadata'])
return d
@property
def name_data(self):
return self.daemon.name_cache[self.name]
@property
def wallet(self):
return self.daemon.session.wallet
def now(self):
return self.daemon._get_long_count_timestamp()
def _add_txid(self, txid):
self.name_data['txid'] = txid
return defer.succeed(None)
def _cache_stream_info(self, stream_info):
self.daemon.name_cache[self.name] = {
'claim_metadata': stream_info,
'timestamp': self.now()
}
d = self.wallet.get_txid_for_name(self.name)
d.addCallback(self._add_txid)
d.addCallback(lambda _: self.daemon._update_claim_cache())
d.addCallback(lambda _: self.name_data['claim_metadata'])
return d
def need_fresh_stream(self):
return self.force_refresh or not self.is_in_cache() or self.is_cached_name_expired()
def is_in_cache(self):
return self.name in self.daemon.name_cache
def is_cached_name_expired(self):
time_in_cache = self.now() - self.name_data['timestamp']
return time_in_cache >= self.daemon.cache_time
|
{
"content_hash": "2882ca960743585848496a1d0e6a618a",
"timestamp": "",
"source": "github",
"line_count": 2802,
"max_line_length": 144,
"avg_line_length": 38.258386866523914,
"alnum_prop": 0.5637873134328358,
"repo_name": "DaveA50/lbry",
"id": "05a654b3a3119aa5671cc791a3f026ebcabf2af8",
"size": "107200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lbrynet/lbrynet_daemon/Daemon.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "3242"
},
{
"name": "Python",
"bytes": "1167874"
},
{
"name": "Shell",
"bytes": "15744"
}
],
"symlink_target": ""
}
|
import bdd_grpc_util
import endorser_util
import bdd_test_util
import bootstrap_util
import orderer_util
import time
class ChannelCreationInfo:
'Used to store the information needed to construct Config TX for orderer broadcast to create a new channel'
def __init__(self, channelId, channelCreationPolicyName, signedConfigEnvelope):
self.channelId = channelId
self.channelCreationPolicyName = channelCreationPolicyName
self.signedConfigEnvelope = signedConfigEnvelope
@given(u'the orderer network has organizations')
def step_impl(context):
assert 'table' in context, "Expected table of orderer organizations"
directory = bootstrap_util.getDirectory(context)
for row in context.table.rows:
org = directory.getOrganization(row['Organization'], shouldCreate = True)
org.addToNetwork(bootstrap_util.Network.Orderer)
@given(u'user requests role of orderer admin by creating a key and csr for orderer and acquires signed certificate from organization')
def step_impl(context):
assert 'table' in context, "Expected table with triplet of User/Orderer/Organization"
directory = bootstrap_util.getDirectory(context)
for row in context.table.rows:
directory.registerOrdererAdminTuple(row['User'], row['Orderer'], row['Organization'])
@given(u'user requests role for peer by creating a key and csr for peer and acquires signed certificate from organization')
def step_impl(context):
assert 'table' in context, "Expected table with triplet of User/Peer/Organization"
directory = bootstrap_util.getDirectory(context)
for row in context.table.rows:
directory.registerOrdererAdminTuple(row['User'], row['Peer'], row['Organization'])
@given(u'the peer network has organizations')
def step_impl(context):
assert 'table' in context, "Expected table of peer network organizations"
directory = bootstrap_util.getDirectory(context)
for row in context.table.rows:
org = directory.getOrganization(row['Organization'], shouldCreate = True)
org.addToNetwork(bootstrap_util.Network.Peer)
@given(u'a ordererBootstrapAdmin is identified and given access to all public certificates and orderer node info')
def step_impl(context):
directory = bootstrap_util.getDirectory(context)
assert len(directory.ordererAdminTuples) > 0, "No orderer admin tuples defined!!!"
# Simply create the user
bootstrap_util.getOrdererBootstrapAdmin(context, shouldCreate=True)
@given(u'the ordererBootstrapAdmin creates the genesis block for chain "{ordererSystemChainIdName}" for network config policy "{networkConfigPolicy}" and consensus "{consensusType}" using chain creators policies')
def step_impl(context, ordererSystemChainIdName, networkConfigPolicy, consensusType):
ordererBootstrapAdmin = bootstrap_util.getOrdererBootstrapAdmin(context)
ordererSystemChainIdGUUID = ordererBootstrapAdmin.tags[ordererSystemChainIdName]
# Now collect the named signed config items
signedConfigItems =[]
for row in context.table.rows:
signedConfigItemName = row['SignedConfigItemsName']
signedConfigItems += ordererBootstrapAdmin.tags[signedConfigItemName]
# Concatenate signedConfigItems
# Construct block
(genesisBlock,envelope) = bootstrap_util.createGenesisBlock(context, ordererSystemChainIdGUUID, consensusType, signedConfigItems=signedConfigItems)
bootstrap_util.OrdererGensisBlockCompositionCallback(context, genesisBlock)
bootstrap_util.PeerCompositionCallback(context)
@given(u'the orderer admins inspect and approve the genesis block for chain "{chainId}"')
def step_impl(context, chainId):
pass
@given(u'the orderer admins use the genesis block for chain "{chainId}" to configure orderers')
def step_impl(context, chainId):
pass
#raise NotImplementedError(u'STEP: Given the orderer admins use the genesis block for chain "testchainid" to configure orderers')
@given(u'the ordererBootstrapAdmin generates a GUUID to identify the orderer system chain and refer to it by name as "{ordererSystemChainId}"')
def step_impl(context, ordererSystemChainId):
directory = bootstrap_util.getDirectory(context)
ordererBootstrapAdmin = bootstrap_util.getOrdererBootstrapAdmin(context)
ordererBootstrapAdmin.tags[ordererSystemChainId] = bootstrap_util.GetUUID()
@given(u'the ordererBootstrapAdmin creates a chain creators policy "{chainCreatePolicyName}" (network name) for peer orgs who wish to form a network using orderer system chain "{ordererSystemChainId}"')
def step_impl(context, chainCreatePolicyName, ordererSystemChainId):
directory = bootstrap_util.getDirectory(context)
ordererBootstrapAdmin = bootstrap_util.getOrdererBootstrapAdmin(context)
ordererSystemChainIdGuuid = ordererBootstrapAdmin.tags[ordererSystemChainId]
# Collect the orgs from the table
orgNames = [row['Organization'] for row in context.table.rows]
bootstrap_util.addOrdererBootstrapAdminOrgReferences(context, chainCreatePolicyName, orgNames)
chainCreatorsOrgsPolicySignedConfigItem = \
bootstrap_util.createChainCreatorsPolicy(context=context, chainCreatePolicyName=chainCreatePolicyName, chaindId=ordererSystemChainIdGuuid, orgNames=orgNames)
ordererBootstrapAdmin.tags[chainCreatePolicyName] = [chainCreatorsOrgsPolicySignedConfigItem]
@given(u'the ordererBootstrapAdmin runs the channel template tool to create the orderer configuration template "{templateName}" for application developers using orderer "{ordererComposeService}"')
def step_impl(context, templateName, ordererComposeService):
pass
@given(u'the ordererBootstrapAdmin distributes orderer configuration template "template1" and chain creation policy name "chainCreatePolicy1"')
def step_impl(context):
pass
@given(u'the user "{userName}" creates a peer template "{templateName}" with chaincode deployment policy using chain creation policy name "{chainCreatePolicyName}" and peer organizations')
def step_impl(context, userName, templateName, chainCreatePolicyName):
' At the moment, only really defining MSP Config Items (NOT SIGNED)'
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName)
user.tags[templateName] = [directory.getOrganization(row['Organization']) for row in context.table.rows]
@given(u'the user "{userName}" creates a signedConfigEnvelope "{createChannelSignedConfigEnvelope}"')
def step_impl(context, userName, createChannelSignedConfigEnvelope):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName)
ordererBootstrapAdmin = bootstrap_util.getOrdererBootstrapAdmin(context)
channelID = context.table.rows[0]["ChannelID"]
chainCreationPolicyName = context.table.rows[0]["Chain Creation Policy Name"]
templateName = context.table.rows[0]["Template"]
# Loop through templates referenced orgs
mspOrgNames = [org.name for org in user.tags[templateName]]
signedMspConfigItems = bootstrap_util.getSignedMSPConfigItems(context=context, chainId=channelID, orgNames=mspOrgNames)
# Add the anchors signed config Items
anchorSignedConfigItemsName = context.table.rows[0]["Anchors"]
signedAnchorsConfigItems = user.tags[anchorSignedConfigItemsName]
# Intermediate step until template tool is ready
signedConfigItems = bootstrap_util.createSignedConfigItems(context, channelID, "solo", signedConfigItems=signedMspConfigItems + signedAnchorsConfigItems)
#NOTE: Conidered passing signing key for appDeveloper, but decided that the peer org signatures they need to collect subsequently should be proper way
signedConfigEnvelope = bootstrap_util.signInitialChainConfig(signedConfigItems=signedConfigItems, chainId=channelID, chainCreationPolicyName=chainCreationPolicyName)
user.tags[createChannelSignedConfigEnvelope] = ChannelCreationInfo(channelID, chainCreationPolicyName, signedConfigEnvelope)
# Construct TX Config Envelope, broadcast, expect success, and then connect to deliver to revtrieve block.
# Make sure the blockdata exactly the TxConfigEnvelope I submitted.
# txConfigEnvelope = bootstrap_util.createConfigTxEnvelope(chainId=channelID, signedConfigEnvelope=signedConfigEnvelope)
@given(u'the following application developers are defined for peer organizations and each saves their cert as alias')
def step_impl(context):
assert 'table' in context, "Expected table with triplet of Developer/ChainCreationPolicyName/Organization"
directory = bootstrap_util.getDirectory(context)
for row in context.table.rows:
userName = row['Developer']
nodeAdminNamedTuple = directory.registerOrdererAdminTuple(userName, row['ChainCreationPolicyName'], row['Organization'])
user = directory.getUser(userName)
user.tags[row['AliasSavedUnder']] = nodeAdminNamedTuple
@given(u'the user "{userName}" collects signatures for signedConfigEnvelope "{createChannelSignedConfigEnvelopeName}" from peer orgs')
def step_impl(context, userName, createChannelSignedConfigEnvelopeName):
assert 'table' in context, "Expected table of peer organizations"
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
# Get the ChannelCreationInfo object that holds the signedConfigEnvelope
channelCreationInfo = user.tags[createChannelSignedConfigEnvelopeName]
signedConfigEnvelope = channelCreationInfo.signedConfigEnvelope
for row in context.table.rows:
org = directory.getOrganization(row['Organization'])
assert bootstrap_util.Network.Peer in org.networks, "Organization '{0}' not in Peer network".format(org.name)
bootstrap_util.BootstrapHelper.addSignatureToSignedConfigItem(signedConfigEnvelope.Items[0], (org, org.getSelfSignedCert()))
# print("Signatures for signedConfigEnvelope:\n {0}\n".format(signedConfigEnvelope.Items[0]))
@given(u'the user "{userName}" creates config Tx "{configTxName}" using signedConfigEnvelope "{createChannelSignedConfigEnvelopeName}"')
def step_impl(context, userName, configTxName, createChannelSignedConfigEnvelopeName):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
channelCreationInfo = user.tags[createChannelSignedConfigEnvelopeName]
user.tags[configTxName] = bootstrap_util.createConfigTxEnvelope(channelCreationInfo.channelId, channelCreationInfo.signedConfigEnvelope)
@given(u'the user "{userName}" broadcasts config Tx "{configTxName}" to orderer "{orderer}" to create channel "{channelId}"')
def step_impl(context, userName, configTxName, orderer, channelId):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
configTxEnvelope = user.tags[configTxName]
bootstrap_util.broadcastCreateChannelConfigTx(context=context, composeService=orderer, chainId=channelId, user=user, configTxEnvelope=configTxEnvelope)
@when(u'user "{userName}" connects to deliver function on orderer "{composeService}"')
def step_impl(context, userName, composeService):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
user.connectToDeliverFunction(context, composeService)
@when(u'user "{userName}" sends deliver a seek request on orderer "{composeService}" with properties')
def step_impl(context, userName, composeService):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
row = context.table.rows[0]
chainID = row['ChainId']
start, end, = orderer_util.convertSeek(row['Start']), orderer_util.convertSeek(row['End'])
streamHelper = user.getDelivererStreamHelper(context, composeService)
streamHelper.seekToRange(chainID=chainID, start = start, end = end)
@then(u'user "{userName}" should get a delivery "{deliveryName}" from "{composeService}" of "{expectedBlocks}" blocks with "{numMsgsToBroadcast}" messages within "{batchTimeout}" seconds')
def step_impl(context, userName, deliveryName, composeService, expectedBlocks, numMsgsToBroadcast, batchTimeout):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
streamHelper = user.getDelivererStreamHelper(context, composeService)
blocks = streamHelper.getBlocks()
# Verify block count
assert len(blocks) == int(expectedBlocks), "Expected {0} blocks, received {1}".format(expectedBlocks, len(blocks))
user.tags[deliveryName] = blocks
@when(u'user "{userName}" using cert alias "{certAlias}" requests to join channel using genesis block "{genisisBlockName}" on peers with result "{joinChannelResult}"')
def step_impl(context, userName, certAlias, genisisBlockName, joinChannelResult):
timeout = 10
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName)
# Find the cert using the cert tuple information saved for the user under certAlias
signersCert = directory.findCertForNodeAdminTuple(user.tags[certAlias])
# Retrieve the genesis block from the returned value of deliver (Will be list with first block as genesis block)
genesisBlock = user.tags[genisisBlockName][0]
ccSpec = endorser_util.getChaincodeSpec("GOLANG", "", "cscc", ["JoinChain", genesisBlock.SerializeToString()])
proposal = endorser_util.createInvokeProposalForBDD(ccSpec=ccSpec, chainID="",signersCert=signersCert, Mspid="DEFAULT", type="CONFIGURATION_TRANSACTION")
signedProposal = endorser_util.signProposal(proposal=proposal, entity=user, signersCert=signersCert)
# Send proposal to each specified endorser, waiting 'timeout' seconds for response/error
endorsers = [row['Peer'] for row in context.table.rows]
proposalResponseFutures = [endorserStub.ProcessProposal.future(signedProposal, int(timeout)) for endorserStub in endorser_util.getEndorserStubs(context, endorsers)]
resultsDict = dict(zip(endorsers, [respFuture.result() for respFuture in proposalResponseFutures]))
user.tags[joinChannelResult] = resultsDict
@given(u'the ordererBoostrapAdmin creates MSP Configuration Items "{mspConfigItemsName}" for orderer system chain "{ordererSystemChainIdName}" for every MSP referenced by the policies')
def step_impl(context, ordererSystemChainIdName, mspConfigItemsName):
assert 'table' in context, "Expected table of policy names"
directory = bootstrap_util.getDirectory(context)
ordererBootstrapAdmin = bootstrap_util.getOrdererBootstrapAdmin(context)
ordererSystemChainIdGUUID = ordererBootstrapAdmin.tags[ordererSystemChainIdName]
mspSignedConfigItems = bootstrap_util.getMspConfigItemsForPolicyNames(context, chainId=ordererSystemChainIdGUUID, policyNames=[row['PolicyName'] for row in context.table.rows])
ordererBootstrapAdmin.tags[mspConfigItemsName] = mspSignedConfigItems
@given(u'the ordererBoostrapAdmin creates the chain creation policy names "{chainCreationPolicyNames}" signedConfigurationItem for orderer system chain "{ordererSystemChainIdName}" with policies')
def step_impl(context, chainCreationPolicyNames, ordererSystemChainIdName):
ordererBootstrapAdmin = bootstrap_util.getOrdererBootstrapAdmin(context)
ordererSystemChainIdGUUID = ordererBootstrapAdmin.tags[ordererSystemChainIdName]
policyNames = [row['PolicyName'] for row in context.table.rows]
chainCreationPolicyNamesConfigItem = bootstrap_util.createChainCreationPolicyNames(context, chainCreationPolicyNames=policyNames, chaindId=ordererSystemChainIdGUUID)
ordererBootstrapAdmin.tags[chainCreationPolicyNames] = [chainCreationPolicyNamesConfigItem]
@then(u'user "{userName}" expects result code for "{proposalResponseName}" of "{proposalResponseResultCode}" from peers')
def step_impl(context, userName, proposalResponseName, proposalResponseResultCode):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
peerToProposalResponseDict = user.tags[proposalResponseName]
unexpectedResponses = [(composeService,proposalResponse) for composeService, proposalResponse in peerToProposalResponseDict.items() if proposalResponse.response.payload != proposalResponseResultCode]
print("ProposalResponse: \n{0}\n".format(proposalResponse))
print("")
@given(u'the user "{userName}" creates an peer anchor set "{anchorSetName}" for channel "{channelName}" for orgs')
def step_impl(context, userName, anchorSetName, channelName):
directory = bootstrap_util.getDirectory(context)
user = directory.getUser(userName=userName)
nodeAdminTuples = [directory.findNodeAdminTuple(row['User'], row['Peer'], row['Organization']) for row in context.table.rows]
user.tags[anchorSetName] = bootstrap_util.getSignedAnchorConfigItems(context=context, chainId=channelName, nodeAdminTuples=nodeAdminTuples)
|
{
"content_hash": "94f3b5c89452df051947d45d0c534eb6",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 213,
"avg_line_length": 61.17454545454545,
"alnum_prop": 0.7889199310467812,
"repo_name": "masterDev1985/fabric",
"id": "d1b0e767857e65708ee14770cebc8f39bfb43a09",
"size": "17421",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bddtests/steps/bootstrap_impl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "110633"
},
{
"name": "Go",
"bytes": "2918014"
},
{
"name": "Java",
"bytes": "70137"
},
{
"name": "Makefile",
"bytes": "13630"
},
{
"name": "Protocol Buffer",
"bytes": "78675"
},
{
"name": "Python",
"bytes": "166502"
},
{
"name": "Ruby",
"bytes": "3255"
},
{
"name": "Shell",
"bytes": "42783"
}
],
"symlink_target": ""
}
|
"""
Compatibility Support for Python 2.6 and earlier
"""
import sys
try:
from urllib.parse import splittag
except ImportError:
from urllib import splittag
def strip_fragment(url):
"""
In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and
later was patched to disregard the fragment when making URL requests.
Do the same for Python 2.6 and earlier.
"""
url, fragment = splittag(url)
return url
if sys.version_info >= (2, 7):
strip_fragment = lambda x: x
|
{
"content_hash": "bc13d4aea2d675dd09ee43879b29e821",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 73,
"avg_line_length": 22.304347826086957,
"alnum_prop": 0.6803118908382066,
"repo_name": "letouriste001/SmartForest_2.0",
"id": "7c60c90ea598b81dba73660a9a2484bfa47d5674",
"size": "513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python3.4Smartforest/lib/python3.4/site-packages/setuptools/py26compat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "68689"
},
{
"name": "Python",
"bytes": "6055383"
},
{
"name": "Shell",
"bytes": "3294"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('release', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ContentCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50)),
('description', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ContentFormat',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50)),
('description', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Repo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('shadow', models.BooleanField(default=False)),
('name', models.CharField(max_length=2000, db_index=True)),
('product_id', models.PositiveIntegerField(null=True, blank=True)),
('content_category', models.ForeignKey(to='repository.ContentCategory')),
('content_format', models.ForeignKey(to='repository.ContentFormat')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='RepoFamily',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50)),
('description', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Service',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50)),
('description', models.CharField(max_length=200)),
],
),
migrations.AddField(
model_name='repo',
name='repo_family',
field=models.ForeignKey(to='repository.RepoFamily'),
),
migrations.AddField(
model_name='repo',
name='service',
field=models.ForeignKey(to='repository.Service'),
),
migrations.AddField(
model_name='repo',
name='variant_arch',
field=models.ForeignKey(related_name='repos', on_delete=django.db.models.deletion.PROTECT, to='release.VariantArch'),
),
migrations.AlterUniqueTogether(
name='repo',
unique_together=set([('variant_arch', 'service', 'repo_family', 'content_format', 'content_category', 'name', 'shadow')]),
),
]
|
{
"content_hash": "fb6ec5baa86356a40958c34501540853",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 134,
"avg_line_length": 40.20253164556962,
"alnum_prop": 0.547544080604534,
"repo_name": "puiterwijk/product-definition-center",
"id": "b786c31310715a53e488494c543e8650977b3ff6",
"size": "3309",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "pdc/apps/repository/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1553"
},
{
"name": "Groff",
"bytes": "1766"
},
{
"name": "HTML",
"bytes": "48323"
},
{
"name": "JavaScript",
"bytes": "6629"
},
{
"name": "Makefile",
"bytes": "2677"
},
{
"name": "Python",
"bytes": "1213188"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import *
# Register your models here.
class LevelAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(Level, LevelAdmin)
# class EnDefinitionInline(admin.StackedInline):
# model = EnDefinition
# fields = ('pos', 'defn',)
# show_change_link = True
class PronunciationInline(admin.StackedInline):
model = Pronunciation
fields = ('us', 'uk')
class WordAdmin(admin.ModelAdmin):
list_display = ('content', 'cn_definition')
search_fields = ('content', 'cn_definition')
inlines = (PronunciationInline,)
admin.site.register(Word, WordAdmin)
|
{
"content_hash": "5d6e6e28577daefff0587aa26efd4f6a",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 48,
"avg_line_length": 24.576923076923077,
"alnum_prop": 0.701095461658842,
"repo_name": "HideMode/ShanBay",
"id": "0d88b8e2ebe0347799375c3d373e12aff3965919",
"size": "639",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "word/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "287039"
},
{
"name": "HTML",
"bytes": "68662"
},
{
"name": "JavaScript",
"bytes": "1097995"
},
{
"name": "Python",
"bytes": "37924"
}
],
"symlink_target": ""
}
|
"""
XX. Model inheritance
Model inheritance exists in two varieties:
- abstract base classes which are a way of specifying common
information inherited by the subclasses. They don't exist as a separate
model.
- non-abstract base classes (the default), which are models in their own
right with their own database tables and everything. Their subclasses
have references back to them, created automatically.
Both styles are demonstrated here.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
#
# Abstract base classes
#
@python_2_unicode_compatible
class CommonInfo(models.Model):
name = models.CharField(max_length=50)
age = models.PositiveIntegerField()
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return '%s %s' % (self.__class__.__name__, self.name)
class Worker(CommonInfo):
job = models.CharField(max_length=50)
class Student(CommonInfo):
school_class = models.CharField(max_length=10)
class Meta:
pass
class StudentWorker(Student, Worker):
pass
#
# Abstract base classes with related models
#
class Post(models.Model):
title = models.CharField(max_length=50)
@python_2_unicode_compatible
class Attachment(models.Model):
post = models.ForeignKey(Post, related_name='attached_%(class)s_set')
content = models.TextField()
class Meta:
abstract = True
def __str__(self):
return self.content
class Comment(Attachment):
is_spam = models.BooleanField(default=False)
class Link(Attachment):
url = models.URLField()
#
# Multi-table inheritance
#
@python_2_unicode_compatible
class Chef(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "%s the chef" % self.name
@python_2_unicode_compatible
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
def __str__(self):
return "%s the place" % self.name
class Rating(models.Model):
rating = models.IntegerField(null=True, blank=True)
class Meta:
abstract = True
ordering = ['-rating']
@python_2_unicode_compatible
class Restaurant(Place, Rating):
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
chef = models.ForeignKey(Chef, null=True, blank=True)
class Meta(Rating.Meta):
db_table = 'my_restaurant'
def __str__(self):
return "%s the restaurant" % self.name
@python_2_unicode_compatible
class ItalianRestaurant(Restaurant):
serves_gnocchi = models.BooleanField(default=False)
def __str__(self):
return "%s the italian restaurant" % self.name
@python_2_unicode_compatible
class Supplier(Place):
customers = models.ManyToManyField(Restaurant, related_name='provider')
def __str__(self):
return "%s the supplier" % self.name
@python_2_unicode_compatible
class ParkingLot(Place):
# An explicit link to the parent (we can control the attribute name).
parent = models.OneToOneField(Place, primary_key=True, parent_link=True)
main_site = models.ForeignKey(Place, related_name='lot')
def __str__(self):
return "%s the parking lot" % self.name
#
# Abstract base classes with related models where the sub-class has the
# same name in a different app and inherits from the same abstract base
# class.
# NOTE: The actual API tests for the following classes are in
# model_inheritance_same_model_name/models.py - They are defined
# here in order to have the name conflict between apps
#
class Title(models.Model):
title = models.CharField(max_length=50)
class NamedURL(models.Model):
title = models.ForeignKey(Title, related_name='attached_%(app_label)s_%(class)s_set')
url = models.URLField()
class Meta:
abstract = True
@python_2_unicode_compatible
class Copy(NamedURL):
content = models.TextField()
def __str__(self):
return self.content
class Mixin(object):
def __init__(self):
self.other_attr = 1
super(Mixin, self).__init__()
class MixinModel(models.Model, Mixin):
pass
class Base(models.Model):
titles = models.ManyToManyField(Title)
class SubBase(Base):
sub_id = models.IntegerField(primary_key=True)
|
{
"content_hash": "f99aa00a870af7e77444a7bac94c751f",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 89,
"avg_line_length": 23.046875,
"alnum_prop": 0.6858757062146893,
"repo_name": "rogerhu/django",
"id": "7f5702da59be245b8b7b720429bd96efcf867d59",
"size": "4425",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tests/model_inheritance/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "51177"
},
{
"name": "JavaScript",
"bytes": "102290"
},
{
"name": "Python",
"bytes": "9186415"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
}
|
try:
from django.conf.urls import url, patterns, include
except ImportError:
from django.conf.urls.defaults import *
try:
from django.shortcuts import render
except ImportError:
from django.views.generic.simple import direct_to_template as render
urlpatterns = patterns('',
url(r'^yandex/', include('yandex_maps.urls')),
url(r'^$', lambda request: render(request, 'index.html'), name='index')
)
|
{
"content_hash": "a368ffacf0443a65fb478f37c8fa477e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 75,
"avg_line_length": 30.071428571428573,
"alnum_prop": 0.7149643705463183,
"repo_name": "kmike/yandex-maps",
"id": "ec377b8a3f6c4b2ccb9dccd0f4b5ad63925e1dcf",
"size": "421",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "yandex_maps_tests/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18897"
}
],
"symlink_target": ""
}
|
import numpy as np
from sklearn_pmml.convert.test.jpmml_test import JPMMLClassificationTest, JPMMLRegressionTest, TARGET_NAME, TARGET
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn_pmml.convert import TransformationContext, pmml_row, ModelMode, Schema
from sklearn_pmml.convert.features import *
from sklearn_pmml.convert.tree import DecisionTreeConverter
from sklearn_pmml import pmml
from unittest import TestCase
class TestDecisionTreeClassifierConverter(TestCase):
def setUp(self):
np.random.seed(1)
self.est = DecisionTreeClassifier(max_depth=2)
self.est.fit([
[0, 0],
[0, 1],
[1, 0],
[1, 1],
], [0, 1, 1, 1])
self.ctx = TransformationContext({
Schema.INPUT: [
IntegerNumericFeature('x1'),
StringCategoricalFeature('x2', ['zero', 'one'])
],
Schema.MODEL: [
IntegerNumericFeature('x1'),
StringCategoricalFeature('x2', ['zero', 'one'])
],
Schema.DERIVED: [],
Schema.OUTPUT: [
IntegerNumericFeature('output')
]
})
self.converter = DecisionTreeConverter(
estimator=self.est,
context=self.ctx,
mode=ModelMode.CLASSIFICATION
)
def test_transform(self):
p = self.converter.pmml()
tm = p.TreeModel[0]
assert tm.MiningSchema is not None, 'Missing mining schema'
assert len(tm.MiningSchema.MiningField) == 2, 'Wrong number of mining fields'
assert tm.Node is not None, 'Missing root node'
assert tm.Node.recordCount == 4
assert tm.Node.True_ is not None, 'Root condition should always be True'
def test_transform_with_derived_field(self):
self.est = DecisionTreeClassifier(max_depth=2)
self.est.fit([
[0, 0, 0],
[0, 1, 0],
[1, 0, 0],
[1, 1, 1],
], [0, 1, 1, 1])
mapping = pmml.MapValues(dataType="double", outputColumn="output")
mapping.append(pmml.FieldColumnPair(column="x1", field="x1"))
mapping.append(pmml.FieldColumnPair(column="x2", field="x2"))
it = pmml.InlineTable()
mapping_df = pd.DataFrame([
dict(x1=0, x2='zero', output=0),
dict(x1=0, x2='one', output=0),
dict(x1=1, x2='zero', output=0),
dict(x1=1, x2='one', output=1),
])
for idx, line in mapping_df.iterrows():
it.append(pmml_row(**dict(line)))
mapping.append(it)
mapping_df.set_index(keys=['x1', 'x2'])
mapping_f = np.vectorize(lambda x1, x2: mapping_df.ix[x1, x2].output.values[0])
self.ctx = TransformationContext({
Schema.INPUT: [
IntegerNumericFeature('x1'),
StringCategoricalFeature('x2', ['zero', 'one'])
],
Schema.DERIVED: [
DerivedFeature(
feature=RealNumericFeature(name='x3'),
transformation=mapping,
function=mapping_f
)
],
Schema.MODEL: [
IntegerNumericFeature('x1'),
StringCategoricalFeature('x2', ['zero', 'one']),
RealNumericFeature(name='x3')
],
Schema.OUTPUT: [
IntegerCategoricalFeature('output', ['neg', 'pos'])
]
})
self.converter = DecisionTreeConverter(
estimator=self.est,
context=self.ctx,
mode=ModelMode.CLASSIFICATION
)
self.converter.pmml().toxml()
class TestDecisionTreeRegressorConverter(TestCase):
def setUp(self):
np.random.seed(1)
self.est = DecisionTreeRegressor(max_depth=2)
self.est.fit([
[0, 0],
[0, 1],
[1, 0],
[1, 1],
], [0, 1, 1, 1])
self.ctx = TransformationContext({
Schema.INPUT: [
IntegerNumericFeature('x1'),
StringCategoricalFeature('x2', ['zero', 'one'])
],
Schema.MODEL: [
IntegerNumericFeature('x1'),
StringCategoricalFeature('x2', ['zero', 'one'])
],
Schema.DERIVED: [],
Schema.OUTPUT: [
IntegerNumericFeature('output')
]
})
self.converter = DecisionTreeConverter(
estimator=self.est,
context=self.ctx,
mode=ModelMode.REGRESSION
)
def test_transform(self):
p = self.converter.pmml()
tm = p.TreeModel[0]
assert tm.MiningSchema is not None, 'Missing mining schema'
assert len(tm.MiningSchema.MiningField) == 2, 'Wrong number of mining fields'
assert tm.Node is not None, 'Missing root node'
assert tm.Node.recordCount == 4
assert tm.Node.True_ is not None, 'Root condition should always be True'
class TestDecisionTreeClassificationJPMMLParity(TestCase, JPMMLClassificationTest):
def setUp(self):
self.model = DecisionTreeClassifier(max_depth=2)
self.init_data()
self.converter = DecisionTreeConverter(
estimator=self.model,
context=self.ctx,
mode=ModelMode.CLASSIFICATION
)
@property
def output(self):
return IntegerCategoricalFeature(name=TARGET_NAME, value_list=TARGET)
class TestDecisionTreeRegressionJPMMLParity(TestCase, JPMMLRegressionTest):
def setUp(self):
self.model = DecisionTreeRegressor()
self.init_data()
self.converter = DecisionTreeConverter(
estimator=self.model,
context=self.ctx,
mode=ModelMode.REGRESSION
)
|
{
"content_hash": "6a50edeb7ba2e5c8543f0b78eb362fcf",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 114,
"avg_line_length": 34.588235294117645,
"alnum_prop": 0.5593537414965987,
"repo_name": "alex-pirozhenko/sklearn-pmml",
"id": "b116850cd9a46353c5657d9b5781abbb3c652889",
"size": "5880",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sklearn_pmml/convert/test/test_decisionTreeClassifierConverter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "5707"
},
{
"name": "Python",
"bytes": "2294560"
}
],
"symlink_target": ""
}
|
from pymongo import MongoClient
from flask import Flask, request, jsonify
DB_COUNT = 32
dbs = {}
DBNAME = "replay"
COLLECTION = "data"
def _hash(hash_str):
s = 0
for i in range(1, len(hash_str)+1):
c = ord(hash_str[i-1])
s = s + c * i
return (s % DB_COUNT) + 1
def _get_collection(replay_id):
id = _hash(replay_id)
name = DBNAME + str(id)
db = dbs[name]
return db[COLLECTION]
def get_replay(replay_id):
collection = _get_collection(replay_id)
t = collection.find_one({"replay_id":replay_id})
return t
def init():
client = MongoClient("localhost", 27017)
for i in range(1, DB_COUNT+1):
name = DBNAME + str(i)
dbs[name] = client[name]
init()
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/get_replay')
def web_get_replay():
replay_id = request.args.get("replay_id", "")
replay_id = replay_id.encode("utf8")
ret = get_replay(replay_id)
if not ret:
return jsonify({})
return jsonify(**ret)
if __name__ == '__main__':
app.run(debug=True)
|
{
"content_hash": "af9237a117d533acfe2c17ff8d629e90",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 52,
"avg_line_length": 21.25,
"alnum_prop": 0.5927601809954751,
"repo_name": "spin6lock/web_replay",
"id": "25252edbcfbde3b28447eb6192904e21c3682272",
"size": "1120",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1120"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
# the following is a hack to get the baseclient to import whether we're in a
# package or not. This makes pep8 unhappy hence the annotations.
try:
# baseclient and this client are in a package
from .baseclient import BaseClient as _BaseClient # @UnusedImport
except ImportError:
# no they aren't
from baseclient import BaseClient as _BaseClient # @Reimport
class Catalog(object):
def __init__(
self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False,
auth_svc='https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login'):
if url is None:
raise ValueError('A url is required')
self._service_ver = None
self._client = _BaseClient(
url, timeout=timeout, user_id=user_id, password=password,
token=token, ignore_authrc=ignore_authrc,
trust_all_ssl_certificates=trust_all_ssl_certificates,
auth_svc=auth_svc)
def version(self, context=None):
"""
Get the version of the deployed catalog service endpoint.
:returns: instance of String
"""
return self._client.call_method('Catalog.version',
[], self._service_ver, context)
def is_registered(self, params, context=None):
"""
returns true (1) if the module exists, false (2) otherwise
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
:returns: instance of type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.is_registered',
[params], self._service_ver, context)
def register_repo(self, params, context=None):
"""
allow/require developer to supply git branch/git commit tag?
if this is a new module, creates the initial registration with the authenticated user as
the sole owner, then launches a build to update the dev version of the module. You can check
the state of this build with the 'get_module_state' method passing in the git_url. If the module
already exists, then you must be an owner to reregister. That will immediately overwrite your
dev version of the module (old dev versions are not stored, but you can always reregister an old
version from the repo) and start a build.
:param params: instance of type "RegisterRepoParams" -> structure:
parameter "git_url" of String, parameter "git_commit_hash" of
String
:returns: instance of String
"""
return self._client.call_method('Catalog.register_repo',
[params], self._service_ver, context)
def push_dev_to_beta(self, params, context=None):
"""
immediately updates the beta tag to what is currently in dev, whatever is currently in beta
is discarded. Will fail if a release request is active and has not been approved/denied
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
"""
return self._client.call_method('Catalog.push_dev_to_beta',
[params], self._service_ver, context)
def request_release(self, params, context=None):
"""
requests a push from beta to release version; must be approved be a kbase Admin
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
"""
return self._client.call_method('Catalog.request_release',
[params], self._service_ver, context)
def list_requested_releases(self, context=None):
"""
:returns: instance of list of type "RequestedReleaseInfo" ->
structure: parameter "module_name" of String, parameter "git_url"
of String, parameter "git_commit_hash" of String, parameter
"git_commit_message" of String, parameter "timestamp" of Long,
parameter "owners" of list of String
"""
return self._client.call_method('Catalog.list_requested_releases',
[], self._service_ver, context)
def review_release_request(self, review, context=None):
"""
:param review: instance of type "ReleaseReview" (decision - approved
| denied review_message -) -> structure: parameter "module_name"
of String, parameter "git_url" of String, parameter "decision" of
String, parameter "review_message" of String
"""
return self._client.call_method('Catalog.review_release_request',
[review], self._service_ver, context)
def list_basic_module_info(self, params, context=None):
"""
:param params: instance of type "ListModuleParams" (Describes how to
filter repositories. include_released - optional flag indicated
modules that are released are included (default:true)
include_unreleased - optional flag indicated modules that are not
released are included (default:false) with_disabled - optional
flag indicating disabled repos should be included (default:false).
include_modules_with_no_name_set - default to 0, if set return
modules that were never registered successfully (first
registration failed, never got a module name, but there is a
git_url)) -> structure: parameter "owners" of list of String,
parameter "include_released" of type "boolean" (@range [0,1]),
parameter "include_unreleased" of type "boolean" (@range [0,1]),
parameter "include_disabled" of type "boolean" (@range [0,1]),
parameter "include_modules_with_no_name_set" of type "boolean"
(@range [0,1])
:returns: instance of list of type "BasicModuleInfo" (git_url is
always returned. Every other field may or may not exist depending
on what has been registered or if certain registrations have
failed) -> structure: parameter "module_name" of String, parameter
"git_url" of String, parameter "language" of String, parameter
"dynamic_service" of type "boolean" (@range [0,1]), parameter
"owners" of list of String, parameter "dev" of type
"VersionCommitInfo" -> structure: parameter "git_commit_hash" of
String, parameter "beta" of type "VersionCommitInfo" -> structure:
parameter "git_commit_hash" of String, parameter "release" of type
"VersionCommitInfo" -> structure: parameter "git_commit_hash" of
String, parameter "released_version_list" of list of type
"VersionCommitInfo" -> structure: parameter "git_commit_hash" of
String
"""
return self._client.call_method('Catalog.list_basic_module_info',
[params], self._service_ver, context)
def add_favorite(self, params, context=None):
"""
:param params: instance of type "FavoriteItem" (FAVORITES!!) ->
structure: parameter "module_name" of String, parameter "id" of
String
"""
return self._client.call_method('Catalog.add_favorite',
[params], self._service_ver, context)
def remove_favorite(self, params, context=None):
"""
:param params: instance of type "FavoriteItem" (FAVORITES!!) ->
structure: parameter "module_name" of String, parameter "id" of
String
"""
return self._client.call_method('Catalog.remove_favorite',
[params], self._service_ver, context)
def list_favorites(self, username, context=None):
"""
:param username: instance of String
:returns: instance of list of type "FavoriteItem" (FAVORITES!!) ->
structure: parameter "module_name" of String, parameter "id" of
String
"""
return self._client.call_method('Catalog.list_favorites',
[username], self._service_ver, context)
def list_app_favorites(self, item, context=None):
"""
:param item: instance of type "FavoriteItem" (FAVORITES!!) ->
structure: parameter "module_name" of String, parameter "id" of
String
:returns: instance of list of type "FavoriteUser" -> structure:
parameter "username" of String, parameter "timestamp" of String
"""
return self._client.call_method('Catalog.list_app_favorites',
[item], self._service_ver, context)
def list_favorite_counts(self, params, context=None):
"""
:param params: instance of type "ListFavoriteCounts" (if favorite
item is given, will return stars just for that item. If a module
name is given, will return stars for all methods in that module.
If none of those are given, then will return stars for every
method that there is info on parameters to add: list<FavoriteItem>
items;) -> structure: parameter "modules" of list of String
:returns: instance of list of type "FavoriteCount" -> structure:
parameter "module_name" of String, parameter "app_id" of String,
parameter "count" of Long
"""
return self._client.call_method('Catalog.list_favorite_counts',
[params], self._service_ver, context)
def get_module_info(self, selection, context=None):
"""
:param selection: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
:returns: instance of type "ModuleInfo" -> structure: parameter
"module_name" of String, parameter "git_url" of String, parameter
"description" of String, parameter "language" of String, parameter
"owners" of list of String, parameter "release" of type
"ModuleVersionInfo" (data_folder - optional field representing
unique module name (like <module_name> transformed to lower cases)
used for reference data purposes (see description for data_version
field). This value will be treated as part of file system path
relative to the base that comes from the config (currently base is
supposed to be "/kb/data" defined in "ref-data-base" parameter).
data_version - optional field, reflects version of data defined in
kbase.yml (see "data-version" key). In case this field is set data
folder with path "/kb/data/<data_folder>/<data_version>" should be
initialized by running docker image with "init" target from
catalog. And later when async methods are run it should be mounted
on AWE worker machine into "/data" folder inside docker container
by execution engine.) -> structure: parameter "timestamp" of Long,
parameter "registration_id" of String, parameter "version" of
String, parameter "git_commit_hash" of String, parameter
"git_commit_message" of String, parameter "dynamic_service" of
type "boolean" (@range [0,1]), parameter "narrative_method_ids" of
list of String, parameter "local_function_ids" of list of String,
parameter "docker_img_name" of String, parameter "data_folder" of
String, parameter "data_version" of String, parameter
"compilation_report" of type "CompilationReport" -> structure:
parameter "module_name" of String, parameter "sdk_version" of
String, parameter "sdk_git_commit" of String, parameter
"impl_file_path" of String, parameter "function_places" of mapping
from String to type "FunctionPlace" -> structure: parameter
"start_line" of Long, parameter "end_line" of Long, parameter
"functions" of mapping from String to type "Function" ->
structure: parameter "name" of String, parameter "comment" of
String, parameter "place" of type "FunctionPlace" -> structure:
parameter "start_line" of Long, parameter "end_line" of Long,
parameter "input" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "output" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "spec_files" of list of type "SpecFile" -> structure:
parameter "file_name" of String, parameter "content" of String,
parameter "is_main" of type "boolean" (@range [0,1]), parameter
"beta" of type "ModuleVersionInfo" (data_folder - optional field
representing unique module name (like <module_name> transformed to
lower cases) used for reference data purposes (see description for
data_version field). This value will be treated as part of file
system path relative to the base that comes from the config
(currently base is supposed to be "/kb/data" defined in
"ref-data-base" parameter). data_version - optional field,
reflects version of data defined in kbase.yml (see "data-version"
key). In case this field is set data folder with path
"/kb/data/<data_folder>/<data_version>" should be initialized by
running docker image with "init" target from catalog. And later
when async methods are run it should be mounted on AWE worker
machine into "/data" folder inside docker container by execution
engine.) -> structure: parameter "timestamp" of Long, parameter
"registration_id" of String, parameter "version" of String,
parameter "git_commit_hash" of String, parameter
"git_commit_message" of String, parameter "dynamic_service" of
type "boolean" (@range [0,1]), parameter "narrative_method_ids" of
list of String, parameter "local_function_ids" of list of String,
parameter "docker_img_name" of String, parameter "data_folder" of
String, parameter "data_version" of String, parameter
"compilation_report" of type "CompilationReport" -> structure:
parameter "module_name" of String, parameter "sdk_version" of
String, parameter "sdk_git_commit" of String, parameter
"impl_file_path" of String, parameter "function_places" of mapping
from String to type "FunctionPlace" -> structure: parameter
"start_line" of Long, parameter "end_line" of Long, parameter
"functions" of mapping from String to type "Function" ->
structure: parameter "name" of String, parameter "comment" of
String, parameter "place" of type "FunctionPlace" -> structure:
parameter "start_line" of Long, parameter "end_line" of Long,
parameter "input" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "output" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "spec_files" of list of type "SpecFile" -> structure:
parameter "file_name" of String, parameter "content" of String,
parameter "is_main" of type "boolean" (@range [0,1]), parameter
"dev" of type "ModuleVersionInfo" (data_folder - optional field
representing unique module name (like <module_name> transformed to
lower cases) used for reference data purposes (see description for
data_version field). This value will be treated as part of file
system path relative to the base that comes from the config
(currently base is supposed to be "/kb/data" defined in
"ref-data-base" parameter). data_version - optional field,
reflects version of data defined in kbase.yml (see "data-version"
key). In case this field is set data folder with path
"/kb/data/<data_folder>/<data_version>" should be initialized by
running docker image with "init" target from catalog. And later
when async methods are run it should be mounted on AWE worker
machine into "/data" folder inside docker container by execution
engine.) -> structure: parameter "timestamp" of Long, parameter
"registration_id" of String, parameter "version" of String,
parameter "git_commit_hash" of String, parameter
"git_commit_message" of String, parameter "dynamic_service" of
type "boolean" (@range [0,1]), parameter "narrative_method_ids" of
list of String, parameter "local_function_ids" of list of String,
parameter "docker_img_name" of String, parameter "data_folder" of
String, parameter "data_version" of String, parameter
"compilation_report" of type "CompilationReport" -> structure:
parameter "module_name" of String, parameter "sdk_version" of
String, parameter "sdk_git_commit" of String, parameter
"impl_file_path" of String, parameter "function_places" of mapping
from String to type "FunctionPlace" -> structure: parameter
"start_line" of Long, parameter "end_line" of Long, parameter
"functions" of mapping from String to type "Function" ->
structure: parameter "name" of String, parameter "comment" of
String, parameter "place" of type "FunctionPlace" -> structure:
parameter "start_line" of Long, parameter "end_line" of Long,
parameter "input" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "output" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "spec_files" of list of type "SpecFile" -> structure:
parameter "file_name" of String, parameter "content" of String,
parameter "is_main" of type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.get_module_info',
[selection], self._service_ver, context)
def get_version_info(self, params, context=None):
"""
DEPRECATED!!! use get_module_version
:param params: instance of type "SelectModuleVersionParams" (only
required: module_name or git_url, the rest are optional selectors
If no selectors given, returns current release version version is
one of: release | beta | dev old release versions can only be
retrieved individually by timestamp or git_commit_hash Note: this
method isn't particularly smart or effecient yet, because it pulls
the info for a particular module first, then searches in code for
matches to the relevant query. Instead, this should be performed
on the database side through queries. Will optimize when this
becomes an issue. In the future, this will be extended so that you
can retrieve version info by only timestamp, git commit, etc, but
the necessary indicies have not been setup yet. In general, we
will need to add better search capabilities) -> structure:
parameter "module_name" of String, parameter "git_url" of String,
parameter "timestamp" of Long, parameter "git_commit_hash" of
String, parameter "version" of String
:returns: instance of type "ModuleVersionInfo" (data_folder -
optional field representing unique module name (like <module_name>
transformed to lower cases) used for reference data purposes (see
description for data_version field). This value will be treated as
part of file system path relative to the base that comes from the
config (currently base is supposed to be "/kb/data" defined in
"ref-data-base" parameter). data_version - optional field,
reflects version of data defined in kbase.yml (see "data-version"
key). In case this field is set data folder with path
"/kb/data/<data_folder>/<data_version>" should be initialized by
running docker image with "init" target from catalog. And later
when async methods are run it should be mounted on AWE worker
machine into "/data" folder inside docker container by execution
engine.) -> structure: parameter "timestamp" of Long, parameter
"registration_id" of String, parameter "version" of String,
parameter "git_commit_hash" of String, parameter
"git_commit_message" of String, parameter "dynamic_service" of
type "boolean" (@range [0,1]), parameter "narrative_method_ids" of
list of String, parameter "local_function_ids" of list of String,
parameter "docker_img_name" of String, parameter "data_folder" of
String, parameter "data_version" of String, parameter
"compilation_report" of type "CompilationReport" -> structure:
parameter "module_name" of String, parameter "sdk_version" of
String, parameter "sdk_git_commit" of String, parameter
"impl_file_path" of String, parameter "function_places" of mapping
from String to type "FunctionPlace" -> structure: parameter
"start_line" of Long, parameter "end_line" of Long, parameter
"functions" of mapping from String to type "Function" ->
structure: parameter "name" of String, parameter "comment" of
String, parameter "place" of type "FunctionPlace" -> structure:
parameter "start_line" of Long, parameter "end_line" of Long,
parameter "input" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "output" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "spec_files" of list of type "SpecFile" -> structure:
parameter "file_name" of String, parameter "content" of String,
parameter "is_main" of type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.get_version_info',
[params], self._service_ver, context)
def list_released_module_versions(self, params, context=None):
"""
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
:returns: instance of list of type "ModuleVersionInfo" (data_folder -
optional field representing unique module name (like <module_name>
transformed to lower cases) used for reference data purposes (see
description for data_version field). This value will be treated as
part of file system path relative to the base that comes from the
config (currently base is supposed to be "/kb/data" defined in
"ref-data-base" parameter). data_version - optional field,
reflects version of data defined in kbase.yml (see "data-version"
key). In case this field is set data folder with path
"/kb/data/<data_folder>/<data_version>" should be initialized by
running docker image with "init" target from catalog. And later
when async methods are run it should be mounted on AWE worker
machine into "/data" folder inside docker container by execution
engine.) -> structure: parameter "timestamp" of Long, parameter
"registration_id" of String, parameter "version" of String,
parameter "git_commit_hash" of String, parameter
"git_commit_message" of String, parameter "dynamic_service" of
type "boolean" (@range [0,1]), parameter "narrative_method_ids" of
list of String, parameter "local_function_ids" of list of String,
parameter "docker_img_name" of String, parameter "data_folder" of
String, parameter "data_version" of String, parameter
"compilation_report" of type "CompilationReport" -> structure:
parameter "module_name" of String, parameter "sdk_version" of
String, parameter "sdk_git_commit" of String, parameter
"impl_file_path" of String, parameter "function_places" of mapping
from String to type "FunctionPlace" -> structure: parameter
"start_line" of Long, parameter "end_line" of Long, parameter
"functions" of mapping from String to type "Function" ->
structure: parameter "name" of String, parameter "comment" of
String, parameter "place" of type "FunctionPlace" -> structure:
parameter "start_line" of Long, parameter "end_line" of Long,
parameter "input" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "output" of list of type "Parameter" -> structure:
parameter "type" of String, parameter "comment" of String,
parameter "spec_files" of list of type "SpecFile" -> structure:
parameter "file_name" of String, parameter "content" of String,
parameter "is_main" of type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.list_released_module_versions',
[params], self._service_ver, context)
def get_module_version(self, selection, context=None):
"""
:param selection: instance of type "SelectModuleVersion" (Get a
specific module version. Requires either a module_name or git_url.
If both are provided, they both must match. If no other options
are specified, then the latest 'release' version is returned. If
the module has not been released, then the latest 'beta' or 'dev'
version is returned. You can check in the returned object if the
version has been released (see is_released) and what release tags
are pointing to this version (see release_tags). Optionally, a
'version' parameter can be provided that can be either: 1) release
tag: 'dev' | 'beta' | 'release' 2) specific semantic version of a
released version (you cannot pull dev/beta or other unreleased
versions by semantic version) - e.g. 2.0.1 3) semantic version
requirement specification, see:
https://pypi.python.org/pypi/semantic_version/ which will return
the latest released version that matches the criteria. You cannot
pull dev/beta or other unreleased versions this way. - e.g.: -
'>1.0.0' - '>=2.1.1,<3.3.0' - '!=0.2.4-alpha,<0.3.0' 4) specific
full git commit hash include_module_description - set to 1 to
include the module description in the YAML file of this version;
default is 0 include_compilation_report - set to 1 to include the
module compilation report, default is 0) -> structure: parameter
"module_name" of String, parameter "git_url" of String, parameter
"version" of String, parameter "include_module_description" of
type "boolean" (@range [0,1]), parameter
"include_compilation_report" of type "boolean" (@range [0,1])
:returns: instance of type "ModuleVersion" (module_name -
the name of the module module_description - (optionally
returned) html description in KBase YAML of this module git_url
- the git url of the source for this module released
- 1 if this version has been released, 0 otherwise release_tags
- list of strings of: 'dev', 'beta', or 'release', or empty list
this is a list because the same commit version may be the version
in multiple release states release_timestamp - time in ms
since epoch when this module was approved and moved to release,
null otherwise note that a module was released before v1.0.0, the
release timestamp may not have been recorded and will default to
the registration timestamp timestamp - time in ms
since epoch when the registration for this version was started
registration_id - id of the last registration for this
version, used for fetching registration logs and state version
- validated semantic version number as indicated in the KBase YAML
of this version semantic versions are unique among released
versions of this module git_commit_hash - the full git
commit hash of the source for this module git_commit_message -
the message attached to this git commit dynamic_service - 1
if this version is available as a web service, 0 otherwise
narrative_app_ids - list of Narrative App ids registered with
this module version local_function_ids - list of Local
Function ids registered with this module version docker_img_name
- name of the docker image for this module created on registration
data_folder - name of the data folder used
compilation_report - (optionally returned) summary of the KIDL
specification compilation) -> structure: parameter "module_name"
of String, parameter "module_description" of String, parameter
"git_url" of String, parameter "released" of type "boolean"
(@range [0,1]), parameter "release_tags" of list of String,
parameter "timestamp" of Long, parameter "registration_id" of
String, parameter "version" of String, parameter "git_commit_hash"
of String, parameter "git_commit_message" of String, parameter
"dynamic_service" of type "boolean" (@range [0,1]), parameter
"narrative_app_ids" of list of String, parameter
"local_function_ids" of list of String, parameter
"docker_img_name" of String, parameter "data_folder" of String,
parameter "data_version" of String, parameter "compilation_report"
of type "CompilationReport" -> structure: parameter "module_name"
of String, parameter "sdk_version" of String, parameter
"sdk_git_commit" of String, parameter "impl_file_path" of String,
parameter "function_places" of mapping from String to type
"FunctionPlace" -> structure: parameter "start_line" of Long,
parameter "end_line" of Long, parameter "functions" of mapping
from String to type "Function" -> structure: parameter "name" of
String, parameter "comment" of String, parameter "place" of type
"FunctionPlace" -> structure: parameter "start_line" of Long,
parameter "end_line" of Long, parameter "input" of list of type
"Parameter" -> structure: parameter "type" of String, parameter
"comment" of String, parameter "output" of list of type
"Parameter" -> structure: parameter "type" of String, parameter
"comment" of String, parameter "spec_files" of list of type
"SpecFile" -> structure: parameter "file_name" of String,
parameter "content" of String, parameter "is_main" of type
"boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.get_module_version',
[selection], self._service_ver, context)
def list_local_functions(self, params, context=None):
"""
:param params: instance of type "ListLocalFunctionParams" (Allows
various ways to filter. Release tag = dev/beta/release, default is
release module_names = only include modules in the list; if empty
or not provided then include everything) -> structure: parameter
"release_tag" of String, parameter "module_names" of list of String
:returns: instance of list of type "LocalFunctionInfo" (todo: switch
release_tag to release_tags) -> structure: parameter "module_name"
of String, parameter "function_id" of String, parameter
"git_commit_hash" of String, parameter "version" of String,
parameter "release_tag" of list of String, parameter "name" of
String, parameter "short_description" of String, parameter "tags"
of type "LocalFunctionTags" -> structure: parameter "categories"
of list of String, parameter "input" of type "IOTags" (Local
Function Listing Support) -> structure: parameter "file_types" of
list of String, parameter "kb_types" of list of String, parameter
"output" of type "IOTags" (Local Function Listing Support) ->
structure: parameter "file_types" of list of String, parameter
"kb_types" of list of String
"""
return self._client.call_method('Catalog.list_local_functions',
[params], self._service_ver, context)
def get_local_function_details(self, params, context=None):
"""
:param params: instance of type "GetLocalFunctionDetails" ->
structure: parameter "functions" of list of type
"SelectOneLocalFunction" (release_tag = dev | beta | release, if
it doesn't exist and git_commit_hash isn't set, we default to
release and will not return anything if the function is not
released) -> structure: parameter "module_name" of String,
parameter "function_id" of String, parameter "release_tag" of
String, parameter "git_commit_hash" of String
:returns: instance of list of type "LocalFunctionDetails" ->
structure: parameter "info" of type "LocalFunctionInfo" (todo:
switch release_tag to release_tags) -> structure: parameter
"module_name" of String, parameter "function_id" of String,
parameter "git_commit_hash" of String, parameter "version" of
String, parameter "release_tag" of list of String, parameter
"name" of String, parameter "short_description" of String,
parameter "tags" of type "LocalFunctionTags" -> structure:
parameter "categories" of list of String, parameter "input" of
type "IOTags" (Local Function Listing Support) -> structure:
parameter "file_types" of list of String, parameter "kb_types" of
list of String, parameter "output" of type "IOTags" (Local
Function Listing Support) -> structure: parameter "file_types" of
list of String, parameter "kb_types" of list of String, parameter
"long_description" of String
"""
return self._client.call_method('Catalog.get_local_function_details',
[params], self._service_ver, context)
def module_version_lookup(self, selection, context=None):
"""
:param selection: instance of type "ModuleVersionLookupParams"
(module_name - required for module lookup lookup - a lookup
string, if empty will get the latest released module 1) version
tag = dev | beta | release 2) semantic version match identifiier
not supported yet: 3) exact commit hash not supported yet: 4)
exact timestamp only_service_versions - 1/0, default is 1) ->
structure: parameter "module_name" of String, parameter "lookup"
of String, parameter "only_service_versions" of type "boolean"
(@range [0,1])
:returns: instance of type "BasicModuleVersionInfo" (DYNAMIC SERVICES
SUPPORT Methods) -> structure: parameter "module_name" of String,
parameter "version" of String, parameter "git_commit_hash" of
String, parameter "docker_img_name" of String
"""
return self._client.call_method('Catalog.module_version_lookup',
[selection], self._service_ver, context)
def list_service_modules(self, filter, context=None):
"""
:param filter: instance of type "ListServiceModuleParams" (tag = dev
| beta | release if tag is not set, all release versions are
returned) -> structure: parameter "tag" of String
:returns: instance of list of type "BasicModuleVersionInfo" (DYNAMIC
SERVICES SUPPORT Methods) -> structure: parameter "module_name" of
String, parameter "version" of String, parameter "git_commit_hash"
of String, parameter "docker_img_name" of String
"""
return self._client.call_method('Catalog.list_service_modules',
[filter], self._service_ver, context)
def set_registration_state(self, params, context=None):
"""
:param params: instance of type "SetRegistrationStateParams" (End
Dynamic Services Support Methods) -> structure: parameter
"module_name" of String, parameter "git_url" of String, parameter
"registration_state" of String, parameter "error_message" of String
"""
return self._client.call_method('Catalog.set_registration_state',
[params], self._service_ver, context)
def get_module_state(self, params, context=None):
"""
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
:returns: instance of type "ModuleState" (active: True | False,
release_approval: approved | denied | under_review |
not_requested, (all releases require approval) review_message:
str, (optional) registration: complete | error | (build state
status), error_message: str (optional)) -> structure: parameter
"active" of type "boolean" (@range [0,1]), parameter "released" of
type "boolean" (@range [0,1]), parameter "release_approval" of
String, parameter "review_message" of String, parameter
"registration" of String, parameter "error_message" of String
"""
return self._client.call_method('Catalog.get_module_state',
[params], self._service_ver, context)
def get_build_log(self, registration_id, context=None):
"""
:param registration_id: instance of String
:returns: instance of String
"""
return self._client.call_method('Catalog.get_build_log',
[registration_id], self._service_ver, context)
def get_parsed_build_log(self, params, context=None):
"""
given the registration_id returned from the register method, you can check the build log with this method
:param params: instance of type "GetBuildLogParams" (must specify
skip & limit, or first_n, or last_n. If none given, this gets
last 5000 lines) -> structure: parameter "registration_id" of
String, parameter "skip" of Long, parameter "limit" of Long,
parameter "first_n" of Long, parameter "last_n" of Long
:returns: instance of type "BuildLog" -> structure: parameter
"registration_id" of String, parameter "timestamp" of String,
parameter "module_name_lc" of String, parameter "git_url" of
String, parameter "error" of String, parameter "registration" of
String, parameter "log" of list of type "BuildLogLine" ->
structure: parameter "content" of String, parameter "error" of
type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.get_parsed_build_log',
[params], self._service_ver, context)
def list_builds(self, params, context=None):
"""
:param params: instance of type "ListBuildParams" (Always sorted by
time, oldest builds are last. only one of these can be set to
true: only_running - if true, only show running builds only_error
- if true, only show builds that ended in an error only_complete -
if true, only show builds that are complete skip - skip these
first n records, default 0 limit - limit result to the most recent
n records, default 1000 modules - only include builds from these
modules based on names/git_urls) -> structure: parameter
"only_runnning" of type "boolean" (@range [0,1]), parameter
"only_error" of type "boolean" (@range [0,1]), parameter
"only_complete" of type "boolean" (@range [0,1]), parameter "skip"
of Long, parameter "limit" of Long, parameter "modules" of list of
type "SelectOneModuleParams" (Describes how to find a single
module/repository. module_name - name of module defined in
kbase.yaml file; git_url - the url used to register the module) ->
structure: parameter "module_name" of String, parameter "git_url"
of String
:returns: instance of list of type "BuildInfo" -> structure:
parameter "timestamp" of String, parameter "registration_id" of
String, parameter "registration" of String, parameter
"error_message" of String, parameter "module_name_lc" of String,
parameter "git_url" of String
"""
return self._client.call_method('Catalog.list_builds',
[params], self._service_ver, context)
def delete_module(self, params, context=None):
"""
admin method to delete a module, will only work if the module has not been released
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
"""
return self._client.call_method('Catalog.delete_module',
[params], self._service_ver, context)
def migrate_module_to_new_git_url(self, params, context=None):
"""
admin method to move the git url for a module, should only be used if the exact same code has migrated to
a new URL. It should not be used as a way to change ownership, get updates from a new source, or get a new
module name for an existing git url because old versions are retained and git commits saved will no longer
be correct.
:param params: instance of type "UpdateGitUrlParams" (all fields are
required to make sure you update the right one) -> structure:
parameter "module_name" of String, parameter "current_git_url" of
String, parameter "new_git_url" of String
"""
return self._client.call_method('Catalog.migrate_module_to_new_git_url',
[params], self._service_ver, context)
def set_to_active(self, params, context=None):
"""
admin methods to turn on/off modules
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
"""
return self._client.call_method('Catalog.set_to_active',
[params], self._service_ver, context)
def set_to_inactive(self, params, context=None):
"""
:param params: instance of type "SelectOneModuleParams" (Describes
how to find a single module/repository. module_name - name of
module defined in kbase.yaml file; git_url - the url used to
register the module) -> structure: parameter "module_name" of
String, parameter "git_url" of String
"""
return self._client.call_method('Catalog.set_to_inactive',
[params], self._service_ver, context)
def is_approved_developer(self, usernames, context=None):
"""
temporary developer approval, should be moved to more mature user profile service
:param usernames: instance of list of String
:returns: instance of list of type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.is_approved_developer',
[usernames], self._service_ver, context)
def list_approved_developers(self, context=None):
"""
:returns: instance of list of String
"""
return self._client.call_method('Catalog.list_approved_developers',
[], self._service_ver, context)
def approve_developer(self, username, context=None):
"""
:param username: instance of String
"""
return self._client.call_method('Catalog.approve_developer',
[username], self._service_ver, context)
def revoke_developer(self, username, context=None):
"""
:param username: instance of String
"""
return self._client.call_method('Catalog.revoke_developer',
[username], self._service_ver, context)
def log_exec_stats(self, params, context=None):
"""
Request from Execution Engine for adding statistics about each method run. It could be done
using catalog admin credentials only.
:param params: instance of type "LogExecStatsParams" (user_id -
GlobusOnline login of invoker, app_module_name - optional module
name of registered repo (could be absent of null for old fashioned
services) where app_id comes from, app_id - optional method-spec
id without module_name prefix (could be absent or null in case
original execution was started through API call without app ID
defined), func_module_name - optional module name of registered
repo (could be absent of null for old fashioned services) where
func_name comes from, func_name - name of function in KIDL-spec
without module_name prefix, git_commit_hash - optional service
version (in case of dynamically registered repo), creation_time,
exec_start_time and finish_time - defined in seconds since Epoch
(POSIX), is_error - indicates whether execution was finished with
error or not.) -> structure: parameter "user_id" of String,
parameter "app_module_name" of String, parameter "app_id" of
String, parameter "func_module_name" of String, parameter
"func_name" of String, parameter "git_commit_hash" of String,
parameter "creation_time" of Double, parameter "exec_start_time"
of Double, parameter "finish_time" of Double, parameter "is_error"
of type "boolean" (@range [0,1]), parameter "job_id" of String
"""
return self._client.call_method('Catalog.log_exec_stats',
[params], self._service_ver, context)
def get_exec_aggr_stats(self, params, context=None):
"""
:param params: instance of type "GetExecAggrStatsParams"
(full_app_ids - list of fully qualified app IDs (including
module_name prefix followed by slash in case of dynamically
registered repo). per_week - optional flag switching results to
weekly data rather than one row per app for all time (default
value is false)) -> structure: parameter "full_app_ids" of list of
String, parameter "per_week" of type "boolean" (@range [0,1])
:returns: instance of list of type "ExecAggrStats" (full_app_id -
optional fully qualified method-spec id including module_name
prefix followed by slash in case of dynamically registered repo
(it could be absent or null in case original execution was started
through API call without app ID defined), time_range - one of
supported time ranges (currently it could be either '*' for all
time or ISO-encoded week like "2016-W01") total_queue_time -
summarized time difference between exec_start_time and
creation_time moments defined in seconds since Epoch (POSIX),
total_exec_time - summarized time difference between finish_time
and exec_start_time moments defined in seconds since Epoch
(POSIX).) -> structure: parameter "full_app_id" of String,
parameter "time_range" of String, parameter "number_of_calls" of
Long, parameter "number_of_errors" of Long, parameter
"total_queue_time" of Double, parameter "total_exec_time" of Double
"""
return self._client.call_method('Catalog.get_exec_aggr_stats',
[params], self._service_ver, context)
def get_exec_aggr_table(self, params, context=None):
"""
:param params: instance of type "ExecAggrTableParams" (Get aggregated
usage metrics; available only to Admins.) -> structure: parameter
"begin" of Long, parameter "end" of Long
:returns: instance of unspecified object
"""
return self._client.call_method('Catalog.get_exec_aggr_table',
[params], self._service_ver, context)
def get_exec_raw_stats(self, params, context=None):
"""
:param params: instance of type "GetExecRawStatsParams" (Get raw
usage metrics; available only to Admins.) -> structure: parameter
"begin" of Long, parameter "end" of Long
:returns: instance of list of unspecified object
"""
return self._client.call_method('Catalog.get_exec_raw_stats',
[params], self._service_ver, context)
def get_client_groups(self, params, context=None):
"""
@deprecated list_client_group_configs
:param params: instance of type "GetClientGroupParams" (if app_ids is
empty or null, all client groups are returned) -> structure:
:returns: instance of list of type "AppClientGroup" (app_id = full
app id; if module name is used it will be case insensitive this
will overwrite all existing client groups (it won't just push
what's on the list) If client_groups is empty or set to null, then
the client_group mapping will be removed.) -> structure: parameter
"app_id" of String, parameter "client_groups" of list of String
"""
return self._client.call_method('Catalog.get_client_groups',
[params], self._service_ver, context)
def set_client_group_config(self, config, context=None):
"""
:param config: instance of type "ClientGroupConfig" -> structure:
parameter "module_name" of String, parameter "function_name" of
String, parameter "client_groups" of list of String
"""
return self._client.call_method('Catalog.set_client_group_config',
[config], self._service_ver, context)
def remove_client_group_config(self, config, context=None):
"""
:param config: instance of type "ClientGroupConfig" -> structure:
parameter "module_name" of String, parameter "function_name" of
String, parameter "client_groups" of list of String
"""
return self._client.call_method('Catalog.remove_client_group_config',
[config], self._service_ver, context)
def list_client_group_configs(self, filter, context=None):
"""
:param filter: instance of type "ClientGroupFilter" -> structure:
parameter "module_name" of String, parameter "function_name" of
String
:returns: instance of list of type "ClientGroupConfig" -> structure:
parameter "module_name" of String, parameter "function_name" of
String, parameter "client_groups" of list of String
"""
return self._client.call_method('Catalog.list_client_group_configs',
[filter], self._service_ver, context)
def set_volume_mount(self, config, context=None):
"""
must specify all properties of the VolumeMountConfig
:param config: instance of type "VolumeMountConfig" (for a module,
function, and client group, set mount configurations) ->
structure: parameter "module_name" of String, parameter
"function_name" of String, parameter "client_group" of String,
parameter "volume_mounts" of list of type "VolumeMount" ->
structure: parameter "host_dir" of String, parameter
"container_dir" of String, parameter "read_only" of type "boolean"
(@range [0,1])
"""
return self._client.call_method('Catalog.set_volume_mount',
[config], self._service_ver, context)
def remove_volume_mount(self, config, context=None):
"""
must specify module_name, function_name, client_group and this method will delete any configured mounts
:param config: instance of type "VolumeMountConfig" (for a module,
function, and client group, set mount configurations) ->
structure: parameter "module_name" of String, parameter
"function_name" of String, parameter "client_group" of String,
parameter "volume_mounts" of list of type "VolumeMount" ->
structure: parameter "host_dir" of String, parameter
"container_dir" of String, parameter "read_only" of type "boolean"
(@range [0,1])
"""
return self._client.call_method('Catalog.remove_volume_mount',
[config], self._service_ver, context)
def list_volume_mounts(self, filter, context=None):
"""
:param filter: instance of type "VolumeMountFilter" (Parameters for
listing VolumeMountConfigs. If nothing is set, everything is
returned. Otherwise, will return everything that matches all
fields set. For instance, if only module_name is set, will return
everything for that module. If they are all set, will return the
specific module/app/client group config. Returns nothing if no
matches are found.) -> structure: parameter "module_name" of
String, parameter "function_name" of String, parameter
"client_group" of String
:returns: instance of list of type "VolumeMountConfig" (for a module,
function, and client group, set mount configurations) ->
structure: parameter "module_name" of String, parameter
"function_name" of String, parameter "client_group" of String,
parameter "volume_mounts" of list of type "VolumeMount" ->
structure: parameter "host_dir" of String, parameter
"container_dir" of String, parameter "read_only" of type "boolean"
(@range [0,1])
"""
return self._client.call_method('Catalog.list_volume_mounts',
[filter], self._service_ver, context)
def is_admin(self, username, context=None):
"""
returns true (1) if the user is an admin, false (0) otherwise.
NOTE: username is now ignored (it checks the token) but retained for back compatibility
:param username: instance of String
:returns: instance of type "boolean" (@range [0,1])
"""
return self._client.call_method('Catalog.is_admin',
[username], self._service_ver, context)
def set_secure_config_params(self, params, context=None):
"""
Only admins can use this function.
:param params: instance of type "ModifySecureConfigParamsInput" ->
structure: parameter "data" of list of type
"SecureConfigParameter" (version - optional version (commit hash,
tag or semantic one) of module, if not set then default "" value
is used which means parameter is applied to any version;
is_password - optional flag meaning to hide this parameter's value
in UI.) -> structure: parameter "module_name" of String, parameter
"version" of String, parameter "param_name" of String, parameter
"is_password" of type "boolean" (@range [0,1]), parameter
"param_value" of String
"""
return self._client.call_method('Catalog.set_secure_config_params',
[params], self._service_ver, context)
def remove_secure_config_params(self, params, context=None):
"""
Only admins can use this function.
:param params: instance of type "ModifySecureConfigParamsInput" ->
structure: parameter "data" of list of type
"SecureConfigParameter" (version - optional version (commit hash,
tag or semantic one) of module, if not set then default "" value
is used which means parameter is applied to any version;
is_password - optional flag meaning to hide this parameter's value
in UI.) -> structure: parameter "module_name" of String, parameter
"version" of String, parameter "param_name" of String, parameter
"is_password" of type "boolean" (@range [0,1]), parameter
"param_value" of String
"""
return self._client.call_method('Catalog.remove_secure_config_params',
[params], self._service_ver, context)
def get_secure_config_params(self, params, context=None):
"""
Only admins can use this function.
:param params: instance of type "GetSecureConfigParamsInput" (version
- optional version (commit hash, tag or semantic one) of module,
if not set then default "release" value is used; load_all_versions
- optional flag indicating that all parameter versions should be
loaded (version filter is not applied), default value is 0.) ->
structure: parameter "module_name" of String, parameter "version"
of String, parameter "load_all_versions" of type "boolean" (@range
[0,1])
:returns: instance of list of type "SecureConfigParameter" (version -
optional version (commit hash, tag or semantic one) of module, if
not set then default "" value is used which means parameter is
applied to any version; is_password - optional flag meaning to
hide this parameter's value in UI.) -> structure: parameter
"module_name" of String, parameter "version" of String, parameter
"param_name" of String, parameter "is_password" of type "boolean"
(@range [0,1]), parameter "param_value" of String
"""
return self._client.call_method('Catalog.get_secure_config_params',
[params], self._service_ver, context)
def status(self, context=None):
return self._client.call_method('Catalog.status',
[], self._service_ver, context)
|
{
"content_hash": "c964b49bed0d137229fd7bf11632ae3e",
"timestamp": "",
"source": "github",
"line_count": 1018,
"max_line_length": 115,
"avg_line_length": 60.73084479371316,
"alnum_prop": 0.6285584886128365,
"repo_name": "kbaseIncubator/catalog",
"id": "eb70ac29ca5e0728868dee6dd0fd543a0ed95324",
"size": "62064",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/biokbase/catalog/Client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "163826"
},
{
"name": "JavaScript",
"bytes": "28656"
},
{
"name": "Makefile",
"bytes": "3460"
},
{
"name": "Perl",
"bytes": "121489"
},
{
"name": "Python",
"bytes": "283880"
},
{
"name": "Shell",
"bytes": "3159"
}
],
"symlink_target": ""
}
|
import dateutil.parser
from datetime import date
from .. import constants
class ProfileDescriptor(object):
# this is a python descriptor that allows for
# dynamic updating of profile data
def __init__(self, id):
self.id = id
def __get__(self, instance, owner):
if hasattr(self, 'value'):
return self.value
else:
return instance._data[self.id]
def __set__(self, instance, value):
profile = {}
for key in constants.UPDATABLE_FIELDS:
profile[key] = getattr(instance, key)
profile['gender'] = constants.GENDER_MAP_REVERSE[profile['gender']]
profile['interested_in'] = [constants.GENDER_MAP_REVERSE[x] for x in
profile['interested_in']]
profile[self.id] = value
instance.__init__(instance._api.update_profile(profile), instance._api)
self.value = value
class GenderDescriptor(ProfileDescriptor):
# makes gender human readable
def __get__(self, instance, owner):
gender = super(GenderDescriptor, self).__get__(instance, owner)
return constants.GENDER_MAP[gender]
def __set__(self, instance, value):
gender = constants.GENDER_MAP_REVERSE[value]
return super(GenderDescriptor, self).__get__(instance, gender)
class InterestedInDescriptor(ProfileDescriptor):
# makes gender human readable
def __get__(self, instance, owner):
interested_in = super(InterestedInDescriptor, self).\
__get__(instance, owner)
return map(lambda x: constants.GENDER_MAP[x], interested_in)
def __set__(self, instance, value):
interested_in = map(lambda x: constants.GENDER_MAP_REVERSE[x], value)
return super(InterestedInDescriptor, self).\
__get__(instance, interested_in)
class Profile(object):
bio = ProfileDescriptor('bio')
discoverable = ProfileDescriptor('discoverable')
distance_filter = ProfileDescriptor('distance_filter')
age_filter_min = ProfileDescriptor('age_filter_min')
age_filter_max = ProfileDescriptor('age_filter_max')
interested_in = InterestedInDescriptor('interested_in')
gender = GenderDescriptor('gender')
def __init__(self, data, api):
self.id = data['_id']
self._api = api
self.create_date = data['create_date']
self.photos = map(lambda photo: str(photo['url']), data['photos'])
self.ping_time = data['ping_time']
self.name = data['name']
today = date.today()
self.create_date = dateutil.parser.parse(self.create_date)
self._data = data
def __repr__(self):
return self.name
|
{
"content_hash": "63eb6943b2ad63deb59ff04eb3813fc4",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 79,
"avg_line_length": 35.276315789473685,
"alnum_prop": 0.6307348004475942,
"repo_name": "ckjoshi9/Auto-Mate-for-Tinder",
"id": "4f87b696d5990c1db19903cf8c474a278ef4c1a7",
"size": "2681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Django App/tinderapp/pynder/models/me.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "254126"
},
{
"name": "HTML",
"bytes": "37262"
},
{
"name": "JavaScript",
"bytes": "86464"
},
{
"name": "PHP",
"bytes": "2194"
},
{
"name": "Python",
"bytes": "93022"
}
],
"symlink_target": ""
}
|
"""Utilities to get a password and/or the current user name.
getpass(prompt[, stream]) - Prompt for a password, with echo turned off.
getuser() - Get the user name from the environment or password database.
GetPassWarning - This UserWarning is issued when getpass() cannot prevent
echoing of the password contents while reading.
On Windows, the msvcrt module will be used.
On the Mac EasyDialogs.AskPassword is used, if available.
"""
# Authors: Piers Lauder (original)
# Guido van Rossum (Windows support and cleanup)
# Gregory P. Smith (tty support & GetPassWarning)
import contextlib
import io
import os
import sys
import warnings
__all__ = ["getpass","getuser","GetPassWarning"]
class GetPassWarning(UserWarning): pass
def unix_getpass(prompt='Password: ', stream=None):
"""Prompt for a password, with echo turned off.
Args:
prompt: Written on stream to ask for the input. Default: 'Password: '
stream: A writable file object to display the prompt. Defaults to
the tty. If no tty is available defaults to sys.stderr.
Returns:
The seKr3t input.
Raises:
EOFError: If our input tty or stdin was closed.
GetPassWarning: When we were unable to turn echo off on the input.
Always restores terminal settings before returning.
"""
passwd = None
with contextlib.ExitStack() as stack:
try:
# Always try reading and writing directly on the tty first.
fd = os.open('/dev/tty', os.O_RDWR|os.O_NOCTTY)
tty = io.FileIO(fd, 'w+')
stack.enter_context(tty)
input = io.TextIOWrapper(tty)
stack.enter_context(input)
if not stream:
stream = input
except OSError as e:
# If that fails, see if stdin can be controlled.
stack.close()
try:
fd = sys.stdin.fileno()
except (AttributeError, ValueError):
fd = None
passwd = fallback_getpass(prompt, stream)
input = sys.stdin
if not stream:
stream = sys.stderr
if fd is not None:
try:
old = termios.tcgetattr(fd) # a copy to save
new = old[:]
new[3] &= ~termios.ECHO # 3 == 'lflags'
tcsetattr_flags = termios.TCSAFLUSH
if hasattr(termios, 'TCSASOFT'):
tcsetattr_flags |= termios.TCSASOFT
try:
termios.tcsetattr(fd, tcsetattr_flags, new)
passwd = _raw_input(prompt, stream, input=input)
finally:
termios.tcsetattr(fd, tcsetattr_flags, old)
stream.flush() # issue7208
except termios.error:
if passwd is not None:
# _raw_input succeeded. The final tcsetattr failed. Reraise
# instead of leaving the terminal in an unknown state.
raise
# We can't control the tty or stdin. Give up and use normal IO.
# fallback_getpass() raises an appropriate warning.
if stream is not input:
# clean up unused file objects before blocking
stack.close()
passwd = fallback_getpass(prompt, stream)
stream.write('\n')
return passwd
def win_getpass(prompt='Password: ', stream=None):
"""Prompt for password with echo off, using Windows getch()."""
if sys.stdin is not sys.__stdin__:
return fallback_getpass(prompt, stream)
import msvcrt
for c in prompt:
msvcrt.putwch(c)
pw = ""
while 1:
c = msvcrt.getwch()
if c == '\r' or c == '\n':
break
if c == '\003':
raise KeyboardInterrupt
if c == '\b':
pw = pw[:-1]
else:
pw = pw + c
msvcrt.putwch('\r')
msvcrt.putwch('\n')
return pw
def fallback_getpass(prompt='Password: ', stream=None):
warnings.warn("Can not control echo on the terminal.", GetPassWarning,
stacklevel=2)
if not stream:
stream = sys.stderr
print("Warning: Password input may be echoed.", file=stream)
return _raw_input(prompt, stream)
def _raw_input(prompt="", stream=None, input=None):
# This doesn't save the string in the GNU readline history.
if not stream:
stream = sys.stderr
if not input:
input = sys.stdin
prompt = str(prompt)
if prompt:
stream.write(prompt)
stream.flush()
# NOTE: The Python C API calls flockfile() (and unlock) during readline.
line = input.readline()
if not line:
raise EOFError
if line[-1] == '\n':
line = line[:-1]
return line
def getuser():
"""Get the username from the environment or password database.
First try various environment variables, then the password
database. This works on Windows as long as USERNAME is set.
"""
for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'):
user = os.environ.get(name)
if user:
return user
# If this fails, the exception will "explain" why
import pwd
return pwd.getpwuid(os.getuid())[0]
# Bind the name getpass to the appropriate function
try:
import termios
# it's possible there is an incompatible termios from the
# McMillan Installer, make sure we have a UNIX-compatible termios
termios.tcgetattr, termios.tcsetattr
except (ImportError, AttributeError):
try:
import msvcrt
except ImportError:
getpass = fallback_getpass
else:
getpass = win_getpass
else:
getpass = unix_getpass
|
{
"content_hash": "b888a4758ef31c374342b292e5d705f6",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 81,
"avg_line_length": 32.18333333333333,
"alnum_prop": 0.5877783531848783,
"repo_name": "timm/timmnix",
"id": "53c38b8897569140f8bd35e73b1e2677a47351a0",
"size": "5793",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pypy3-v5.5.0-linux64/lib-python/3/getpass.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1641"
},
{
"name": "Batchfile",
"bytes": "1234"
},
{
"name": "C",
"bytes": "436685"
},
{
"name": "CSS",
"bytes": "96"
},
{
"name": "Common Lisp",
"bytes": "4"
},
{
"name": "Emacs Lisp",
"bytes": "290698"
},
{
"name": "HTML",
"bytes": "111577"
},
{
"name": "Makefile",
"bytes": "1681"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PowerShell",
"bytes": "1540"
},
{
"name": "Prolog",
"bytes": "14301"
},
{
"name": "Python",
"bytes": "21267592"
},
{
"name": "Roff",
"bytes": "21080"
},
{
"name": "Shell",
"bytes": "27687"
},
{
"name": "TeX",
"bytes": "3052861"
},
{
"name": "VBScript",
"bytes": "481"
}
],
"symlink_target": ""
}
|
import itertools
import py
from rpython.rlib.objectmodel import r_dict, compute_identity_hash, specialize
from rpython.rlib.rarithmetic import intmask
from rpython.rlib.unroll import unrolling_iterable
from rpython.rlib.objectmodel import we_are_translated
from rpython.jit.metainterp import resoperation
from rpython.rlib.debug import make_sure_not_resized
from rpython.jit.metainterp.resoperation import rop
from rpython.jit.metainterp.resume import Snapshot, AccumInfo
# ____________________________________________________________
# Misc. utilities
def _findall(Class, name_prefix, op_prefix=None):
result = []
for name in dir(Class):
if name.startswith(name_prefix):
opname = name[len(name_prefix):]
if opname.isupper():
assert hasattr(resoperation.rop, opname)
for value, name in resoperation.opname.items():
if op_prefix and not name.startswith(op_prefix):
continue
if hasattr(Class, name_prefix + name):
opclass = resoperation.opclasses[getattr(rop, name)]
assert name in opclass.__name__
result.append((value, opclass, getattr(Class, name_prefix + name)))
return unrolling_iterable(result)
def make_dispatcher_method(Class, name_prefix, op_prefix=None, default=None):
ops = _findall(Class, name_prefix, op_prefix)
def dispatch(self, op, *args):
if we_are_translated():
opnum = op.getopnum()
for value, cls, func in ops:
if opnum == value:
assert isinstance(op, cls)
return func(self, op, *args)
if default:
return default(self, op, *args)
else:
func = getattr(Class, name_prefix + op.getopname().upper(), None)
if func is not None:
return func(self, op, *args)
if default:
return default(self, op, *args)
dispatch.func_name = "dispatch_" + name_prefix
return dispatch
def partition(array, left, right):
last_item = array[right]
pivot = last_item.sort_key()
storeindex = left
for i in range(left, right):
if array[i].sort_key() <= pivot:
array[i], array[storeindex] = array[storeindex], array[i]
storeindex += 1
# Move pivot to its final place
array[storeindex], array[right] = last_item, array[storeindex]
return storeindex
def quicksort(array, left, right):
# sort array[left:right+1] (i.e. bounds included)
if right > left:
pivotnewindex = partition(array, left, right)
quicksort(array, left, pivotnewindex - 1)
quicksort(array, pivotnewindex + 1, right)
def sort_descrs(lst):
# unused, should I leave it or kill it?
quicksort(lst, 0, len(lst)-1)
# ____________________________________________________________
def args_eq(args1, args2):
make_sure_not_resized(args1)
make_sure_not_resized(args2)
if len(args1) != len(args2):
return False
for i in range(len(args1)):
arg1 = args1[i]
arg2 = args2[i]
if arg1 is None:
if arg2 is not None:
return False
elif not arg1.same_box(arg2):
return False
return True
def args_hash(args):
make_sure_not_resized(args)
res = 0x345678
for arg in args:
if arg is None:
y = 17
else:
y = arg._get_hash_()
res = intmask((1000003 * res) ^ y)
return res
@specialize.call_location()
def args_dict():
return r_dict(args_eq, args_hash)
# ____________________________________________________________
def equaloplists(oplist1, oplist2, strict_fail_args=True, remap={},
text_right=None):
# try to use the full width of the terminal to display the list
# unfortunately, does not work with the default capture method of py.test
# (which is fd), you you need to use either -s or --capture=sys, else you
# get the standard 80 columns width
totwidth = py.io.get_terminal_width()
width = totwidth / 2 - 1
print ' Comparing lists '.center(totwidth, '-')
text_right = text_right or 'expected'
memo = {}
print '%s| %s' % ('optimized'.center(width), text_right.center(width))
for op1, op2 in itertools.izip_longest(oplist1, oplist2, fillvalue=''):
if op1:
txt1 = op1.repr(memo)
else:
txt1 = ''
if op2:
txt2 = op2.repr(memo)
else:
txt2 = ''
while txt1 or txt2:
part1 = txt1[:width]
part2 = txt2[:width]
if part1 == part2:
sep = '| '
else:
sep = '<>'
print '%s%s%s' % (part1.ljust(width), sep, part2)
txt1 = txt1[width:]
txt2 = txt2[width:]
print '-' * totwidth
for i_count, (op1, op2) in enumerate(zip(oplist1, oplist2)):
assert op1.getopnum() == op2.getopnum()
assert op1.numargs() == op2.numargs()
for i in range(op1.numargs()):
x = op1.getarg(i)
y = op2.getarg(i)
assert x.same_box(remap.get(y, y))
assert x.same_shape(remap.get(y, y))
if op2 in remap:
assert op1.same_box(remap[op2])
else:
if op1.type != 'v':
remap[op2] = op1
if op1.getopnum() not in [rop.JUMP, rop.LABEL, rop.FINISH] and not op1.is_guard():
assert op1.getdescr() == op2.getdescr()
if op1.getfailargs() or op2.getfailargs():
assert len(op1.getfailargs()) == len(op2.getfailargs())
if strict_fail_args:
for x, y in zip(op1.getfailargs(), op2.getfailargs()):
if x is None:
assert remap.get(y, y) is None
else:
assert x.same_box(remap.get(y, y))
assert x.same_shape(remap.get(y, y))
else:
fail_args1 = set(op1.getfailargs())
fail_args2 = set([remap.get(y, y) for y in op2.getfailargs()])
for x in fail_args1:
for y in fail_args2:
if x.same_box(y):
fail_args2.remove(y)
break
else:
assert False
assert len(oplist1) == len(oplist2)
return True
|
{
"content_hash": "abdf060fe8484741cc89c789e5eb33f5",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 90,
"avg_line_length": 35.80555555555556,
"alnum_prop": 0.5456943366951125,
"repo_name": "jptomo/rpython-lang-scheme",
"id": "c434fb63d511023edc733b6331349c6d00d0a3c1",
"size": "6445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rpython/jit/metainterp/optimizeopt/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "161293"
},
{
"name": "Batchfile",
"bytes": "5289"
},
{
"name": "C",
"bytes": "335765"
},
{
"name": "C++",
"bytes": "12638"
},
{
"name": "Emacs Lisp",
"bytes": "3149"
},
{
"name": "HCL",
"bytes": "155"
},
{
"name": "Makefile",
"bytes": "6988"
},
{
"name": "Objective-C",
"bytes": "1907"
},
{
"name": "Python",
"bytes": "16129160"
},
{
"name": "Scheme",
"bytes": "3"
},
{
"name": "Shell",
"bytes": "721"
},
{
"name": "VimL",
"bytes": "1107"
}
],
"symlink_target": ""
}
|
import requests
import datetime
import json
class Publisher(object):
def set_publisher_local(self, id_sensor, value, contextServer):
headers = {'Authorization':'token %s' % "878559b6d7baf6fcede17397fc390c5b9d7cbb77"}
#print("---------------"+id_sensor+"------------------------")
date_now = datetime.datetime.now()
date_str = date_now.strftime("%Y-%m-%d %H:%M:%S")
payload = {'collectDate': date_str, 'value': value, 'sensor': '1', 'contextServer':'1'}
r = requests.post("http://localhost:8000/persistances/", data=payload, headers=headers)
#Cabeçalho do token, utilizado na publicação. É uma chave de verificação
# headers = {'Authorization':'token %s' % "878559b6d7baf6fcede17397fc390c5b9d7cbb77"}
#
# date_now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") # Realiza um get da DATE/TIME
# #date_str = date_now.strftime("%Y-%m-%d %H:%M:%S")
# payload = {"collectDate": date_now, "value": value, "sensor": id_sensor, "contextServer": 1}
# payload = json.dumps(payload)
# # Realiza a publicação nesse comando logo abaixo
# #collectDate: "2016-07-21T15:44:21.766584", value: 14, sensor: 2, contextServer: 1
# r = requests.post("http://localhost:8000/persistances/", json=payload, headers=headers)
#
# print("--------------------------------PUBLICOU--------------------------------------")
#
# def set_publisher_contexto(self, id_sensor, value, date_coleta):
#
# #Cabeçalho do token, utilizado na publicação. É uma chave de verificação
# headers = {'Authorization':'token %s' % "cfb281929c3574091ad2a7cf80274421e6a87c59"}
#
# date_now = datetime.datetime.now() # Realiza um get da DATE/TIME
# payload = {'sensor_id': id_sensor, 'valorcoletado': valeu, 'datacoleta': date_coleta}
#
# # Realiza a publicação nesse comando logo abaixo
# r = requests.post("http://localhost:8000/persistances/", data=payload, headers=headers)
#-------------------------Converte em um formato a hora, retirando fuso horário---------------------
#import requests
#import datetime
#import time
#headers = {'Authorization':'token %s' % "efd7b8057d8eb6951a3138cbfd9b72cf68b17295"}
#date_now = datetime.datetime.now()
#date_str = date_now.strftime("%Y-%m-%d %H:%M:%S")
#payload = {'collectDate': date_str, 'value': '10', 'sensor': '1', 'contextServer':'1'}
#r = requests.post("http://localhost:8000/persistances/", data=payload, headers=headers)
|
{
"content_hash": "280025fa3d31c16805cb07444619813c",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 105,
"avg_line_length": 43.25423728813559,
"alnum_prop": 0.6105015673981191,
"repo_name": "hubertokf/lupsEdgeServer",
"id": "0551fd6d7a435ec4d387e4c9859f8c31f40dfc78",
"size": "2569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/old_files/publisher.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "API Blueprint",
"bytes": "2080"
},
{
"name": "Batchfile",
"bytes": "3406"
},
{
"name": "C",
"bytes": "10317"
},
{
"name": "CSS",
"bytes": "25359"
},
{
"name": "HTML",
"bytes": "44749"
},
{
"name": "JavaScript",
"bytes": "94055"
},
{
"name": "Makefile",
"bytes": "2949"
},
{
"name": "Python",
"bytes": "1022058"
},
{
"name": "Shell",
"bytes": "8799"
}
],
"symlink_target": ""
}
|
"""
Tests for L{twisted.cred._digest} and the associated bits in
L{twisted.cred.credentials}.
"""
from zope.interface.verify import verifyObject
from twisted.trial.unittest import TestCase
from twisted.python.hashlib import md5, sha1
from twisted.internet.address import IPv4Address
from twisted.cred.error import LoginFailed
from twisted.cred.credentials import calcHA1, calcHA2, IUsernameDigestHash
from twisted.cred.credentials import calcResponse, DigestCredentialFactory
def b64encode(s):
return s.encode('base64').strip()
class FakeDigestCredentialFactory(DigestCredentialFactory):
"""
A Fake Digest Credential Factory that generates a predictable
nonce and opaque
"""
def __init__(self, *args, **kwargs):
super(FakeDigestCredentialFactory, self).__init__(*args, **kwargs)
self.privateKey = "0"
def _generateNonce(self):
"""
Generate a static nonce
"""
return '178288758716122392881254770685'
def _getTime(self):
"""
Return a stable time
"""
return 0
class DigestAuthTests(TestCase):
"""
L{TestCase} mixin class which defines a number of tests for
L{DigestCredentialFactory}. Because this mixin defines C{setUp}, it
must be inherited before L{TestCase}.
"""
def setUp(self):
"""
Create a DigestCredentialFactory for testing
"""
self.username = "foobar"
self.password = "bazquux"
self.realm = "test realm"
self.algorithm = "md5"
self.cnonce = "29fc54aa1641c6fa0e151419361c8f23"
self.qop = "auth"
self.uri = "/write/"
self.clientAddress = IPv4Address('TCP', '10.2.3.4', 43125)
self.method = 'GET'
self.credentialFactory = DigestCredentialFactory(
self.algorithm, self.realm)
def test_MD5HashA1(self, _algorithm='md5', _hash=md5):
"""
L{calcHA1} accepts the C{'md5'} algorithm and returns an MD5 hash of
its parameters, excluding the nonce and cnonce.
"""
nonce = 'abc123xyz'
hashA1 = calcHA1(_algorithm, self.username, self.realm, self.password,
nonce, self.cnonce)
a1 = '%s:%s:%s' % (self.username, self.realm, self.password)
expected = _hash(a1).hexdigest()
self.assertEqual(hashA1, expected)
def test_MD5SessionHashA1(self):
"""
L{calcHA1} accepts the C{'md5-sess'} algorithm and returns an MD5 hash
of its parameters, including the nonce and cnonce.
"""
nonce = 'xyz321abc'
hashA1 = calcHA1('md5-sess', self.username, self.realm, self.password,
nonce, self.cnonce)
a1 = '%s:%s:%s' % (self.username, self.realm, self.password)
ha1 = md5(a1).digest()
a1 = '%s:%s:%s' % (ha1, nonce, self.cnonce)
expected = md5(a1).hexdigest()
self.assertEqual(hashA1, expected)
def test_SHAHashA1(self):
"""
L{calcHA1} accepts the C{'sha'} algorithm and returns a SHA hash of its
parameters, excluding the nonce and cnonce.
"""
self.test_MD5HashA1('sha', sha1)
def test_MD5HashA2Auth(self, _algorithm='md5', _hash=md5):
"""
L{calcHA2} accepts the C{'md5'} algorithm and returns an MD5 hash of
its arguments, excluding the entity hash for QOP other than
C{'auth-int'}.
"""
method = 'GET'
hashA2 = calcHA2(_algorithm, method, self.uri, 'auth', None)
a2 = '%s:%s' % (method, self.uri)
expected = _hash(a2).hexdigest()
self.assertEqual(hashA2, expected)
def test_MD5HashA2AuthInt(self, _algorithm='md5', _hash=md5):
"""
L{calcHA2} accepts the C{'md5'} algorithm and returns an MD5 hash of
its arguments, including the entity hash for QOP of C{'auth-int'}.
"""
method = 'GET'
hentity = 'foobarbaz'
hashA2 = calcHA2(_algorithm, method, self.uri, 'auth-int', hentity)
a2 = '%s:%s:%s' % (method, self.uri, hentity)
expected = _hash(a2).hexdigest()
self.assertEqual(hashA2, expected)
def test_MD5SessHashA2Auth(self):
"""
L{calcHA2} accepts the C{'md5-sess'} algorithm and QOP of C{'auth'} and
returns the same value as it does for the C{'md5'} algorithm.
"""
self.test_MD5HashA2Auth('md5-sess')
def test_MD5SessHashA2AuthInt(self):
"""
L{calcHA2} accepts the C{'md5-sess'} algorithm and QOP of C{'auth-int'}
and returns the same value as it does for the C{'md5'} algorithm.
"""
self.test_MD5HashA2AuthInt('md5-sess')
def test_SHAHashA2Auth(self):
"""
L{calcHA2} accepts the C{'sha'} algorithm and returns a SHA hash of
its arguments, excluding the entity hash for QOP other than
C{'auth-int'}.
"""
self.test_MD5HashA2Auth('sha', sha1)
def test_SHAHashA2AuthInt(self):
"""
L{calcHA2} accepts the C{'sha'} algorithm and returns a SHA hash of
its arguments, including the entity hash for QOP of C{'auth-int'}.
"""
self.test_MD5HashA2AuthInt('sha', sha1)
def test_MD5HashResponse(self, _algorithm='md5', _hash=md5):
"""
L{calcResponse} accepts the C{'md5'} algorithm and returns an MD5 hash
of its parameters, excluding the nonce count, client nonce, and QoP
value if the nonce count and client nonce are C{None}
"""
hashA1 = 'abc123'
hashA2 = '789xyz'
nonce = 'lmnopq'
response = '%s:%s:%s' % (hashA1, nonce, hashA2)
expected = _hash(response).hexdigest()
digest = calcResponse(hashA1, hashA2, _algorithm, nonce, None, None,
None)
self.assertEqual(expected, digest)
def test_MD5SessionHashResponse(self):
"""
L{calcResponse} accepts the C{'md5-sess'} algorithm and returns an MD5
hash of its parameters, excluding the nonce count, client nonce, and
QoP value if the nonce count and client nonce are C{None}
"""
self.test_MD5HashResponse('md5-sess')
def test_SHAHashResponse(self):
"""
L{calcResponse} accepts the C{'sha'} algorithm and returns a SHA hash
of its parameters, excluding the nonce count, client nonce, and QoP
value if the nonce count and client nonce are C{None}
"""
self.test_MD5HashResponse('sha', sha1)
def test_MD5HashResponseExtra(self, _algorithm='md5', _hash=md5):
"""
L{calcResponse} accepts the C{'md5'} algorithm and returns an MD5 hash
of its parameters, including the nonce count, client nonce, and QoP
value if they are specified.
"""
hashA1 = 'abc123'
hashA2 = '789xyz'
nonce = 'lmnopq'
nonceCount = '00000004'
clientNonce = 'abcxyz123'
qop = 'auth'
response = '%s:%s:%s:%s:%s:%s' % (
hashA1, nonce, nonceCount, clientNonce, qop, hashA2)
expected = _hash(response).hexdigest()
digest = calcResponse(
hashA1, hashA2, _algorithm, nonce, nonceCount, clientNonce, qop)
self.assertEqual(expected, digest)
def test_MD5SessionHashResponseExtra(self):
"""
L{calcResponse} accepts the C{'md5-sess'} algorithm and returns an MD5
hash of its parameters, including the nonce count, client nonce, and
QoP value if they are specified.
"""
self.test_MD5HashResponseExtra('md5-sess')
def test_SHAHashResponseExtra(self):
"""
L{calcResponse} accepts the C{'sha'} algorithm and returns a SHA hash
of its parameters, including the nonce count, client nonce, and QoP
value if they are specified.
"""
self.test_MD5HashResponseExtra('sha', sha1)
def formatResponse(self, quotes=True, **kw):
"""
Format all given keyword arguments and their values suitably for use as
the value of an HTTP header.
@types quotes: C{bool}
@param quotes: A flag indicating whether to quote the values of each
field in the response.
@param **kw: Keywords and C{str} values which will be treated as field
name/value pairs to include in the result.
@rtype: C{str}
@return: The given fields formatted for use as an HTTP header value.
"""
if 'username' not in kw:
kw['username'] = self.username
if 'realm' not in kw:
kw['realm'] = self.realm
if 'algorithm' not in kw:
kw['algorithm'] = self.algorithm
if 'qop' not in kw:
kw['qop'] = self.qop
if 'cnonce' not in kw:
kw['cnonce'] = self.cnonce
if 'uri' not in kw:
kw['uri'] = self.uri
if quotes:
quote = '"'
else:
quote = ''
return ', '.join([
'%s=%s%s%s' % (k, quote, v, quote)
for (k, v)
in kw.iteritems()
if v is not None])
def getDigestResponse(self, challenge, ncount):
"""
Calculate the response for the given challenge
"""
nonce = challenge.get('nonce')
algo = challenge.get('algorithm').lower()
qop = challenge.get('qop')
ha1 = calcHA1(
algo, self.username, self.realm, self.password, nonce, self.cnonce)
ha2 = calcHA2(algo, "GET", self.uri, qop, None)
expected = calcResponse(ha1, ha2, algo, nonce, ncount, self.cnonce, qop)
return expected
def test_response(self, quotes=True):
"""
L{DigestCredentialFactory.decode} accepts a digest challenge response
and parses it into an L{IUsernameHashedPassword} provider.
"""
challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
nc = "00000001"
clientResponse = self.formatResponse(
quotes=quotes,
nonce=challenge['nonce'],
response=self.getDigestResponse(challenge, nc),
nc=nc,
opaque=challenge['opaque'])
creds = self.credentialFactory.decode(
clientResponse, self.method, self.clientAddress.host)
self.assertTrue(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
def test_responseWithoutQuotes(self):
"""
L{DigestCredentialFactory.decode} accepts a digest challenge response
which does not quote the values of its fields and parses it into an
L{IUsernameHashedPassword} provider in the same way it would a
response which included quoted field values.
"""
self.test_response(False)
def test_caseInsensitiveAlgorithm(self):
"""
The case of the algorithm value in the response is ignored when
checking the credentials.
"""
self.algorithm = 'MD5'
self.test_response()
def test_md5DefaultAlgorithm(self):
"""
The algorithm defaults to MD5 if it is not supplied in the response.
"""
self.algorithm = None
self.test_response()
def test_responseWithoutClientIP(self):
"""
L{DigestCredentialFactory.decode} accepts a digest challenge response
even if the client address it is passed is C{None}.
"""
challenge = self.credentialFactory.getChallenge(None)
nc = "00000001"
clientResponse = self.formatResponse(
nonce=challenge['nonce'],
response=self.getDigestResponse(challenge, nc),
nc=nc,
opaque=challenge['opaque'])
creds = self.credentialFactory.decode(clientResponse, self.method, None)
self.assertTrue(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
def test_multiResponse(self):
"""
L{DigestCredentialFactory.decode} handles multiple responses to a
single challenge.
"""
challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
nc = "00000001"
clientResponse = self.formatResponse(
nonce=challenge['nonce'],
response=self.getDigestResponse(challenge, nc),
nc=nc,
opaque=challenge['opaque'])
creds = self.credentialFactory.decode(clientResponse, self.method,
self.clientAddress.host)
self.assertTrue(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
nc = "00000002"
clientResponse = self.formatResponse(
nonce=challenge['nonce'],
response=self.getDigestResponse(challenge, nc),
nc=nc,
opaque=challenge['opaque'])
creds = self.credentialFactory.decode(clientResponse, self.method,
self.clientAddress.host)
self.assertTrue(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
def test_failsWithDifferentMethod(self):
"""
L{DigestCredentialFactory.decode} returns an L{IUsernameHashedPassword}
provider which rejects a correct password for the given user if the
challenge response request is made using a different HTTP method than
was used to request the initial challenge.
"""
challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
nc = "00000001"
clientResponse = self.formatResponse(
nonce=challenge['nonce'],
response=self.getDigestResponse(challenge, nc),
nc=nc,
opaque=challenge['opaque'])
creds = self.credentialFactory.decode(clientResponse, 'POST',
self.clientAddress.host)
self.assertFalse(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
def test_noUsername(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} if the response
has no username field or if the username field is empty.
"""
# Check for no username
e = self.assertRaises(
LoginFailed,
self.credentialFactory.decode,
self.formatResponse(username=None),
self.method, self.clientAddress.host)
self.assertEqual(str(e), "Invalid response, no username given.")
# Check for an empty username
e = self.assertRaises(
LoginFailed,
self.credentialFactory.decode,
self.formatResponse(username=""),
self.method, self.clientAddress.host)
self.assertEqual(str(e), "Invalid response, no username given.")
def test_noNonce(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} if the response
has no nonce.
"""
e = self.assertRaises(
LoginFailed,
self.credentialFactory.decode,
self.formatResponse(opaque="abc123"),
self.method, self.clientAddress.host)
self.assertEqual(str(e), "Invalid response, no nonce given.")
def test_noOpaque(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} if the response
has no opaque.
"""
e = self.assertRaises(
LoginFailed,
self.credentialFactory.decode,
self.formatResponse(),
self.method, self.clientAddress.host)
self.assertEqual(str(e), "Invalid response, no opaque given.")
def test_checkHash(self):
"""
L{DigestCredentialFactory.decode} returns an L{IUsernameDigestHash}
provider which can verify a hash of the form 'username:realm:password'.
"""
challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
nc = "00000001"
clientResponse = self.formatResponse(
nonce=challenge['nonce'],
response=self.getDigestResponse(challenge, nc),
nc=nc,
opaque=challenge['opaque'])
creds = self.credentialFactory.decode(clientResponse, self.method,
self.clientAddress.host)
self.assertTrue(verifyObject(IUsernameDigestHash, creds))
cleartext = '%s:%s:%s' % (self.username, self.realm, self.password)
hash = md5(cleartext)
self.assertTrue(creds.checkHash(hash.hexdigest()))
hash.update('wrong')
self.assertFalse(creds.checkHash(hash.hexdigest()))
def test_invalidOpaque(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} when the opaque
value does not contain all the required parts.
"""
credentialFactory = FakeDigestCredentialFactory(self.algorithm,
self.realm)
challenge = credentialFactory.getChallenge(self.clientAddress.host)
exc = self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
'badOpaque',
challenge['nonce'],
self.clientAddress.host)
self.assertEqual(str(exc), 'Invalid response, invalid opaque value')
badOpaque = 'foo-' + b64encode('nonce,clientip')
exc = self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
badOpaque,
challenge['nonce'],
self.clientAddress.host)
self.assertEqual(str(exc), 'Invalid response, invalid opaque value')
exc = self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
'',
challenge['nonce'],
self.clientAddress.host)
self.assertEqual(str(exc), 'Invalid response, invalid opaque value')
badOpaque = (
'foo-' + b64encode('%s,%s,foobar' % (
challenge['nonce'],
self.clientAddress.host)))
exc = self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
badOpaque,
challenge['nonce'],
self.clientAddress.host)
self.assertEqual(
str(exc), 'Invalid response, invalid opaque/time values')
def test_incompatibleNonce(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} when the given
nonce from the response does not match the nonce encoded in the opaque.
"""
credentialFactory = FakeDigestCredentialFactory(self.algorithm, self.realm)
challenge = credentialFactory.getChallenge(self.clientAddress.host)
badNonceOpaque = credentialFactory._generateOpaque(
'1234567890',
self.clientAddress.host)
exc = self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
badNonceOpaque,
challenge['nonce'],
self.clientAddress.host)
self.assertEqual(
str(exc),
'Invalid response, incompatible opaque/nonce values')
exc = self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
badNonceOpaque,
'',
self.clientAddress.host)
self.assertEqual(
str(exc),
'Invalid response, incompatible opaque/nonce values')
def test_incompatibleClientIP(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} when the
request comes from a client IP other than what is encoded in the
opaque.
"""
credentialFactory = FakeDigestCredentialFactory(self.algorithm, self.realm)
challenge = credentialFactory.getChallenge(self.clientAddress.host)
badAddress = '10.0.0.1'
# Sanity check
self.assertNotEqual(self.clientAddress.host, badAddress)
badNonceOpaque = credentialFactory._generateOpaque(
challenge['nonce'], badAddress)
self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
badNonceOpaque,
challenge['nonce'],
self.clientAddress.host)
def test_oldNonce(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} when the given
opaque is older than C{DigestCredentialFactory.CHALLENGE_LIFETIME_SECS}
"""
credentialFactory = FakeDigestCredentialFactory(self.algorithm,
self.realm)
challenge = credentialFactory.getChallenge(self.clientAddress.host)
key = '%s,%s,%s' % (challenge['nonce'],
self.clientAddress.host,
'-137876876')
digest = md5(key + credentialFactory.privateKey).hexdigest()
ekey = b64encode(key)
oldNonceOpaque = '%s-%s' % (digest, ekey.strip('\n'))
self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
oldNonceOpaque,
challenge['nonce'],
self.clientAddress.host)
def test_mismatchedOpaqueChecksum(self):
"""
L{DigestCredentialFactory.decode} raises L{LoginFailed} when the opaque
checksum fails verification.
"""
credentialFactory = FakeDigestCredentialFactory(self.algorithm,
self.realm)
challenge = credentialFactory.getChallenge(self.clientAddress.host)
key = '%s,%s,%s' % (challenge['nonce'],
self.clientAddress.host,
'0')
digest = md5(key + 'this is not the right pkey').hexdigest()
badChecksum = '%s-%s' % (digest, b64encode(key))
self.assertRaises(
LoginFailed,
credentialFactory._verifyOpaque,
badChecksum,
challenge['nonce'],
self.clientAddress.host)
def test_incompatibleCalcHA1Options(self):
"""
L{calcHA1} raises L{TypeError} when any of the pszUsername, pszRealm,
or pszPassword arguments are specified with the preHA1 keyword
argument.
"""
arguments = (
("user", "realm", "password", "preHA1"),
(None, "realm", None, "preHA1"),
(None, None, "password", "preHA1"),
)
for pszUsername, pszRealm, pszPassword, preHA1 in arguments:
self.assertRaises(
TypeError,
calcHA1,
"md5",
pszUsername,
pszRealm,
pszPassword,
"nonce",
"cnonce",
preHA1=preHA1)
def test_noNewlineOpaque(self):
"""
L{DigestCredentialFactory._generateOpaque} returns a value without
newlines, regardless of the length of the nonce.
"""
opaque = self.credentialFactory._generateOpaque(
"long nonce " * 10, None)
self.assertNotIn('\n', opaque)
|
{
"content_hash": "24e14f0233ce06402adcd5e6bbb8bbb7",
"timestamp": "",
"source": "github",
"line_count": 668,
"max_line_length": 83,
"avg_line_length": 35.01047904191617,
"alnum_prop": 0.5958866036687048,
"repo_name": "biddisco/VTK",
"id": "41368a02eb3433985c06b0f74cf50472ad17c85e",
"size": "23460",
"binary": false,
"copies": "29",
"ref": "refs/heads/master",
"path": "ThirdParty/Twisted/twisted/test/test_digestauth.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "C",
"bytes": "45542302"
},
{
"name": "C++",
"bytes": "60467840"
},
{
"name": "CSS",
"bytes": "157961"
},
{
"name": "Cuda",
"bytes": "28721"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "IDL",
"bytes": "4406"
},
{
"name": "Java",
"bytes": "184678"
},
{
"name": "JavaScript",
"bytes": "978324"
},
{
"name": "Objective-C",
"bytes": "121232"
},
{
"name": "Objective-C++",
"bytes": "101052"
},
{
"name": "Pascal",
"bytes": "3255"
},
{
"name": "Perl",
"bytes": "177007"
},
{
"name": "Python",
"bytes": "13262355"
},
{
"name": "Shell",
"bytes": "41929"
},
{
"name": "Tcl",
"bytes": "1894036"
}
],
"symlink_target": ""
}
|
import unittest
from libpytunes import Library
import os
class TestLibrary(unittest.TestCase):
def setUp(self):
self.it_library = Library(os.path.join(os.path.dirname(__file__), "Test Library.xml"))
def test_songs(self):
for id, song in self.it_library.songs.items():
assert(hasattr(song, 'name') == True)
def test_playlists(self):
playlists = self.it_library.getPlaylistNames()
for song in self.it_library.getPlaylist(playlists[0]).tracks:
assert(hasattr(song, 'track_number'))
assert(hasattr(song, 'artist'))
assert(hasattr(song, 'name'))
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "826d791b48b7f8b869e68c9d1f504bce",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 94,
"avg_line_length": 27.52,
"alnum_prop": 0.625,
"repo_name": "liamks/pyitunes",
"id": "c3244306ec43b992e930c1d3e3f8441095ffc0db",
"size": "688",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "libpytunes/tests/test_library.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10583"
}
],
"symlink_target": ""
}
|
import django_filters.rest_framework
from rest_framework import filters
from rest_framework import permissions, renderers, viewsets
from cbe.human_resources.models import IdentificationType, Identification, Staff, Timesheet, TimesheetEntry
from cbe.human_resources.serializers import IdentificationTypeSerializer, IdentificationSerializer, StaffSerializer, TimesheetSerializer, TimesheetEntrySerializer
class TimesheetViewSet(viewsets.ModelViewSet):
queryset = Timesheet.objects.all()
serializer_class = TimesheetSerializer
class TimesheetEntryViewSet(viewsets.ModelViewSet):
queryset = TimesheetEntry.objects.all()
serializer_class = TimesheetEntrySerializer
class StaffViewSet(viewsets.ModelViewSet):
queryset = Staff.objects.all()
serializer_class = StaffSerializer
permission_classes = (permissions.DjangoModelPermissions, )
class IdentificationViewSet(viewsets.ModelViewSet):
queryset = Identification.objects.all()
serializer_class = IdentificationSerializer
permission_classes = (permissions.DjangoModelPermissions, )
class IdentificationTypeViewSet(viewsets.ModelViewSet):
queryset = IdentificationType.objects.all()
serializer_class = IdentificationTypeSerializer
permission_classes = (permissions.DjangoModelPermissions, )
|
{
"content_hash": "e6772eb38fb38126955e3bb76d93e152",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 162,
"avg_line_length": 40.21212121212121,
"alnum_prop": 0.7980406932931424,
"repo_name": "Semprini/cbe",
"id": "049b2a6ead007f9e1c367238cfc2e0670a8739a7",
"size": "1327",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cbe/cbe/human_resources/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2292"
},
{
"name": "HTML",
"bytes": "3112"
},
{
"name": "PowerShell",
"bytes": "20448"
},
{
"name": "Python",
"bytes": "241197"
}
],
"symlink_target": ""
}
|
"""Add missing foreign keys
Revision ID: 2e5352a0ad4d
Revises: 2a16083502f3
Create Date: 2015-08-20 12:43:09.110427
"""
# revision identifiers, used by Alembic.
revision = '2e5352a0ad4d'
down_revision = '2a16083502f3'
from alembic import op
from sqlalchemy.engine import reflection
from neutron.db import migration
TABLE_NAME = 'flavorserviceprofilebindings'
def upgrade():
inspector = reflection.Inspector.from_engine(op.get_bind())
fk_constraints = inspector.get_foreign_keys(TABLE_NAME)
for fk in fk_constraints:
fk['options']['ondelete'] = 'CASCADE'
migration.remove_foreign_keys(TABLE_NAME, fk_constraints)
migration.create_foreign_keys(TABLE_NAME, fk_constraints)
|
{
"content_hash": "b8e92a70970714acdb4196103c3cb779",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 63,
"avg_line_length": 24.413793103448278,
"alnum_prop": 0.748587570621469,
"repo_name": "klmitch/neutron",
"id": "4961514dd627e3d9631f324c5feb3398b0dba4e4",
"size": "1316",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "neutron/db/migration/alembic_migrations/versions/liberty/contract/2e5352a0ad4d_add_missing_foreign_keys.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "8467992"
},
{
"name": "Shell",
"bytes": "14648"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
setup(name='wallaby-app-inspector',
version='0.1.27',
url='https://github.com/FreshXOpenSource/wallaby-app-inspector',
author='FreshX GbR',
author_email='wallaby@freshx.de',
license='BSD',
description='IDE for wallaby.',
long_description=open('README.md').read(),
package_data={'': ['LICENSE', 'AUTHORS', 'README.md']},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: MacOS X',
'Environment :: Win32 (MS Windows)',
'Environment :: X11 Applications',
'Framework :: Twisted',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
packages=find_packages('.'),
install_requires=['wallaby-frontend-qt'],
include_package_data = True
)
|
{
"content_hash": "18195a982c7c7c033d87b616eb068429",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 78,
"avg_line_length": 39.333333333333336,
"alnum_prop": 0.6025423728813559,
"repo_name": "FreshXOpenSource/wallaby-app-inspector",
"id": "32fe507dabfb28640cead86424e1a94d621a72ab",
"size": "1261",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "164"
},
{
"name": "Python",
"bytes": "12479"
}
],
"symlink_target": ""
}
|
import unittest
import os
import sys
from axelrod import DeterministicCache, TitForTat, Defector, Random
class TestDeterministicCache(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_key = (TitForTat, Defector)
cls.test_value = [('C', 'D'), ('D', 'D'), ('D', 'D')]
cls.test_save_file = 'test_cache_save.txt'
cls.test_load_file = 'test_cache_load.txt'
if sys.version_info[0] == 2:
# Python 2.x
cls.test_pickle = b'\x80\x02}q\x00caxelrod.strategies.titfortat\nTitForTat\nq\x01caxelrod.strategies.defector\nDefector\nq\x02\x86q\x03]q\x04(U\x01Cq\x05U\x01Dq\x06\x86q\x07h\x06h\x06\x86q\x08h\x06h\x06\x86q\tes.'
else:
# Python 3.x
cls.test_pickle = b'\x80\x03}q\x00caxelrod.strategies.titfortat\nTitForTat\nq\x01caxelrod.strategies.defector\nDefector\nq\x02\x86q\x03]q\x04(X\x01\x00\x00\x00Cq\x05X\x01\x00\x00\x00Dq\x06\x86q\x07h\x06h\x06\x86q\x08h\x06h\x06\x86q\tes.'
with open(cls.test_load_file, 'wb') as f:
f.write(cls.test_pickle)
@classmethod
def tearDownClass(cls):
os.remove(cls.test_save_file)
os.remove(cls.test_load_file)
def test_basic_init(self):
cache = DeterministicCache()
self.assertTrue(cache.mutable)
self.assertEqual(cache.turns, None)
def test_init_from_file(self):
cache = DeterministicCache(file_name=self.test_load_file)
self.assertEqual(cache[self.test_key], self.test_value)
self.assertEqual(cache.turns, 3)
def test_setitem(self):
cache = DeterministicCache()
cache[self.test_key] = self.test_value
self.assertEqual(cache[self.test_key], self.test_value)
# The first cached entry should set the turns attribute
self.assertEqual(cache.turns, 3)
def test_set_immutable_cache(self):
cache = DeterministicCache()
cache.mutable = False
with self.assertRaises(ValueError):
cache[self.test_key] = self.test_value
def test_is_valid_key(self):
cache = DeterministicCache()
self.assertTrue(cache._is_valid_key(self.test_key))
# Should return false if key is not a tuple
self.assertFalse(cache._is_valid_key('test'))
# Should return false if tuple is not a pair
self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
# Should return false if contents of tuple are not axelrod Players
self.assertFalse(cache._is_valid_key(('test', 'test')))
self.assertFalse(cache._is_valid_key((TitForTat, 'test')))
self.assertFalse(cache._is_valid_key(('test', TitForTat)))
# Should return false if either player class is stochastic
self.assertFalse(cache._is_valid_key((Random, TitForTat)))
self.assertFalse(cache._is_valid_key((TitForTat, Random)))
def test_is_valid_value(self):
cache = DeterministicCache()
self.assertTrue(cache._is_valid_value(self.test_value))
# Should return false if value is not a list
self.assertFalse(cache._is_valid_value('test'))
# Should return false if length does not match turns attribute
cache.turns = 20
self.assertFalse(cache._is_valid_value(self.test_value))
def test_save(self):
cache = DeterministicCache()
cache[self.test_key] = self.test_value
cache.save(self.test_save_file)
with open(self.test_save_file, 'rb') as f:
text = f.read()
self.assertEqual(text, self.test_pickle)
def test_load(self):
cache = DeterministicCache()
cache.load(self.test_load_file)
self.assertEqual(cache[self.test_key], self.test_value)
self.assertEqual(cache.turns, 3)
|
{
"content_hash": "ca2d103d54792ae1a97ea7b4ece8b1f3",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 249,
"avg_line_length": 42.80681818181818,
"alnum_prop": 0.6493230687549775,
"repo_name": "marcharper/Axelrod",
"id": "4340bf641b142a69d4febc1911b416db2680de89",
"size": "3767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "axelrod/tests/unit/test_deterministic_cache.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "523862"
},
{
"name": "Shell",
"bytes": "1593"
}
],
"symlink_target": ""
}
|
"""Test event helpers."""
# pylint: disable=protected-access,too-many-public-methods
# pylint: disable=too-few-public-methods
import asyncio
import unittest
from datetime import datetime, timedelta
from astral import Astral
from homeassistant.bootstrap import setup_component
import homeassistant.core as ha
from homeassistant.const import MATCH_ALL
from homeassistant.helpers.event import (
track_point_in_utc_time,
track_point_in_time,
track_utc_time_change,
track_time_change,
track_state_change,
track_sunrise,
track_sunset,
)
from homeassistant.components import sun
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
class TestEventHelpers(unittest.TestCase):
"""Test the Home Assistant event helpers."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_track_point_in_time(self):
"""Test track point in time."""
before_birthday = datetime(1985, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC)
birthday_paulus = datetime(1986, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC)
after_birthday = datetime(1987, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC)
runs = []
track_point_in_utc_time(
self.hass, lambda x: runs.append(1), birthday_paulus)
self._send_time_changed(before_birthday)
self.hass.block_till_done()
self.assertEqual(0, len(runs))
self._send_time_changed(birthday_paulus)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
# A point in time tracker will only fire once, this should do nothing
self._send_time_changed(birthday_paulus)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
track_point_in_time(
self.hass, lambda x: runs.append(1), birthday_paulus)
self._send_time_changed(after_birthday)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
unsub = track_point_in_time(
self.hass, lambda x: runs.append(1), birthday_paulus)
unsub()
self._send_time_changed(after_birthday)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
def test_track_time_change(self):
"""Test tracking time change."""
wildcard_runs = []
specific_runs = []
unsub = track_time_change(self.hass, lambda x: wildcard_runs.append(1))
unsub_utc = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), second=[0, 30])
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(1, len(wildcard_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 15))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(2, len(wildcard_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 30))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self.assertEqual(3, len(wildcard_runs))
unsub()
unsub_utc()
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 30))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self.assertEqual(3, len(wildcard_runs))
def test_track_state_change(self):
"""Test track_state_change."""
# 2 lists to track how often our callbacks get called
specific_runs = []
wildcard_runs = []
wildercard_runs = []
def specific_run_callback(entity_id, old_state, new_state):
specific_runs.append(1)
track_state_change(
self.hass, 'light.Bowl', specific_run_callback, 'on', 'off')
@ha.callback
def wildcard_run_callback(entity_id, old_state, new_state):
wildcard_runs.append((old_state, new_state))
track_state_change(self.hass, 'light.Bowl', wildcard_run_callback)
@asyncio.coroutine
def wildercard_run_callback(entity_id, old_state, new_state):
wildercard_runs.append((old_state, new_state))
track_state_change(self.hass, MATCH_ALL, wildercard_run_callback)
# Adding state to state machine
self.hass.states.set("light.Bowl", "on")
self.hass.block_till_done()
self.assertEqual(0, len(specific_runs))
self.assertEqual(1, len(wildcard_runs))
self.assertEqual(1, len(wildercard_runs))
self.assertIsNone(wildcard_runs[-1][0])
self.assertIsNotNone(wildcard_runs[-1][1])
# Set same state should not trigger a state change/listener
self.hass.states.set('light.Bowl', 'on')
self.hass.block_till_done()
self.assertEqual(0, len(specific_runs))
self.assertEqual(1, len(wildcard_runs))
self.assertEqual(1, len(wildercard_runs))
# State change off -> on
self.hass.states.set('light.Bowl', 'off')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(2, len(wildcard_runs))
self.assertEqual(2, len(wildercard_runs))
# State change off -> off
self.hass.states.set('light.Bowl', 'off', {"some_attr": 1})
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(3, len(wildcard_runs))
self.assertEqual(3, len(wildercard_runs))
# State change off -> on
self.hass.states.set('light.Bowl', 'on')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(4, len(wildcard_runs))
self.assertEqual(4, len(wildercard_runs))
self.hass.states.remove('light.bowl')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(5, len(wildcard_runs))
self.assertEqual(5, len(wildercard_runs))
self.assertIsNotNone(wildcard_runs[-1][0])
self.assertIsNone(wildcard_runs[-1][1])
self.assertIsNotNone(wildercard_runs[-1][0])
self.assertIsNone(wildercard_runs[-1][1])
# Set state for different entity id
self.hass.states.set('switch.kitchen', 'on')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(5, len(wildcard_runs))
self.assertEqual(6, len(wildercard_runs))
def test_track_sunrise(self):
"""Test track the sunrise."""
latitude = 32.87336
longitude = 117.22743
# Setup sun component
self.hass.config.latitude = latitude
self.hass.config.longitude = longitude
setup_component(self.hass, sun.DOMAIN, {
sun.DOMAIN: {sun.CONF_ELEVATION: 0}})
# Get next sunrise/sunset
astral = Astral()
utc_now = dt_util.utcnow()
mod = -1
while True:
next_rising = (astral.sunrise_utc(utc_now +
timedelta(days=mod), latitude, longitude))
if next_rising > utc_now:
break
mod += 1
# Track sunrise
runs = []
unsub = track_sunrise(self.hass, lambda: runs.append(1))
offset_runs = []
offset = timedelta(minutes=30)
unsub2 = track_sunrise(self.hass, lambda: offset_runs.append(1),
offset)
# run tests
self._send_time_changed(next_rising - offset)
self.hass.block_till_done()
self.assertEqual(0, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_rising)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_rising + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
unsub()
unsub2()
self._send_time_changed(next_rising + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
def test_track_sunset(self):
"""Test track the sunset."""
latitude = 32.87336
longitude = 117.22743
# Setup sun component
self.hass.config.latitude = latitude
self.hass.config.longitude = longitude
setup_component(self.hass, sun.DOMAIN, {
sun.DOMAIN: {sun.CONF_ELEVATION: 0}})
# Get next sunrise/sunset
astral = Astral()
utc_now = dt_util.utcnow()
mod = -1
while True:
next_setting = (astral.sunset_utc(utc_now +
timedelta(days=mod), latitude, longitude))
if next_setting > utc_now:
break
mod += 1
# Track sunset
runs = []
unsub = track_sunset(self.hass, lambda: runs.append(1))
offset_runs = []
offset = timedelta(minutes=30)
unsub2 = track_sunset(self.hass, lambda: offset_runs.append(1), offset)
# Run tests
self._send_time_changed(next_setting - offset)
self.hass.block_till_done()
self.assertEqual(0, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_setting)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_setting + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
unsub()
unsub2()
self._send_time_changed(next_setting + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
def _send_time_changed(self, now):
"""Send a time changed event."""
self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: now})
def test_periodic_task_minute(self):
"""Test periodic tasks per minute."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), minute='/5')
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 3, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 5, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
unsub()
self._send_time_changed(datetime(2014, 5, 24, 12, 5, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
def test_periodic_task_hour(self):
"""Test periodic tasks per hour."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), hour='/2')
self._send_time_changed(datetime(2014, 5, 24, 22, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 23, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 25, 1, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 25, 2, 0, 0))
self.hass.block_till_done()
self.assertEqual(3, len(specific_runs))
unsub()
self._send_time_changed(datetime(2014, 5, 25, 2, 0, 0))
self.hass.block_till_done()
self.assertEqual(3, len(specific_runs))
def test_periodic_task_day(self):
"""Test periodic tasks per day."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), day='/2')
self._send_time_changed(datetime(2014, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 3, 12, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 4, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
unsub()
self._send_time_changed(datetime(2014, 5, 4, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
def test_periodic_task_year(self):
"""Test periodic tasks per year."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), year='/2')
self._send_time_changed(datetime(2014, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2015, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2016, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
unsub()
self._send_time_changed(datetime(2016, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
def test_periodic_task_wrong_input(self):
"""Test periodic tasks with wrong input."""
specific_runs = []
track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), year='/two')
self._send_time_changed(datetime(2014, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(0, len(specific_runs))
|
{
"content_hash": "d6f81d7ca332f4b9c39a48c93bd23727",
"timestamp": "",
"source": "github",
"line_count": 421,
"max_line_length": 79,
"avg_line_length": 33.87410926365796,
"alnum_prop": 0.6000280485239464,
"repo_name": "hexxter/home-assistant",
"id": "89c97434f8dc0219b022714fd8496579f5356a0c",
"size": "14261",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "tests/helpers/test_event.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1371597"
},
{
"name": "Python",
"bytes": "3699472"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "7255"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing M2M table for field comments on 'Question'
db.delete_table(db.shorten_name(u'catalog_question_comments'))
# Adding field 'Comment.question'
db.add_column(u'catalog_comment', 'question',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='comments', null=True, to=orm['catalog.Question']),
keep_default=False)
def backwards(self, orm):
# Adding M2M table for field comments on 'Question'
m2m_table_name = db.shorten_name(u'catalog_question_comments')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('question', models.ForeignKey(orm['catalog.question'], null=False)),
('comment', models.ForeignKey(orm['catalog.comment'], null=False))
))
db.create_unique(m2m_table_name, ['question_id', 'comment_id'])
# Deleting field 'Comment.question'
db.delete_column(u'catalog_comment', 'question_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalog.answer': {
'Meta': {'ordering': "['-created']", 'object_name': 'Answer'},
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'answer'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Comment']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('tinymce.models.HTMLField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Question']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'catalog.article': {
'Meta': {'object_name': 'Article'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_time_staff_pick': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff_pick': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'new_user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.NewUser']", 'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {}),
'recommendation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.ArticleTag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.articleemail': {
'Meta': {'object_name': 'ArticleEmail'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'email_subscriptions'", 'null': 'True', 'to': "orm['catalog.ArticleTag']"}),
'temp_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.articletag': {
'Meta': {'object_name': 'ArticleTag'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url_snippet': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'catalog.cfistoreitem': {
'Meta': {'object_name': 'CfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.Product']", 'unique': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'cfi_store_item_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeCfiStoreItem']", 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.comment': {
'Meta': {'ordering': "['-added_time']", 'object_name': 'Comment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['catalog.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.favoritemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'FavoriteMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.image': {
'Meta': {'object_name': 'Image'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'full_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_s3': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['auth.User']"})
},
'catalog.instructablestep': {
'Meta': {'ordering': "['-step']", 'object_name': 'InstructableStep'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'body': ('tinymce.models.HTMLField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iid': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50', 'null': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'step': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'words': ('django.db.models.fields.IntegerField', [], {'default': '-1'})
},
'catalog.inventory': {
'Meta': {'unique_together': "(('part', 'space'),)", 'object_name': 'Inventory'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'part': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inventory_part'", 'to': "orm['catalog.Product']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'space': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inventory_space'", 'to': "orm['catalog.Space']"})
},
'catalog.like': {
'Meta': {'object_name': 'Like'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likearticle': {
'Meta': {'unique_together': "(('user', 'article'),)", 'object_name': 'LikeArticle'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Article']"}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likecfistoreitem': {
'Meta': {'unique_together': "(('user', 'cfi_store_item'),)", 'object_name': 'LikeCfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'cfi_store_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.CfiStoreItem']"}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likechannel': {
'Meta': {'unique_together': "(('user', 'channel'),)", 'object_name': 'LikeChannel'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ArticleTag']"}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likecomment': {
'Meta': {'unique_together': "(('user', 'comment'),)", 'object_name': 'LikeComment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Comment']"}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeimage': {
'Meta': {'unique_together': "(('user', 'image'),)", 'object_name': 'LikeImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likelisting': {
'Meta': {'unique_together': "(('user', 'listing'),)", 'object_name': 'LikeListing'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'listing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Listing']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'LikeMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'makeylikes'", 'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likenote': {
'Meta': {'unique_together': "(('user', 'note'),)", 'object_name': 'LikeNote'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'note': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Note']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproduct': {
'Meta': {'unique_together': "(('user', 'product'),)", 'object_name': 'LikeProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproductdescription': {
'Meta': {'unique_together': "(('user', 'product_description'),)", 'object_name': 'LikeProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product_description': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductDescription']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproductimage': {
'Meta': {'unique_together': "(('user', 'image'),)", 'object_name': 'LikeProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductImage']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproducttutorial': {
'Meta': {'unique_together': "(('user', 'tutorial', 'product'),)", 'object_name': 'LikeProductTutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeshop': {
'Meta': {'unique_together': "(('user', 'shop'),)", 'object_name': 'LikeShop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likevideo': {
'Meta': {'unique_together': "(('user', 'video'),)", 'object_name': 'LikeVideo'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fb_like_id': ('django.db.models.fields.CharField', [], {'default': "'-1'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Video']"})
},
'catalog.list': {
'Meta': {'object_name': 'List'},
'access': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'access'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.ListItem']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.listgroup': {
'Meta': {'object_name': 'ListGroup'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.List']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.listing': {
'Meta': {'object_name': 'Listing'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'content': ('tinymce.models.HTMLField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'catalog.listitem': {
'Meta': {'object_name': 'ListItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.location': {
'Meta': {'object_name': 'Location'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.logidenticalproduct': {
'Meta': {'object_name': 'LogIdenticalProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product1'", 'to': "orm['catalog.Product']"}),
'product2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product2'", 'to': "orm['catalog.Product']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.makey': {
'Meta': {'object_name': 'Makey'},
'about': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_time_staff_pick': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'as_part': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'as_makey'", 'null': 'True', 'to': "orm['catalog.Product']"}),
'as_part_new': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'as_makey'", 'null': 'True', 'to': "orm['catalog.NewProduct']"}),
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaborators'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeycomments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Comment']"}),
'cover_pic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'credits': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'derived_from': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'forked_as'", 'null': 'True', 'to': "orm['catalog.Makey']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'documentations': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeydocumentations'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Documentation']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff_pick': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'made_in': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'makeys_made_in'", 'null': 'True', 'to': "orm['catalog.Space']"}),
'mentors': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'modules_used': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'used_in'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'new_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeys_parts'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewProduct']"}),
'new_tools': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeys_tools'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewProduct']"}),
'new_users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeys'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewUser']"}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeynotes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Note']"}),
'removed_collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makey_removed'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyvideos'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Video']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'catalog.makeyimage': {
'Meta': {'object_name': 'MakeyImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey_id': ('django.db.models.fields.IntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.newinventory': {
'Meta': {'unique_together': "(('part', 'space'),)", 'object_name': 'NewInventory'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'part': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'new_inventory_part'", 'to': "orm['catalog.NewProduct']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'space': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'new_inventory_space'", 'to': "orm['catalog.Space']"})
},
'catalog.newproduct': {
'Meta': {'object_name': 'NewProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.newuser': {
'Meta': {'object_name': 'NewUser'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.note': {
'Meta': {'ordering': "['order']", 'object_name': 'Note'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.product': {
'Meta': {'object_name': 'Product'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identicalto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makeys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'partsused'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'makeys_as_tools': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'tools_used'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sku': ('django.db.models.fields.IntegerField', [], {}),
'space_as_tools': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tools_in_space'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Space']"}),
'tutorials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'products'", 'blank': 'True', 'to': "orm['catalog.Tutorial']"})
},
'catalog.productdescription': {
'Meta': {'object_name': 'ProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'catalog.productimage': {
'Meta': {'object_name': 'ProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.productreview': {
'Meta': {'object_name': 'ProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product_reviews'", 'to': "orm['catalog.Product']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.question': {
'Meta': {'object_name': 'Question'},
'accepted_answer': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'answer_of'", 'null': 'True', 'blank': 'True', 'to': "orm['catalog.Answer']"}),
'accepted_time': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('tinymce.models.HTMLField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.shop': {
'Meta': {'object_name': 'Shop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.shopreview': {
'Meta': {'object_name': 'ShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shop_reviews'", 'to': "orm['catalog.Shop']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.space': {
'Meta': {'object_name': 'Space'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_admins'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'date_of_founding': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'space_inventory'", 'symmetrical': 'False', 'through': "orm['catalog.Inventory']", 'to': "orm['catalog.Product']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'last_updated_external': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '10', 'blank': 'True'}),
'logo': ('django.db.models.fields.URLField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '10', 'blank': 'True'}),
'map_zoom_level': ('django.db.models.fields.IntegerField', [], {'default': '13'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_members'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'membership_fee': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'new_inventory': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'space_new_inventory'", 'symmetrical': 'False', 'through': "orm['catalog.NewInventory']", 'to': "orm['catalog.NewProduct']"}),
'new_members': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_new_members'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewUser']"}),
'new_tools': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_new_tools'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewProduct']"}),
'no_of_members': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'twitter': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'catalog.spacereview': {
'Meta': {'object_name': 'SpaceReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'space': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'space_reviews'", 'to': "orm['catalog.Space']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.textdocumentation': {
'Meta': {'object_name': 'TextDocumentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'body': ('tinymce.models.HTMLField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'text_documentations'", 'null': 'True', 'to': "orm['catalog.Makey']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.TextField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.toindexstore': {
'Meta': {'object_name': 'ToIndexStore'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.topmakeys': {
'Meta': {'object_name': 'TopMakeys'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.topproducts': {
'Meta': {'object_name': 'TopProducts'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.topshops': {
'Meta': {'object_name': 'TopShops'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.toptutorials': {
'Meta': {'object_name': 'TopTutorials'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.topusers': {
'Meta': {'object_name': 'TopUsers'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.tutorial': {
'Meta': {'object_name': 'Tutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tutorialimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.upfile': {
'Meta': {'object_name': 'UpFile'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'filetype': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'files'", 'null': 'True', 'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.userflags': {
'Meta': {'object_name': 'UserFlags'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_maker_intro': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_makey_intro': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.userinteraction': {
'Meta': {'object_name': 'UserInteraction'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.IntegerField', [], {}),
'event_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'aboutme': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'college': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'facebook_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'following': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'followers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.UserProfile']"}),
'github_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructables_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'linkedin_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'Bangalore, India'", 'max_length': '255'}),
'membership': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'patent': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'profile_pic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'stackoverflow_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'twitter_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'yt_channel_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'})
},
'catalog.video': {
'Meta': {'object_name': 'Video'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'embed_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'site': ('django.db.models.fields.IntegerField', [], {}),
'thumb_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.votemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'VoteMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.voteproductreview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.voteshopreview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ShopReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.votespacereview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteSpaceReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.SpaceReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.votetutorial': {
'Meta': {'unique_together': "(('user', 'tutorial'),)", 'object_name': 'VoteTutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['catalog']
|
{
"content_hash": "32a4722277fa8358541349f6b13a819e",
"timestamp": "",
"source": "github",
"line_count": 872,
"max_line_length": 229,
"avg_line_length": 87.77522935779817,
"alnum_prop": 0.5450744708649072,
"repo_name": "Makeystreet/makeystreet",
"id": "d94537685b929c1f774b15a80c3157e067d0d608",
"size": "76564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "woot/apps/catalog/migrations/0144_auto__add_field_comment_question.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1893401"
},
{
"name": "HTML",
"bytes": "2253311"
},
{
"name": "JavaScript",
"bytes": "1698946"
},
{
"name": "Python",
"bytes": "9010343"
}
],
"symlink_target": ""
}
|
"""A module containing optimization routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.opt.python.training.drop_stale_gradient_optimizer import *
from tensorflow.contrib.opt.python.training.external_optimizer import *
from tensorflow.contrib.opt.python.training.lazy_adam_optimizer import *
from tensorflow.contrib.opt.python.training.nadam_optimizer import *
from tensorflow.contrib.opt.python.training.moving_average_optimizer import *
from tensorflow.contrib.opt.python.training.variable_clipping_optimizer import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ['DropStaleGradientOptimizer',
'ExternalOptimizerInterface',
'LazyAdamOptimizer',
'NadamOptimizer',
'MovingAverageOptimizer',
'ScipyOptimizerInterface',
'VariableClippingOptimizer']
remove_undocumented(__name__, _allowed_symbols)
|
{
"content_hash": "69d2b88fad1d0bf92f88ef1d14dcd139",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 82,
"avg_line_length": 43.03846153846154,
"alnum_prop": 0.7292225201072386,
"repo_name": "mortada/tensorflow",
"id": "be12f934a46358a0ce519de65ad663811bc8618b",
"size": "1808",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/opt/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7481"
},
{
"name": "C",
"bytes": "183351"
},
{
"name": "C++",
"bytes": "24162971"
},
{
"name": "CMake",
"bytes": "160908"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "832877"
},
{
"name": "HTML",
"bytes": "1166817"
},
{
"name": "Java",
"bytes": "307140"
},
{
"name": "JavaScript",
"bytes": "14005"
},
{
"name": "Jupyter Notebook",
"bytes": "1833654"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37393"
},
{
"name": "Objective-C",
"bytes": "7037"
},
{
"name": "Objective-C++",
"bytes": "64142"
},
{
"name": "Protocol Buffer",
"bytes": "218430"
},
{
"name": "Python",
"bytes": "21378026"
},
{
"name": "Shell",
"bytes": "336861"
},
{
"name": "TypeScript",
"bytes": "1593623"
}
],
"symlink_target": ""
}
|
from __future__ import division
import math
class Vector(object):
def __init__(self, x, y):
self.x = x
self.y = y
def __iter__(self):
return iter((self.x, self.y))
def __getitem__(self, key):
return (self.x, self.y)[key]
def __eq__(self, other):
return tuple(self) == tuple(other)
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return 'Vector(x: %d, y: %d)' % (
self.x, self.y
)
class Rect(object):
def __init__(self, left, top, right, bottom):
self.left = left
self.top = top
self.right = right
self.bottom = bottom
def _get_size(self):
return Vector(self.right - self.left, self.bottom - self.top)
def _set_size(self, new_size):
centroid = self.centroid
self.left = centroid[0] - new_size[0] / 2
self.right = centroid[0] + new_size[0] / 2
self.top = centroid[1] - new_size[1] / 2
self.bottom = centroid[1] + new_size[1] / 2
size = property(_get_size, _set_size)
@property
def width(self):
return self.size.x
@property
def height(self):
return self.size.y
def _get_centroid(self):
return Vector((self.left + self.right) / 2, (self.top + self.bottom) / 2)
def _set_centroid(self, new_centroid):
size = self.size
self.left = new_centroid[0] - size[0] / 2
self.right = new_centroid[0] + size[0] / 2
self.top = new_centroid[1] - size[1] / 2
self.bottom = new_centroid[1] + size[1] / 2
centroid = property(_get_centroid, _set_centroid)
@property
def x(self):
return self.centroid.x
@property
def y(self):
return self.centroid.y
@property
def centroid_x(self):
# Included for backwards compatibility
return self.centroid.x
@property
def centroid_y(self):
# Included for backwards compatibility
return self.centroid.y
def as_tuple(self):
# No longer needed, this class should behave like a tuple
# Included for backwards compatibility
return self.left, self.top, self.right, self.bottom
def clone(self):
return type(self)(self.left, self.top, self.right, self.bottom)
def round(self):
"""
Returns a new rect with all attributes rounded to integers
"""
clone = self.clone()
# Round down left and top
clone.left = int(math.floor(clone.left))
clone.top = int(math.floor(clone.top))
# Round up right and bottom
clone.right = int(math.ceil(clone.right))
clone.bottom = int(math.ceil(clone.bottom))
return clone
def move_to_clamp(self, other):
"""
Moves this rect so it is completely covered by the rect in "other" and
returns a new Rect instance.
"""
other = Rect(*other)
clone = self.clone()
if clone.left < other.left:
clone.right -= clone.left - other.left
clone.left = other.left
if clone.top < other.top:
clone.bottom -= clone.top - other.top
clone.top = other.top
if clone.right > other.right:
clone.left -= clone.right - other.right
clone.right = other.right
if clone.bottom > other.bottom:
clone.top -= clone.bottom - other.bottom
clone.bottom = other.bottom
return clone
def move_to_cover(self, other):
"""
Moves this rect so it completely covers the rect specified in the
"other" parameter and returns a new Rect instance.
"""
other = Rect(*other)
clone = self.clone()
if clone.left > other.left:
clone.right -= clone.left - other.left
clone.left = other.left
if clone.top > other.top:
clone.bottom -= clone.top - other.top
clone.top = other.top
if clone.right < other.right:
clone.left += other.right - clone.right
clone.right = other.right
if clone.bottom < other.bottom:
clone.top += other.bottom - clone.bottom
clone.bottom = other.bottom
return clone
def __iter__(self):
return iter((self.left, self.top, self.right, self.bottom))
def __getitem__(self, key):
return (self.left, self.top, self.right, self.bottom)[key]
def __eq__(self, other):
return tuple(self) == tuple(other)
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return 'Rect(left: %d, top: %d, right: %d, bottom: %d)' % (
self.left, self.top, self.right, self.bottom
)
@classmethod
def from_point(cls, x, y, width, height):
return cls(
x - width / 2,
y - height / 2,
x + width / 2,
y + height / 2,
)
|
{
"content_hash": "8c023e985861e2ca2da47a7870de3f1e",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 81,
"avg_line_length": 26.833333333333332,
"alnum_prop": 0.5507914245642156,
"repo_name": "gogobook/wagtail",
"id": "e264e30818c232e00021a74b21bd085b7376ad74",
"size": "4991",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "wagtail/wagtailimages/rect.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "155100"
},
{
"name": "HTML",
"bytes": "267043"
},
{
"name": "JavaScript",
"bytes": "109586"
},
{
"name": "Makefile",
"bytes": "548"
},
{
"name": "Python",
"bytes": "2059166"
},
{
"name": "Shell",
"bytes": "7388"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import sys
from PIL import Image
try:
from PIL import _imagingcms
except ImportError as ex:
# Allow error import for doc purposes, but error out when accessing
# anything in core.
from _util import deferred_error
_imagingcms = deferred_error(ex)
from PIL._util import isStringType
DESCRIPTION = """
pyCMS
a Python / PIL interface to the littleCMS ICC Color Management System
Copyright (C) 2002-2003 Kevin Cazabon
kevin@cazabon.com
http://www.cazabon.com
pyCMS home page: http://www.cazabon.com/pyCMS
littleCMS home page: http://www.littlecms.com
(littleCMS is Copyright (C) 1998-2001 Marti Maria)
Originally released under LGPL. Graciously donated to PIL in
March 2009, for distribution under the standard PIL license
The pyCMS.py module provides a "clean" interface between Python/PIL and
pyCMSdll, taking care of some of the more complex handling of the direct
pyCMSdll functions, as well as error-checking and making sure that all
relevant data is kept together.
While it is possible to call pyCMSdll functions directly, it's not highly
recommended.
Version History:
1.0.0 pil Oct 2013 Port to LCMS 2.
0.1.0 pil mod March 10, 2009
Renamed display profile to proof profile. The proof
profile is the profile of the device that is being
simulated, not the profile of the device which is
actually used to display/print the final simulation
(that'd be the output profile) - also see LCMSAPI.txt
input colorspace -> using 'renderingIntent' -> proof
colorspace -> using 'proofRenderingIntent' -> output
colorspace
Added LCMS FLAGS support.
Added FLAGS["SOFTPROOFING"] as default flag for
buildProofTransform (otherwise the proof profile/intent
would be ignored).
0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms
0.0.2 alpha Jan 6, 2002
Added try/except statements around type() checks of
potential CObjects... Python won't let you use type()
on them, and raises a TypeError (stupid, if you ask
me!)
Added buildProofTransformFromOpenProfiles() function.
Additional fixes in DLL, see DLL code for details.
0.0.1 alpha first public release, Dec. 26, 2002
Known to-do list with current version (of Python interface, not pyCMSdll):
none
"""
VERSION = "1.0.0 pil"
# --------------------------------------------------------------------.
core = _imagingcms
#
# intent/direction values
INTENT_PERCEPTUAL = 0
INTENT_RELATIVE_COLORIMETRIC = 1
INTENT_SATURATION = 2
INTENT_ABSOLUTE_COLORIMETRIC = 3
DIRECTION_INPUT = 0
DIRECTION_OUTPUT = 1
DIRECTION_PROOF = 2
#
# flags
FLAGS = {
"MATRIXINPUT": 1,
"MATRIXOUTPUT": 2,
"MATRIXONLY": (1 | 2),
"NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot
# Don't create prelinearization tables on precalculated transforms
# (internal use):
"NOPRELINEARIZATION": 16,
"GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink)
"NOTCACHE": 64, # Inhibit 1-pixel cache
"NOTPRECALC": 256,
"NULLTRANSFORM": 512, # Don't transform anyway
"HIGHRESPRECALC": 1024, # Use more memory to give better accuracy
"LOWRESPRECALC": 2048, # Use less memory to minimize resources
"WHITEBLACKCOMPENSATION": 8192,
"BLACKPOINTCOMPENSATION": 8192,
"GAMUTCHECK": 4096, # Out of Gamut alarm
"SOFTPROOFING": 16384, # Do softproofing
"PRESERVEBLACK": 32768, # Black preservation
"NODEFAULTRESOURCEDEF": 16777216, # CRD special
"GRIDPOINTS": lambda n: ((n) & 0xFF) << 16 # Gridpoints
}
_MAX_FLAG = 0
for flag in FLAGS.values():
if isinstance(flag, int):
_MAX_FLAG = _MAX_FLAG | flag
# --------------------------------------------------------------------.
# Experimental PIL-level API
# --------------------------------------------------------------------.
##
# Profile.
class ImageCmsProfile(object):
def __init__(self, profile):
"""
:param profile: Either a string representing a filename,
a file like object containing a profile or a
low-level profile object
"""
if isStringType(profile):
self._set(core.profile_open(profile), profile)
elif hasattr(profile, "read"):
self._set(core.profile_frombytes(profile.read()))
elif isinstance(profile, _imagingcms.CmsProfile):
self._set(profile)
else:
raise TypeError("Invalid type for Profile")
def _set(self, profile, filename=None):
self.profile = profile
self.filename = filename
if profile:
self.product_name = None # profile.product_name
self.product_info = None # profile.product_info
else:
self.product_name = None
self.product_info = None
def tobytes(self):
"""
Returns the profile in a format suitable for embedding in
saved images.
:returns: a bytes object containing the ICC profile.
"""
return core.profile_tobytes(self.profile)
class ImageCmsTransform(Image.ImagePointHandler):
"""
Transform. This can be used with the procedural API, or with the standard
Image.point() method.
Will return the output profile in the output.info['icc_profile'].
"""
def __init__(self, input, output, input_mode, output_mode,
intent=INTENT_PERCEPTUAL, proof=None,
proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0):
if proof is None:
self.transform = core.buildTransform(
input.profile, output.profile,
input_mode, output_mode,
intent,
flags
)
else:
self.transform = core.buildProofTransform(
input.profile, output.profile, proof.profile,
input_mode, output_mode,
intent, proof_intent,
flags
)
# Note: inputMode and outputMode are for pyCMS compatibility only
self.input_mode = self.inputMode = input_mode
self.output_mode = self.outputMode = output_mode
self.output_profile = output
def point(self, im):
return self.apply(im)
def apply(self, im, imOut=None):
im.load()
if imOut is None:
imOut = Image.new(self.output_mode, im.size, None)
self.transform.apply(im.im.id, imOut.im.id)
imOut.info['icc_profile'] = self.output_profile.tobytes()
return imOut
def apply_in_place(self, im):
im.load()
if im.mode != self.output_mode:
raise ValueError("mode mismatch") # wrong output mode
self.transform.apply(im.im.id, im.im.id)
im.info['icc_profile'] = self.output_profile.tobytes()
return im
def get_display_profile(handle=None):
""" (experimental) Fetches the profile for the current display device.
:returns: None if the profile is not known.
"""
if sys.platform == "win32":
from PIL import ImageWin
if isinstance(handle, ImageWin.HDC):
profile = core.get_display_profile_win32(handle, 1)
else:
profile = core.get_display_profile_win32(handle or 0)
else:
try:
get = _imagingcms.get_display_profile
except AttributeError:
return None
else:
profile = get()
return ImageCmsProfile(profile)
# --------------------------------------------------------------------.
# pyCMS compatible layer
# --------------------------------------------------------------------.
class PyCMSError(Exception):
""" (pyCMS) Exception class.
This is used for all errors in the pyCMS API. """
pass
def profileToProfile(
im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL,
outputMode=None, inPlace=0, flags=0):
"""
(pyCMS) Applies an ICC transformation to a given image, mapping from
inputProfile to outputProfile.
If the input or output profiles specified are not valid filenames, a
PyCMSError will be raised. If inPlace == TRUE and outputMode != im.mode,
a PyCMSError will be raised. If an error occurs during application of
the profiles, a PyCMSError will be raised. If outputMode is not a mode
supported by the outputProfile (or by pyCMS), a PyCMSError will be
raised.
This function applies an ICC transformation to im from inputProfile's
color space to outputProfile's color space using the specified rendering
intent to decide how to handle out-of-gamut colors.
OutputMode can be used to specify that a color mode conversion is to
be done using these profiles, but the specified profiles must be able
to handle that mode. I.e., if converting im from RGB to CMYK using
profiles, the input profile must handle RGB data, and the output
profile must handle CMYK data.
:param im: An open PIL image object (i.e. Image.new(...) or
Image.open(...), etc.)
:param inputProfile: String, as a valid filename path to the ICC input
profile you wish to use for this image, or a profile object
:param outputProfile: String, as a valid filename path to the ICC output
profile you wish to use for this image, or a profile object
:param renderingIntent: Integer (0-3) specifying the rendering intent you
wish to use for the transform
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
see the pyCMS documentation for details on rendering intents and what
they do.
:param outputMode: A valid PIL mode for the output image (i.e. "RGB",
"CMYK", etc.). Note: if rendering the image "inPlace", outputMode
MUST be the same mode as the input, or omitted completely. If
omitted, the outputMode will be the same as the mode of the input
image (im.mode)
:param inPlace: Boolean (1 = True, None or 0 = False). If True, the
original image is modified in-place, and None is returned. If False
(default), a new Image object is returned with the transform applied.
:param flags: Integer (0-...) specifying additional flags
:returns: Either None or a new PIL image object, depending on value of
inPlace
:exception PyCMSError:
"""
if outputMode is None:
outputMode = im.mode
if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
raise PyCMSError("renderingIntent must be an integer between 0 and 3")
if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
raise PyCMSError(
"flags must be an integer between 0 and %s" + _MAX_FLAG)
try:
if not isinstance(inputProfile, ImageCmsProfile):
inputProfile = ImageCmsProfile(inputProfile)
if not isinstance(outputProfile, ImageCmsProfile):
outputProfile = ImageCmsProfile(outputProfile)
transform = ImageCmsTransform(
inputProfile, outputProfile, im.mode, outputMode,
renderingIntent, flags=flags
)
if inPlace:
transform.apply_in_place(im)
imOut = None
else:
imOut = transform.apply(im)
except (IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
return imOut
def getOpenProfile(profileFilename):
"""
(pyCMS) Opens an ICC profile file.
The PyCMSProfile object can be passed back into pyCMS for use in creating
transforms and such (as in ImageCms.buildTransformFromOpenProfiles()).
If profileFilename is not a vaild filename for an ICC profile, a PyCMSError
will be raised.
:param profileFilename: String, as a valid filename path to the ICC profile
you wish to open, or a file-like object.
:returns: A CmsProfile class object.
:exception PyCMSError:
"""
try:
return ImageCmsProfile(profileFilename)
except (IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def buildTransform(
inputProfile, outputProfile, inMode, outMode,
renderingIntent=INTENT_PERCEPTUAL, flags=0):
"""
(pyCMS) Builds an ICC transform mapping from the inputProfile to the
outputProfile. Use applyTransform to apply the transform to a given
image.
If the input or output profiles specified are not valid filenames, a
PyCMSError will be raised. If an error occurs during creation of the
transform, a PyCMSError will be raised.
If inMode or outMode are not a mode supported by the outputProfile (or
by pyCMS), a PyCMSError will be raised.
This function builds and returns an ICC transform from the inputProfile
to the outputProfile using the renderingIntent to determine what to do
with out-of-gamut colors. It will ONLY work for converting images that
are in inMode to images that are in outMode color format (PIL mode,
i.e. "RGB", "RGBA", "CMYK", etc.).
Building the transform is a fair part of the overhead in
ImageCms.profileToProfile(), so if you're planning on converting multiple
images using the same input/output settings, this can save you time.
Once you have a transform object, it can be used with
ImageCms.applyProfile() to convert images without the need to re-compute
the lookup table for the transform.
The reason pyCMS returns a class object rather than a handle directly
to the transform is that it needs to keep track of the PIL input/output
modes that the transform is meant for. These attributes are stored in
the "inMode" and "outMode" attributes of the object (which can be
manually overridden if you really want to, but I don't know of any
time that would be of use, or would even work).
:param inputProfile: String, as a valid filename path to the ICC input
profile you wish to use for this transform, or a profile object
:param outputProfile: String, as a valid filename path to the ICC output
profile you wish to use for this transform, or a profile object
:param inMode: String, as a valid PIL mode that the appropriate profile
also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
:param outMode: String, as a valid PIL mode that the appropriate profile
also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
:param renderingIntent: Integer (0-3) specifying the rendering intent you
wish to use for the transform
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
see the pyCMS documentation for details on rendering intents and what
they do.
:param flags: Integer (0-...) specifying additional flags
:returns: A CmsTransform class object.
:exception PyCMSError:
"""
if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
raise PyCMSError("renderingIntent must be an integer between 0 and 3")
if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
raise PyCMSError(
"flags must be an integer between 0 and %s" + _MAX_FLAG)
try:
if not isinstance(inputProfile, ImageCmsProfile):
inputProfile = ImageCmsProfile(inputProfile)
if not isinstance(outputProfile, ImageCmsProfile):
outputProfile = ImageCmsProfile(outputProfile)
return ImageCmsTransform(
inputProfile, outputProfile, inMode, outMode,
renderingIntent, flags=flags)
except (IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def buildProofTransform(
inputProfile, outputProfile, proofProfile, inMode, outMode,
renderingIntent=INTENT_PERCEPTUAL,
proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC,
flags=FLAGS["SOFTPROOFING"]):
"""
(pyCMS) Builds an ICC transform mapping from the inputProfile to the
outputProfile, but tries to simulate the result that would be
obtained on the proofProfile device.
If the input, output, or proof profiles specified are not valid
filenames, a PyCMSError will be raised.
If an error occurs during creation of the transform, a PyCMSError will
be raised.
If inMode or outMode are not a mode supported by the outputProfile
(or by pyCMS), a PyCMSError will be raised.
This function builds and returns an ICC transform from the inputProfile
to the outputProfile, but tries to simulate the result that would be
obtained on the proofProfile device using renderingIntent and
proofRenderingIntent to determine what to do with out-of-gamut
colors. This is known as "soft-proofing". It will ONLY work for
converting images that are in inMode to images that are in outMode
color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.).
Usage of the resulting transform object is exactly the same as with
ImageCms.buildTransform().
Proof profiling is generally used when using an output device to get a
good idea of what the final printed/displayed image would look like on
the proofProfile device when it's quicker and easier to use the
output device for judging color. Generally, this means that the
output device is a monitor, or a dye-sub printer (etc.), and the simulated
device is something more expensive, complicated, or time consuming
(making it difficult to make a real print for color judgement purposes).
Soft-proofing basically functions by adjusting the colors on the
output device to match the colors of the device being simulated. However,
when the simulated device has a much wider gamut than the output
device, you may obtain marginal results.
:param inputProfile: String, as a valid filename path to the ICC input
profile you wish to use for this transform, or a profile object
:param outputProfile: String, as a valid filename path to the ICC output
(monitor, usually) profile you wish to use for this transform, or a
profile object
:param proofProfile: String, as a valid filename path to the ICC proof
profile you wish to use for this transform, or a profile object
:param inMode: String, as a valid PIL mode that the appropriate profile
also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
:param outMode: String, as a valid PIL mode that the appropriate profile
also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
:param renderingIntent: Integer (0-3) specifying the rendering intent you
wish to use for the input->proof (simulated) transform
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
see the pyCMS documentation for details on rendering intents and what
they do.
:param proofRenderingIntent: Integer (0-3) specifying the rendering intent you
wish to use for proof->output transform
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
see the pyCMS documentation for details on rendering intents and what
they do.
:param flags: Integer (0-...) specifying additional flags
:returns: A CmsTransform class object.
:exception PyCMSError:
"""
if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
raise PyCMSError("renderingIntent must be an integer between 0 and 3")
if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
raise PyCMSError(
"flags must be an integer between 0 and %s" + _MAX_FLAG)
try:
if not isinstance(inputProfile, ImageCmsProfile):
inputProfile = ImageCmsProfile(inputProfile)
if not isinstance(outputProfile, ImageCmsProfile):
outputProfile = ImageCmsProfile(outputProfile)
if not isinstance(proofProfile, ImageCmsProfile):
proofProfile = ImageCmsProfile(proofProfile)
return ImageCmsTransform(
inputProfile, outputProfile, inMode, outMode, renderingIntent,
proofProfile, proofRenderingIntent, flags)
except (IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
buildTransformFromOpenProfiles = buildTransform
buildProofTransformFromOpenProfiles = buildProofTransform
def applyTransform(im, transform, inPlace=0):
"""
(pyCMS) Applies a transform to a given image.
If im.mode != transform.inMode, a PyCMSError is raised.
If inPlace == TRUE and transform.inMode != transform.outMode, a
PyCMSError is raised.
If im.mode, transfer.inMode, or transfer.outMode is not supported by
pyCMSdll or the profiles you used for the transform, a PyCMSError is
raised.
If an error occurs while the transform is being applied, a PyCMSError
is raised.
This function applies a pre-calculated transform (from
ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles())
to an image. The transform can be used for multiple images, saving
considerable calculation time if doing the same conversion multiple times.
If you want to modify im in-place instead of receiving a new image as
the return value, set inPlace to TRUE. This can only be done if
transform.inMode and transform.outMode are the same, because we can't
change the mode in-place (the buffer sizes for some modes are
different). The default behavior is to return a new Image object of
the same dimensions in mode transform.outMode.
:param im: A PIL Image object, and im.mode must be the same as the inMode
supported by the transform.
:param transform: A valid CmsTransform class object
:param inPlace: Bool (1 == True, 0 or None == False). If True, im is
modified in place and None is returned, if False, a new Image object
with the transform applied is returned (and im is not changed). The
default is False.
:returns: Either None, or a new PIL Image object, depending on the value of
inPlace. The profile will be returned in the image's
info['icc_profile'].
:exception PyCMSError:
"""
try:
if inPlace:
transform.apply_in_place(im)
imOut = None
else:
imOut = transform.apply(im)
except (TypeError, ValueError) as v:
raise PyCMSError(v)
return imOut
def createProfile(colorSpace, colorTemp=-1):
"""
(pyCMS) Creates a profile.
If colorSpace not in ["LAB", "XYZ", "sRGB"], a PyCMSError is raised
If using LAB and colorTemp != a positive integer, a PyCMSError is raised.
If an error occurs while creating the profile, a PyCMSError is raised.
Use this function to create common profiles on-the-fly instead of
having to supply a profile on disk and knowing the path to it. It
returns a normal CmsProfile object that can be passed to
ImageCms.buildTransformFromOpenProfiles() to create a transform to apply
to images.
:param colorSpace: String, the color space of the profile you wish to
create.
Currently only "LAB", "XYZ", and "sRGB" are supported.
:param colorTemp: Positive integer for the white point for the profile, in
degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50
illuminant if omitted (5000k). colorTemp is ONLY applied to LAB
profiles, and is ignored for XYZ and sRGB.
:returns: A CmsProfile class object
:exception PyCMSError:
"""
if colorSpace not in ["LAB", "XYZ", "sRGB"]:
raise PyCMSError(
"Color space not supported for on-the-fly profile creation (%s)"
% colorSpace)
if colorSpace == "LAB":
try:
colorTemp = float(colorTemp)
except:
raise PyCMSError(
"Color temperature must be numeric, \"%s\" not valid"
% colorTemp)
try:
return core.createProfile(colorSpace, colorTemp)
except (TypeError, ValueError) as v:
raise PyCMSError(v)
def getProfileName(profile):
"""
(pyCMS) Gets the internal product name for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised If an error occurs while trying to obtain the
name tag, a PyCMSError is raised.
Use this function to obtain the INTERNAL name of the profile (stored
in an ICC tag in the profile itself), usually the one used when the
profile was originally created. Sometimes this tag also contains
additional information supplied by the creator.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: A string containing the internal name of the profile as stored
in an ICC tag.
:exception PyCMSError:
"""
try:
# add an extra newline to preserve pyCMS compatibility
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
# do it in python, not c.
# // name was "%s - %s" (model, manufacturer) || Description ,
# // but if the Model and Manufacturer were the same or the model
# // was long, Just the model, in 1.x
model = profile.profile.product_model
manufacturer = profile.profile.product_manufacturer
if not (model or manufacturer):
return profile.profile.product_description + "\n"
if not manufacturer or len(model) > 30:
return model + "\n"
return "%s - %s\n" % (model, manufacturer)
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def getProfileInfo(profile):
"""
(pyCMS) Gets the internal product information for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised.
If an error occurs while trying to obtain the info tag, a PyCMSError
is raised
Use this function to obtain the information stored in the profile's
info tag. This often contains details about the profile, and how it
was created, as supplied by the creator.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: A string containing the internal profile information stored in
an ICC tag.
:exception PyCMSError:
"""
try:
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
# add an extra newline to preserve pyCMS compatibility
# Python, not C. the white point bits weren't working well,
# so skipping.
# // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint
description = profile.profile.product_description
cpright = profile.profile.product_copyright
arr = []
for elt in (description, cpright):
if elt:
arr.append(elt)
return "\r\n\r\n".join(arr) + "\r\n\r\n"
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def getProfileCopyright(profile):
"""
(pyCMS) Gets the copyright for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised.
If an error occurs while trying to obtain the copyright tag, a PyCMSError
is raised
Use this function to obtain the information stored in the profile's
copyright tag.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: A string containing the internal profile information stored in
an ICC tag.
:exception PyCMSError:
"""
try:
# add an extra newline to preserve pyCMS compatibility
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
return profile.profile.product_copyright + "\n"
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def getProfileManufacturer(profile):
"""
(pyCMS) Gets the manufacturer for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised.
If an error occurs while trying to obtain the manufacturer tag, a
PyCMSError is raised
Use this function to obtain the information stored in the profile's
manufacturer tag.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: A string containing the internal profile information stored in
an ICC tag.
:exception PyCMSError:
"""
try:
# add an extra newline to preserve pyCMS compatibility
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
return profile.profile.product_manufacturer + "\n"
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def getProfileModel(profile):
"""
(pyCMS) Gets the model for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised.
If an error occurs while trying to obtain the model tag, a PyCMSError
is raised
Use this function to obtain the information stored in the profile's
model tag.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: A string containing the internal profile information stored in
an ICC tag.
:exception PyCMSError:
"""
try:
# add an extra newline to preserve pyCMS compatibility
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
return profile.profile.product_model + "\n"
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def getProfileDescription(profile):
"""
(pyCMS) Gets the description for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised.
If an error occurs while trying to obtain the description tag, a PyCMSError
is raised
Use this function to obtain the information stored in the profile's
description tag.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: A string containing the internal profile information stored in an
ICC tag.
:exception PyCMSError:
"""
try:
# add an extra newline to preserve pyCMS compatibility
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
return profile.profile.product_description + "\n"
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def getDefaultIntent(profile):
"""
(pyCMS) Gets the default intent name for the given profile.
If profile isn't a valid CmsProfile object or filename to a profile,
a PyCMSError is raised.
If an error occurs while trying to obtain the default intent, a
PyCMSError is raised.
Use this function to determine the default (and usually best optimized)
rendering intent for this profile. Most profiles support multiple
rendering intents, but are intended mostly for one type of conversion.
If you wish to use a different intent than returned, use
ImageCms.isIntentSupported() to verify it will work first.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:returns: Integer 0-3 specifying the default rendering intent for this
profile.
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
see the pyCMS documentation for details on rendering intents and what
they do.
:exception PyCMSError:
"""
try:
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
return profile.profile.rendering_intent
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def isIntentSupported(profile, intent, direction):
"""
(pyCMS) Checks if a given intent is supported.
Use this function to verify that you can use your desired
renderingIntent with profile, and that profile can be used for the
input/output/proof profile as you desire.
Some profiles are created specifically for one "direction", can cannot
be used for others. Some profiles can only be used for certain
rendering intents... so it's best to either verify this before trying
to create a transform with them (using this function), or catch the
potential PyCMSError that will occur if they don't support the modes
you select.
:param profile: EITHER a valid CmsProfile object, OR a string of the
filename of an ICC profile.
:param intent: Integer (0-3) specifying the rendering intent you wish to
use with this profile
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
see the pyCMS documentation for details on rendering intents and what
they do.
:param direction: Integer specifying if the profile is to be used for input,
output, or proof
INPUT = 0 (or use ImageCms.DIRECTION_INPUT)
OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT)
PROOF = 2 (or use ImageCms.DIRECTION_PROOF)
:returns: 1 if the intent/direction are supported, -1 if they are not.
:exception PyCMSError:
"""
try:
if not isinstance(profile, ImageCmsProfile):
profile = ImageCmsProfile(profile)
# FIXME: I get different results for the same data w. different
# compilers. Bug in LittleCMS or in the binding?
if profile.profile.is_intent_supported(intent, direction):
return 1
else:
return -1
except (AttributeError, IOError, TypeError, ValueError) as v:
raise PyCMSError(v)
def versions():
"""
(pyCMS) Fetches versions.
"""
return (
VERSION, core.littlecms_version,
sys.version.split()[0], Image.VERSION
)
# --------------------------------------------------------------------
if __name__ == "__main__":
# create a cheap manual from the __doc__ strings for the functions above
print(__doc__)
for f in dir(sys.modules[__name__]):
doc = None
try:
exec("doc = %s.__doc__" % (f))
if "pyCMS" in doc:
# so we don't get the __doc__ string for imported modules
print("=" * 80)
print("%s" % f)
print(doc)
except (AttributeError, TypeError):
pass
|
{
"content_hash": "c65b5054660ecd4581be0d391c1eac67",
"timestamp": "",
"source": "github",
"line_count": 956,
"max_line_length": 93,
"avg_line_length": 38.49058577405858,
"alnum_prop": 0.6587765306954372,
"repo_name": "tequa/ammisoft",
"id": "40fb1ad0a68f9407374e9a5d51d03a8066909418",
"size": "37186",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/PIL/ImageCms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9595"
},
{
"name": "C",
"bytes": "715524"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "59535"
},
{
"name": "CSS",
"bytes": "5382"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "Fortran",
"bytes": "67146"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "HTML",
"bytes": "46416"
},
{
"name": "Inno Setup",
"bytes": "1227"
},
{
"name": "JavaScript",
"bytes": "24663"
},
{
"name": "Jupyter Notebook",
"bytes": "629939"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PowerShell",
"bytes": "3673"
},
{
"name": "Python",
"bytes": "29200377"
},
{
"name": "Shell",
"bytes": "6905"
},
{
"name": "Tcl",
"bytes": "2124176"
},
{
"name": "Visual Basic",
"bytes": "2144"
}
],
"symlink_target": ""
}
|
"""A very simple example demonstrating Sijax and the Flask-Sijax extension."""
import os, sys
path = os.path.join('.', os.path.dirname(__file__), '../')
sys.path.append(path)
from flask import Flask, g, render_template
import flask_sijax
app = Flask(__name__)
# The path where you want the extension to create the needed javascript files
# DON'T put any of your files in this directory, because they'll be deleted!
app.config["SIJAX_STATIC_PATH"] = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/')
# You need to point Sijax to the json2.js library if you want to support
# browsers that don't support JSON natively (like IE <= 7)
app.config["SIJAX_JSON_URI"] = '/static/js/sijax/json2.js'
flask_sijax.Sijax(app)
# Regular flask view function - Sijax is unavailable here
@app.route("/")
def hello():
return "Hello World!<br /><a href='/sijax'>Go to Sijax test</a>"
# Sijax enabled function - notice the `@Sijax.route` decorator
# used instead of `@app.route` (above).
@flask_sijax.route(app, "/sijax")
def hello_sijax():
# Sijax handler function receiving 2 arguments from the browser
# The first argument (obj_response) is passed automatically
# by Sijax (much like Python passes `self` to object methods)
def hello_handler(obj_response, hello_from, hello_to):
obj_response.alert('Hello from %s to %s' % (hello_from, hello_to))
obj_response.css('a', 'color', 'green')
# Another Sijax handler function which receives no arguments
def goodbye_handler(obj_response):
obj_response.alert('Goodbye, whoever you are.')
obj_response.css('a', 'color', 'red')
if g.sijax.is_sijax_request:
# The request looks like a valid Sijax request
# Let's register the handlers and tell Sijax to process it
g.sijax.register_callback('say_hello', hello_handler)
g.sijax.register_callback('say_goodbye', goodbye_handler)
return g.sijax.process_request()
return render_template('hello.html')
if __name__ == '__main__':
app.run(debug=True, port=8080)
|
{
"content_hash": "ecf8ca86f9831581a35ae41e95accf78",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 98,
"avg_line_length": 37.58181818181818,
"alnum_prop": 0.6855345911949685,
"repo_name": "spantaleev/flask-sijax",
"id": "b62b57d8f99c61d85af5e41ea29baeeb120ce8ee",
"size": "2092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/hello.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "4610"
},
{
"name": "Python",
"bytes": "37393"
},
{
"name": "Shell",
"bytes": "4521"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.contrib import admin
import httplib
import json
import logging
import datetime
from django.utils.timezone import utc
from howlcore import core
from howlcore.exceptions import SensorReadError
from mongodb.models import Mongodb
DEVICE_TYPE = core.DeviceType.SENSOR
logger = logging.getLogger(__name__)
class Roomsensor(core.Device, core.Sensor):
attributes = ["read"]
data = ["luminosity", "temperature", "humidity"]
ip_address = models.GenericIPAddressField()
url = models.CharField(max_length=200)
db = models.ForeignKey(Mongodb)
luminosity = models.FloatField(blank=True, null=True)
temperature = models.FloatField(blank=True, null=True)
humidity = models.FloatField(blank=True, null=True)
def read(self):
conn = httplib.HTTPConnection(self.ip_address)
try:
conn.request("GET", self.url)
response = conn.getresponse()
if (response.status != 200):
logger.error("roomsensor read failed: " + str(response.status) + " (" + response.reason + ")" )
raise SensorReadError({response.status, response.reason})
else:
body = response.read()
payload = json.loads(body)
self.luminosity = int(payload["luminosity"])
self.temperature = float(payload["temperature"])
self.humidity = float(payload["humidity"])
self.last_active = datetime.datetime.utcnow().replace(tzinfo=utc)
self.status = core.StatusType.OK
self.save()
except SensorReadError as e:
logger.error("roomsensor " + self.name + " read failed", exc_info=True)
self.status = core.StatusType.ERROR
self.save()
raise
except Exception as e:
logger.error("roomsensor " + self.name + " read failed", exc_info=True)
self.status = core.StatusType.NOT_RESPONDING
self.save()
raise SensorReadError(e)
def ping(self):
try:
conn = httplib.HTTPConnection(self.ip_address)
conn.request("GET", self.url)
response = conn.getresponse()
if (response.status != 200):
logger.error("roomsensor ping failed: " + str(response.status) + " (" + response.reason + ")")
raise SensorReadError({response.status, response.reason})
else:
logger.debug("roomsensor {0} ping ok".format(self.name))
self.status = core.StatusType.OK
except SensorReadError:
logger.error("roomsensor {0} ping failed".format(self.name), exc_info=True)
self.status = core.StatusType.ERROR
except:
logger.error("roomsensor {0} ping failed".format(self.name), exc_info=True)
self.status = core.StatusType.NOT_RESPONDING
finally:
self.save()
return self.status
|
{
"content_hash": "e167c682c283aa0aea05e3b578facfac",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 111,
"avg_line_length": 37.7625,
"alnum_prop": 0.6044356173452499,
"repo_name": "volzotan/django-howl",
"id": "5b47ac0c09024e88f382a105256713a475fea019",
"size": "3021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "howl/roomsensor/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "5972"
},
{
"name": "CSS",
"bytes": "12606"
},
{
"name": "Gnuplot",
"bytes": "696"
},
{
"name": "JavaScript",
"bytes": "3733"
},
{
"name": "Python",
"bytes": "55992"
},
{
"name": "Shell",
"bytes": "588"
}
],
"symlink_target": ""
}
|
from .phpcpd_handler import PHPCPDHandler
from .php_lint_handler import PHPLintHandler
from .php_fixer_handler import PHPFixerHandler
__all__ = ['PHPCPDHandler', 'PHPLintHandler', 'PHPFixerHandler']
|
{
"content_hash": "5d9b28c073d4121f01d0d963b3e34438",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 64,
"avg_line_length": 33.5,
"alnum_prop": 0.7910447761194029,
"repo_name": "prisis/sublime-text-packages",
"id": "887dd1765c38fbd2ada45c818a695ff97bd22b20",
"size": "330",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Packages/anaconda_php/plugin/handlers_php/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "318"
},
{
"name": "Batchfile",
"bytes": "786"
},
{
"name": "C++",
"bytes": "56562"
},
{
"name": "CSS",
"bytes": "18339"
},
{
"name": "HTML",
"bytes": "1757"
},
{
"name": "JavaScript",
"bytes": "206342"
},
{
"name": "PHP",
"bytes": "2193174"
},
{
"name": "Pascal",
"bytes": "7460"
},
{
"name": "PowerShell",
"bytes": "397"
},
{
"name": "Python",
"bytes": "19331281"
},
{
"name": "Shell",
"bytes": "1903"
},
{
"name": "Smarty",
"bytes": "4883"
},
{
"name": "SourcePawn",
"bytes": "4479"
},
{
"name": "Tcl",
"bytes": "88877"
}
],
"symlink_target": ""
}
|
from django.forms import ModelForm
import models
class PassageForm(ModelForm):
class Meta:
model = models.Passage
exclude = ['created_time']
class CommentForm(ModelForm):
class Meta:
model = models.Comment
exclude = ['created_time']
|
{
"content_hash": "dae820e55b7d5fa3660dcd33c962a33a",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 34,
"avg_line_length": 18.571428571428573,
"alnum_prop": 0.7,
"repo_name": "hfercc/mese2014",
"id": "e94a7878fb4dfdeee94921eea47471bc3042ba87",
"size": "260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webboard/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "103122"
},
{
"name": "JavaScript",
"bytes": "1054910"
},
{
"name": "Python",
"bytes": "1121791"
},
{
"name": "Shell",
"bytes": "2381"
}
],
"symlink_target": ""
}
|
import os
import glob
import logging
import unittest
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
try:
from bs4 import BeautifulSoup as _BeautifulSoup
class BeautifulSoup(_BeautifulSoup):
def prettify(self):
if self.html: self.html.unwrap()
if self.body: self.body.unwrap()
res = super(BeautifulSoup,self).prettify()
res = res.replace("/>"," />")
res = res.replace(" />"," />")
return str(res)
except ImportError:
from BeautifulSoup import BeautifulSoup # required for html prettification
from webob import Request, Response
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.orm import mapper as sqla_mapper
from sqlalchemy.ext.declarative import declarative_base
logging.getLogger('sqlalchemy').setLevel(logging.ERROR)
from formalchemy.fields import Field, SelectFieldRenderer, FieldRenderer, TextFieldRenderer, EscapingReadonlyRenderer
import formalchemy.fatypes as types
from formalchemy import Column
def ls(*args):
dirname = os.path.dirname(__file__)
args = list(args)
args.append('*')
files = glob.glob(os.path.join(dirname, *args))
files.sort()
for f in files:
if os.path.isdir(f):
print('D %s' % os.path.basename(f))
else:
print('- %s' % os.path.basename(f))
def cat(*args):
filename = os.path.join(os.path.dirname(__file__), *args)
print(open(filename).read())
def session_mapper(scoped_session):
def mapper(cls, *arg, **kw):
cls.query = scoped_session.query_property()
return sqla_mapper(cls, *arg, **kw)
return mapper
def application(model, fieldset=None):
def app(environ, start_response):
tmpl = u'''<DOCTYPE !html>
<html><body><form method="POST" action="" enctype="multipart/form-data">
%s
<input type="submit" id="submit" name="submit" />
</form></body></html>'''
req = Request(environ)
resp = Response()
if fieldset is None:
fs = FieldSet(model)
else:
fs = fieldset.bind(model)
if req.method == 'POST':
if fieldset is None:
fs = fs.bind(request=req)
else:
fs = fs.bind(model=model, request=req)
if fs.validate():
fs.sync()
fs.readonly = True
resp.text = tmpl % ('<b>OK<b>' + fs.render(),)
else:
resp.text = tmpl % fs.render()
else:
resp.text = tmpl % fs.render()
return resp(environ, start_response)
return app
engine = create_engine('sqlite://')
Session = scoped_session(sessionmaker(autoflush=False, bind=engine))
mapper = session_mapper(Session)
Base = declarative_base(engine, mapper=mapper)
class One(Base):
__tablename__ = 'ones'
id = Column(Integer, primary_key=True)
class Two(Base):
__tablename__ = 'twos'
id = Column(Integer, primary_key=True)
foo = Column(Integer, default='133', nullable=True)
class TwoInterval(Base):
__tablename__ = 'two_interval'
id = Column(Integer, primary_key=True)
foo = Column(Interval, nullable=False)
class TwoFloat(Base):
__tablename__ = 'two_floats'
id = Column(Integer, primary_key=True)
foo = Column(Float, nullable=False)
from decimal import Decimal
class TwoNumeric(Base):
__tablename__ = 'two_numerics'
id = Column(Integer, primary_key=True)
foo = Column(Numeric, nullable=True)
class Three(Base):
__tablename__ = 'threes'
id = Column(Integer, primary_key=True)
foo = Column(Text, nullable=True)
bar = Column(Text, nullable=True)
class CheckBox(Base):
__tablename__ = 'checkboxes'
id = Column(Integer, primary_key=True)
field = Column(Boolean, nullable=False)
class PrimaryKeys(Base):
__tablename__ = 'primary_keys'
id = Column(Integer, primary_key=True)
id2 = Column(String(10), primary_key=True)
field = Column(String(10), nullable=False)
class Binaries(Base):
__tablename__ = 'binaries'
id = Column(Integer, primary_key=True)
file = Column(LargeBinary, nullable=True)
class ConflictNames(Base):
__tablename__ = 'conflict_names'
id = Column(Integer, primary_key=True)
model = Column(String, nullable=True)
data = Column(String, nullable=True)
session = Column(String, nullable=True)
vertices = Table('vertices', Base.metadata,
Column('id', Integer, primary_key=True),
Column('x1', Integer),
Column('y1', Integer),
Column('x2', Integer),
Column('y2', Integer),
)
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def __composite_values__(self):
return [self.x, self.y]
def __eq__(self, other):
return other.x == self.x and other.y == self.y
def __ne__(self, other):
return not self.__eq__(other)
class Vertex(object):
pass
mapper(Vertex, vertices, properties={
'start':composite(Point, vertices.c.x1, vertices.c.y1),
'end':composite(Point, vertices.c.x2, vertices.c.y2)
})
class PointFieldRenderer(FieldRenderer):
def render(self, **kwargs):
from formalchemy import helpers as h
data = self.field.parent.data
x_name = self.name + '-x'
y_name = self.name + '-y'
x_value = (data is not None and x_name in data) and data[x_name] or str(self.field.value and self.field.value.x or '')
y_value = (data is not None and y_name in data) and data[y_name] or str(self.field.value and self.field.value.y or '')
return h.text_field(x_name, value=x_value) + h.text_field(y_name, value=y_value)
def deserialize(self):
data = self.field.parent.data.getone(self.name + '-x'), self.field.parent.data.getone(self.name + '-y')
return Point(*[int(i) for i in data])
# todo? test a CustomBoolean, using a TypeDecorator --
# http://www.sqlalchemy.org/docs/04/types.html#types_custom
# probably need to add _renderer attr and check
# isinstance(getattr(myclass, '_renderer', type(myclass)), Boolean)
# since the custom class shouldn't really inherit from Boolean
properties = Table('properties', Base.metadata,
Column('id', Integer, primary_key=True),
Column('a', Integer))
class Property(Base):
__table__ = properties
foo = column_property(properties.c.a.label('foo'))
# bar = column_property(properties.c.a) # TODO
class Recursive(Base):
__tablename__ = 'recursives'
id = Column(Integer, primary_key=True)
foo = Column(Text, nullable=True)
parent_id = Column(Integer, ForeignKey("recursives.id"))
parent = relation('Recursive', primaryjoin=parent_id==id, uselist=False, remote_side=parent_id)
class RecursiveBase(Base):
__tablename__ = 'recursivebase'
__mapper_args__ = {'polymorphic_on': 'typ_id'}
id = Column(Integer, primary_key=True)
typ_id = Column(Integer, nullable=False)
foo = Column(Text, nullable=True)
parent_id = Column(Integer, ForeignKey(id))
parent = relation('Recursive', primaryjoin=parent_id==id, uselist=False, remote_side=parent_id)
class RecursiveChild(RecursiveBase):
__tablename__ = 'recursivechild'
__mapper_args__ = {'polymorphic_identity': 42}
id = Column(None, ForeignKey(RecursiveBase.id, ondelete='CASCADE', onupdate='RESTRICT'), primary_key=True)
bar = Column(Text, nullable=True)
@classmethod
def __declare_last__(cls):
if not hasattr(cls,'owner'):
cls.owner = cls.parent
class Synonym(Base):
__tablename__ = 'synonyms'
id = Column(Integer, primary_key=True)
_foo = Column(Text, nullable=True)
def _set_foo(self, foo):
self._foo = "SOMEFOO " + foo
def _get_foo(self):
return self._foo
foo = synonym('_foo', descriptor=property(_get_foo, _set_foo))
class AliasedRecursive(Base):
__tablename__ = 'aliasedrecursive'
id = Column(Integer, primary_key=True)
foo = Column(Text, nullable=True)
parent_id = Column(Integer, ForeignKey(id))
parent = relation('Recursive', primaryjoin=parent_id==id, uselist=False, remote_side=parent_id)
@classmethod
def __declare_last__(cls):
if not hasattr(cls,'owner'):
cls.owner = cls.parent
class OTOChild(Base):
__tablename__ = 'one_to_one_child'
id = Column(Integer, primary_key=True)
baz = Column(Text, nullable=False)
def __unicode__(self):
return self.baz
__str__ = __unicode__
def __repr__(self):
return '<OTOChild %s>' % self.baz
class OTOParent(Base):
__tablename__ = 'one_to_one_parent'
id = Column(Integer, primary_key=True)
oto_child_id = Column(Integer, ForeignKey('one_to_one_child.id'), nullable=False)
child = relation(OTOChild, uselist=False)
class Order(Base):
__tablename__ = 'orders'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
quantity = Column(Integer, nullable=False)
def __unicode__(self):
return 'Quantity: %s' % self.quantity
__str__ = __unicode__
def __repr__(self):
return '<Order for user %s: %s>' % (self.user_id, self.quantity)
class OptionalOrder(Base): # the user is optional, not the order
__tablename__ = 'optional_orders'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'))
quantity = Column(Integer)
user = relation('User')
def __unicode__(self):
return 'Quantity: %s' % self.quantity
__str__ = __unicode__
def __repr__(self):
return '<OptionalOrder for user %s: %s>' % (self.user_id, self.quantity)
class User(Base):
__label__ = 'User'
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
email = Column(Unicode(40), unique=True, nullable=False)
password = Column(Unicode(20), nullable=False)
name = Column(Unicode(30))
orders = relation(Order, backref='user', order_by='Order.quantity')
orders_dl = dynamic_loader(Order)
def __unicode__(self):
return self.name
__str__ = __unicode__
def __repr__(self):
return '<User %s>' % self.name
def __html__(self):
return '<a href="mailto:%s">%s</a>' % (self.email, self.name)
class NaturalOrder(Base):
__tablename__ = 'natural_orders'
id = Column(Integer, primary_key=True)
user_email = Column(String, ForeignKey('natural_users.email'), nullable=False)
quantity = Column(Integer, nullable=False)
def __repr__(self):
return 'Quantity: %s' % self.quantity
class NaturalUser(Base):
__tablename__ = 'natural_users'
email = Column(Unicode(40), primary_key=True)
password = Column(Unicode(20), nullable=False)
name = Column(Unicode(30))
orders = relation(NaturalOrder, backref='user')
def __repr__(self):
return self.name
class Function(Base):
__tablename__ = 'functions'
foo = Column(TIMESTAMP, primary_key=True, default=func.current_timestamp())
# test property order for non-declarative mapper
addresses = Table('email_addresses', Base.metadata,
Column('address_id', Integer, Sequence('address_id_seq', optional=True), primary_key = True),
Column('address', String(40)),
)
users2 = Table('users2', Base.metadata,
Column('user_id', Integer, Sequence('user_id_seq', optional=True), primary_key = True),
Column('address_id', Integer, ForeignKey(addresses.c.address_id)),
Column('name', String(40), nullable=False)
)
class Address(object): pass
class User2(object): pass
mapper(Address, addresses)
mapper(User2, users2, properties={'address': relation(Address)})
class OrderUser(Base):
__tablename__ = 'order_users'
user_id = Column(Integer, ForeignKey('users.id'), primary_key=True)
order_id = Column(Integer, ForeignKey('orders.id'), primary_key=True)
user = relation(User)
order = relation(Order)
def __repr__(self):
return 'OrderUser(%s, %s)' % (self.user_id, self.order_id)
class OrderUserTag(Base):
__table__ = Table('order_user_tags', Base.metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, nullable=False),
Column('order_id', Integer, nullable=False),
Column('tag', String, nullable=False),
ForeignKeyConstraint(['user_id', 'order_id'], ['order_users.user_id', 'order_users.order_id']))
order_user = relation(OrderUser)
class Order__User(Base):
__table__ = join(Order.__table__, User.__table__).alias('__orders__users')
class Aliases(Base):
__tablename__ = 'table_with_aliases'
id = Column(Integer, primary_key=True)
text = Column('row_text', Text)
Base.metadata.create_all()
session = Session()
primary1 = PrimaryKeys(id=1, id2='22', field='value1')
session.add(primary1)
primary2 = PrimaryKeys(id=1, id2='33', field='value2')
session.add(primary2)
parent = OTOParent()
session.add(parent)
parent.child = OTOChild(baz='baz')
bill = User(email='bill@example.com',
password='1234',
name='Bill')
session.add(bill)
john = User(email='john@example.com',
password='5678',
name='John')
order1 = Order(user=bill, quantity=10)
session.add(order1)
order2 = Order(user=john, quantity=5)
session.add(order2)
order3 = Order(user=john, quantity=6)
session.add(order3)
nbill = NaturalUser(email='nbill@example.com',
password='1234',
name='Natural Bill')
session.add(nbill)
njohn = NaturalUser(email='njohn@example.com',
password='5678',
name='Natural John')
session.add(njohn)
norder1 = NaturalOrder(user=nbill, quantity=10)
session.add(norder1)
norder2 = NaturalOrder(user=njohn, quantity=5)
session.add(norder2)
orderuser1 = OrderUser(user_id=1, order_id=1)
session.add(orderuser1)
orderuser2 = OrderUser(user_id=1, order_id=2)
session.add(orderuser2)
conflict_names = ConflictNames(data='data', model='model', session='session')
session.commit()
from formalchemy import config
from formalchemy.forms import FieldSet as DefaultFieldSet
from formalchemy.tables import Grid as DefaultGrid
from formalchemy.fields import Field
from formalchemy import templates
from formalchemy.validators import ValidationError
if templates.HAS_MAKO:
if not isinstance(config.engine, templates.MakoEngine):
raise ValueError('MakoEngine is not the default engine: %s' % config.engine)
else:
raise ImportError('mako is required for testing')
def pretty_html(html):
soup = BeautifulSoup(html)
return soup.prettify().strip()
class FieldSet(DefaultFieldSet):
def render(self, lang=None):
if self.readonly:
html = pretty_html(DefaultFieldSet.render(self))
for name, engine in templates.engines.items():
if isinstance(engine, config.engine.__class__):
continue
html_engine = pretty_html(engine('fieldset_readonly', fieldset=self))
assert html == html_engine, (name, html, html_engine)
return html
html = pretty_html(DefaultFieldSet.render(self))
for name, engine in templates.engines.items():
if isinstance(engine, config.engine.__class__):
continue
html_engine = pretty_html(engine('fieldset', fieldset=self))
assert html == html_engine, (name, html, html_engine)
return html
class Grid(DefaultGrid):
def render(self, lang=None):
if self.readonly:
html = pretty_html(DefaultGrid.render(self))
for name, engine in templates.engines.items():
if isinstance(engine, config.engine.__class__):
continue
html_engine = pretty_html(engine('grid_readonly', collection=self))
assert html == html_engine, (name, html, html_engine)
return html
html = pretty_html(DefaultGrid.render(self))
for name, engine in templates.engines.items():
if isinstance(engine, config.engine.__class__):
continue
html_engine = pretty_html(engine('grid', collection=self))
assert html == html_engine, (name, html, html_engine)
return html
original_renderers = FieldSet.default_renderers.copy()
def configure_and_render(fs, **options):
fs.configure(**options)
return fs.render()
class ImgRenderer(TextFieldRenderer):
def render(self, *args, **kwargs):
return '<img src="%s">' % self.value
from formalchemy.tests import fake_module
fake_module.__dict__.update({
'fs': FieldSet(User, session=session),
})
import sys
sys.modules['library'] = fake_module
|
{
"content_hash": "40a87daeec773adeb2463667c2f9f2f6",
"timestamp": "",
"source": "github",
"line_count": 489,
"max_line_length": 126,
"avg_line_length": 34.35787321063395,
"alnum_prop": 0.637223974763407,
"repo_name": "camptocamp/formalchemy",
"id": "c0b257651ff7250fabb22e1c3e68d72ddf58122e",
"size": "16825",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "formalchemy/tests/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6599"
},
{
"name": "HTML",
"bytes": "4787"
},
{
"name": "JavaScript",
"bytes": "155"
},
{
"name": "Mako",
"bytes": "8449"
},
{
"name": "Python",
"bytes": "428049"
},
{
"name": "Shell",
"bytes": "194"
}
],
"symlink_target": ""
}
|
import sys
import pickle
import operator
import re
import codecs
from .stop_words import StopWords
if int(sys.version_info[0]) < 3:
try:
reload(sys)
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
sys.stdin = codecs.getreader('utf-8')(sys.stdin)
except NameError:
pass
# pre-compile some res
# xml/html tags
markup_tag_re = re.compile(r'<.*?>', re.UNICODE)
space_tokenizer_re = re.compile('\s+', re.UNICODE)
word_tokenizer_re = re.compile('\W+', re.UNICODE)
end_entity_re = re.compile('[\s.,?!\t\n():;#$@&"]', re.UNICODE)
shortened_url_re = re.compile('http://[a-zA-Z]+.[a-zA-Z]+/[a-zA-Z0-9]+/?', re.UNICODE)
class SimpleNGrams:
def __init__(self,
char_lower_cutoff = 2,
char_upper_cutoff = 25,
cutoff = 0.00001,
tokenizer = None,
token_iter = None,
n_grams = 2):
self.char_lower_cutoff = char_lower_cutoff
self.char_upper_cutoff = char_upper_cutoff
# if cutoff < 1, compare frac else total count
self.cutoff = cutoff
# Tokenizer can be either a key word in pre-defined tokenizers or None
# When None, tokenizer_reg_ex must be defined
if tokenizer is None:
if token_iter is not None:
self.token_iter = token_iter
else:
print >>sys.stderr, "Please define valid tokenizer regular expression. Exiting!"
sys.exit()
else:
VALID_TOKENIZERS = ['word','space','twitter']
if tokenizer.lower() not in VALID_TOKENIZERS:
print >>sys.stderr, "Please select valid tokenizer from {}. Exiting!".format(VALID_TOKENIZERS)
sys.exit()
else:
if tokenizer.lower().startswith("word"):
self.token_iter = self.word_token_iter
elif tokenizer.lower().startswith("space"):
self.token_iter = self.space_token_iter
elif tokenizer.lower().startswith("twit"):
self.token_iter = self.twitter_token_iter
self.n_grams = int(n_grams)
self.sl = StopWords()
self.start_new()
def word_token_iter(self, row):
"""Split a row on word boundaries as defined by regex W options"""
return [x.lower() for x in word_tokenizer_re.split(row)]
def space_token_iter(self, row):
"""Split a row only on whitespace"""
return [x.lower() for x in space_tokenizer_re.split(row)]
def twitter_token_iter(self, row):
res = self.get_twitter_entities(row,"#") \
+ self.get_twitter_entities(row,"@") \
+ [x.upper() for x in self.get_twitter_entities(row,"$")] \
+ self.get_shortened_urls(row)
#remove id'd tags before tokening remainder
return res + [x.lower() for x in word_tokenizer_re.split(markup_tag_re.sub(' ', self.remove(row, res)))]
def remove(self, row_str, word_list):
for x in word_list:
row_str = row_str.replace(x, " ")
return row_str
def get_twitter_entities(self, row, marker):
"""Extract text items that start with special markers such as @ and #"""
tags = []
in_tag = False
for x in row:
if x == marker and not in_tag:
in_tag = True
tag = marker
elif in_tag and end_entity_re.findall(x) != []:
tags.append(tag)
in_tag = False
elif in_tag:
tag += x
return tags
def get_shortened_urls(self, row):
"""Extract shortened urls from text"""
return shortened_url_re.findall(row)
def start_new(self):
self.token = {i+1:{"tcnt":0, "gram_dict":{}} for i in range(self.n_grams)}
self.activities = {}
self.acnt = 0
self.max_term_freq = -sys.maxsize
def add(self, row):
if row.strip() == '':
return
self.acnt += 1
act_set = set()
last_tok_list = []
for tok in self.token_iter(row):
# Be sure to deal with lower-ing in tokenizer if necessary!
#gram = tok.strip().lower()
gram = tok.strip()
tmp = gram
if len(tmp) > self.char_lower_cutoff and len(tmp) < self.char_upper_cutoff and not self.sl[tmp]:
for i in range(min([self.n_grams, 1 + len(last_tok_list)])):
self.token[i+1]["tcnt"] += 1
self.token[i+1]["gram_dict"][tmp] = 1 + self.token[i+1]["gram_dict"].get(tmp, 0)
if tmp not in act_set:
self.activities[tmp] = 1 + self.activities.get(tmp, 0)
act_set.add(tmp)
tmp = ' '.join(last_tok_list[-(i+1):] + [gram])
last_tok_list.append(gram)
if len(last_tok_list) > self.n_grams:
last_tok_list.pop(0)
def _gets(self, rec_tuples, counts):
for tok in rec_tuples:
tokenFrac = tok[1]/float(counts)
activitiesFrac = self.activities[tok[0]]/float(self.acnt)
if (self.cutoff < 1 and tokenFrac > self.cutoff) or (self.cutoff >= 1 and tok[1] > self.cutoff):
yield [ tok[1], tokenFrac, self.activities[tok[0]], activitiesFrac, tok[0]]
def get_tokens(self, n=None):
for i in range(self.n_grams):
for x in self._gets(sorted(self.token[i+1]["gram_dict"].items(), key=operator.itemgetter(1), reverse=True)[:n], self.token[i+1]["tcnt"]):
if x[0] > self.max_term_freq:
self.max_term_freq = x[0]
yield x
def build_string_list(self, mixed_list, mant=6):
fmt = ["%d", "%%4.%df"%mant, "%d", "%%4.%df"%mant, "%s", "%s"]
return [ f%x for x, f in zip(mixed_list, fmt) ]
def __iter__(self):
for x in self.get_tokens():
yield x
def term_dictionary(self, file_name, co=2):
# E.g.
# list is [ count terms, count terms /uniq terms, docs appearances, doc appearances/docs ]
# {'meta': {'cutoff': 3, 'max_term_freq': 17, 'n_docs': 12, 'n_terms': 697},
# 'terms': {u'answered': [4, 0.005738880918220947, 2, 0.16666666666666666],
# u'both': [7, 0.010043041606886656, 3, 0.25],
# u'cause': [4, 0.005738880918220947, 2, 0.16666666666666666],
# ...
#
inner = {}
term_id = 0
for x in self.get_tokens():
if x[0] >= co:
inner[x[4]] = x[:4] + [term_id]
term_id += 1
res = {"terms": inner,
"meta": {"n_docs":self.acnt ,
"n_terms":self.token[1]["tcnt"] ,
"max_term_freq": self.max_term_freq ,
"cutoff":co}}
pickle.dump(res, open(file_name, "wb"))
# make the id-keyed dictionary so we can look it up later
kv = {}
for k,v in inner.items():
kv[k] = v[4]
fn = file_name.rsplit(".",1)
if len(fn) > 1:
fn = ".lookup.".join(fn)
else:
fn += ".lookup"
res = {"terms": kv,
"meta": {"n_docs":self.acnt ,
"n_terms":self.token[1]["tcnt"] ,
"max_term_freq": self.max_term_freq ,
"cutoff":co}}
pickle.dump(res, open(fn, "wb"))
def get_repr(self, n=None):
# if you want a header in the output
res = u','.join([ "total count", "percent of total", "activities count", "percent of activities","tokens", "n_gram\n"])
for x in self.get_tokens(n):
res += ','.join(self.build_string_list(x) + ["%dgrams\n"%(x[-1].count(" ") + 1)])
return res
|
{
"content_hash": "87449ac105bcb949fdcd128e3b4a68b1",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 149,
"avg_line_length": 39.786802030456855,
"alnum_prop": 0.5192651186527175,
"repo_name": "DrSkippy/Simple-n-grams",
"id": "6d55a29150056f7d8c14fa4c96a8ab9c927ad429",
"size": "7884",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simple_n_grams/simple_n_grams.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "16201"
}
],
"symlink_target": ""
}
|
"""
Summary class improves extraction.Extracted by providing
incremental load mechanism, and especially image validation.
But the main difference is that it performs the requests.
Extraction is performed gradually by parsing the HTML <head>
tag first, applying specific head extraction techniques, and
goes on to the <body> only if Summary data is not complete.
"""
from functools import reduce
import logging
import config, request, extraction, filters
from urlparse import urlparse
from url import canonicalize_url
from urlnorm import norm
from contextlib import closing
# try:
# import lxml
# parser = 'lxml'
# except:
# parser = None
# from bs4 import BeautifulSoup, Comment
site = lambda url: urlparse(url).netloc
decode = lambda mystr, encoding: \
isinstance(mystr, str) and mystr.decode(encoding, 'ignore') or mystr
class URLError(Exception):
pass
class HTMLParseError(Exception):
pass
class Summary(object):
"Provides incremental load mechanism and validation."
def __init__(self, source_url=None):
"""
Unlike Extracted ctor, this one just sets the source_url.
Extracted data is loaded later gradually by calling extract.
"""
self._html = ""
self.titles = []
self.descriptions = []
self.images = []
self.urls = []
self.source_url = source_url
self.clean_url = self.source_url
# Non-plural properties
@property
def title(self):
"Return the best title, if any."
if self.titles:
return self.titles[0]
else:
return None
@property
def description(self):
"Return the best description, if any."
if self.descriptions:
return self.descriptions[0]
else:
return None
@property
def image(self):
"Return the best image, if any."
if self.images:
return self.images[0]
else:
return None
@property
def url(self):
"Return the best canonical url, or the cleaned source url."
if self.urls:
return self.urls[0]
else:
return self.clean_url
def _is_clear(self):
return not (self.titles or self.descriptions or self.images or self.urls)
def _is_complete(self):
return self.titles and self.descriptions and self.images and self.urls and True
def _clear(self):
self.titles = []
self.descriptions = []
self.images = []
self.urls = []
def _load(self, titles=[], descriptions=[], images=[], urls=[], **kwargs):
"""
Loads extracted data into Summary.
Performs validation and filtering on-the-fly, and sets the
non-plural fields to the best specific item so far.
If GET_ALL_DATA is False, it gets only the first valid item.
"""
enough = lambda items: items # len(items) >= MAX_ITEMS
if config.GET_ALL_DATA or not enough(self.titles):
titles = filter(None, map(self._clean_text, titles))
self.titles.extend(titles)
if config.GET_ALL_DATA or not enough(self.descriptions):
descriptions = filter(None, map(self._clean_text, descriptions))
self.descriptions.extend(descriptions)
## Never mind the urls, they can be bad not worth it
# if config.GET_ALL_DATA or not enough(self.urls):
# # urls = [self._clean_url(u) for u in urls]
# urls = filter(None, map(self._clean_url, urls))
# self.urls.extend(urls)
if config.GET_ALL_DATA:
# images = [i for i in [self._filter_image(i) for i in images] if i]
images = filter(None, map(self._filter_image, images))
self.images.extend(images)
elif not enough(self.images):
for i in images:
image = self._filter_image(i)
if image:
self.images.append(image)
if enough(self.images):
break
# Picking the best item by sorting
# self.titles = sorted(self.titles, key=len)
# self.descriptions = sorted(self.descriptions, key=len, reverse=True)
# self.images = sorted(self.images, key=lambda i: sum(i.size), reverse=True)
def _clean_text(self, text):
"""
Checks for bad text like "{{ metatags.title }}" and such
"""
if text.startswith('{{') and text.endswith('}}'):
return None
return text
def _clean_url(self, url):
"""
Canonicalizes the url, as it is done in Scrapy.
And keeps only USEFUL_QUERY_KEYS. It also strips the
trailing slash to help identifying dupes.
"""
# TODO: Turn this into regex
if not url.startswith('http') or url.endswith('}}') or 'nojs_router' in url:
return None
if site(norm(url).lower()) in config.NONCANONIC_SITES:
clean_url = canonicalize_url(url, keep_params=True)
else:
clean_url = canonicalize_url(url)
return clean_url
def _filter_image(self, url):
"The param is the image URL, which is returned if it passes all the filters."
return reduce(lambda f, g: f and g(f),
[
filters.AdblockURLFilter()(url),
filters.NoImageFilter(),
filters.SizeImageFilter(),
filters.MonoImageFilter(),
filters.FormatImageFilter(),
])
def _get_tag(self, response, tag_name="html", encoding="utf-8"):
"""
Iterates response content and returns the tag if found.
If not found, the response content is fully consumed so
self._html equals response.content, and it returns None.
"""
def find_tag(tag_name):
tag_start = tag_end = None
found = lambda: \
tag_start is not None and tag_end is not None
html = self._html.lower()
start = html.find("<%s" % tag_name)
if start >= 0:
tag_start = start
else:
return None # no tag
end = html.find("</%s>" % tag_name)
if end > tag_start:
tag_end = end+len(tag_name)+3
elif consumed:
tag_end = -1 # till the end
if found():
return self._html[tag_start:tag_end]
return None
consumed = getattr(response, 'consumed', False)
if not consumed:
stream = getattr(response, 'stream', None)
if stream is None:
stream = response.iter_content(config.CHUNK_SIZE) # , decode_unicode=True
response.stream = stream
while True:
try:
chunk = next(stream)
self._html += chunk
tag = find_tag(tag_name)
if tag:
return tag
if len(self._html) > config.HTML_MAX_BYTESIZE:
raise HTMLParseError('Maximum response size reached.')
except StopIteration:
response.consumed = True
tag = find_tag(tag_name)
return decode(tag, encoding) # decode here
def _extract(self, html, url, techniques):
extractor = extraction.SvvenExtractor(techniques=techniques)
extracted = extractor.extract(html, source_url=url)
self._load(**extracted)
def extract(self, check_url=None, http_equiv_refresh=True):
"""
Downloads HTML <head> tag first, extracts data from it using
specific head techniques, loads it and checks if is complete.
Otherwise downloads the HTML <body> tag as well and loads data
extracted by using appropriate semantic techniques.
Eagerly calls check_url(url) if any, before parsing the HTML.
Provided function should raise an exception to break extraction.
E.g.: URL has been summarized before; URL points to off limits
websites like foursquare.com, facebook.com, bitly.com and so on.
"""
# assert self._is_clear()
logger = logging.getLogger(__name__)
logger.info("Extract: %s", self.clean_url)
with closing(request.get(self.clean_url, stream=True)) as response:
response.raise_for_status()
mime = response.headers.get('content-type')
if mime and not ('html' in mime.lower()):
raise HTMLParseError('Invalid Content-Type: %s' % mime)
self.clean_url = self._clean_url(response.url)
if self.clean_url is None:
raise URLError('Bad url: %s' % response.url)
if check_url is not None:
check_url(url=self.clean_url)
encoding = config.ENCODING or response.encoding
self._html = ""
if config.PHANTOMJS_BIN and \
site(self.clean_url) in config.PHANTOMJS_SITES:
self._html = request.phantomjs_get(self.clean_url)
response.consumed = True
head = self._get_tag(response, tag_name="head", encoding=encoding)
if http_equiv_refresh:
# Check meta http-equiv refresh tag
html = head or decode(self._html, encoding)
self._extract(html, self.clean_url, [
"summary.techniques.HTTPEquivRefreshTags",
])
new_url = self.urls and self.urls[0]
if new_url and new_url != self.clean_url:
logger.warning("Refresh: %s", new_url)
self._clear()
self.clean_url = new_url
return self.extract(check_url=check_url, http_equiv_refresh=False)
if head:
logger.debug("Got head: %s", len(head))
self._extract(head, self.clean_url, [
"extraction.techniques.FacebookOpengraphTags",
"extraction.techniques.TwitterSummaryCardTags",
"extraction.techniques.HeadTags"
])
else:
logger.debug("No head: %s", self.clean_url)
if config.GET_ALL_DATA or not self._is_complete():
body = self._get_tag(response, tag_name="body", encoding=encoding)
if body:
logger.debug("Got body: %s", len(body))
self._extract(body, self.clean_url, [
"extraction.techniques.HTML5SemanticTags",
"extraction.techniques.SemanticTags"
])
else:
logger.debug("No body: %s", self.clean_url)
if not head and not body:
raise HTMLParseError('No head nor body tags found.')
del self._html # no longer needed
# that's it
|
{
"content_hash": "80b018ab3dc8960f4127edb5fad7b66c",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 89,
"avg_line_length": 36.05573770491803,
"alnum_prop": 0.5653360007274711,
"repo_name": "svven/summary",
"id": "0e693f83c190ef8c2714ad8a1e57d3660e224b7f",
"size": "10997",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "summary/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2576"
},
{
"name": "HTML",
"bytes": "14954"
},
{
"name": "JavaScript",
"bytes": "2244"
},
{
"name": "Python",
"bytes": "43535"
}
],
"symlink_target": ""
}
|
"""SHA-1 cryptographic hash algorithm.
SHA-1_ produces the 160 bit digest of a message.
>>> from Crypto.Hash import SHA1
>>>
>>> h = SHA1.new()
>>> h.update(b'Hello')
>>> print h.hexdigest()
*SHA* stands for Secure Hash Algorithm.
This algorithm is not considered secure. Do not use it for new designs.
.. _SHA-1: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
"""
__all__ = ['new', 'block_size', 'digest_size']
from Crypto.Util.py3compat import *
def __make_constructor():
try:
# The sha module is deprecated in Python 2.6, so use hashlib when possible.
from hashlib import sha1 as _hash_new
except ImportError:
from .sha import new as _hash_new
h = _hash_new()
if hasattr(h, 'new') and hasattr(h, 'name') and hasattr(h, 'digest_size') and hasattr(h, 'block_size'):
# The module from stdlib has the API that we need. Just use it.
return _hash_new
else:
# Wrap the hash object in something that gives us the expected API.
_copy_sentinel = object()
class _SHA1(object):
digest_size = 20
block_size = 64
oid = "1.3.14.3.2.26"
def __init__(self, *args):
if args and args[0] is _copy_sentinel:
self._h = args[1]
else:
self._h = _hash_new(*args)
def copy(self):
return _SHA1(_copy_sentinel, self._h.copy())
def update(self, *args):
f = self.update = self._h.update
f(*args)
def digest(self):
f = self.digest = self._h.digest
return f()
def hexdigest(self):
f = self.hexdigest = self._h.hexdigest
return f()
_SHA1.new = _SHA1
return _SHA1
new = __make_constructor()
del __make_constructor
#: The size of the resulting hash in bytes.
digest_size = new().digest_size
#: The internal block size of the hash algorithm in bytes.
block_size = new().block_size
|
{
"content_hash": "27e2d4e70210e25e62b5d2d59ecaae2e",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 107,
"avg_line_length": 31.46969696969697,
"alnum_prop": 0.5609051516610496,
"repo_name": "quoclieu/codebrew17-starving",
"id": "cd741df1d2c6eccc69924623ae618318d9d08e73",
"size": "3064",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "env/lib/python3.5/site-packages/Crypto/Hash/SHA1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "878455"
},
{
"name": "HTML",
"bytes": "510823"
},
{
"name": "JavaScript",
"bytes": "1522698"
},
{
"name": "Python",
"bytes": "5473"
}
],
"symlink_target": ""
}
|
import pytest
import json
import os.path
import jsonpickle
from fixture.application import Application
from fixture.db import DBFixture
from fixture.orm import ORMFixture
fixture = None
target = None
def load_config(file):
global target
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), file)
with open(config_file) as config:
target = json.load(config)
return target
@pytest.fixture
def app(request):
global fixture
web_config = load_config(request.config.getoption("--target"))["web"]
if fixture is None or not fixture.is_valid:
fixture = Application(baseurl=web_config["baseURL"])
fixture.session.ensure_login(username=web_config["username"], password=web_config["password"])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
@pytest.fixture(scope="session")
def db(request):
db_config = load_config(request.config.getoption("--target"))["db"]
db_fixture = DBFixture(host=db_config["host"], database=db_config["name"], user=db_config["user"], password=db_config["password"])
def fin():
db_fixture.destroy()
request.addfinalizer(fin)
return db_fixture
@pytest.fixture(scope="session")
def orm(request):
orm_config = load_config(request.config.getoption("--target"))["db"]
orm_fixture = ORMFixture(host=orm_config["host"], database=orm_config["name"], user=orm_config["user"], password=orm_config["password"])
return orm_fixture
@pytest.fixture
def check_ui(request):
return request.config.getoption("--check_ui")
def pytest_addoption(parser):
parser.addoption("--target", action="store", default="target.json")
parser.addoption("--check_ui", action="store_true")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("json_"):
test_data = load_from_json(fixture[5:])
metafunc.parametrize(fixture, test_data, ids=[str(x) for x in test_data])
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as data:
return jsonpickle.decode(data.read())
|
{
"content_hash": "4f15bb115e1657b1454f23434b956d1c",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 140,
"avg_line_length": 29.987179487179485,
"alnum_prop": 0.6866182129115006,
"repo_name": "tinytoon1/python",
"id": "9a3c6d869e3a91ac2b04bfd9c0ac1bc8de0f1fd5",
"size": "2339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cucumber",
"bytes": "612"
},
{
"name": "Python",
"bytes": "35072"
},
{
"name": "RobotFramework",
"bytes": "1228"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
__version__ = "0.1"
setup(name="twisted_hang",
version=__version__,
description="Figure out if the main thread is hanging, and if so, what's causing it to hang.",
author="Geoff Greer",
license="MIT",
url="https://github.com/ggreer/twisted_hang",
download_url="https://github.com/ggreer/twisted_hang.git",
)
|
{
"content_hash": "5925ac3b1b099c37d32da78a45e34338",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 100,
"avg_line_length": 32,
"alnum_prop": 0.6432291666666666,
"repo_name": "ggreer/twisted_hang",
"id": "a05b275af79159e9f966afa07a3abdd0bab3fd48",
"size": "407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14025"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division
import matplotlib
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import Net, RealApplianceSource, BLSTMLayer, DimshuffleLayer
from lasagne.nonlinearities import sigmoid, rectify
from lasagne.objectives import crossentropy, mse
from lasagne.init import Uniform, Normal
from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer, FeaturePoolLayer
from neuralnilm.updates import nesterov_momentum
from functools import partial
import os
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment
from neuralnilm.net import TrainingError
import __main__
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 250
GRADIENT_STEPS = 100
"""
e103
Discovered that bottom layer is hardly changing. So will try
just a single lstm layer
e104
standard init
lower learning rate
e106
lower learning rate to 0.001
e108
is e107 but with batch size of 5
e109
Normal(1) for LSTM
e110
* Back to Uniform(5) for LSTM
* Using nntools eb17bd923ef9ff2cacde2e92d7323b4e51bb5f1f
RESULTS: Seems to run fine again!
e111
* Try with nntools head
* peepholes=False
RESULTS: appears to be working well. Haven't seen a NaN,
even with training rate of 0.1
e112
* n_seq_per_batch = 50
e114
* Trying looking at layer by layer training again.
* Start with single LSTM layer
e115
* Learning rate = 1
e116
* Standard inits
e117
* Uniform(1) init
e119
* Learning rate 10
# Result: didn't work well!
e120
* init: Normal(1)
* not as good as Uniform(5)
e121
* Uniform(25)
e122
* Just 10 cells
* Uniform(5)
e125
* Pre-train lower layers
e128
* Add back all 5 appliances
* Seq length 1500
* skip_prob = 0.7
e129
* max_input_power = None
* 2nd layer has Uniform(5)
* pre-train bottom layer for 2000 epochs
* add third layer at 4000 epochs
e131
e138
* Trying to replicate e82 and then break it ;)
e140
diff
e141
conv1D layer has Uniform(1), as does 2nd LSTM layer
e142
diff AND power
e144
diff and power and max power is 5900
e145
Uniform(25) for first layer
e146
gradient clip and use peepholes
e147
* try again with new code
e148
* learning rate 0.1
e150
* Same as e149 but without peepholes and using LSTM not BLSTM
e151
* Max pooling
"""
def exp_a(name):
# 151d but training for much longer and skip prob = 0.7
source = RealApplianceSource(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television'
# 'dish washer'
# ['washer dryer', 'washing machine']
],
max_appliance_powers=None,#[200, 100, 200, 2500, 2400],
on_power_thresholds=[5, 5, 5, 5, 5],
max_input_power=5900,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1500,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
skip_probability=0.0,
n_seq_per_batch=25,
include_diff=True
)
net = Net(
experiment_name=name,
source=source,
save_plot_interval=250,
loss_function=mse,
updates=partial(nesterov_momentum, learning_rate=.1, clip_range=(-1, 1)),
layers_config=[
{
'type': LSTMLayer,
'num_units': 50,
'W_in_to_cell': Uniform(25),
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': LSTMLayer,
'num_units': 50,
'W_in_to_cell': Uniform(1),
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': None
}
]
)
return net
def init_experiment(experiment):
full_exp_name = NAME + experiment
func_call = 'exp_{:s}(full_exp_name)'.format(experiment)
print("***********************************")
print("Preparing", full_exp_name, "...")
net = eval(func_call)
return net
def main():
for experiment in list('a'):
full_exp_name = NAME + experiment
path = os.path.join(PATH, full_exp_name)
try:
net = init_experiment(experiment)
run_experiment(net, path, epochs=None)
except KeyboardInterrupt:
break
except TrainingError as exception:
print("EXCEPTION:", exception)
except Exception as exception:
print("EXCEPTION:", exception)
import ipdb; ipdb.set_trace()
if __name__ == "__main__":
main()
|
{
"content_hash": "c35958b73dce0ce9ae0ae4031af326d8",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 93,
"avg_line_length": 23.10810810810811,
"alnum_prop": 0.6210526315789474,
"repo_name": "JackKelly/neuralnilm_prototype",
"id": "d0a6b265f141b113c389ccc91ac13a33bba311b9",
"size": "5130",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/e166.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4536723"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
class CommonConfig(AppConfig):
name = 'mozillians.common'
label = 'common'
def ready(self):
import mozillians.common.signals # noqa
|
{
"content_hash": "c52c31ee0905d920d99406838764b45e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 47,
"avg_line_length": 21,
"alnum_prop": 0.6931216931216931,
"repo_name": "fxa90id/mozillians",
"id": "c9f5ae59bd02d4629efa7a3c513997685ee36100",
"size": "189",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "mozillians/common/apps.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "1986"
},
{
"name": "CSS",
"bytes": "181742"
},
{
"name": "HTML",
"bytes": "165063"
},
{
"name": "JavaScript",
"bytes": "141584"
},
{
"name": "Makefile",
"bytes": "478"
},
{
"name": "Python",
"bytes": "887164"
},
{
"name": "Shell",
"bytes": "1332"
}
],
"symlink_target": ""
}
|
"""
Custom Sphinx directives
========================
"""
import os
from docutils import statemachine
from docutils.parsers.rst import Directive, directives
class MiniGallery(Directive):
"""
Custom directive to insert a mini-gallery
The required argument is one or more fully qualified names of objects,
separated by spaces. The mini-gallery will be the subset of gallery
examples that make use of that object (from that specific namespace).
Options:
* `add-heading` adds a heading to the mini-gallery. If an argument is
provided, it uses that text for the heading. Otherwise, it uses
default text.
* `heading-level` specifies the heading level of the heading as a single
character. If omitted, the default heading level is `'^'`.
"""
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {'add-heading': directives.unchanged,
'heading-level': directives.single_char_or_unicode}
def run(self):
# Respect the same disabling options as the `raw` directive
if (not self.state.document.settings.raw_enabled
or not self.state.document.settings.file_insertion_enabled):
raise self.warning('"%s" directive disabled.' % self.name)
# Retrieve the backreferences directory
config = self.state.document.settings.env.config
backreferences_dir = config.sphinx_gallery_conf['backreferences_dir']
# Parse the argument into the individual objects
obj_list = self.arguments[0].split()
lines = []
# Add a heading if requested
if 'add-heading' in self.options:
heading = self.options['add-heading']
if heading == "":
if len(obj_list) == 1:
heading = 'Examples using ``{}``'.format(obj_list[0])
else:
heading = 'Examples using one of multiple objects'
lines.append(heading)
heading_level = self.options.get('heading-level', '^')
lines.append(heading_level * len(heading))
# Insert the backreferences file(s) using the `include` directive
for obj in obj_list:
path = os.path.join('/', # Sphinx treats this as the source dir
backreferences_dir,
'{}.examples'.format(obj))
# Always remove the heading (first 5 lines) from the file
lines.append('.. include:: {}\n :start-line: 5'.format(path))
# Insert the end for the gallery using the `raw` directive
lines.append('.. raw:: html\n\n <div class="sphx-glr-clear"></div>')
# Parse the assembly of `include` and `raw` directives
text = '\n'.join(lines)
include_lines = statemachine.string2lines(text,
convert_whitespace=True)
self.state_machine.insert_input(include_lines, path)
return []
|
{
"content_hash": "6ef391d1e948b12872da14c7f6404a70",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 38.41772151898734,
"alnum_prop": 0.6042833607907743,
"repo_name": "Eric89GXL/sphinx-gallery",
"id": "ca81cb6f05407334a13a0d3bf03d1eef8a37aa70",
"size": "3035",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sphinx_gallery/directives.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3981"
},
{
"name": "Python",
"bytes": "113604"
},
{
"name": "Shell",
"bytes": "1280"
}
],
"symlink_target": ""
}
|
import os
# PATH CONFIGURATION
PROJECT_DIR = os.path.dirname(os.path.dirname(__file__))
PUBLIC_DIR = os.path.join(PROJECT_DIR, 'public')
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'plugin_manager.core.wsgi.application'
# END PATH CONFIGURATION
# DEBUG CONFIGURATION
DEBUG = False
TEMPLATE_DEBUG = True
# END DEBUG CONFIGURATION
# PAGINATION DEFAULT VALUE CONFIG
NUM_RESULTS_PER_PAGE = 20
# END PAGINATION DEFAULT VALUE CONFIG
# MANAGER CONFIGURATION
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
# END MANAGER CONFIGURATION
# URL CONFIGURATION
ROOT_URLCONF = 'plugin_manager.core.urls'
# END URL CONFIGURATION
# GENERAL CONFIGURATION
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Location of fixtures for the project
FIXTURE_DIRS = (
os.path.join(PROJECT_DIR, 'fixtures'),
)
# END GENERAL CONFIGURATION
# MEDIA CONFIGURATION
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PUBLIC_DIR, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# END MEDIA CONFIGURATION
# STATIC FILE CONFIGURATION
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PUBLIC_DIR, 'static')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# END STATIC FILE CONFIGURATION
SOCKETIO_ENABLED = False
# TEMPLATE CONFIGURATION
GRAPPELLI_ADMIN_TITLE = 'Admin'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or
# "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
'plugin_manager.core.context_processors.sidebar_lists',
'sekizai.context_processors.sekizai',
)
# END TEMPLATE CONFIGURATION
# MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'stronghold.middleware.LoginRequiredMiddleware',
)
# END MIDDLEWARE CONFIGURATION
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend', # default
)
ANONYMOUS_USER_ID = -1
# APP CONFIGURATION
INSTALLED_APPS = (
# Django Core
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django_extensions',
'django.contrib.admin',
# 3rd Party
'corsheaders',
'grappelli',
'djcelery',
'sekizai',
'crispy_forms',
'stronghold',
'django_tables2',
'bootstrapform',
# Project
'plugin_manager.accounts',
'plugin_manager.hosts',
'plugin_manager.launch_window',
)
# END APP CONFIGURATION
FABFILE_PATH = os.path.join(os.path.dirname(PROJECT_DIR), 'fabfile.py')
# STRONGHOLD CONFIGURATION
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
STRONGHOLD_PUBLIC_NAMED_URLS = (
'password_reset',
'password_reset_done',
'password_reset_complete',
'business_redirect_setup',
)
STRONGHOLD_PUBLIC_URLS = (
r'^/reset/[0-9A-Za-z_\-]+/[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20}/',
r'^/api/v1/.*',
r'^/hosts/logs/elk*',
)
# END STRONGHOLD CONFIGURATION
# CRISPY CONFIGURATION
CRISPY_TEMPLATE_PACK = "bootstrap3"
# END CRISPY CONFIGURATION
# EMAIL CONFIGURATION
AUTH_USER_MODEL = 'accounts.DeployUser'
# END EMAIL CONFIGURATION
# EMAIL CONFIGURATION
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# EMAIL_HOST = 'localhost'
# EMAIL_PORT = 25
# EMAIL_USE_TLS = False
EMAIL_FROM = 'deploy@deploy.com'
# END EMAIL CONFIGURATION
# LOGGING CONFIGURATION
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# END LOGGING CONFIGURATION
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': os.path.join(PUBLIC_DIR, '.django_cache'),
}
}
FABRIC_TASK_CACHE_TIMEOUT = 60 * 60 * 24 # one day
# celery
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERY_TIMEZONE = 'UTC'
# elasticsearch
ELK_URL = "http://localhost"
ELK_PORT = '9200'
# cors
#TODO enable white list
CORS_ORIGIN_ALLOW_ALL = True
|
{
"content_hash": "e3e56f74c4a67abde75a7e19f3e3b113",
"timestamp": "",
"source": "github",
"line_count": 287,
"max_line_length": 79,
"avg_line_length": 27.247386759581882,
"alnum_prop": 0.708312020460358,
"repo_name": "ahharu/plugin-manager",
"id": "e8f7092cbc550106e707fb7513ffe4cca7a4f1ad",
"size": "7856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugin_manager/core/settings/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "68971"
},
{
"name": "HTML",
"bytes": "72414"
},
{
"name": "JavaScript",
"bytes": "313284"
},
{
"name": "Python",
"bytes": "138428"
}
],
"symlink_target": ""
}
|
import time
from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Slices import Slice, Slices
from PLC.Auth import Auth
from PLC.Methods.UpdateSlice import UpdateSlice
class SliceRenew(UpdateSlice):
"""
Deprecated. See UpdateSlice.
"""
status = "deprecated"
roles = ['admin', 'pi', 'user']
accepts = [
Auth(),
Slice.fields['name'],
Slice.fields['expires']
]
returns = Parameter(int, '1 if successful')
def call(self, auth, slice_name, slice_expires):
slice_fields = {}
slice_fields['expires'] = slice_expires
return UpdateSlice.call(self, auth, slice_name, slice_fields)
|
{
"content_hash": "a76cca89db4b1f4db491c2911d9c9592",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 69,
"avg_line_length": 21.87878787878788,
"alnum_prop": 0.646814404432133,
"repo_name": "dreibh/planetlab-lxc-plcapi",
"id": "531c4a7df9bdecb69676f5581976b7e6171f05e0",
"size": "722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PLC/Methods/SliceRenew.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "724"
},
{
"name": "Makefile",
"bytes": "2995"
},
{
"name": "PHP",
"bytes": "574445"
},
{
"name": "PLpgSQL",
"bytes": "2764"
},
{
"name": "Perl",
"bytes": "1350"
},
{
"name": "Python",
"bytes": "871238"
},
{
"name": "Shell",
"bytes": "31392"
}
],
"symlink_target": ""
}
|
import gunicorn
gunicorn.SERVER_SOFTWARE = 'Microsoft-IIS/6.0'
|
{
"content_hash": "122f9922838ff36b4ed11a19c5335864",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 46,
"avg_line_length": 31.5,
"alnum_prop": 0.7936507936507936,
"repo_name": "andreif/stasher",
"id": "edbd35894a777e02b060838b63ece0b323172abd",
"size": "63",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12543"
}
],
"symlink_target": ""
}
|
try: from collections import Counter
except: from Counter import Counter # For Python < 2.7
import traceback
import wordList
import anagram
import random
import math
import re
# Allows channel members to play an acronym-based word game.
# !jumble [min [max]] -- starts a new round, optionally setting the min and max length of words.
# !jumble end -- stops the current game.
# <guess> -- users must unjumble the given word and present their solution as a single word.
# !unjumble <word> -- lists all the acronyms of the given word.
# The possible words which may be used are taken from words.txt, and, if plugins.log has a log for
# the channel given by VOCAB_CHANNEL, tailored to match the vocabulary of that channel.
VOCAB_CHANNEL = '#calculus'
# If plugins.define.define is loaded, the definitions of any mentioned words will also be given.
words = set(wordList.fromFile('plugins/jumble/words.txt'))
logged = nicks = None
try:
logged = Counter(wordList.fromLog(VOCAB_CHANNEL, 'plugins/log/database'))
nicks = set(wordList.nicksFromLog(VOCAB_CHANNEL, 'plugins/log/database'))
words |= set(w for w in logged if logged[w] > 99) - nicks # Include nonnicks logged > 99 times.
anagrams = anagram.Finder(words)
words &= set(w for w in logged if logged[w] > 1) # Exclude words logged < 2 times.
except Exception as e:
anagrams = anagram.Finder(words)
traceback.print_exc(e)
finally:
del logged, nicks
games = {}
def chmsg(event, server):
chan, nick, msg = event['channel'], event['nicka'], event['msg']
def respond(msg): server.send_msg(chan, wrap(msg, server))
if chan in games: jumble = games[chan]
else: jumble = games[chan] = Jumble()
return jumble.said(server, msg, nick, respond)
def wrap(msg, server):
max = server.MAX_SIZE - 100
end = ' (...)'
if len(msg) > max: return msg[0:max-len(end)] + end
else: return msg
def define(server, word):
if 'plugins.define.define' not in server.mod.modules: return None
define = server.mod.modules['plugins.define.define']
try: return define.instance.define(word)
except IOError: pass
except define.dictionary.ProtocolError: pass
class Jumble(object):
__slots__ = 'word', 'solutions', 'min', 'max'
def __init__(self):
self.min = self.max = 0
self.word = None
def said(self, server, msg, nick, respond):
match = re.match(r'!jumble(\s+\d+)?(\s+\d+)?\s*$', msg, re.I)
if match: return self.call(server, respond, *match.groups())
match = re.match(r'!jumble\s+end\s*$', msg, re.I)
if match: return self.end(server, respond, *match.groups())
match = re.match(r'!jumble\s.', msg, re.I)
if match: return self.help(server, respond, *match.groups())
match = re.match(r'!unjumble\s+([a-z]+)\s*$', msg, re.I)
if match: return self.unjumble(server, respond, nick, *match.groups())
match = re.match(r'\s*([a-z]+)\s*$', msg, re.I)
if match: return self.guess(server, respond, nick, *match.groups())
def call(self, server, respond, min, max):
if min != None: self.min = int(min)
if max != None: self.max = int(max)
self.end(server, respond)
self.start(server, respond)
def start(self, server, respond):
sub = set(w for w in words if self.min <= len(w) and (self.max == 0 or len(w) <= self.max))
while len(sub):
word = random.sample(sub, 1)[0]
solutions = anagrams(word)
if math.factorial(len(word)) != len(solutions):
chars = list(word)
while ''.join(chars) in solutions: random.shuffle(chars)
self.word, self.solutions = ''.join(chars), solutions
return respond(self.word)
else: sub.remove(word) # Every permutation of this word is in the dictionary.
else: return respond("Failed to generate a word.") # There are no suitable words.
def guess(self, server, respond, nick, word):
if not self.word: return
word = word.upper()
if word in self.solutions:
defn = define(server, word)
if defn: respond("%s wins with %s (%s)" % (nick, word, defn))
else: respond("%s wins with %s." % (nick, word))
self.start(server, respond)
def end(self, server, respond):
if not self.word: return
self.word = None
if len(self.solutions) == 1:
defn = define(server, tuple(self.solutions)[0])
if defn: return respond('Nobody wins. Solutions: %s (%s)' %
(', '.join(self.solutions), defn))
return respond('Nobody wins. Solutions: %s.' % ', '.join(self.solutions))
def unjumble(self, server, respond, nick, word):
words = anagrams(word)
if len(words): return respond(', '.join(words))
return respond('No results.')
def help(self, server, respond):
respond('Available commands:'
' \002!jumble [min [max]]\002,'
' \002!jumble end\002,'
' \002!jumble help\002,'
' \002!unjumble <word>\002.')
|
{
"content_hash": "e89e579012c4f9feadd510404cdda312",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 99,
"avg_line_length": 40.054263565891475,
"alnum_prop": 0.6148635571898587,
"repo_name": "iogf/candocabot",
"id": "818849cd359b60a6497658d67df9cb1b17cdba86",
"size": "5167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/jumble/jumble.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "92092"
}
],
"symlink_target": ""
}
|
import unittest
from flask import Flask
from blueprints import app
class BlueprintsTestCase(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
def test_index(self):
rv = app.test_client().get('/')
self.assertIn('<h1>Index</h1>', rv.data)
def test_admin_index(self):
rv = app.test_client().get('/admin/')
self.assertIn('<h1>Admin Index</h1>', rv.data)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "4ff1f62e9959ca11e5675cd56d6ec7b7",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 54,
"avg_line_length": 21.5,
"alnum_prop": 0.6067653276955602,
"repo_name": "devbreeze/python",
"id": "92b4a8bc7d9f79deef9136c485c970279bd12351",
"size": "473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "learn-flask/test_blueprints.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2155"
},
{
"name": "Python",
"bytes": "8530"
}
],
"symlink_target": ""
}
|
import tests.exog.test_random_exogenous as testrandexog
testrandexog.test_random_exogenous( 8,160);
|
{
"content_hash": "3f1adb4c85a2f38bd432949a3efc19f0",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 55,
"avg_line_length": 25.25,
"alnum_prop": 0.8217821782178217,
"repo_name": "antoinecarme/pyaf",
"id": "abbb121f7cfb14ce8b9d00d05af731692008ef6c",
"size": "101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/exog/random/random_exog_8_160.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import asyncio
from thriftpy2.thrift import TType
from thriftpy2.protocol.exc import TProtocolException
from thriftpy2.protocol.binary import (
VERSION_MASK,
VERSION_1,
TYPE_MASK,
unpack_i8,
unpack_i16,
unpack_i32,
unpack_i64,
unpack_double,
write_message_begin,
write_val
)
from .base import TAsyncProtocolBase
@asyncio.coroutine
def read_message_begin(inbuf, strict=True):
sz = unpack_i32((yield from inbuf.read(4)))
if sz < 0:
version = sz & VERSION_MASK
if version != VERSION_1:
raise TProtocolException(
type=TProtocolException.BAD_VERSION,
message='Bad version in read_message_begin: %d' % (sz))
name_sz = unpack_i32((yield from inbuf.read(4)))
name = yield from inbuf.read(name_sz)
name = name.decode('utf-8')
type_ = sz & TYPE_MASK
else:
if strict:
raise TProtocolException(type=TProtocolException.BAD_VERSION,
message='No protocol version header')
name = yield from inbuf.read(sz)
type_ = unpack_i8((yield from inbuf.read(1)))
seqid = unpack_i32((yield from inbuf.read(4)))
return name, type_, seqid
@asyncio.coroutine
def read_field_begin(inbuf):
f_type = unpack_i8((yield from inbuf.read(1)))
if f_type == TType.STOP:
return f_type, 0
return f_type, unpack_i16((yield from inbuf.read(2)))
@asyncio.coroutine
def read_list_begin(inbuf):
e_type = unpack_i8((yield from inbuf.read(1)))
sz = unpack_i32((yield from inbuf.read(4)))
return e_type, sz
@asyncio.coroutine
def read_map_begin(inbuf):
k_type = unpack_i8((yield from inbuf.read(1)))
v_type = unpack_i8((yield from inbuf.read(1)))
sz = unpack_i32((yield from inbuf.read(4)))
return k_type, v_type, sz
@asyncio.coroutine
def read_val(inbuf, ttype, spec=None, decode_response=True):
if ttype == TType.BOOL:
return bool(unpack_i8((yield from inbuf.read(1))))
elif ttype == TType.BYTE:
return unpack_i8((yield from inbuf.read(1)))
elif ttype == TType.I16:
return unpack_i16((yield from inbuf.read(2)))
elif ttype == TType.I32:
return unpack_i32((yield from inbuf.read(4)))
elif ttype == TType.I64:
return unpack_i64((yield from inbuf.read(8)))
elif ttype == TType.DOUBLE:
return unpack_double((yield from inbuf.read(8)))
elif ttype == TType.STRING:
sz = unpack_i32((yield from inbuf.read(4)))
byte_payload = yield from inbuf.read(sz)
# Since we cannot tell if we're getting STRING or BINARY
# if not asked not to decode, try both
if decode_response:
try:
return byte_payload.decode('utf-8')
except UnicodeDecodeError:
pass
return byte_payload
elif ttype == TType.SET or ttype == TType.LIST:
if isinstance(spec, tuple):
v_type, v_spec = spec[0], spec[1]
else:
v_type, v_spec = spec, None
result = []
r_type, sz = yield from read_list_begin(inbuf)
# the v_type is useless here since we already get it from spec
if r_type != v_type:
for _ in range(sz):
yield from skip(inbuf, r_type)
return []
for i in range(sz):
result.append(
(yield from read_val(
inbuf, v_type, v_spec, decode_response
))
)
return result
elif ttype == TType.MAP:
if isinstance(spec[0], int):
k_type = spec[0]
k_spec = None
else:
k_type, k_spec = spec[0]
if isinstance(spec[1], int):
v_type = spec[1]
v_spec = None
else:
v_type, v_spec = spec[1]
result = {}
sk_type, sv_type, sz = yield from read_map_begin(inbuf)
if sk_type != k_type or sv_type != v_type:
for _ in range(sz):
yield from skip(inbuf, sk_type)
yield from skip(inbuf, sv_type)
return {}
for i in range(sz):
k_val = yield from read_val(inbuf, k_type, k_spec, decode_response)
v_val = yield from read_val(inbuf, v_type, v_spec, decode_response)
result[k_val] = v_val
return result
elif ttype == TType.STRUCT:
obj = spec()
yield from read_struct(inbuf, obj, decode_response)
return obj
@asyncio.coroutine
def read_struct(inbuf, obj, decode_response=True):
while True:
f_type, fid = yield from read_field_begin(inbuf)
if f_type == TType.STOP:
break
if fid not in obj.thrift_spec:
yield from skip(inbuf, f_type)
continue
if len(obj.thrift_spec[fid]) == 3:
sf_type, f_name, f_req = obj.thrift_spec[fid]
f_container_spec = None
else:
sf_type, f_name, f_container_spec, f_req = obj.thrift_spec[fid]
# it really should equal here. but since we already wasted
# space storing the duplicate info, let's check it.
if f_type != sf_type:
yield from skip(inbuf, f_type)
continue
_buf = yield from read_val(
inbuf, f_type, f_container_spec, decode_response)
setattr(obj, f_name, _buf)
@asyncio.coroutine
def skip(inbuf, ftype):
if ftype == TType.BOOL or ftype == TType.BYTE:
yield from inbuf.read(1)
elif ftype == TType.I16:
yield from inbuf.read(2)
elif ftype == TType.I32:
yield from inbuf.read(4)
elif ftype == TType.I64:
yield from inbuf.read(8)
elif ftype == TType.DOUBLE:
yield from inbuf.read(8)
elif ftype == TType.STRING:
_size = yield from inbuf.read(4)
yield from inbuf.read(unpack_i32(_size))
elif ftype == TType.SET or ftype == TType.LIST:
v_type, sz = yield from read_list_begin(inbuf)
for i in range(sz):
yield from skip(inbuf, v_type)
elif ftype == TType.MAP:
k_type, v_type, sz = yield from read_map_begin(inbuf)
for i in range(sz):
yield from skip(inbuf, k_type)
yield from skip(inbuf, v_type)
elif ftype == TType.STRUCT:
while True:
f_type, fid = yield from read_field_begin(inbuf)
if f_type == TType.STOP:
break
yield from skip(inbuf, f_type)
class TAsyncBinaryProtocol(TAsyncProtocolBase):
"""Binary implementation of the Thrift protocol driver."""
def __init__(self, trans,
strict_read=True, strict_write=True,
decode_response=True):
TAsyncProtocolBase.__init__(self, trans)
self.strict_read = strict_read
self.strict_write = strict_write
self.decode_response = decode_response
@asyncio.coroutine
def skip(self, ttype):
yield from skip(self.trans, ttype)
@asyncio.coroutine
def read_message_begin(self):
api, ttype, seqid = yield from read_message_begin(
self.trans, strict=self.strict_read)
return api, ttype, seqid
@asyncio.coroutine
def read_message_end(self):
pass
def write_message_begin(self, name, ttype, seqid):
write_message_begin(
self.trans, name, ttype,
seqid, strict=self.strict_write
)
def write_message_end(self):
pass
@asyncio.coroutine
def read_struct(self, obj):
return (yield from read_struct(self.trans, obj, self.decode_response))
def write_struct(self, obj):
write_val(self.trans, TType.STRUCT, obj)
class TAsyncBinaryProtocolFactory(object):
def __init__(self, strict_read=True, strict_write=True,
decode_response=True):
self.strict_read = strict_read
self.strict_write = strict_write
self.decode_response = decode_response
def get_protocol(self, trans):
return TAsyncBinaryProtocol(
trans,
self.strict_read,
self.strict_write,
self.decode_response
)
|
{
"content_hash": "f75c1383a0fb87b18ac693bb499e58bb",
"timestamp": "",
"source": "github",
"line_count": 287,
"max_line_length": 79,
"avg_line_length": 28.76306620209059,
"alnum_prop": 0.579648697758934,
"repo_name": "cloudera/hue",
"id": "fa5e3a7eb6f83962ea29063f626484cf584fbc05",
"size": "8280",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/thriftpy2-0.4.12/thriftpy2/contrib/aio/protocol/binary.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
import sys
architectures = set((s.upper() for s in sys.argv[1:]))
histogram = {}
for uops in range(100):
histogram[uops] = []
for line in sys.stdin:
F = line.split()
arch = F[0]
uops = int(F[1])
instr = ' '.join(F[2:])
if arch not in architectures:
continue
histogram[uops].append(instr)
print '* - uops'
print ' - count'
print ' - %'
print ' - CPU instructions'
total = sum(map(len, histogram.itervalues()))
for uops in sorted(histogram):
instructions = histogram[uops]
if not instructions:
continue
count = len(instructions)
proc = 100.0*count/total
print '* - %d' % uops
print ' - %d' % count
print ' - %.2f' % proc
if count < 50:
tmp = sorted(s.lower() for s in instructions)
print ' - %s' % (', '.join(tmp))
else:
print ' -'
|
{
"content_hash": "b5e8da8b04d990a405ee5bafed6d6ed9",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 54,
"avg_line_length": 20.78048780487805,
"alnum_prop": 0.5610328638497653,
"repo_name": "WojciechMula/toys",
"id": "492c449c6d2a7773bd3fd3fdb17b112794f444aa",
"size": "852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uops-histogram/histogram.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "595833"
},
{
"name": "C",
"bytes": "511676"
},
{
"name": "C++",
"bytes": "899067"
},
{
"name": "Gnuplot",
"bytes": "679"
},
{
"name": "Go",
"bytes": "1341"
},
{
"name": "Makefile",
"bytes": "55906"
},
{
"name": "Python",
"bytes": "175884"
},
{
"name": "Shell",
"bytes": "6347"
},
{
"name": "Vim script",
"bytes": "542"
}
],
"symlink_target": ""
}
|
__all__ = [
'image',
'enums',
'types',
'draw',
]
import ctypes
import atexit
for dll_name in ('libMagickCore.so', 'libMagick.so', 'CORE_RL_magick_.dll'):
try:
lib = ctypes.CDLL(dll_name)
except OSError:
pass
else:
break
else:
raise RuntimeError("Can't find imagemagick dll")
lib.MagickCoreGenesis(None, False)
lib.SetMagickMemoryMethods(
ctypes.pythonapi.PyMem_Malloc,
ctypes.pythonapi.PyMem_Realloc,
ctypes.pythonapi.PyMem_Free)
atexit.register(lib.MagickCoreTerminus)
from .image import *
from .types import *
from .enums import *
from .animation import *
|
{
"content_hash": "424cd98ddfa233b6aacfb0c71e94c37f",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 76,
"avg_line_length": 19.84375,
"alnum_prop": 0.662992125984252,
"repo_name": "tailhook/magickpy",
"id": "85828ea5d207b575ca91339a9e74917f5ba8a35f",
"size": "635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "magickpy/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "43324"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import deepnlp
deepnlp.download(module='pos',name='en') # download the POS pretrained models from github if installed from pip
from deepnlp import pos_tagger
tagger = pos_tagger.load_model(name = 'en') # Loading English model, lang code 'en'
#Segmentation
text = "I want to see a funny movie"
words = text.split(" ")
print (" ".join(words))
#POS Tagging
tagging = tagger.predict(words)
for (w,t) in tagging:
pair = w + "/" + t
print (pair)
#Results
#I/nn
#want/vb
#to/to
#see/vb
#a/at
#funny/jj
#movie/nn
|
{
"content_hash": "bd31b376f2a7d6173508b79ff4c6e8b4",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 131,
"avg_line_length": 22.444444444444443,
"alnum_prop": 0.6402640264026402,
"repo_name": "rockingdingo/deepnlp",
"id": "04f2a999ac9ea474e9b984f1469b5a0df95029e3",
"size": "621",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_pos_en.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "255776"
},
{
"name": "Shell",
"bytes": "4032"
}
],
"symlink_target": ""
}
|
import numbers
import warnings
from collections import Counter
import numpy as np
import numpy.ma as ma
from scipy import sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..utils._param_validation import StrOptions
from ..utils.fixes import _mode
from ..utils.sparsefuncs import _get_median
from ..utils.validation import check_is_fitted
from ..utils.validation import FLOAT_DTYPES
from ..utils.validation import _check_feature_names_in
from ..utils._mask import _get_mask
from ..utils import _is_pandas_na
from ..utils import is_scalar_nan
def _check_inputs_dtype(X, missing_values):
if _is_pandas_na(missing_values):
# Allow using `pd.NA` as missing values to impute numerical arrays.
return
if X.dtype.kind in ("f", "i", "u") and not isinstance(missing_values, numbers.Real):
raise ValueError(
"'X' and 'missing_values' types are expected to be"
" both numerical. Got X.dtype={} and "
" type(missing_values)={}.".format(X.dtype, type(missing_values))
)
def _most_frequent(array, extra_value, n_repeat):
"""Compute the most frequent value in a 1d array extended with
[extra_value] * n_repeat, where extra_value is assumed to be not part
of the array."""
# Compute the most frequent value in array only
if array.size > 0:
if array.dtype == object:
# scipy.stats.mode is slow with object dtype array.
# Python Counter is more efficient
counter = Counter(array)
most_frequent_count = counter.most_common(1)[0][1]
# tie breaking similarly to scipy.stats.mode
most_frequent_value = min(
value
for value, count in counter.items()
if count == most_frequent_count
)
else:
mode = _mode(array)
most_frequent_value = mode[0][0]
most_frequent_count = mode[1][0]
else:
most_frequent_value = 0
most_frequent_count = 0
# Compare to array + [extra_value] * n_repeat
if most_frequent_count == 0 and n_repeat == 0:
return np.nan
elif most_frequent_count < n_repeat:
return extra_value
elif most_frequent_count > n_repeat:
return most_frequent_value
elif most_frequent_count == n_repeat:
# tie breaking similarly to scipy.stats.mode
return min(most_frequent_value, extra_value)
class _BaseImputer(TransformerMixin, BaseEstimator):
"""Base class for all imputers.
It adds automatically support for `add_indicator`.
"""
_parameter_constraints = {
"missing_values": [numbers.Real, numbers.Integral, str, None],
"add_indicator": ["boolean"],
}
def __init__(self, *, missing_values=np.nan, add_indicator=False):
self.missing_values = missing_values
self.add_indicator = add_indicator
def _fit_indicator(self, X):
"""Fit a MissingIndicator."""
if self.add_indicator:
self.indicator_ = MissingIndicator(
missing_values=self.missing_values, error_on_new=False
)
self.indicator_._fit(X, precomputed=True)
else:
self.indicator_ = None
def _transform_indicator(self, X):
"""Compute the indicator mask.'
Note that X must be the original data as passed to the imputer before
any imputation, since imputation may be done inplace in some cases.
"""
if self.add_indicator:
if not hasattr(self, "indicator_"):
raise ValueError(
"Make sure to call _fit_indicator before _transform_indicator"
)
return self.indicator_.transform(X)
def _concatenate_indicator(self, X_imputed, X_indicator):
"""Concatenate indicator mask with the imputed data."""
if not self.add_indicator:
return X_imputed
hstack = sp.hstack if sp.issparse(X_imputed) else np.hstack
if X_indicator is None:
raise ValueError(
"Data from the missing indicator are not provided. Call "
"_fit_indicator and _transform_indicator in the imputer "
"implementation."
)
return hstack((X_imputed, X_indicator))
def _concatenate_indicator_feature_names_out(self, names, input_features):
if not self.add_indicator:
return names
indicator_names = self.indicator_.get_feature_names_out(input_features)
return np.concatenate([names, indicator_names])
def _more_tags(self):
return {"allow_nan": is_scalar_nan(self.missing_values)}
class SimpleImputer(_BaseImputer):
"""Univariate imputer for completing missing values with simple strategies.
Replace missing values using a descriptive statistic (e.g. mean, median, or
most frequent) along each column, or using a constant value.
Read more in the :ref:`User Guide <impute>`.
.. versionadded:: 0.20
`SimpleImputer` replaces the previous `sklearn.preprocessing.Imputer`
estimator which is now removed.
Parameters
----------
missing_values : int, float, str, np.nan, None or pandas.NA, default=np.nan
The placeholder for the missing values. All occurrences of
`missing_values` will be imputed. For pandas' dataframes with
nullable integer dtypes with missing values, `missing_values`
can be set to either `np.nan` or `pd.NA`.
strategy : str, default='mean'
The imputation strategy.
- If "mean", then replace missing values using the mean along
each column. Can only be used with numeric data.
- If "median", then replace missing values using the median along
each column. Can only be used with numeric data.
- If "most_frequent", then replace missing using the most frequent
value along each column. Can be used with strings or numeric data.
If there is more than one such value, only the smallest is returned.
- If "constant", then replace missing values with fill_value. Can be
used with strings or numeric data.
.. versionadded:: 0.20
strategy="constant" for fixed value imputation.
fill_value : str or numerical value, default=None
When strategy == "constant", fill_value is used to replace all
occurrences of missing_values.
If left to the default, fill_value will be 0 when imputing numerical
data and "missing_value" for strings or object data types.
verbose : int, default=0
Controls the verbosity of the imputer.
.. deprecated:: 1.1
The 'verbose' parameter was deprecated in version 1.1 and will be
removed in 1.3. A warning will always be raised upon the removal of
empty columns in the future version.
copy : bool, default=True
If True, a copy of X will be created. If False, imputation will
be done in-place whenever possible. Note that, in the following cases,
a new copy will always be made, even if `copy=False`:
- If `X` is not an array of floating values;
- If `X` is encoded as a CSR matrix;
- If `add_indicator=True`.
add_indicator : bool, default=False
If True, a :class:`MissingIndicator` transform will stack onto output
of the imputer's transform. This allows a predictive estimator
to account for missingness despite imputation. If a feature has no
missing values at fit/train time, the feature won't appear on
the missing indicator even if there are missing values at
transform/test time.
Attributes
----------
statistics_ : array of shape (n_features,)
The imputation fill value for each feature.
Computing statistics can result in `np.nan` values.
During :meth:`transform`, features corresponding to `np.nan`
statistics will be discarded.
indicator_ : :class:`~sklearn.impute.MissingIndicator`
Indicator used to add binary indicators for missing values.
`None` if `add_indicator=False`.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
See Also
--------
IterativeImputer : Multivariate imputer that estimates values to impute for
each feature with missing values from all the others.
KNNImputer : Multivariate imputer that estimates missing features using
nearest samples.
Notes
-----
Columns which only contained missing values at :meth:`fit` are discarded
upon :meth:`transform` if strategy is not `"constant"`.
In a prediction context, simple imputation usually performs poorly when
associated with a weak learner. However, with a powerful learner, it can
lead to as good or better performance than complex imputation such as
:class:`~sklearn.impute.IterativeImputer` or :class:`~sklearn.impute.KNNImputer`.
Examples
--------
>>> import numpy as np
>>> from sklearn.impute import SimpleImputer
>>> imp_mean = SimpleImputer(missing_values=np.nan, strategy='mean')
>>> imp_mean.fit([[7, 2, 3], [4, np.nan, 6], [10, 5, 9]])
SimpleImputer()
>>> X = [[np.nan, 2, 3], [4, np.nan, 6], [10, np.nan, 9]]
>>> print(imp_mean.transform(X))
[[ 7. 2. 3. ]
[ 4. 3.5 6. ]
[10. 3.5 9. ]]
"""
def __init__(
self,
*,
missing_values=np.nan,
strategy="mean",
fill_value=None,
verbose="deprecated",
copy=True,
add_indicator=False,
):
super().__init__(missing_values=missing_values, add_indicator=add_indicator)
self.strategy = strategy
self.fill_value = fill_value
self.verbose = verbose
self.copy = copy
def _validate_input(self, X, in_fit):
allowed_strategies = ["mean", "median", "most_frequent", "constant"]
if self.strategy not in allowed_strategies:
raise ValueError(
"Can only use these strategies: {0} got strategy={1}".format(
allowed_strategies, self.strategy
)
)
if self.strategy in ("most_frequent", "constant"):
# If input is a list of strings, dtype = object.
# Otherwise ValueError is raised in SimpleImputer
# with strategy='most_frequent' or 'constant'
# because the list is converted to Unicode numpy array
if isinstance(X, list) and any(
isinstance(elem, str) for row in X for elem in row
):
dtype = object
else:
dtype = None
else:
dtype = FLOAT_DTYPES
if not in_fit and self._fit_dtype.kind == "O":
# Use object dtype if fitted on object dtypes
dtype = self._fit_dtype
if _is_pandas_na(self.missing_values) or is_scalar_nan(self.missing_values):
force_all_finite = "allow-nan"
else:
force_all_finite = True
try:
X = self._validate_data(
X,
reset=in_fit,
accept_sparse="csc",
dtype=dtype,
force_all_finite=force_all_finite,
copy=self.copy,
)
except ValueError as ve:
if "could not convert" in str(ve):
new_ve = ValueError(
"Cannot use {} strategy with non-numeric data:\n{}".format(
self.strategy, ve
)
)
raise new_ve from None
else:
raise ve
if in_fit:
# Use the dtype seen in `fit` for non-`fit` conversion
self._fit_dtype = X.dtype
_check_inputs_dtype(X, self.missing_values)
if X.dtype.kind not in ("i", "u", "f", "O"):
raise ValueError(
"SimpleImputer does not support data with dtype "
"{0}. Please provide either a numeric array (with"
" a floating point or integer dtype) or "
"categorical data represented either as an array "
"with integer dtype or an array of string values "
"with an object dtype.".format(X.dtype)
)
return X
def fit(self, X, y=None):
"""Fit the imputer on `X`.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Input data, where `n_samples` is the number of samples and
`n_features` is the number of features.
y : Ignored
Not used, present here for API consistency by convention.
Returns
-------
self : object
Fitted estimator.
"""
if self.verbose != "deprecated":
warnings.warn(
"The 'verbose' parameter was deprecated in version "
"1.1 and will be removed in 1.3. A warning will "
"always be raised upon the removal of empty columns "
"in the future version.",
FutureWarning,
)
X = self._validate_input(X, in_fit=True)
# default fill_value is 0 for numerical input and "missing_value"
# otherwise
if self.fill_value is None:
if X.dtype.kind in ("i", "u", "f"):
fill_value = 0
else:
fill_value = "missing_value"
else:
fill_value = self.fill_value
# fill_value should be numerical in case of numerical input
if (
self.strategy == "constant"
and X.dtype.kind in ("i", "u", "f")
and not isinstance(fill_value, numbers.Real)
):
raise ValueError(
"'fill_value'={0} is invalid. Expected a "
"numerical value when imputing numerical "
"data".format(fill_value)
)
if sp.issparse(X):
# missing_values = 0 not allowed with sparse data as it would
# force densification
if self.missing_values == 0:
raise ValueError(
"Imputation not possible when missing_values "
"== 0 and input is sparse. Provide a dense "
"array instead."
)
else:
self.statistics_ = self._sparse_fit(
X, self.strategy, self.missing_values, fill_value
)
else:
self.statistics_ = self._dense_fit(
X, self.strategy, self.missing_values, fill_value
)
return self
def _sparse_fit(self, X, strategy, missing_values, fill_value):
"""Fit the transformer on sparse data."""
missing_mask = _get_mask(X, missing_values)
mask_data = missing_mask.data
n_implicit_zeros = X.shape[0] - np.diff(X.indptr)
statistics = np.empty(X.shape[1])
if strategy == "constant":
# for constant strategy, self.statistcs_ is used to store
# fill_value in each column
statistics.fill(fill_value)
else:
for i in range(X.shape[1]):
column = X.data[X.indptr[i] : X.indptr[i + 1]]
mask_column = mask_data[X.indptr[i] : X.indptr[i + 1]]
column = column[~mask_column]
# combine explicit and implicit zeros
mask_zeros = _get_mask(column, 0)
column = column[~mask_zeros]
n_explicit_zeros = mask_zeros.sum()
n_zeros = n_implicit_zeros[i] + n_explicit_zeros
if strategy == "mean":
s = column.size + n_zeros
statistics[i] = np.nan if s == 0 else column.sum() / s
elif strategy == "median":
statistics[i] = _get_median(column, n_zeros)
elif strategy == "most_frequent":
statistics[i] = _most_frequent(column, 0, n_zeros)
super()._fit_indicator(missing_mask)
return statistics
def _dense_fit(self, X, strategy, missing_values, fill_value):
"""Fit the transformer on dense data."""
missing_mask = _get_mask(X, missing_values)
masked_X = ma.masked_array(X, mask=missing_mask)
super()._fit_indicator(missing_mask)
# Mean
if strategy == "mean":
mean_masked = np.ma.mean(masked_X, axis=0)
# Avoid the warning "Warning: converting a masked element to nan."
mean = np.ma.getdata(mean_masked)
mean[np.ma.getmask(mean_masked)] = np.nan
return mean
# Median
elif strategy == "median":
median_masked = np.ma.median(masked_X, axis=0)
# Avoid the warning "Warning: converting a masked element to nan."
median = np.ma.getdata(median_masked)
median[np.ma.getmaskarray(median_masked)] = np.nan
return median
# Most frequent
elif strategy == "most_frequent":
# Avoid use of scipy.stats.mstats.mode due to the required
# additional overhead and slow benchmarking performance.
# See Issue 14325 and PR 14399 for full discussion.
# To be able access the elements by columns
X = X.transpose()
mask = missing_mask.transpose()
if X.dtype.kind == "O":
most_frequent = np.empty(X.shape[0], dtype=object)
else:
most_frequent = np.empty(X.shape[0])
for i, (row, row_mask) in enumerate(zip(X[:], mask[:])):
row_mask = np.logical_not(row_mask).astype(bool)
row = row[row_mask]
most_frequent[i] = _most_frequent(row, np.nan, 0)
return most_frequent
# Constant
elif strategy == "constant":
# for constant strategy, self.statistcs_ is used to store
# fill_value in each column
return np.full(X.shape[1], fill_value, dtype=X.dtype)
def transform(self, X):
"""Impute all missing values in `X`.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data to complete.
Returns
-------
X_imputed : {ndarray, sparse matrix} of shape \
(n_samples, n_features_out)
`X` with imputed values.
"""
check_is_fitted(self)
X = self._validate_input(X, in_fit=False)
statistics = self.statistics_
if X.shape[1] != statistics.shape[0]:
raise ValueError(
"X has %d features per sample, expected %d"
% (X.shape[1], self.statistics_.shape[0])
)
# compute mask before eliminating invalid features
missing_mask = _get_mask(X, self.missing_values)
# Delete the invalid columns if strategy is not constant
if self.strategy == "constant":
valid_statistics = statistics
valid_statistics_indexes = None
else:
# same as np.isnan but also works for object dtypes
invalid_mask = _get_mask(statistics, np.nan)
valid_mask = np.logical_not(invalid_mask)
valid_statistics = statistics[valid_mask]
valid_statistics_indexes = np.flatnonzero(valid_mask)
if invalid_mask.any():
invalid_features = np.arange(X.shape[1])[invalid_mask]
if self.verbose != "deprecated" and self.verbose:
# use feature names warning if features are provided
if hasattr(self, "feature_names_in_"):
invalid_features = self.feature_names_in_[invalid_features]
warnings.warn(
"Skipping features without any observed values:"
f" {invalid_features}. At least one non-missing value is needed"
f" for imputation with strategy='{self.strategy}'."
)
X = X[:, valid_statistics_indexes]
# Do actual imputation
if sp.issparse(X):
if self.missing_values == 0:
raise ValueError(
"Imputation not possible when missing_values "
"== 0 and input is sparse. Provide a dense "
"array instead."
)
else:
# if no invalid statistics are found, use the mask computed
# before, else recompute mask
if valid_statistics_indexes is None:
mask = missing_mask.data
else:
mask = _get_mask(X.data, self.missing_values)
indexes = np.repeat(
np.arange(len(X.indptr) - 1, dtype=int), np.diff(X.indptr)
)[mask]
X.data[mask] = valid_statistics[indexes].astype(X.dtype, copy=False)
else:
# use mask computed before eliminating invalid mask
if valid_statistics_indexes is None:
mask_valid_features = missing_mask
else:
mask_valid_features = missing_mask[:, valid_statistics_indexes]
n_missing = np.sum(mask_valid_features, axis=0)
values = np.repeat(valid_statistics, n_missing)
coordinates = np.where(mask_valid_features.transpose())[::-1]
X[coordinates] = values
X_indicator = super()._transform_indicator(missing_mask)
return super()._concatenate_indicator(X, X_indicator)
def inverse_transform(self, X):
"""Convert the data back to the original representation.
Inverts the `transform` operation performed on an array.
This operation can only be performed after :class:`SimpleImputer` is
instantiated with `add_indicator=True`.
Note that `inverse_transform` can only invert the transform in
features that have binary indicators for missing values. If a feature
has no missing values at `fit` time, the feature won't have a binary
indicator, and the imputation done at `transform` time won't be
inverted.
.. versionadded:: 0.24
Parameters
----------
X : array-like of shape \
(n_samples, n_features + n_features_missing_indicator)
The imputed data to be reverted to original data. It has to be
an augmented array of imputed data and the missing indicator mask.
Returns
-------
X_original : ndarray of shape (n_samples, n_features)
The original `X` with missing values as it was prior
to imputation.
"""
check_is_fitted(self)
if not self.add_indicator:
raise ValueError(
"'inverse_transform' works only when "
"'SimpleImputer' is instantiated with "
"'add_indicator=True'. "
f"Got 'add_indicator={self.add_indicator}' "
"instead."
)
n_features_missing = len(self.indicator_.features_)
non_empty_feature_count = X.shape[1] - n_features_missing
array_imputed = X[:, :non_empty_feature_count].copy()
missing_mask = X[:, non_empty_feature_count:].astype(bool)
n_features_original = len(self.statistics_)
shape_original = (X.shape[0], n_features_original)
X_original = np.zeros(shape_original)
X_original[:, self.indicator_.features_] = missing_mask
full_mask = X_original.astype(bool)
imputed_idx, original_idx = 0, 0
while imputed_idx < len(array_imputed.T):
if not np.all(X_original[:, original_idx]):
X_original[:, original_idx] = array_imputed.T[imputed_idx]
imputed_idx += 1
original_idx += 1
else:
original_idx += 1
X_original[full_mask] = self.missing_values
return X_original
def _more_tags(self):
return {
"allow_nan": (
_is_pandas_na(self.missing_values) or is_scalar_nan(self.missing_values)
)
}
def get_feature_names_out(self, input_features=None):
"""Get output feature names for transformation.
Parameters
----------
input_features : array-like of str or None, default=None
Input features.
- If `input_features` is `None`, then `feature_names_in_` is
used as feature names in. If `feature_names_in_` is not defined,
then the following input feature names are generated:
`["x0", "x1", ..., "x(n_features_in_ - 1)"]`.
- If `input_features` is an array-like, then `input_features` must
match `feature_names_in_` if `feature_names_in_` is defined.
Returns
-------
feature_names_out : ndarray of str objects
Transformed feature names.
"""
input_features = _check_feature_names_in(self, input_features)
non_missing_mask = np.logical_not(_get_mask(self.statistics_, np.nan))
names = input_features[non_missing_mask]
return self._concatenate_indicator_feature_names_out(names, input_features)
class MissingIndicator(TransformerMixin, BaseEstimator):
"""Binary indicators for missing values.
Note that this component typically should not be used in a vanilla
:class:`Pipeline` consisting of transformers and a classifier, but rather
could be added using a :class:`FeatureUnion` or :class:`ColumnTransformer`.
Read more in the :ref:`User Guide <impute>`.
.. versionadded:: 0.20
Parameters
----------
missing_values : int, float, str, np.nan or None, default=np.nan
The placeholder for the missing values. All occurrences of
`missing_values` will be imputed. For pandas' dataframes with
nullable integer dtypes with missing values, `missing_values`
should be set to `np.nan`, since `pd.NA` will be converted to `np.nan`.
features : {'missing-only', 'all'}, default='missing-only'
Whether the imputer mask should represent all or a subset of
features.
- If `'missing-only'` (default), the imputer mask will only represent
features containing missing values during fit time.
- If `'all'`, the imputer mask will represent all features.
sparse : bool or 'auto', default='auto'
Whether the imputer mask format should be sparse or dense.
- If `'auto'` (default), the imputer mask will be of same type as
input.
- If `True`, the imputer mask will be a sparse matrix.
- If `False`, the imputer mask will be a numpy array.
error_on_new : bool, default=True
If `True`, :meth:`transform` will raise an error when there are
features with missing values that have no missing values in
:meth:`fit`. This is applicable only when `features='missing-only'`.
Attributes
----------
features_ : ndarray of shape (n_missing_features,) or (n_features,)
The features indices which will be returned when calling
:meth:`transform`. They are computed during :meth:`fit`. If
`features='all'`, `features_` is equal to `range(n_features)`.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
See Also
--------
SimpleImputer : Univariate imputation of missing values.
IterativeImputer : Multivariate imputation of missing values.
Examples
--------
>>> import numpy as np
>>> from sklearn.impute import MissingIndicator
>>> X1 = np.array([[np.nan, 1, 3],
... [4, 0, np.nan],
... [8, 1, 0]])
>>> X2 = np.array([[5, 1, np.nan],
... [np.nan, 2, 3],
... [2, 4, 0]])
>>> indicator = MissingIndicator()
>>> indicator.fit(X1)
MissingIndicator()
>>> X2_tr = indicator.transform(X2)
>>> X2_tr
array([[False, True],
[ True, False],
[False, False]])
"""
_parameter_constraints = {
"missing_values": [numbers.Real, numbers.Integral, str, None],
"features": [StrOptions({"missing-only", "all"})],
"sparse": ["boolean", StrOptions({"auto"})],
"error_on_new": ["boolean"],
}
def __init__(
self,
*,
missing_values=np.nan,
features="missing-only",
sparse="auto",
error_on_new=True,
):
self.missing_values = missing_values
self.features = features
self.sparse = sparse
self.error_on_new = error_on_new
def _get_missing_features_info(self, X):
"""Compute the imputer mask and the indices of the features
containing missing values.
Parameters
----------
X : {ndarray, sparse matrix} of shape (n_samples, n_features)
The input data with missing values. Note that `X` has been
checked in :meth:`fit` and :meth:`transform` before to call this
function.
Returns
-------
imputer_mask : {ndarray, sparse matrix} of shape \
(n_samples, n_features)
The imputer mask of the original data.
features_with_missing : ndarray of shape (n_features_with_missing)
The features containing missing values.
"""
if not self._precomputed:
imputer_mask = _get_mask(X, self.missing_values)
else:
imputer_mask = X
if sp.issparse(X):
imputer_mask.eliminate_zeros()
if self.features == "missing-only":
n_missing = imputer_mask.getnnz(axis=0)
if self.sparse is False:
imputer_mask = imputer_mask.toarray()
elif imputer_mask.format == "csr":
imputer_mask = imputer_mask.tocsc()
else:
if not self._precomputed:
imputer_mask = _get_mask(X, self.missing_values)
else:
imputer_mask = X
if self.features == "missing-only":
n_missing = imputer_mask.sum(axis=0)
if self.sparse is True:
imputer_mask = sp.csc_matrix(imputer_mask)
if self.features == "all":
features_indices = np.arange(X.shape[1])
else:
features_indices = np.flatnonzero(n_missing)
return imputer_mask, features_indices
def _validate_input(self, X, in_fit):
if not is_scalar_nan(self.missing_values):
force_all_finite = True
else:
force_all_finite = "allow-nan"
X = self._validate_data(
X,
reset=in_fit,
accept_sparse=("csc", "csr"),
dtype=None,
force_all_finite=force_all_finite,
)
_check_inputs_dtype(X, self.missing_values)
if X.dtype.kind not in ("i", "u", "f", "O"):
raise ValueError(
"MissingIndicator does not support data with "
"dtype {0}. Please provide either a numeric array"
" (with a floating point or integer dtype) or "
"categorical data represented either as an array "
"with integer dtype or an array of string values "
"with an object dtype.".format(X.dtype)
)
if sp.issparse(X) and self.missing_values == 0:
# missing_values = 0 not allowed with sparse data as it would
# force densification
raise ValueError(
"Sparse input with missing_values=0 is "
"not supported. Provide a dense "
"array instead."
)
return X
def _fit(self, X, y=None, precomputed=False):
"""Fit the transformer on `X`.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data, where `n_samples` is the number of samples and
`n_features` is the number of features.
If `precomputed=True`, then `X` is a mask of the input data.
precomputed : bool
Whether the input data is a mask.
Returns
-------
imputer_mask : {ndarray, sparse matrix} of shape (n_samples, \
n_features)
The imputer mask of the original data.
"""
if precomputed:
if not (hasattr(X, "dtype") and X.dtype.kind == "b"):
raise ValueError("precomputed is True but the input data is not a mask")
self._precomputed = True
else:
self._precomputed = False
# Need not validate X again as it would have already been validated
# in the Imputer calling MissingIndicator
if not self._precomputed:
X = self._validate_input(X, in_fit=True)
self._n_features = X.shape[1]
missing_features_info = self._get_missing_features_info(X)
self.features_ = missing_features_info[1]
return missing_features_info[0]
def fit(self, X, y=None):
"""Fit the transformer on `X`.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data, where `n_samples` is the number of samples and
`n_features` is the number of features.
y : Ignored
Not used, present for API consistency by convention.
Returns
-------
self : object
Fitted estimator.
"""
self._validate_params()
self._fit(X, y)
return self
def transform(self, X):
"""Generate missing values indicator for `X`.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input data to complete.
Returns
-------
Xt : {ndarray, sparse matrix} of shape (n_samples, n_features) \
or (n_samples, n_features_with_missing)
The missing indicator for input data. The data type of `Xt`
will be boolean.
"""
check_is_fitted(self)
# Need not validate X again as it would have already been validated
# in the Imputer calling MissingIndicator
if not self._precomputed:
X = self._validate_input(X, in_fit=False)
else:
if not (hasattr(X, "dtype") and X.dtype.kind == "b"):
raise ValueError("precomputed is True but the input data is not a mask")
imputer_mask, features = self._get_missing_features_info(X)
if self.features == "missing-only":
features_diff_fit_trans = np.setdiff1d(features, self.features_)
if self.error_on_new and features_diff_fit_trans.size > 0:
raise ValueError(
"The features {} have missing values "
"in transform but have no missing values "
"in fit.".format(features_diff_fit_trans)
)
if self.features_.size < self._n_features:
imputer_mask = imputer_mask[:, self.features_]
return imputer_mask
def fit_transform(self, X, y=None):
"""Generate missing values indicator for `X`.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input data to complete.
y : Ignored
Not used, present for API consistency by convention.
Returns
-------
Xt : {ndarray, sparse matrix} of shape (n_samples, n_features) \
or (n_samples, n_features_with_missing)
The missing indicator for input data. The data type of `Xt`
will be boolean.
"""
self._validate_params()
imputer_mask = self._fit(X, y)
if self.features_.size < self._n_features:
imputer_mask = imputer_mask[:, self.features_]
return imputer_mask
def get_feature_names_out(self, input_features=None):
"""Get output feature names for transformation.
Parameters
----------
input_features : array-like of str or None, default=None
Input features.
- If `input_features` is `None`, then `feature_names_in_` is
used as feature names in. If `feature_names_in_` is not defined,
then the following input feature names are generated:
`["x0", "x1", ..., "x(n_features_in_ - 1)"]`.
- If `input_features` is an array-like, then `input_features` must
match `feature_names_in_` if `feature_names_in_` is defined.
Returns
-------
feature_names_out : ndarray of str objects
Transformed feature names.
"""
input_features = _check_feature_names_in(self, input_features)
prefix = self.__class__.__name__.lower()
return np.asarray(
[
f"{prefix}_{feature_name}"
for feature_name in input_features[self.features_]
],
dtype=object,
)
def _more_tags(self):
return {
"allow_nan": True,
"X_types": ["2darray", "string"],
"preserves_dtype": [],
}
|
{
"content_hash": "1eaef1b2b89d8e7002d8130f7b2b1a10",
"timestamp": "",
"source": "github",
"line_count": 1037,
"max_line_length": 88,
"avg_line_length": 36.80038572806172,
"alnum_prop": 0.5697290498401552,
"repo_name": "AlexandreAbraham/scikit-learn",
"id": "225ae249b2107d248ca4127b5d5b8e1bf8e05797",
"size": "38298",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sklearn/impute/_base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "42335"
},
{
"name": "C++",
"bytes": "147316"
},
{
"name": "Cython",
"bytes": "669740"
},
{
"name": "Makefile",
"bytes": "1644"
},
{
"name": "Python",
"bytes": "10469927"
},
{
"name": "Shell",
"bytes": "42620"
}
],
"symlink_target": ""
}
|
import os
import sys
import shlex
from subprocess import PIPE
from process import spawn
from .error import AnaGondaError, GoGetError
class AnaGondaContext(object):
"""Every anaGonda context must inherit from this class
"""
def __init__(self, env_ctx, go_get_url):
self.__go_get_url = go_get_url
self.__env = env_ctx
self._bin_found = None
def __enter__(self):
"""Check binary existence or run go get
"""
if self._bin_found is None:
if not os.path.exists(self.binary):
try:
self.go_get()
except AnaGondaError:
import traceback
print(traceback.print_exc())
self._bin_found = False
raise
else:
self._bin_found = True
def __exit__(self, *ext):
"""Do nothing
"""
@property
def go(self):
"""Return the Go binary for this GOROOT
"""
if self.__env['GOROOT'] == "":
return "go" # pray for it being in the PATH
return os.path.join(self.__env['GOROOT'], 'bin', 'go')
@property
def env(self):
"""Prepare the environ with go vars and sanitization
"""
env = {}
curenv = os.environ.copy()
for key in curenv:
env[str(key)] = str(curenv[key])
env.update(self.__env)
return env
def go_get(self):
"""Go get the code to execute the scoped context
"""
args = shlex.split('{0} get {1}'.format(
self.go, self.__go_get_url), posix=os.name != 'nt')
go = spawn(args, stdout=PIPE, stderr=PIPE, env=self.env)
out, err = go.communicate()
if err is not None and len(err) > 0:
if sys.version_info >= (3, 0):
err = err.decode('utf8')
raise GoGetError(err)
self._bin_found = True
def get_binary(self, binary):
"""Get a binary from the GOBIN/GOPATH
"""
if self.env.get('GOBIN') is not None:
binary_path = os.path.join(self.env['GOBIN'], binary)
if os.path.exists(binary_path):
return binary_path
for path in self.env['GOPATH'].split(':'):
binary_path = os.path.join(path, 'bin', binary)
if os.path.exists(binary_path):
return binary_path
return '/not/found'
|
{
"content_hash": "b516da42dee80ef0e04494e72f1217a0",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 65,
"avg_line_length": 27.087912087912088,
"alnum_prop": 0.5160243407707911,
"repo_name": "danalec/dotfiles",
"id": "9e04ea9bd191f988cdadfb29f866ccc23f135ba1",
"size": "2601",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sublime/.config/sublime-text-3/Packages/anaconda_go/plugin/handlers_go/anagonda/context/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "699"
},
{
"name": "CSS",
"bytes": "32865"
},
{
"name": "GLSL",
"bytes": "10062"
},
{
"name": "HTML",
"bytes": "4806"
},
{
"name": "JavaScript",
"bytes": "817118"
},
{
"name": "Lua",
"bytes": "34246"
},
{
"name": "PHP",
"bytes": "2263892"
},
{
"name": "Python",
"bytes": "9571271"
},
{
"name": "Ruby",
"bytes": "56701"
},
{
"name": "Shell",
"bytes": "280060"
},
{
"name": "Smarty",
"bytes": "5128"
},
{
"name": "Vim script",
"bytes": "26736"
}
],
"symlink_target": ""
}
|
"""Resource definitions for cloud platform apis."""
import enum
BASE_URL = 'https://cloudresourcemanager.googleapis.com/v1beta1/'
class Collections(enum.Enum):
"""Collections for all supported apis."""
ORGANIZATIONS = (
'organizations',
'organizations/{organizationsId}',
{},
[u'organizationsId']
)
PROJECTS = (
'projects',
'projects/{projectId}',
{},
[u'projectId']
)
def __init__(self, collection_name, path, flat_paths, params):
self.collection_name = collection_name
self.path = path
self.flat_paths = flat_paths
self.params = params
|
{
"content_hash": "f8b1f7f2a48dcf95388d4bffe51f83ec",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 65,
"avg_line_length": 21.413793103448278,
"alnum_prop": 0.6344605475040258,
"repo_name": "KaranToor/MA450",
"id": "344226a667a90dba9d83febbad36a2a73cb45df6",
"size": "1216",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/.install/.backup/lib/googlecloudsdk/third_party/apis/cloudresourcemanager/v1beta1/resources.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
}
|
"""Implementation of gcloud genomics callsets update.
"""
from googlecloudsdk.api_lib.genomics import genomics_util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
class Update(base.UpdateCommand):
"""Updates a call set name.
"""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument('id',
help='The ID of the call set to be updated.')
parser.add_argument('--name',
help='The new name of the call set.',
required=True)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace, All the arguments that were provided to this
command invocation.
Raises:
HttpException: An http error response was received while executing api
request.
Returns:
None
"""
apitools_client = genomics_util.GetGenomicsClient()
genomics_messages = genomics_util.GetGenomicsMessages()
request = genomics_messages.GenomicsCallsetsPatchRequest(
callSet=genomics_messages.CallSet(
id=args.id,
name=args.name,
# Can't construct a callset without the variant id set, but
# actually setting the variant id would not do anything, so
# use a dummy value. See b/22818510.
variantSetIds=['123'],
),
callSetId=args.id,
)
result = apitools_client.callsets.Patch(request)
log.UpdatedResource('{0}, id: {1}'.format(result.name, result.id),
kind='call set')
return result
|
{
"content_hash": "ea7be0d7d3d3530ea3df6e2cda5a1746",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 31.32075471698113,
"alnum_prop": 0.6295180722891566,
"repo_name": "KaranToor/MA450",
"id": "6ecd92143a3d39d06db5e25c1731bbd37e6a63b5",
"size": "2256",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/lib/surface/genomics/callsets/update.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
}
|
"""Unittests for Popup Forms functionality"""
from unittest import skip
from django import test, forms
from django.conf.urls.defaults import patterns, url
from django.http import HttpResponse
from django.shortcuts import render
import popup_forms
from django.core.urlresolvers import reverse
try:
from django.test.utils import override_settings
except ImportError:
from django.conf import settings
def override_settings(**kwargs):
for key, value in kwargs.iteritems():
setattr(settings, key, value)
return lambda fn: fn
class PopupForm(forms.Form):
name = forms.CharField(max_length=10)
email = forms.EmailField(max_length=20)
def save(self):
return '{name}, {email}'.format(**self.cleaned_data)
def index(request):
return HttpResponse('Hello, World!')
def render_form(request):
return render(request, 'popup_forms_test/page.html')
@popup_forms.handler
def process_form(request):
if request.method == 'POST':
form = PopupForm(request.POST)
if not form.is_valid():
return popup_forms.OpenFormResponse(request, form)
request.session['stored_data'] = form.save()
return popup_forms.CloseFormResponse(request, reverse('success'))
return popup_forms.CloseFormResponse(request)
def success(request):
return HttpResponse(request.session.pop('stored_data', 'No data'))
urlpatterns = patterns('',
url(r'^$', index, name='index'),
url(r'^render_form/$', render_form, name='render_form'),
url(r'^process_form/$', process_form, name='process_form'),
url(r'^success/$', success, name='success'),
)
@override_settings(POPUP_FORMS=('popup_forms.tests.PopupForm',))
class TestPopupForm(test.TestCase):
"""Unit-testing popup forms"""
urls = 'popup_forms.tests'
def test_render_form(self):
"""Form should be rendered by `popup_form` template tag"""
response = self.client.get('/render_form/')
self.assertContains(response, '<form method="post" action="/process_form/">')
self.assertContains(response, '<a href="/process_form/" id="popup_link_1"')
self.assertContains(response, 'style="display:none"')
self.assertContains(response, '<input id="id_name" type="text" '
'name="name" maxlength="10" />')
self.assertContains(response, '<input id="id_email" type="text" '
'name="email" maxlength="20" />')
@skip('TODO: Write test!')
def test_render_form_kwargs(self):
"""Extra Keyword arguments should be passed to form when instantiating it"""
pass
def test_submit_form(self):
"""Form processing view should redirect to success page after submit"""
response = self.client.post('/process_form/',
data={'name': 'David', 'email': 'avsd05@gmail.com'},
HTTP_REFERER='/render_form/', follow=True)
self.assertRedirects(response, '/success/')
self.assertContains(response, 'David, avsd05@gmail.com')
def test_error_in_form(self):
"""Form should be re-populated on the same page with errors highlighted"""
response = self.client.post('/process_form/',
data={'name': 'David', 'email': 'wrongemail'},
HTTP_REFERER='/render_form/', follow=True)
self.assertRedirects(response, '/render_form/')
self.assertNotContains(response, 'style="display:none"')
self.assertContains(response, 'Enter a valid e-mail address.')
self.assertContains(response, '<input id="id_name" type="text" '
'name="name" value="David" maxlength="10" />')
self.assertContains(response, '<input id="id_email" type="text" '
'name="email" value="wrongemail" maxlength="20" />')
@skip('TODO: Write test!')
class TestTokenVarExtractor(test.TestCase):
"""Unittest for TokenVarExtractor """
pass
|
{
"content_hash": "39f8fc0832d8d649722090db32d0608c",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 85,
"avg_line_length": 36.154545454545456,
"alnum_prop": 0.6401810409856676,
"repo_name": "joinourtalents/django-popup-forms",
"id": "ffc8bfc8606a8125d158fb8d3e15bb2359132612",
"size": "3977",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "popup_forms/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "1481"
},
{
"name": "Python",
"bytes": "25582"
}
],
"symlink_target": ""
}
|
from app import app, cfg, error, engine, create_session, abort
from common import types_handler
from sqlalchemy import exc, Table, MetaData
from sqlalchemy.orm import mapper
from simplejson import dumps as sdumps
import json
log = app.logger
def all_warehouse():
''' scan warehouse table '''
result = None
try:
connection = create_session(engine)
warehouse_ = Table('warehouse', MetaData(engine), autoload=True)
result = sdumps(connection.query(warehouse_).all())
connection.close()
except (Exception, ValueError, exc.SQLAlchemyError), e:
log.error(e)
log.error(error['db-002'])
if result == None:
log.error(error['wh-001'])
abort(500)
return result
def find_warehouse(warehouse):
''' query warehouse by name '''
if warehouse == '':
log.error(error['dic-001'])
abort(500)
result = None
try:
connection = create_session(engine)
warehouse_ = Table('warehouse', MetaData(engine), autoload=True)
query_string = connection.query(
warehouse_
).filter(
warehouse_.c.name.like(
'%%%s%%' % (warehouse.upper().encode('utf-8').replace('*','%'))
)
)
result = query_string.all()
connection.close()
log.debug(result)
except (Exception, ValueError, exc.SQLAlchemyError), e:
log.error(e)
log.error(error['db-002'])
if result == None:
log.error(error['wh-001'])
abort(500)
return sdumps(result)
def insert_warehouse(warehouse):
''' add/update warehouse '''
if warehouse == '':
log.error(error['wh-003'])
abort(500)
if warehouse['warehouse_id'] < 0:
log.error(error['wh-004'])
abort(500)
if any(warehouse['name'] in warehouse_saved for warehouse_saved in json.loads(all_warehouse())):
log.error(error['wh-005'])
abort(500)
class Warehouse(object): pass
mapper(Warehouse, Table('warehouse', MetaData(engine), autoload=True))
new_warehouse = Warehouse()
if warehouse['warehouse_id']:
new_warehouse.warehouse_id = warehouse['warehouse_id']
new_warehouse.name = warehouse['name'].upper().encode('utf-8')
try:
log.debug(new_warehouse)
connection = create_session(engine)
connection.merge(new_warehouse)
connection.commit()
connection.close()
return '200'
except (Exception, ValueError, exc.SQLAlchemyError), e:
connection.rollback()
connection.close()
log.error(e)
log.error(error['wh-002'])
abort(500)
|
{
"content_hash": "26f79f60e00cdf8b907a2a19013eef42",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 100,
"avg_line_length": 32.31707317073171,
"alnum_prop": 0.6064150943396226,
"repo_name": "monchitos82/shop2",
"id": "bcaa4347d0e4309e03c43ecb21c82d278b034a72",
"size": "2674",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/app/controls/warehouse.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17883"
},
{
"name": "Gherkin",
"bytes": "20664"
},
{
"name": "HTML",
"bytes": "113565"
},
{
"name": "JavaScript",
"bytes": "193209"
},
{
"name": "Python",
"bytes": "111779"
},
{
"name": "Shell",
"bytes": "78"
}
],
"symlink_target": ""
}
|
"""
Support for displaying collected data over SNMP.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.snmp/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.helpers.entity import Entity
from homeassistant.const import (CONF_HOST, CONF_PLATFORM, CONF_NAME,
CONF_PORT, ATTR_UNIT_OF_MEASUREMENT)
from homeassistant.util import Throttle
REQUIREMENTS = ['pysnmp==4.3.2']
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "SNMP"
DEFAULT_COMMUNITY = "public"
DEFAULT_PORT = "161"
CONF_COMMUNITY = "community"
CONF_BASEOID = "baseoid"
PLATFORM_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'snmp',
vol.Optional(CONF_NAME): vol.Coerce(str),
vol.Required(CONF_HOST): vol.Coerce(str),
vol.Optional(CONF_PORT): vol.Coerce(int),
vol.Optional(CONF_COMMUNITY): vol.Coerce(str),
vol.Required(CONF_BASEOID): vol.Coerce(str),
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): vol.Coerce(str),
})
# Return cached results if last scan was less then this time ago.
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
# pylint: disable=too-many-locals
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the SNMP sensor."""
from pysnmp.hlapi import (getCmd, CommunityData, SnmpEngine,
UdpTransportTarget, ContextData, ObjectType,
ObjectIdentity)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT, DEFAULT_PORT)
community = config.get(CONF_COMMUNITY, DEFAULT_COMMUNITY)
baseoid = config.get(CONF_BASEOID)
errindication, _, _, _ = next(
getCmd(SnmpEngine(),
CommunityData(community, mpModel=0),
UdpTransportTarget((host, port)),
ContextData(),
ObjectType(ObjectIdentity(baseoid))))
if errindication:
_LOGGER.error('Please check the details in the configuration file')
return False
else:
data = SnmpData(host, port, community, baseoid)
add_devices([SnmpSensor(data,
config.get('name', DEFAULT_NAME),
config.get('unit_of_measurement'))])
class SnmpSensor(Entity):
"""Representation of a SNMP sensor."""
def __init__(self, data, name, unit_of_measurement):
"""Initialize the sensor."""
self.data = data
self._name = name
self._state = None
self._unit_of_measurement = unit_of_measurement
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
self._state = self.data.value
class SnmpData(object):
"""Get the latest data and update the states."""
# pylint: disable=too-few-public-methods
def __init__(self, host, port, community, baseoid):
"""Initialize the data object."""
self._host = host
self._port = port
self._community = community
self._baseoid = baseoid
self.value = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the remote SNMP capable host."""
from pysnmp.hlapi import (getCmd, CommunityData, SnmpEngine,
UdpTransportTarget, ContextData, ObjectType,
ObjectIdentity)
errindication, errstatus, errindex, restable = next(
getCmd(SnmpEngine(),
CommunityData(self._community, mpModel=0),
UdpTransportTarget((self._host, self._port)),
ContextData(),
ObjectType(ObjectIdentity(self._baseoid)))
)
if errindication:
_LOGGER.error("SNMP error: %s", errindication)
elif errstatus:
_LOGGER.error('SNMP error: %s at %s', errstatus.prettyPrint(),
errindex and restable[-1][int(errindex) - 1] or '?')
else:
for resrow in restable:
self.value = resrow[-1]
|
{
"content_hash": "e22f8531bc9f01f464a51ced5479f6af",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 78,
"avg_line_length": 33.59701492537314,
"alnum_prop": 0.609729009329187,
"repo_name": "deisi/home-assistant",
"id": "59730624a114e52e96f770b37816cd077a40eb8d",
"size": "4502",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "homeassistant/components/sensor/snmp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1296753"
},
{
"name": "JavaScript",
"bytes": "11038"
},
{
"name": "Python",
"bytes": "2720476"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "6430"
}
],
"symlink_target": ""
}
|
import os
import re
from flask import request
import psycopg2
from sqlalchemy import create_engine, types
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.pool import NullPool
from sqlalchemy.ext.declarative import declarative_base
from psycopg2.extensions import adapt, register_adapter, AsIs
engine = create_engine(os.environ['DB_CONN'], convert_unicode=True)
session = scoped_session(sessionmaker(bind=engine,
autocommit=False,
autoflush=False))
Base = declarative_base()
Base.query = session.query_property()
|
{
"content_hash": "d82d9060bca4ec0dded69b882ac2ae1e",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 67,
"avg_line_length": 38.3125,
"alnum_prop": 0.7177814029363785,
"repo_name": "apanella/ContiguousBuildings",
"id": "01248cb5bd6c5e448ec3e13934c27d637d21eedc",
"size": "613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ctgbdg/database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6714"
}
],
"symlink_target": ""
}
|
import unittest
from cStringIO import StringIO
from csvkit.convert import fixed
class TestFixed(unittest.TestCase):
def test_fixed(self):
with open('examples/testfixed', 'r') as f:
with open('examples/testfixed_schema.csv', 'r') as schema:
output = fixed.fixed2csv(f, schema)
with open('examples/testfixed_converted.csv', 'r') as f:
self.assertEqual(f.read(), output)
def test_fixed_streaming(self):
with open('examples/testfixed', 'r') as f:
with open('examples/testfixed_schema.csv', 'r') as schema:
output = StringIO()
fixed.fixed2csv(f, schema, output=output)
output = output.getvalue()
with open('examples/testfixed_converted.csv', 'r') as f:
self.assertEqual(f.read(), output)
def test_schema_decoder_init(self):
rd = fixed.SchemaDecoder(['column', 'start', 'length'])
self.assertEqual(1,rd.start)
self.assertEqual(2,rd.length)
self.assertEqual(0,rd.column)
def test_schema_decoder_in_action(self):
rd = fixed.SchemaDecoder(['comment', 'start', 'length', 'column'])
(column, start, length) = rd(['This is a comment','0','1','column_name'])
self.assertEqual(False,rd.one_based)
self.assertEqual('column_name',column)
self.assertEqual(0, start)
self.assertEqual(1, length)
(column, start, length) = rd(['This is another comment','1','5','column_name2'])
self.assertEqual(False,rd.one_based)
self.assertEqual('column_name2',column)
self.assertEqual(1, start)
self.assertEqual(5, length)
(column, start, length) = rd(['yet another comment','9','14','column_name3'])
self.assertEqual(False,rd.one_based)
self.assertEqual('column_name3',column)
self.assertEqual(9, start)
self.assertEqual(14, length)
def test_one_based_row_decoder(self):
rd = fixed.SchemaDecoder(['column','start','length'])
(column, start, length) = rd(['LABEL', '1', '5' ])
self.assertEqual(True,rd.one_based)
self.assertEqual('LABEL',column)
self.assertEqual(0, start)
self.assertEqual(5, length)
(column, start, length) = rd(['LABEL2', '6', '15' ])
self.assertEqual('LABEL2',column)
self.assertEqual(5, start)
self.assertEqual(15, length)
def test_schematic_line_parser(self):
schema = """column,start,length
foo,1,5
bar,6,2
baz,8,5"""
f = StringIO(schema)
parser = fixed.FixedWidthRowParser(f)
self.assertEqual('foo',parser.headers[0])
self.assertEqual('bar',parser.headers[1])
self.assertEqual('baz',parser.headers[2])
parsed = parser.parse("111112233333")
self.assertEqual('11111',parsed[0])
self.assertEqual('22',parsed[1])
self.assertEqual('33333',parsed[2])
parsed = parser.parse(" 1 2 3")
self.assertEqual('1',parsed[0])
self.assertEqual('2',parsed[1])
self.assertEqual('3',parsed[2])
parsed = parser.parse("1 1 233 3")
self.assertEqual('1 1',parsed[0])
self.assertEqual('2',parsed[1])
self.assertEqual('33 3',parsed[2])
|
{
"content_hash": "f364a5a5e8a94b780cc464b67b3eff6a",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 88,
"avg_line_length": 36.75824175824176,
"alnum_prop": 0.5907324364723467,
"repo_name": "moradology/csvkit",
"id": "4ff91c2d5f5a28421009ee1d31ab62a3c1c883d0",
"size": "3345",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/test_convert/test_fixed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Prolog",
"bytes": "501000"
},
{
"name": "Python",
"bytes": "226716"
}
],
"symlink_target": ""
}
|
"""Plugin utilities."""
import logging
import os
import socket
import zope.component
from certbot import interfaces
from certbot import util
try:
import psutil
USE_PSUTIL = True
except ImportError:
USE_PSUTIL = False
logger = logging.getLogger(__name__)
RENEWER_EXTRA_MSG = (
" For automated renewal, you may want to use a script that stops"
" and starts your webserver. You can find an example at"
" https://certbot.eff.org/docs/using.html#renewal ."
" Alternatively you can use the webroot plugin to renew without"
" needing to stop and start your webserver.")
def path_surgery(restart_cmd):
"""Attempt to perform PATH surgery to find restart_cmd
Mitigates https://github.com/certbot/certbot/issues/1833
:param str restart_cmd: the command that is being searched for in the PATH
:returns: True if the operation succeeded, False otherwise
"""
dirs = ("/usr/sbin", "/usr/local/bin", "/usr/local/sbin")
path = os.environ["PATH"]
added = []
for d in dirs:
if d not in path:
path += os.pathsep + d
added.append(d)
if any(added):
logger.debug("Can't find %s, attempting PATH mitigation by adding %s",
restart_cmd, os.pathsep.join(added))
os.environ["PATH"] = path
if util.exe_exists(restart_cmd):
return True
else:
expanded = " expanded" if any(added) else ""
logger.warning("Failed to find %s in%s PATH: %s", restart_cmd,
expanded, path)
return False
def already_listening(port, renewer=False):
"""Check if a process is already listening on the port.
If so, also tell the user via a display notification.
.. warning::
On some operating systems, this function can only usefully be
run as root.
:param int port: The TCP port in question.
:returns: True or False.
"""
if USE_PSUTIL:
return already_listening_psutil(port, renewer=renewer)
else:
logger.debug("Psutil not found, using simple socket check.")
return already_listening_socket(port, renewer=renewer)
def already_listening_socket(port, renewer=False):
"""Simple socket based check to find out if port is already in use
:param int port: The TCP port in question.
:returns: True or False
"""
try:
testsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
try:
testsocket.bind(("", port))
except socket.error:
display = zope.component.getUtility(interfaces.IDisplay)
extra = ""
if renewer:
extra = RENEWER_EXTRA_MSG
display.notification(
"Port {0} is already in use by another process. This will "
"prevent us from binding to that port. Please stop the "
"process that is populating the port in question and try "
"again. {1}".format(port, extra), height=13)
return True
finally:
testsocket.close()
except socket.error:
pass
return False
def already_listening_psutil(port, renewer=False):
"""Psutil variant of the open port check
:param int port: The TCP port in question.
:returns: True or False.
"""
try:
net_connections = psutil.net_connections()
except psutil.AccessDenied as error:
logger.info("Access denied when trying to list network "
"connections: %s. Are you root?", error)
# this function is just a pre-check that often causes false
# positives and problems in testing (c.f. #680 on Mac, #255
# generally); we will fail later in bind() anyway
return False
listeners = [conn.pid for conn in net_connections
if conn.status == 'LISTEN' and
conn.type == socket.SOCK_STREAM and
conn.laddr[1] == port]
try:
if listeners and listeners[0] is not None:
# conn.pid may be None if the current process doesn't have
# permission to identify the listening process! Additionally,
# listeners may have more than one element if separate
# sockets have bound the same port on separate interfaces.
# We currently only have UI to notify the user about one
# of them at a time.
pid = listeners[0]
name = psutil.Process(pid).name()
display = zope.component.getUtility(interfaces.IDisplay)
extra = ""
if renewer:
extra = RENEWER_EXTRA_MSG
display.notification(
"The program {0} (process ID {1}) is already listening "
"on TCP port {2}. This will prevent us from binding to "
"that port. Please stop the {0} program temporarily "
"and then try again.{3}".format(name, pid, port, extra),
height=13)
return True
except (psutil.NoSuchProcess, psutil.AccessDenied):
# Perhaps the result of a race where the process could have
# exited or relinquished the port (NoSuchProcess), or the result
# of an OS policy where we're not allowed to look up the process
# name (AccessDenied).
pass
return False
|
{
"content_hash": "5d91d41d9e32226a7a570f455a07abcc",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 78,
"avg_line_length": 34.19871794871795,
"alnum_prop": 0.6118088097469541,
"repo_name": "jtl999/certbot",
"id": "b97ca1afd9423f9fe36ab27b30ab5a37cebee7fe",
"size": "5335",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "certbot/plugins/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "62302"
},
{
"name": "Augeas",
"bytes": "5245"
},
{
"name": "Batchfile",
"bytes": "35005"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37245"
},
{
"name": "Nginx",
"bytes": "118585"
},
{
"name": "Python",
"bytes": "1477643"
},
{
"name": "Shell",
"bytes": "176838"
},
{
"name": "Standard ML",
"bytes": "256"
}
],
"symlink_target": ""
}
|
import re
import pyparsing as pp
# Entry class responsible for replacing all mentions of the technology for #0#/#1#
class TechRemover:
def __init__(self, code, technology, alwaysTrue, file, log):
self.code = code
self.tech = technology
self.log = log
self.alwaysTrue = alwaysTrue
self.anyChange = False
self.simplifier = ExpressionSimplifier()
self.rep = re.compile(r"^(?P<start>[ \t]*#[ \t]*(?:el)?if(?:n)?(?:def)?[ \t]+)(?P<inside>[^/\n]+)(?P<end>[ \t]*(?://.*)?\n)$")
self.file = file
#defined and not defined
self.rgx_notdefined = re.compile('![ \t]*defined[ \t]+'+self.tech+'[ \t]')
self.rgx_notdefined_paran = re.compile('![ \t]*defined[ \t]*\('+self.tech+'\)[ \t]')
self.rgx_defined = re.compile('defined[ \t]+'+self.tech+'[ \t]')
self.rgx_defined_param = re.compile('defined[ \t]*\([ \t]*'+self.tech+'[ \t]*\)[ \t]')
#normal
self.rgx_not_normal = re.compile('![ \t]*'+self.tech+'[ \t]')
self.rgx_normal = re.compile('[ \t]+'+self.tech+'[ \t]')
self.rgx_normal_start = re.compile('^'+self.tech+'[ \t]')
def replaceTechInside(self,text):
#add dummy space at the end to more easily deal with endings and simplify the regexes
dummy = text+' '
#defined and not defined
dummy = self.rgx_notdefined.sub(' #%d# '%(not self.alwaysTrue), dummy)
dummy = self.rgx_notdefined_paran.sub(' #%d# '%(not self.alwaysTrue), dummy)
dummy = self.rgx_defined.sub(' #%d# '%(self.alwaysTrue), dummy)
dummy = self.rgx_defined_param.sub(' #%d# '%(self.alwaysTrue), dummy)
#normal
dummy = self.rgx_not_normal.sub(' #%d# '%(not self.alwaysTrue), dummy)
dummy = self.rgx_normal.sub(' #%d# '%(self.alwaysTrue), dummy)
dummy = self.rgx_normal_start.sub('#%d# '%(self.alwaysTrue), dummy)
#remove the dummy space
return dummy[:-1]
def replaceAllMentions(self,cline,lnumber):
findifelif = self.rep.search(cline)
if(findifelif!=None):
start = findifelif.group('start')
insides = findifelif.group('inside')
end = '' if findifelif.group('end') is None else findifelif.group('end')
if(not self.log is None): self.log.write('Found (el)if(def) on line %d replacing tech:%s\n\tinsides:%s' % (lnumber,cline,insides))
changed = self.replaceTechInside(insides)
if(changed!=insides):
self.anyChange = True
if(not self.log is None): self.log.write('\t Tech replaced:%s' % start + changed + end)
try:
changed = self.simplifier.process(changed)
if(self.simplifier.anyChange and not self.log is None): self.log.write('\t Simplifed to:%s' % start + changed + end)
self.simplifier.reset()
self.code[lnumber] = start + changed + end
except Exception as inst:
print('!!!There was an error simplifying line %d in %s:\n\tOriginal:%s\n\tChanged:%s\n\tException:%s\nPlease manualy edit it.' % (lnumber, self.file, cline, changed, inst))
if(not self.log is None): self.log.write('There was an error simplifying line %d in %s:\n\tOriginal:%s\n\tChanged:%s\n\tException:%s' % (lnumber, self.file, cline, changed, inst))
self.simplifier.reset()
def process(self):
for lnumber,cline in enumerate(self.code):
#replace and simplify
self.replaceAllMentions(cline,lnumber)
if(self.anyChange):
#clean
cleaner = RemovedTechCleaner(self.code,self.log)
cleaner.process()
if(cleaner.anyChange):
self.code = cleaner.newCode
#Class responsible for simplifying conditions and hopefully removing #0# and #1# in the process
class ExpressionSimplifier:
def __init__(self):
name = pp.Regex(r"(!?[_a-zA-z]\w*[ \t]*(?:[<>]=?[ \t]*\d+)?)|(?:defined)|[01]")
self.grammar = pp.Literal('#0#') | pp.Literal('#1#') | name | pp.Literal('&&') | pp.Literal('||') | pp.Literal("!")
self.nestedgrammar = pp.nestedExpr( '(', ')', self.grammar)
self.anyChange = False
self.iterChange = False
def predZeroAndSmt(self,x):
return len(x)>=3 and len(x)<=4 and ('#0#' in x) and ('&&' in x)
def predZeroOrSmt(self,x):
return len(x)>=3 and len(x)<=4 and ('#0#' in x) and ('||' in x)
def predOneOrSmt(self,x):
return len(x)>=3 and len(x)<=4 and ('#1#' in x) and ('||' in x)
def predOneAndSmt(self,x):
return len(x)>=3 and len(x)<=4 and ('#1#' in x) and ('&&' in x)
def reset(self):
self.anyChange = False
self.iterChange = False
def printResult(self,a,initP):
if(isinstance(a,str)):
if(a == '&&' or a == '||'):
return ' '+a+' '
else:
return a
else:
result=''
for x in a:
result += self.printResult(x,True)
if(initP):
return '('+result +')'
else:
return result
def simplify(self,l):
if(isinstance(l,str)):
return l
result = []
for x in l:
result.append(self.simplify(x))
if(self.predZeroAndSmt(result)):
self.anyChange = True
self.iterChange = True
result = '#0#'
elif(self.predOneOrSmt(result)):
self.anyChange = True
self.iterChange = True
result = '#1#'
elif(self.predOneAndSmt(result)):
self.anyChange = True
self.iterChange = True
result.remove("#1#")
result.remove("&&")
elif(self.predZeroOrSmt(result)):
self.anyChange = True
self.iterChange = True
result.remove("#0#")
result.remove("||")
return result
def process(self,insides):
parsed = self.nestedgrammar.parseString("("+insides+")").asList()
simpl = self.simplify(parsed)
while(self.iterChange):
self.iterChange = False
simpl = self.simplify(simpl)
return self.printResult(simpl[0],False)
class FSMDefaultState:
def __init__(self):
self.ifzero = re.compile(r"^[ \t]*#[ \t]*if(?:def)? +#0#[ \t]*(?P<end>(?://.*)?\n)$")
self.ifone = re.compile(r"^[ \t]*#[ \t]*if(?:def)? +#1#[ \t]*(?P<end>(?://.*)?\n)$")
self.ifndefzero = re.compile(r"^[ \t]*#[ \t]*ifndef +#0#[ \t]*(?P<end>(?://.*)?\n)?$")
self.ifndefone = re.compile(r"^[ \t]*#[ \t]*ifndef +#1#[ \t]*(?P<end>(?://.*)?\n)?$")
self.elseif = re.compile(r"^(?P<start>[ \t]*)#[ \t]*el(?P<seif>se|if )(?P<rest>[^\n]*\n)$")
self.anyif = re.compile(r"^[ \t]*#[ \t]*if")
self.endif = re.compile(r"^[ \t]*#[ \t]*endif")
self.emptyLine = '#EMPTY#'
def processLine(self,cline):
ifonesearch = self.ifone.search(cline)
ifzerosearch = self.ifzero.search(cline)
ifndefzerosearch = self.ifndefzero.search(cline)
ifndefonesearch = self.ifndefone.search(cline)
if(ifonesearch!=None):
result = self.emptyLine if (ifonesearch.group('end') is None or ifonesearch.group('end') == '\n') else ifonesearch.group('end')
return (result,'waitforelseifandremoveit')
elif(ifzerosearch!=None):
result = self.emptyLine if (ifzerosearch.group('end') is None or ifzerosearch.group('end') == '\n') else ifzerosearch.group('end')
return (result,'removeuntilelseif')
elif(ifndefzerosearch!=None):
result = self.emptyLine if (ifndefzerosearch.group('end') is None or ifndefzerosearch.group('end') == '\n') else ifndefzerosearch.group('end')
return (result,'waitforelseifandremoveit')
elif(ifndefonesearch!=None):
result = self.emptyLine if (ifndefonesearch.group('end') is None or ifndefonesearch.group('end') == '\n') else ifndefonesearch.group('end')
return (result,'removeuntilelseif')
else:
return (cline, self.getName())
def getName(self):
return 'default'
class FSMWaitForElseIFAndRemoveIt(FSMDefaultState):
def __init__(self):
FSMDefaultState.__init__(self)
self.ifcounter = 0
self.waitingForElseIf = True
def resetMem(self):
self.ifcounter = 0
self.waitingForElseIf = True
def processLine(self,cline):
anyifsearch = self.anyif.search(cline)
endifsearch = self.endif.search(cline)
elseifsearch = self.elseif.search(cline)
if(anyifsearch != None):
self.ifcounter += 1
if(endifsearch != None):
self.ifcounter -= 1
if(elseifsearch != None and self.ifcounter==0):
self.waitingForElseIf = False
if(self.ifcounter == -1):
self.resetMem()
return (self.emptyLine,'default')
if(not self.waitingForElseIf):
return (self.emptyLine,self.getName())
else:
return (cline,self.getName())
def getName(self):
return 'waitforelseifandremoveit'
class FSMRemoveUntilElseIf(FSMDefaultState):
def __init__(self):
FSMDefaultState.__init__(self)
self.ifcounter = 0
self.waitingForElseIf = True
self.wasThereElif = False
def resetMem(self):
self.ifcounter = 0
self.waitingForElseIf = True
self.wasThereElif = False
def processLine(self,cline):
anyifsearch = self.anyif.search(cline)
endifsearch = self.endif.search(cline)
elseifsearch = self.elseif.search(cline)
if(anyifsearch != None):
self.ifcounter += 1
if(endifsearch != None):
self.ifcounter -= 1
if(elseifsearch != None and self.ifcounter==0 and self.waitingForElseIf):
self.waitingForElseIf = False
if(elseifsearch.group('seif')=='if '):
start = '' if elseifsearch.group('start') is None else elseifsearch.group('start')
rest = '' if elseifsearch.group('rest') is None else elseifsearch.group('rest')
self.wasThereElif = True
return (start+'#if '+rest,self.getName())
else:
self.wasThereElif = False
return (self.emptyLine,self.getName())
if(self.ifcounter == -1):
if(self.wasThereElif):
self.resetMem()
return (cline,'default')
else:
self.resetMem()
return (self.emptyLine,'default')
if(self.waitingForElseIf):
return (self.emptyLine,self.getName())
else:
return (cline,self.getName())
def getName(self):
return 'removeuntilelseif'
#Class responsible for cleaning-up code of conditions with only #0# or #1#
class RemovedTechCleaner:
def __init__(self, code, log):
self.code = code
self.log = log
self.anyChange = False
self.newCode = []
default = FSMDefaultState()
waitandremove = FSMWaitForElseIFAndRemoveIt()
removeuntil = FSMRemoveUntilElseIf()
self.states = {default.getName():default,waitandremove.getName():waitandremove,removeuntil.getName():removeuntil}
self.currentState = self.states['default']
def process(self):
for lnumber,cline in enumerate(self.code):
processed = self.currentState.processLine(cline)
if(not self.log is None): self.log.write('FSMS %s processed line %d\n' % (self.currentState.getName(),lnumber))
self.currentState = self.states[processed[1]]
if(processed[0]==self.states['default'].emptyLine):
if(not self.log is None): self.log.write('\t and it removed it.\n')
self.anyChange = True
elif(processed[0]!=cline):
if(not self.log is None): self.log.write('\t and change it to: %s\n' % (processed[0]))
self.anyChange = True
self.newCode.append(processed[0])
else:
self.newCode.append(cline)
|
{
"content_hash": "4b65b5879ac919c97fa0d9ed50f0c187",
"timestamp": "",
"source": "github",
"line_count": 293,
"max_line_length": 199,
"avg_line_length": 42.965870307167236,
"alnum_prop": 0.5505600127095083,
"repo_name": "VHonzik/VBSTechRemoval",
"id": "45335d236b6287b6620009498f388f997e99c05a",
"size": "12589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "VBSTechRemovalCore.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18637"
}
],
"symlink_target": ""
}
|
"""This module contains a collection of utilities/common code
that will be used by all integration tests that validate the
spiders service.
"""
from cloudfeaster_services.spiders import __api_version__
from ..integration_tests import IntegrationTestCase
from ..integration_tests import IntegrationTestEnv
from ..integration_tests import Service
from ..integration_tests import ServiceConfig
class SpidersServiceConfig(ServiceConfig):
def __init__(self):
ServiceConfig.__init__(self, 'spiders', '127.0.0.1', 8080)
class SpidersService(Service):
def __init__(self, spiders_config):
cmd = [
'spiders.py',
'--config',
spiders_config.filename,
]
Service.__init__(self, spiders_config, cmd, '/%s/_noop' % __api_version__)
class SpidersIntegrationTestEnv(IntegrationTestEnv):
def __init__(self):
IntegrationTestEnv.__init__(self, SpidersServiceConfig, SpidersService)
class SpidersIntegrationTestCase(IntegrationTestCase):
@property
def clf_test_env_class(self):
return SpidersIntegrationTestEnv
|
{
"content_hash": "b10edfbe1995f207097e7ed1a9a85d5f",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 82,
"avg_line_length": 27.65,
"alnum_prop": 0.6998191681735986,
"repo_name": "simonsdave/cloudfeaster-services",
"id": "ba05871d3d009a4a41662b317d0b245a83d50f34",
"size": "1106",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/integration/spiders/integration_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1995"
},
{
"name": "JavaScript",
"bytes": "1177"
},
{
"name": "Python",
"bytes": "602059"
},
{
"name": "RAML",
"bytes": "33777"
},
{
"name": "Shell",
"bytes": "17929"
}
],
"symlink_target": ""
}
|
import os
import urllib2
import datetime
from sauron import logger
from sauron.metrics import Metric, MetricException
class PingMetric(Metric):
def reconfig(self, url, **kwargs):
'''parameters: url, [ post={}, timeout=30 ]'''
Metric.reconfig(self, **kwargs)
self.url = url
def values(self):
start = datetime.datetime.now()
try:
result = urllib2.urlopen(self.url, self.post).read()
except urllib2.HTTPError:
pass
except IOError:
raise MetricException('Failed to fetch %s' % self.url)
try:
# Apparently different implementations don't expose this
latency = (datetime.datetime.now() - start).total_seconds()
except AttributeError:
latency = (datetime.datetime.now() - start).seconds
return {
'results': {
'latency': (latency, 'Seconds')
}
}
|
{
"content_hash": "2e69de79e2a4818ca4fae49dc3aa4072",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 65,
"avg_line_length": 28.166666666666668,
"alnum_prop": 0.6520710059171597,
"repo_name": "johnny-die-tulpe/illuminati",
"id": "86a470d196747b891da7b4e8e3e027ae5918b50b",
"size": "1962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sauron/metrics/PingMetric.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "97976"
},
{
"name": "Shell",
"bytes": "1897"
}
],
"symlink_target": ""
}
|
"""Tasklib cmd interface
Exit Codes:
ended successfully - 0
running - 1
valid but failed - 2
unexpected error - 3
notfound such task - 4
"""
import argparse
import sys
import textwrap
import yaml
from tasklib import agent
from tasklib import config
from tasklib import logger
from tasklib import task
from tasklib import utils
class CmdApi(object):
def __init__(self):
self.parser = argparse.ArgumentParser(
description=textwrap.dedent(__doc__),
formatter_class=argparse.RawDescriptionHelpFormatter)
self.subparser = self.parser.add_subparsers(
title='actions',
description='Supported actions',
help='Provide of one valid actions')
self.config = config.Config()
self.register_options()
self.register_actions()
def register_options(self):
self.parser.add_argument(
'--config', '-c', dest='config', default=None,
help='Path to configuration file')
self.parser.add_argument(
'--debug', '-d', dest='debug', action='store_true', default=None)
def register_actions(self):
task_arg = [(('task',), {'type': str})]
self.register_parser('list')
self.register_parser('conf')
for name in ('run', 'daemon', 'report', 'status', 'show'):
self.register_parser(name, task_arg)
def register_parser(self, func_name, arguments=()):
parser = self.subparser.add_parser(func_name)
parser.set_defaults(func=getattr(self, func_name))
for args, kwargs in arguments:
parser.add_argument(*args, **kwargs)
def parse(self, args):
parsed = self.parser.parse_args(args)
if parsed.config:
self.config.update_from_file(parsed.config)
if parsed.debug is not None:
self.config['debug'] = parsed.debug
logger.setup_logging(self.config)
return parsed.func(parsed)
def list(self, args):
for task_dir in utils.find_all_tasks(self.config):
print(task.Task.task_from_dir(task_dir, self.config))
def show(self, args):
meta = task.Task(args.task, self.config).metadata
print(yaml.dump(meta, default_flow_style=False))
def run(self, args):
task_agent = agent.TaskAgent(args.task, self.config)
task_agent.run()
status = task_agent.status()
print(status)
return task_agent.code()
def daemon(self, args):
task_agent = agent.TaskAgent(args.task, self.config)
task_agent.daemon()
def report(self, args):
task_agent = agent.TaskAgent(args.task, self.config)
print(task_agent.report())
def status(self, args):
task_agent = agent.TaskAgent(args.task, self.config)
exit_code = task_agent.code()
print(task_agent.status())
return exit_code
def conf(self, args):
print(self.config)
def main():
api = CmdApi()
exit_code = api.parse(sys.argv[1:])
exit(exit_code)
|
{
"content_hash": "75d49318ba1565f0ff5820725fab48bf",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 77,
"avg_line_length": 29.686274509803923,
"alnum_prop": 0.619220607661823,
"repo_name": "andrei4ka/fuel-web-redhat",
"id": "bb0cce283c3cc8b3886a8805262af1f331b40695",
"size": "3637",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tasklib/tasklib/cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "100524"
},
{
"name": "JavaScript",
"bytes": "639783"
},
{
"name": "Makefile",
"bytes": "5891"
},
{
"name": "Puppet",
"bytes": "282"
},
{
"name": "Python",
"bytes": "3206343"
},
{
"name": "Ruby",
"bytes": "33423"
},
{
"name": "Shell",
"bytes": "31460"
}
],
"symlink_target": ""
}
|
"""Tests for the GogoGate2 component."""
from unittest.mock import MagicMock, patch
from ismartgate import GogoGate2Api, ISmartGateApi
from ismartgate.common import ApiError
from ismartgate.const import GogoGate2ApiErrorCode
from homeassistant import config_entries
from homeassistant.components import dhcp, zeroconf
from homeassistant.components.gogogate2.const import (
DEVICE_TYPE_GOGOGATE2,
DEVICE_TYPE_ISMARTGATE,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import (
CONF_DEVICE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from . import _mocked_ismartgate_closed_door_response
from tests.common import MockConfigEntry
MOCK_MAC_ADDR = "AA:BB:CC:DD:EE:FF"
@patch("homeassistant.components.gogogate2.async_setup_entry", return_value=True)
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
async def test_auth_fail(
gogogate2api_mock, async_setup_entry_mock, hass: HomeAssistant
) -> None:
"""Test authorization failures."""
api: GogoGate2Api = MagicMock(spec=GogoGate2Api)
gogogate2api_mock.return_value = api
api.reset_mock()
api.async_info.side_effect = ApiError(
GogoGate2ApiErrorCode.CREDENTIALS_INCORRECT, "blah"
)
result = await hass.config_entries.flow.async_init(
"gogogate2", context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.2",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {
"base": "invalid_auth",
}
api.reset_mock()
api.async_info.side_effect = Exception("Generic connection error.")
result = await hass.config_entries.flow.async_init(
"gogogate2", context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.2",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {"base": "cannot_connect"}
api.reset_mock()
api.async_info.side_effect = ApiError(0, "blah")
result = await hass.config_entries.flow.async_init(
"gogogate2", context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.2",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_form_homekit_unique_id_already_setup(hass):
"""Test that we abort from homekit if gogogate2 is already setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data=zeroconf.ZeroconfServiceInfo(
host="1.2.3.4",
addresses=["1.2.3.4"],
hostname="mock_hostname",
name="mock_name",
port=None,
properties={zeroconf.ATTR_PROPERTIES_ID: MOCK_MAC_ADDR},
type="mock_type",
),
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {}
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == MOCK_MAC_ADDR
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_IP_ADDRESS: "1.2.3.4", CONF_USERNAME: "mock", CONF_PASSWORD: "mock"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data=zeroconf.ZeroconfServiceInfo(
host="1.2.3.4",
addresses=["1.2.3.4"],
hostname="mock_hostname",
name="mock_name",
port=None,
properties={zeroconf.ATTR_PROPERTIES_ID: MOCK_MAC_ADDR},
type="mock_type",
),
)
assert result["type"] == FlowResultType.ABORT
async def test_form_homekit_ip_address_already_setup(hass):
"""Test that we abort from homekit if gogogate2 is already setup."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_IP_ADDRESS: "1.2.3.4", CONF_USERNAME: "mock", CONF_PASSWORD: "mock"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data=zeroconf.ZeroconfServiceInfo(
host="1.2.3.4",
addresses=["1.2.3.4"],
hostname="mock_hostname",
name="mock_name",
port=None,
properties={zeroconf.ATTR_PROPERTIES_ID: MOCK_MAC_ADDR},
type="mock_type",
),
)
assert result["type"] == FlowResultType.ABORT
async def test_form_homekit_ip_address(hass):
"""Test homekit includes the defaults ip address."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data=zeroconf.ZeroconfServiceInfo(
host="1.2.3.4",
addresses=["1.2.3.4"],
hostname="mock_hostname",
name="mock_name",
port=None,
properties={zeroconf.ATTR_PROPERTIES_ID: MOCK_MAC_ADDR},
type="mock_type",
),
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {}
data_schema = result["data_schema"]
assert data_schema({CONF_USERNAME: "username", CONF_PASSWORD: "password"}) == {
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "1.2.3.4",
CONF_PASSWORD: "password",
CONF_USERNAME: "username",
}
@patch("homeassistant.components.gogogate2.async_setup_entry", return_value=True)
@patch("homeassistant.components.gogogate2.common.ISmartGateApi")
async def test_discovered_dhcp(
ismartgateapi_mock, async_setup_entry_mock, hass
) -> None:
"""Test we get the form with homekit and abort for dhcp source when we get both."""
api: ISmartGateApi = MagicMock(spec=ISmartGateApi)
ismartgateapi_mock.return_value = api
api.reset_mock()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=dhcp.DhcpServiceInfo(
ip="1.2.3.4", macaddress=MOCK_MAC_ADDR, hostname="mock_hostname"
),
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "1.2.3.4",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result2
assert result2["type"] == FlowResultType.FORM
assert result2["errors"] == {"base": "cannot_connect"}
api.reset_mock()
closed_door_response = _mocked_ismartgate_closed_door_response()
api.async_info.return_value = closed_door_response
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "1.2.3.4",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result3
assert result3["type"] == FlowResultType.CREATE_ENTRY
assert result3["data"] == {
"device": "ismartgate",
"ip_address": "1.2.3.4",
"password": "password0",
"username": "user0",
}
async def test_discovered_by_homekit_and_dhcp(hass):
"""Test we get the form with homekit and abort for dhcp source when we get both."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data=zeroconf.ZeroconfServiceInfo(
host="1.2.3.4",
addresses=["1.2.3.4"],
hostname="mock_hostname",
name="mock_name",
port=None,
properties={zeroconf.ATTR_PROPERTIES_ID: MOCK_MAC_ADDR},
type="mock_type",
),
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=dhcp.DhcpServiceInfo(
ip="1.2.3.4", macaddress=MOCK_MAC_ADDR, hostname="mock_hostname"
),
)
assert result2["type"] == FlowResultType.ABORT
assert result2["reason"] == "already_in_progress"
result3 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=dhcp.DhcpServiceInfo(
ip="1.2.3.4", macaddress="00:00:00:00:00:00", hostname="mock_hostname"
),
)
assert result3["type"] == FlowResultType.ABORT
assert result3["reason"] == "already_in_progress"
|
{
"content_hash": "fb1afe2306b763d687fac8a9ff476b25",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 88,
"avg_line_length": 33.20890410958904,
"alnum_prop": 0.6164793235021141,
"repo_name": "mezz64/home-assistant",
"id": "cbe40a2b9cbdc5831fd1d79f3f9ae3ece920cb8a",
"size": "9697",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/gogogate2/test_config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.