prompt listlengths 1 1 | compression_prompt listlengths 1 1 | target stringlengths 1.03k 828k |
|---|---|---|
[
{
"content": "Here is some code:\n```python\n# -*- coding: utf-8 -*-\n\n\"\"\"This module provides features to create and evaluate garbled circuits\"\"\"\n\n__all__ = [\"AbstractCreatorGarbledCircuit\", \"AbstractEvaluatorGarbledCircuit\"]\n\n### CreatorGarbledCircuit\nclass AbstractCreatorGarbledCircuit(object... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\n\"\"\"This module provides features to create and evaluate garbled circuits\"\"\"\n\n__all__ = [\"AbstractCreatorGarbledCircuit\", \"AbstractEvaluatorGarbledCircuit\"]\n\n### CreatorGarbledCircuit\nclass AbstractCreatorGarbl... | ```python
# -*- coding: utf-8 -*-
"""This module provides features to create and evaluate garbled circuits"""
__all__ = ["AbstractCreatorGarbledCircuit", "AbstractEvaluatorGarbledCircuit"]
### CreatorGarbledCircuit
class AbstractCreatorGarbledCircuit(object):
"""
Creator Garbled Circuit Abstract Class
DOCUMENT ME!!!
"""
_circuit_counter = 0
def __init__(self, circuit, R, inputs):
self.circuit = self.optimize_circuit(circuit) #optimize the circuit for gc
#self.circuit = circuit
self.circuit_id = self._circuit_counter
AbstractCreatorGarbledCircuit._circuit_counter += 1
self.inputs = inputs
self.R = R
self.outputs = [None]
def optimize_circuit(self, c):
"""
Overwrite this with the appropriate circuit transformation for your gc-implementation
"""
return c
def next_garbled_gate(self):
""" """
#map inputs to input_bits
# list of list to one single tuple in same order
inputs = self.inputs = reduce(lambda x, y: tuple(x) + tuple(y), self.inputs)
u = self.circuit.num_input_bits()
if len(inputs) != u:
raise ValueError("Number of garbled inputs of does not match "
"number of circuit inputs! (expected %d, got %d)"%(u, len(inputs)))
k = self.circuit.num_gates()
garbled_wires = [None for i in xrange(u + k)] #initialize all wires
# map input-bits to input-wires
if len(self.inputs) != u:
raise ValueError("Number of garbled inputs of does not match "
"number of circuit inputs")
garbled_wires[:u] = inputs
# add costs:
self.creation_costs()
# create garbled gates
for ix, gate in enumerate(self.circuit.next_gate()):
inputs, truth = gate
wireval, garbled_table = self.create_garbled_gate([garbled_wires[i] for i in inputs], truth, ix)
garbled_wires[u + ix] = wireval
if garbled_table: # None for gates without any table (e.g. XOR-Gates)
yield garbled_table
self.outputs = [(garbled_wires[idx] for idx in output[0])
for output in self.circuit.outputs()]
def results(self):
for i in self.outputs:
yield i
class AbstractEvaluatorGarbledCircuit(object):
"""A garbled circuit"""
_circuit_counter = 0
def __init__(self, circuit, next_garbled_gate, garbled_inputs):
self.circuit = self.optimize_circuit(circuit)
self.next_garbled_gate = next_garbled_gate
self.circuit_id = AbstractEvaluatorGarbledCircuit._circuit_counter
AbstractEvaluatorGarbledCircuit._circuit_counter += 1
self.garbled_inputs = garbled_inputs
def set_inputs(self, inputs):
self.garbled_inputs = inputs
def optimize_circuit(self, c):
"""
Overwrite this with the appropriate circuit transformation for your gc-implementation
"""
return c
def evaluate(self):
"""Used in online phase
@type garbled_input: iterable
@param garbled_inputs: iterable of keys
@rtype: iterable
@return: returns the keys of output"""
#serialize the inputs into one big list of garbled wires
garbled_inputs = self.garbled_inputs = reduce(lambda x, y: tuple(x) + tuple(y), self.garbled_inputs)
u = self.circuit.num_input_bits()
k = self.circuit.num_gates()
garbled_wires = [None for i in xrange(u + k)]
if len(garbled_inputs) != u:
raise ValueError("Number of garbled inputs does not match "
"number of circuit inputs! (got %d, expect %d)"%(len(garbled_inputs), u))
garbled_wires[:u] = garbled_inputs
self.evaluation_costs()
# evaluate garbled gates
for ix, gate in enumerate(self.circuit.next_gate()):
inputs, truth = gate
len_inputs = len(inputs)
garbled_wires[u + ix] = self.evaluate_garbled_gate([garbled_wires[i] for i in inputs], truth, ix)
# yield None
for i in self.next_garbled_gate:
# unreachable unless your circuits do not match or your
# evaluate_garbled_gate does not use all of self.next_garbled_gate
assert False, "Circuit and Garbled Circuit does not Match!"
self.outputs = [(garbled_wires[outwire] for outwire in output[0]) for output in self.circuit.outputs()]
def eval (self):
# for i in self.evaluate_next_gate():
# pass
self.evaluate()
return self.outputs
def results(self):
try:
for i in self.outputs:
yield i
except AttributeError:
self.eval()
for i in self.outputs:
yield i
``` |
[
{
"content": "Write out the code verbatim, preserving indentation and whitespace:\n```python\n# -*- coding: utf-8 -*-\n\n# Mathmaker Lib offers lualatex-printable mathematical objects.\n# Copyright 2006-2017 Nicolas Hainaux <nh.techn@gmail.com>\n\n# This file is part of Mathmaker Lib.\n\n# Mathmaker Lib is free... | [
{
"content": "Write out the code verbatim, preserving indentation and whitespace:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\n# Mathmaker Lib offers lualatex-printable mathematical objects.\n# Copyright 2006-2017 Nicolas Hainaux <nh.techn@gmail.com>\n\n# This file is part of Mathmaker Lib.\n\n# Mathm... | ```python
# -*- coding: utf-8 -*-
# Mathmaker Lib offers lualatex-printable mathematical objects.
# Copyright 2006-2017 Nicolas Hainaux <nh.techn@gmail.com>
# This file is part of Mathmaker Lib.
# Mathmaker Lib is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
# Mathmaker Lib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Mathmaker Lib; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from mathmakerlib.exceptions import ZERO_OBJECTS_ERRORS, ZeroVector
from mathmakerlib.geometry.point import Point
from mathmakerlib.geometry.vector import Vector
from mathmakerlib.calculus.number import Number
from mathmakerlib.core.dimensional import Dimensional
from mathmakerlib.calculus.tools import is_number, is_integer
class Bipoint(Dimensional):
"""
A pair of Points. Gather methods common to LineSegment, Line, Ray.
Bipoints are quite close to, but not completely the same as, bound vectors.
For free vectors, see Vector.
Notice that if:
A = Point(0, 0); B = Point(1, 0); C = Point(0, 1) and D = Point(1, 1),
then: Bipoint(A, B) != Bipoint(C, D)
but: Vector(A, B) == Vector(C, D)
Also, note that contrary to LineSegments, Bipoint(A, B) != Bipoint(B, A).
This class won't ever need to get Drawable, but can be instanciated.
"""
def __init__(self, tail, head, allow_zero_length=True):
"""
A Bipoint can be created from a pair of Points or a Point + a Vector.
:param tail: the first Point of the Bipoint
:type tail: Point
:param head: the second Point of the Bipoint. If a Vector is provided,
the second Point will be calculated using the first Point and this
Vector.
:type head: Point or Vector
:param allow_zero_length: whether zero length Bipoints are allowed or
not (default True).
:type allow_zero_length: bool
"""
if not isinstance(tail, Point):
raise TypeError('First argument must be a Point, found {} '
'instead.'.format(repr(tail)))
if not isinstance(head, (Point, Vector)):
raise TypeError('Second argument must be a Point or a Vector, '
'found {} instead.'.format(repr(head)))
self._three_dimensional = tail.three_dimensional \
or head.three_dimensional
if isinstance(head, Vector):
if self._three_dimensional:
zval = tail.z + head.z
else:
zval = 'undefined'
head = Point(tail.x + head.x, tail.y + head.y, zval)
if (not allow_zero_length
and tail.coordinates == head.coordinates):
msg = 'Explicitly disallowed creation of a zero-length {}.'\
.format(type(self).__name__)
raise ZERO_OBJECTS_ERRORS[type(self).__name__](msg)
self._points = [tail, head]
self._Δx = self.points[1].x - self.points[0].x
self._Δy = self.points[1].y - self.points[0].y
self._Δz = self.points[1].z - self.points[0].z
def __repr__(self):
return 'Bipoint({}, {})'.format(repr(self.tail), repr(self.head))
def __eq__(self, other):
if isinstance(other, Bipoint):
return self.head == other.head and self.tail == other.tail
else:
return False
def __add__(self, other):
return self.add(other)
def add(self, other, new_endpoint_name='automatic'):
if not isinstance(other, Bipoint):
raise TypeError('Can only add a Bipoint to another Bipoint. '
'Found {} instead.'.format(repr(other)))
if self.three_dimensional:
zval = self.points[1].z + other.Δz
else:
zval = 'undefined'
return Bipoint(self.points[0],
Point(self.points[1].x + other.Δx,
self.points[1].y + other.Δy,
z=zval,
name=new_endpoint_name))
@property
def points(self):
return self._points
@property
def tail(self):
return self.points[0]
@property
def head(self):
return self.points[1]
@property
def Δx(self):
return self._Δx
@property
def Δy(self):
return self._Δy
@property
def Δz(self):
return self._Δz
@property
def coordinates(self):
return (self._Δx, self._Δy, self._Δz)
@property
def length(self):
"""Length between the two Points."""
return Number(self.Δx ** 2 + self.Δy ** 2 + self.Δz ** 2)\
.sqrt()
@property
def slope(self):
"""Slope of the pair of Points, from -180° to 180°."""
try:
return Vector(self).slope
except ZeroVector:
msg = 'Cannot calculate the slope of a zero-length {}.'\
.format(type(self).__name__)
raise ZERO_OBJECTS_ERRORS[type(self).__name__](msg)
@property
def slope360(self):
"""Slope of the pair of Points, from 0° to 360°."""
try:
return Vector(self).slope360
except ZeroVector:
msg = 'Cannot calculate the slope of a zero-length {}.'\
.format(type(self).__name__)
raise ZERO_OBJECTS_ERRORS[type(self).__name__](msg)
def midpoint(self, name='automatic'):
"""Bipoint's midpoint."""
if self.three_dimensional:
zval = (self.points[0].z + self.points[1].z) / 2
else:
zval = 'undefined'
return Point((self.points[0].x + self.points[1].x) / 2,
(self.points[0].y + self.points[1].y) / 2,
z=zval,
name=name)
def point_at(self, position, name='automatic'):
"""
A Point aligned with the Bipoint, at provided position.
The Bipoint's length is the length unit of position.
Hence, position 0 matches points[0], position 1 matches points[1],
position 0.5 matches the midpoint, position 0.75 is three quarters
on the way from points[0] to points[1], position 2 is a Point that
makes points[1] the middle between it and points[0], position -1 makes
points[0] the middle between it and points[1].
:param position: a number
:type position: number
:param name: the name to give to the Point
:type name: str
"""
if not is_number(position):
raise TypeError('position must be a number, found {} instead.'
.format(type(position)))
k = Number(position)
if k == 0:
return self.points[0]
elif k == 1:
return self.points[1]
else:
if self.three_dimensional:
zval = (self.points[0].z
+ (self.points[1].z - self.points[0].z) * k)
else:
zval = 'undefined'
return Point((self.points[0].x
+ (self.points[1].x - self.points[0].x) * k),
(self.points[0].y
+ (self.points[1].y - self.points[0].y) * k),
z=zval,
name=name)
def dividing_points(self, n=None, prefix='a'):
"""
Create the list of Points that divide the Bipoint in n parts.
:param n: the number of parts (so it will create n - 1 points)
n must be greater or equal to 1
:type n: int
"""
if not (is_number(n) and is_integer(n)):
raise TypeError('n must be an integer')
if not n >= 1:
raise ValueError('n must be greater or equal to 1')
x0 = self.points[0].x
x1 = self.points[1].x
xstep = (x1 - x0) / n
x_list = [x0 + (i + 1) * xstep for i in range(int(n - 1))]
y0 = self.points[0].y
y1 = self.points[1].y
ystep = (y1 - y0) / n
y_list = [y0 + (i + 1) * ystep for i in range(int(n - 1))]
if self.three_dimensional:
z0 = self.points[0].z
z1 = self.points[1].z
zstep = (z1 - z0) / n
z_list = [z0 + (i + 1) * zstep for i in range(int(n - 1))]
else:
z_list = ['undefined' for i in range(int(n - 1))]
return [Point(x, y, z, prefix + str(i + 1))
for i, (x, y, z) in enumerate(zip(x_list, y_list, z_list))]
``` |
[
{
"content": "Here is the code content:\n```python\n#!/usr/bin/env python\n# Copyright (c) 2012 Cloudera, Inc. All rights reserved.\n#\n# This test suite validates query results in recordservice are always updated.\n\nfrom subprocess import call\nfrom tests.common.impala_test_suite import *\n\nTEST_DB1 = 'tmpDb... | [
{
"content": "Here is the code content:\n<|memory_start|>```python\n#!/usr/bin/env python\n# Copyright (c) 2012 Cloudera, Inc. All rights reserved.\n#\n# This test suite validates query results in recordservice are always updated.\n\nfrom subprocess import call\nfrom tests.common.impala_test_suite import *\n\nT... | ```python
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# This test suite validates query results in recordservice are always updated.
from subprocess import call
from tests.common.impala_test_suite import *
TEST_DB1 = 'tmpDb1'
TEST_DB2 = 'tmpDb2'
TEST_TBL1 = 'tmpTbl1'
TEST_TBL2 = 'tmpTbl2'
TEST_TBL3 = 'tmpTbl3'
NOT_EXIST_TBL = 'notExistTbl'
HDFS_LOCATION = '/tmpLocation'
FILE1_PATH = 'testdata/rsHdfsData/file1'
FILE2_PATH = 'testdata/rsHdfsData/file2'
class TestRecordService(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
# Only test text format
super(TestRecordService, cls).add_test_dimensions()
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text')
def setup_method(self, method):
self.execute_query('drop table if exists %s.%s' % (TEST_DB1, NOT_EXIST_TBL))
self.execute_query('drop table if exists %s.%s' % (TEST_DB1, TEST_TBL1))
self.execute_query('drop table if exists %s.%s' % (TEST_DB1, TEST_TBL2))
self.execute_query('drop table if exists %s.%s' % (TEST_DB2, TEST_TBL1))
self.execute_query('drop table if exists %s.%s' % (TEST_DB2, TEST_TBL2))
self.execute_query('drop table if exists %s.%s' % (TEST_DB2, TEST_TBL3))
call(["hadoop", "fs", "-rm", "-r", HDFS_LOCATION])
self.execute_query('create database if not exists %s' % TEST_DB1)
self.execute_query(
'create table if not exists %s.%s (i integer, s string)' % (TEST_DB1, TEST_TBL1))
self.execute_query(
"insert into %s.%s values (1, 'a'),(2, 'b'),(3, 'c')" % (TEST_DB1, TEST_TBL1))
self.execute_query('create database if not exists %s' % TEST_DB2)
self.execute_query(
'create table if not exists %s.%s (s1 string, i integer, s2 string)' %
(TEST_DB2, TEST_TBL1))
self.execute_query(
"insert into %s.%s values ('a', 1, 'r'),('b', 2, 's'),('c', 3, 't')" %
(TEST_DB2, TEST_TBL1))
self.execute_query(
'create table if not exists %s.%s (s string, i integer)' % (TEST_DB2, TEST_TBL2))
self.execute_query(
"insert into %s.%s values ('a', 5),('b', -9),('c', 10)" % (TEST_DB2, TEST_TBL2))
def teardown_method(self, method):
self.execute_query('drop table if exists %s.%s' % (TEST_DB1, NOT_EXIST_TBL))
self.execute_query('drop table if exists %s.%s' % (TEST_DB1, TEST_TBL1))
self.execute_query('drop table if exists %s.%s' % (TEST_DB1, TEST_TBL2))
self.execute_query('drop database if exists %s' % TEST_DB1)
self.execute_query('drop table if exists %s.%s' % (TEST_DB2, TEST_TBL1))
self.execute_query('drop table if exists %s.%s' % (TEST_DB2, TEST_TBL2))
self.execute_query('drop table if exists %s.%s' % (TEST_DB2, TEST_TBL3))
self.execute_query('drop database if exists %s' % TEST_DB2)
call(["hadoop", "fs", "-rm", "-r", HDFS_LOCATION])
def test_recordservice_query(self, vector):
# Test basic queries: insert value, alter table, insert table, drop table and etc.
self.run_test_case('QueryTest/recordservice-basic', vector)
# Insert table with hdfs path as location
call(["hadoop", "fs", "-mkdir", HDFS_LOCATION])
file1 = os.path.join(os.environ['IMPALA_HOME'], FILE1_PATH)
call(["hadoop", "fs", "-put", file1, HDFS_LOCATION])
self.execute_query(
"create external table if not exists %s.%s (i integer) location '%s'" %
(TEST_DB1, TEST_TBL2, HDFS_LOCATION))
self.run_test_case('QueryTest/recordservice-hdfs-before', vector)
# Add new file into hdfs path
file2 = os.path.join(os.environ['IMPALA_HOME'], FILE2_PATH)
call(["hadoop", "fs", "-put", file2, HDFS_LOCATION])
self.run_test_case('QueryTest/recordservice-hdfs-after', vector)
# select queries that reference multiple tables or the same table more than once
self.execute_query(
"create external table %s.%s (i integer) partitioned by (parCol integer)" %
(TEST_DB2, TEST_TBL3))
self.run_test_case('QueryTest/recordservice-multiple-tables', vector)
# select queries on partitioned table
# add partition
self.execute_query("alter table %s.%s add partition (parCol=1) location '%s'" %
(TEST_DB2, TEST_TBL3, HDFS_LOCATION))
self.execute_query("alter table %s.%s add partition (parCol=2) location '%s'" %
(TEST_DB2, TEST_TBL3, HDFS_LOCATION))
self.run_test_case('QueryTest/recordservice-add-partition', vector)
# drop partition
self.execute_query("alter table %s.%s drop partition (parCol=2)" %
(TEST_DB2, TEST_TBL3))
self.run_test_case('QueryTest/recordservice-drop-partition', vector)
``` |
[
{
"content": "```python\n#!/usr/bin/python\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any la... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/python\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at you... | ```python
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: execute_lambda
short_description: Execute an AWS Lambda function
description:
- This module executes AWS Lambda functions, allowing synchronous and asynchronous
invocation.
version_added: "2.2"
extends_documentation_fragment:
- aws
- ec2
author: "Ryan Scott Brown (@ryansb) <ryansb@redhat.com>"
requirements:
- python >= 2.6
- boto3
notes:
- Async invocation will always return an empty C(output) key.
- Synchronous invocation may result in a function timeout, resulting in an
empty C(output) key.
options:
name:
description:
- The name of the function to be invoked. This can only be used for
invocations within the calling account. To invoke a function in another
account, use I(function_arn) to specify the full ARN.
function_arn:
description:
- The name of the function to be invoked
tail_log:
description:
- If C(tail_log=yes), the result of the task will include the last 4 KB
of the CloudWatch log for the function execution. Log tailing only
works if you use synchronous invocation C(wait=yes). This is usually
used for development or testing Lambdas.
type: bool
default: 'no'
wait:
description:
- Whether to wait for the function results or not. If I(wait) is C(no),
the task will not return any results. To wait for the Lambda function
to complete, set C(wait=yes) and the result will be available in the
I(output) key.
type: bool
default: 'yes'
dry_run:
description:
- Do not *actually* invoke the function. A C(DryRun) call will check that
the caller has permissions to call the function, especially for
checking cross-account permissions.
type: bool
default: 'no'
version_qualifier:
description:
- Which version/alias of the function to run. This defaults to the
C(LATEST) revision, but can be set to any existing version or alias.
See U(https://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html)
for details.
default: LATEST
payload:
description:
- A dictionary in any form to be provided as input to the Lambda function.
default: {}
'''
EXAMPLES = '''
- execute_lambda:
name: test-function
# the payload is automatically serialized and sent to the function
payload:
foo: bar
value: 8
register: response
# Test that you have sufficient permissions to execute a Lambda function in
# another account
- execute_lambda:
function_arn: arn:aws:lambda:us-east-1:123456789012:function/some-function
dry_run: true
- execute_lambda:
name: test-function
payload:
foo: bar
value: 8
wait: true
tail_log: true
register: response
# the response will have a `logs` key that will contain a log (up to 4KB) of the function execution in Lambda
# Pass the Lambda event payload as a json file.
- execute_lambda:
name: test-function
payload: "{{ lookup('file','lambda_event.json') }}"
register: response
- execute_lambda:
name: test-function
version_qualifier: PRODUCTION
'''
RETURN = '''
output:
description: Function output if wait=true and the function returns a value
returned: success
type: dict
sample: "{ 'output': 'something' }"
logs:
description: The last 4KB of the function logs. Only provided if I(tail_log) is true
type: str
returned: if I(tail_log) == true
status:
description: C(StatusCode) of API call exit (200 for synchronous invokes, 202 for async)
type: int
sample: 200
returned: always
'''
import base64
import json
import traceback
try:
import botocore
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import boto3_conn, ec2_argument_spec, get_aws_connection_info
from ansible.module_utils._text import to_native
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(),
function_arn=dict(),
wait=dict(default=True, type='bool'),
tail_log=dict(default=False, type='bool'),
dry_run=dict(default=False, type='bool'),
version_qualifier=dict(),
payload=dict(default={}, type='dict'),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[
['name', 'function_arn'],
]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
name = module.params.get('name')
function_arn = module.params.get('function_arn')
await_return = module.params.get('wait')
dry_run = module.params.get('dry_run')
tail_log = module.params.get('tail_log')
version_qualifier = module.params.get('version_qualifier')
payload = module.params.get('payload')
if not HAS_BOTO3:
module.fail_json(msg='Python module "boto3" is missing, please install it')
if not (name or function_arn):
module.fail_json(msg="Must provide either a function_arn or a name to invoke.")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=HAS_BOTO3)
if not region:
module.fail_json(msg="The AWS region must be specified as an "
"environment variable or in the AWS credentials "
"profile.")
try:
client = boto3_conn(module, conn_type='client', resource='lambda',
region=region, endpoint=ec2_url, **aws_connect_kwargs)
except (botocore.exceptions.ClientError, botocore.exceptions.ValidationError) as e:
module.fail_json(msg="Failure connecting boto3 to AWS: %s" % to_native(e), exception=traceback.format_exc())
invoke_params = {}
if await_return:
# await response
invoke_params['InvocationType'] = 'RequestResponse'
else:
# fire and forget
invoke_params['InvocationType'] = 'Event'
if dry_run or module.check_mode:
# dry_run overrides invocation type
invoke_params['InvocationType'] = 'DryRun'
if tail_log and await_return:
invoke_params['LogType'] = 'Tail'
elif tail_log and not await_return:
module.fail_json(msg="The `tail_log` parameter is only available if "
"the invocation waits for the function to complete. "
"Set `wait` to true or turn off `tail_log`.")
else:
invoke_params['LogType'] = 'None'
if version_qualifier:
invoke_params['Qualifier'] = version_qualifier
if payload:
invoke_params['Payload'] = json.dumps(payload)
if function_arn:
invoke_params['FunctionName'] = function_arn
elif name:
invoke_params['FunctionName'] = name
try:
response = client.invoke(**invoke_params)
except botocore.exceptions.ClientError as ce:
if ce.response['Error']['Code'] == 'ResourceNotFoundException':
module.fail_json(msg="Could not find Lambda to execute. Make sure "
"the ARN is correct and your profile has "
"permissions to execute this function.",
exception=traceback.format_exc())
module.fail_json(msg="Client-side error when invoking Lambda, check inputs and specific error",
exception=traceback.format_exc())
except botocore.exceptions.ParamValidationError as ve:
module.fail_json(msg="Parameters to `invoke` failed to validate",
exception=traceback.format_exc())
except Exception as e:
module.fail_json(msg="Unexpected failure while invoking Lambda function",
exception=traceback.format_exc())
results = {
'logs': '',
'status': response['StatusCode'],
'output': '',
}
if response.get('LogResult'):
try:
# logs are base64 encoded in the API response
results['logs'] = base64.b64decode(response.get('LogResult', ''))
except Exception as e:
module.fail_json(msg="Failed while decoding logs", exception=traceback.format_exc())
if invoke_params['InvocationType'] == 'RequestResponse':
try:
results['output'] = json.loads(response['Payload'].read().decode('utf8'))
except Exception as e:
module.fail_json(msg="Failed while decoding function return value", exception=traceback.format_exc())
if isinstance(results.get('output'), dict) and any(
[results['output'].get('stackTrace'), results['output'].get('errorMessage')]):
# AWS sends back stack traces and error messages when a function failed
# in a RequestResponse (synchronous) context.
template = ("Function executed, but there was an error in the Lambda function. "
"Message: {errmsg}, Type: {type}, Stack Trace: {trace}")
error_data = {
# format the stacktrace sent back as an array into a multiline string
'trace': '\n'.join(
[' '.join([
str(x) for x in line # cast line numbers to strings
]) for line in results.get('output', {}).get('stackTrace', [])]
),
'errmsg': results['output'].get('errorMessage'),
'type': results['output'].get('errorType')
}
module.fail_json(msg=template.format(**error_data), result=results)
module.exit_json(changed=True, result=results)
if __name__ == '__main__':
main()
``` |
[
{
"content": "```python\n###############################################################################\n##\n## Copyright (C) 2014 Greg Fausak\n##\n## Licensed under the Apache License, Version 2.0 (the \"License\");\n## you may not use this file except in compliance with the License.\n## You may obtain a ... | [
{
"content": "<|memory_start|>```python\n###############################################################################\n##\n## Copyright (C) 2014 Greg Fausak\n##\n## Licensed under the Apache License, Version 2.0 (the \"License\");\n## you may not use this file except in compliance with the License.\n## Y... | ```python
###############################################################################
##
## Copyright (C) 2014 Greg Fausak
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
###############################################################################
## ausqlite3.py - sqlite3 driver
##
## this is driver interface code. It is used with the DB class. It shouldn't
## be called or instantiated independent of that class.
###############################################################################
from __future__ import absolute_import
import sys,os
import sqlite3
from twisted.enterprise import adbapi
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, returnValue
from .dbbase import dbbase
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def set_dict_factory(conn):
conn.row_factory = dict_factory
class SQLITE3_3_8_2(dbbase):
"""
basic sqlite3 3.8.2 driver
"""
def __init__(self, topic_base, app_session, debug):
if debug is not None and debug:
log.startLogging(sys.stdout)
log.msg("SQLITE3_3_8_2:__init__()")
self.engine_version = "SQLITE3_3_8_2"
self.engine = "SQLITE"
self.conn = None
self.dsn = None
self.topic_base = topic_base
self.app_session = app_session
self.debug = debug
return
#
# connect
# simply connect to a database
# dsn is the only argument, it is a string, in psycopg2 connect
# format. basically it looks like
# dbname=DBNAME host=MACHINE user=DBUSER
# DBNAME is the database name
# MACHINE is the ip address or dns name of the machine
# DBUSER is the user to connect as
#
def connect(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:connect({},{})".format(args,kwargs))
self.dsn = args[0]
# there must be an easier way.
# this converts db=x host=y shatever=z to a dictionary.
try:
md = dict(s.split('=') for s in self.dsn.split())
md['cp_openfun'] = set_dict_factory
#self.conn = adbapi.ConnectionPool("sqlite3",**dict(s.split('=') for s in self.dsn.split()))
self.conn = adbapi.ConnectionPool("sqlite3",**dict(md))
log.msg("SQLITE3_3_8_2:connect() established")
except Exception as err:
log.msg("SQLITE3_3_8_2:connect({}),error({})".format(self.dsn,err))
raise err
return
#
# disconnect
# this disconnects from the currently connected database. if no database
# is currently connected then this does nothing.
def disconnect(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:disconnect({},{})".format(args,kwargs))
if self.conn:
c = self.conn
self.conn = None
c.close()
return
#
# query:
# s - query to run (with dictionary substitution embedded, like %(key)s
# a - dictionary pointing to arguments.
# example:
# s = 'select * from login where id = %(id)s'
# a = { 'id': 100 }
# returns:
# dictionary result of query
# note:
# there MUST be a result, otherwise use the operation call!
# well, the query can return 0 rows, that is ok. but, if the query
# by its nature doesn't return any rows then don't use this call!
# for example, a query that says 'insert into table x (c) values(r)'
# by its nature it doesn't return a row, so, this isn't the right
# method to use, use operation instead
#
@inlineCallbacks
def query(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:query({},{})".format(args,kwargs))
s = args[0]
a = args[1]
if self.conn:
try:
log.msg("SQLITE3_3_8_2:query().running({} with args {})".format(s,a))
rv = yield self.conn.runQuery(s,a)
log.msg("SQLITE3_3_8_2:query().results({})".format(rv))
returnValue(rv)
except Exception as err:
log.msg("SQLITE3_3_8_2:query({}),error({})".format(s,err))
raise err
# error here, probably should raise exception
return
#
# operation:
# identical to query, except, there is no result returned.
# note:
# it is important that your query does NOT return anything! If it does,
# use the query call!
#
# see also:
# query method has a good description of this and query.
#
@inlineCallbacks
def operation(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:operation({},{})".format(args,kwargs))
s = args[0]
a = args[1]
if self.conn:
try:
log.msg("SQLITE3_3_8_2:query().running({} with args {})".format(s,a))
rv = yield self.conn.runOperation(s,a)
log.msg("SQLITE3_3_8_2:query().results({})".format(rv))
returnValue(rv)
except Exception as err:
log.msg("SQLITE3_3_8_2:query({}),error({})".format(s,err))
raise err
# error here, probably should raise exception
return
#
# watch:
# this is specific to postgres NOTIFY/LISTEN. other drivers will need to stub this out
#
def watch(self,*args,**kwargs):
raise Exception("sqlite3 is trying to add watch, can only do this in postgres")
return
#
# info:
# return information about this connection
#
@inlineCallbacks
def info(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:info({},{})".format(args,kwargs))
rv = yield [{
"engine":self.engine,
"engine_version":self.engine_version,
"dsn":self.dsn,
"topic_base":self.topic_base,
"debug":self.debug
}]
returnValue(rv)
return
``` |
[
{
"content": "Here is some code:\n```python\n\"\"\"\nModule containing class used for loading config files.\n\nCopyright 2012 Red Hat, Inc.\nLicensed under the GNU General Public License, version 2 as\npublished by the Free Software Foundation; see COPYING for details.\n\"\"\"\n\n__author__ = \"\"\"\nolichtne@r... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n\"\"\"\nModule containing class used for loading config files.\n\nCopyright 2012 Red Hat, Inc.\nLicensed under the GNU General Public License, version 2 as\npublished by the Free Software Foundation; see COPYING for details.\n\"\"\"\n\n__author__ = \"... | ```python
"""
Module containing class used for loading config files.
Copyright 2012 Red Hat, Inc.
Licensed under the GNU General Public License, version 2 as
published by the Free Software Foundation; see COPYING for details.
"""
__author__ = """
olichtne@redhat.com (Ondrej Lichtner)
"""
import os
import sys
import re
import subprocess
from lnst.Common.Utils import bool_it
from lnst.Common.NetUtils import verify_mac_address
from lnst.Common.Colours import get_preset_conf
from lnst.Common.LnstError import LnstError
DefaultRPCPort = 9999
class ConfigError(LnstError):
pass
class Config():
options = None
_scheme = None
def __init__(self):
self._options = dict()
self._init_options()
def _init_options(self):
raise NotImplementedError()
def colours_scheme(self):
self._options['colours'] = dict()
self._options['colours']["disable_colours"] = {\
"value": False, "additive": False,
"action": self.optionBool, "name": "disable_colours"}
for preset in ["faded", "alert", "highlight", "pass", "fail", "error",
"info", "debug", "warning", "log_header"]:
self._options['colours'][preset] = {\
"value": get_preset_conf(preset), "additive": False,
"action": self.optionColour, "name": preset}
def get_config(self):
return self._options
def get_section(self, section):
if section not in self._options:
msg = 'Unknow section: %s' % section
raise ConfigError(msg)
return self._options[section]
def get_section_values(self, section):
if section not in self._options:
msg = 'Unknow section: %s' % section
raise ConfigError(msg)
res = {}
for opt_name, opt in list(self._options[section].items()):
res[opt_name] = opt["value"]
return res
def get_option(self, section, option):
sect = self.get_section(section)
if option not in sect:
msg = 'Unknown option: %s in section: %s' % (option, section)
raise ConfigError(msg)
return sect[option]["value"]
def set_option(self, section, option, value):
sect = self.get_section(section)
sect[option]["value"] = value
def _preprocess_lines(self, lines):
comment_re = re.compile(r'^#.*$')
empty_line_re = re.compile(r'^\s*$')
result = []
for line in lines:
if comment_re.match(line):
continue
if empty_line_re.match(line):
continue
result.append(line.strip())
return result
def _parse_file(self, path):
result = {}
current_section = None
section_re = re.compile(r'^\[(\w+)\]$')
option_re = re.compile(r'^(\w+)\s*(\+?=)\s*(.*)$')
with open(path, "r") as f:
lines = f.readlines()
lines = self._preprocess_lines(lines)
for line in lines:
section = section_re.match(line)
option = option_re.match(line)
if section:
current_section = section.group(1)
if current_section in result:
raise ConfigError("Section '[%s]' already defined." %\
current_section)
result[current_section] = []
elif option:
if current_section is None:
raise ConfigError("No section defined yet.")
opt = {"name": option.group(1),
"operator": option.group(2),
"value": option.group(3)}
result[current_section].append(opt)
else:
msg = "Invalid format of config line:\n%s" % line
raise ConfigError(msg)
return result
def load_config(self, path):
'''Parse and load the config file'''
exp_path = os.path.expanduser(path)
abs_path = os.path.abspath(exp_path)
print("Loading config file '%s'" % abs_path, file=sys.stderr)
sections = self._parse_file(abs_path)
self.handleSections(sections, abs_path)
def handleSections(self, sections, path):
for section in sections:
if section in self._options:
if section == "pools":
self.handlePools(sections[section], path)
else:
self.handleOptions(section, sections[section], path)
else:
msg = "Unknown section: %s" % section
raise ConfigError(msg)
def handleOptions(self, section_name, config, cfg_path):
section = self._options[section_name]
for opt in config:
opt_name = opt["name"]
opt_operator = opt["operator"]
opt_value = opt["value"]
if not opt_value:
continue
option = self._find_option_by_name(section, opt_name)
if option != None:
if opt_operator == "=":
option["value"] = option["action"](opt_value, cfg_path)
elif opt_operator == "+=" and option["additive"]:
option["value"] += option["action"](opt_value, cfg_path)
elif opt_operator == "+=":
msg = "Operator += not allowed for option %s" % opt_name
raise ConfigError(msg)
else:
msg = "Unknown option: %s in section %s" % (opt_name,
section_name)
raise ConfigError(msg)
def handlePools(self, config, cfg_path):
for pool in config:
if pool["operator"] != "=":
msg = "Only opetator '=' is allowed for section pools."
raise ConfigError(msg)
self.add_pool(pool["name"], pool["value"], cfg_path)
def add_pool(self, pool_name, pool_dir, cfg_path):
pool = {"value" : self.optionPath(pool_dir, cfg_path),
"additive" : False,
"action" : self.optionPath,
"name" : pool_name}
self._options["pools"][pool_name] = pool
def get_pools(self):
pools = {}
for pool_name, pool in list(self._options["pools"].items()):
pools[pool_name] = pool["value"]
return pools
def get_pool(self, pool_name):
try:
return self._options["pools"][pool_name]
except KeyError:
return None
def _find_option_by_name(self, section, opt_name):
for option in section.values():
if option["name"] == opt_name:
return option
return None
def optionPort(self, option, cfg_path):
try:
int(option)
except ValueError:
msg = "Option port expects a number."
raise ConfigError(msg)
return int(option)
def optionPath(self, option, cfg_path):
exp_path = os.path.expanduser(option)
abs_path = os.path.join(os.path.dirname(cfg_path), exp_path)
norm_path = os.path.normpath(abs_path)
return norm_path
def optionMacRange(self, option, cfg_path):
vals = option.split()
if len(vals) != 2:
msg = "Option mac_pool_range expects 2"\
" values sepparated by whitespaces."
raise ConfigError(msg)
if not verify_mac_address(vals[0]):
msg = "Invalid MAC address: %s" % vals[0]
raise ConfigError(msg)
if not verify_mac_address(vals[1]):
msg = "Invalid MAC address: %s" % vals[1]
raise ConfigError(msg)
return vals
def optionDirList(self, option, cfg_path):
paths = re.split(r'(?<!\\)\s', option)
dirs = []
for path in paths:
if path == '':
continue
norm_path = self.optionPath(path, cfg_path)
dirs.append(norm_path)
return dirs
def optionTimeval(self, option, cfg_path):
timeval_re = "^(([0-9]+)days?)?\s*(([0-9]+)hours?)?\s*" \
"(([0-9]+)minutes?)?\s*(([0-9]+)seconds?)?$"
timeval_match = re.match(timeval_re, option)
if timeval_match:
values = timeval_match.groups()
timeval = 0
if values[1]:
timeval += int(values[1])*24*60*60
if values[3]:
timeval += int(values[3])*60*60
if values[5]:
timeval += int(values[5])*60
if values[7]:
timeval += int(values[7])
else:
msg = "Incorrect timeval format."
raise ConfigError(msg)
return timeval
def optionColour(self, option, cfg_path):
colour = option.split()
if len(colour) != 3:
msg = "Colour must be specified by 3"\
" values (foreground, background, bold)"\
" sepparated by whitespace."
raise ConfigError(msg)
return colour
def optionBool(self, option, cfg_path):
return bool_it(option)
def optionPlain(self, option, cfg_path):
return option
def dump_config(self):
string = ""
for section in self._options:
string += "[%s]\n" % section
for option in self._options[section]:
val = self.value_to_string(section, option)
opt_name = self._options[section][option]["name"]
string += "%s = %s\n" % (opt_name, val)
return string
def value_to_string(self, section, option):
string = ""
value = self._options[section][option]["value"]
if type(value) == list:
string = " ".join(value)
else:
string = str(value)
return string
``` |
[
{
"content": "Return the code exactly, with no changes:\n```python\n#!/usr/bin/env python\n#\n# Copyright 2009 Facebook\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# ... | [
{
"content": "Return the code exactly, with no changes:\n<|memory_start|>```python\n#!/usr/bin/env python\n#\n# Copyright 2009 Facebook\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the Lice... | ```python
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
import uuid
from lib.tornado import escape
from lib.tornado import httputil
from lib.tornado import locale
from lib.tornado.log import access_log, app_log, gen_log
from lib.tornado import stack_context
from lib.tornado import template
from lib.tornado.escape import utf8, _unicode
from lib.tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_modules` to avoid
# possible conflicts.
self.ui["_modules"] = ObjectDict((n, self._ui_module(n, m)) for n, m in
application.ui_modules.items())
self.ui["modules"] = self.ui["_modules"]
self.clear()
# Check since connection is not available in WSGI
if getattr(self.request, "connection", None):
self.request.connection.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.gmtime()),
})
self.set_default_headers()
if not self.request.supports_http_1_1():
if self.request.headers.get("Connection") == "Keep-Alive":
self.set_header("Connection", "Keep-Alive")
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if len(value) > 4000 or re.search(br"[\x00-\x1f]", value):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we throw an HTTP 400 exception if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise HTTPError(400, "Missing argument %s" % name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = []
for v in self.request.arguments.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
return _unicode(value)
@property
def cookies(self):
"""An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
for name in self.request.cookies:
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
"""
self.set_cookie(name, self.create_signed_value(name, value),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value)
def get_secure_cookie(self, name, value=None, max_age_days=31):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
# Remove whitespace
url = re.sub(br"[\x00-\x20]+", "", utf8(url))
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
url))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
"""
if self.application._wsgi:
raise Exception("WSGI applications do not support flush()")
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = b""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers, callback=callback)
return
self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
inm = self.request.headers.get("If-None-Match")
if inm and inm.find(etag) != -1:
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the IOStream (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.stream.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("debug") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(uuid.uuid4().bytes)
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if self.xsrf_token != token:
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
We append ``?v=<signature>`` to the returned URL, which makes our
static file handler set an infinite expiration header on the
returned content. The signature is based on the content of the
file.
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
static_handler_class = self.settings.get(
"static_handler_class", StaticFileHandler)
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + static_handler_class.make_static_url(self.settings, path)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
self.prepare()
if not self._finished:
getattr(self, self.request.method.lower())(
*self.path_args, **self.path_kwargs)
if self._auto_finish and not self._finished:
self.finish()
except Exception as e:
self._handle_request_exception(e)
def _generate_headers(self):
reason = self._reason
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + reason)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b"\r\n".join(lines) + b"\r\n\r\n"
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
if isinstance(e, HTTPError):
if e.log_message:
format = "%d %s: " + e.log_message
args = [e.status_code, self._request_summary()] + list(e.args)
gen_log.warning(format, *args)
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=True)
self.send_error(500, exc_info=sys.exc_info())
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
return method(self, *args, **kwargs)
return wrapper
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
Each tuple can contain an optional third element, which should be
a dictionary if it is present. That dictionary is passed as
keyword arguments to the contructor of the handler. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi:
from lib.tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from lib.tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, type(())):
assert len(spec) in (2, 3)
pattern = spec[0]
handler = spec[1]
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
if len(spec) == 3:
kwargs = spec[2]
else:
kwargs = {}
spec = URLSpec(pattern, handler, kwargs)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
self, request, url="http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
handler = spec.handler_class(self, request, **spec.kwargs)
if spec.regex.groups:
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
if s is None:
return s
return escape.url_unescape(s, encoding=None)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
# Note that args are passed as bytes so the handler can
# decide what encoding to use.
if spec.regex.groupindex:
kwargs = dict(
(str(k), unquote(v))
for (k, v) in match.groupdict().items())
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
handler = ErrorHandler(self, request, status_code=404)
# In debug mode, re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if self.settings.get("debug"):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
StaticFileHandler.reset()
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
To map a path to this handler for a static data directory ``/var/www``,
you would add a line to your application like::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The local root directory of the content should be passed as the ``path``
argument to the handler.
To support aggressive browser caching, if the argument ``v`` is given
with the path, we set an infinite HTTP expiration header. So, if you
want browsers to cache a file indefinitely, send them to, e.g.,
``/static/images/myimage.png?v=xxx``. Override `get_cache_time` method for
more fine-grained cache control.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = os.path.abspath(path) + os.path.sep
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
path = self.parse_url_path(path)
abspath = os.path.abspath(os.path.join(self.root, path))
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (abspath + os.path.sep).startswith(self.root):
raise HTTPError(403, "%s is not in root static directory", path)
if os.path.isdir(abspath) and self.default_filename is not None:
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/")
return
abspath = os.path.join(abspath, self.default_filename)
if not os.path.exists(abspath):
raise HTTPError(404)
if not os.path.isfile(abspath):
raise HTTPError(403, "%s is not a file", path)
stat_result = os.stat(abspath)
modified = datetime.datetime.fromtimestamp(stat_result[stat.ST_MTIME])
self.set_header("Last-Modified", modified)
mime_type, encoding = mimetypes.guess_type(abspath)
if mime_type:
self.set_header("Content-Type", mime_type)
cache_time = self.get_cache_time(path, modified, mime_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(path)
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if_since = datetime.datetime.fromtimestamp(time.mktime(date_tuple))
if if_since >= modified:
self.set_status(304)
return
with open(abspath, "rb") as file:
data = file.read()
if include_body:
self.write(data)
else:
assert self.request.method == "HEAD"
self.set_header("Content-Length", len(data))
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it is
a class method rather than an instance method).
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
"""
static_url_prefix = settings.get('static_url_prefix', '/static/')
version_hash = cls.get_version(settings, path)
if version_hash:
return static_url_prefix + path + "?v=" + version_hash
return static_url_prefix + path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
This method may be overridden in subclasses (but note that it
is a class method rather than a static method). The default
implementation uses a hash of the file's contents.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
"""
abs_path = os.path.join(settings["static_path"], path)
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
f = open(abs_path, "rb")
hashes[abs_path] = hashlib.md5(f.read()).hexdigest()
f.close()
except Exception:
gen_log.error("Could not open static file %r", path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh[:5]
return None
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httpserver.HTTPRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, status_code, headers, chunk, finishing):
# 304 responses have no body (not even a zero-length body), and so
# should not have either Content-Length or Transfer-Encoding headers.
if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
if finishing:
block += b"0\r\n\r\n"
return block
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.current_user = handler.current_user
self.locale = handler.locale
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler_class, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
self.handler_class = handler_class
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a)))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value):
timestamp = utf8(str(int(time.time())))
value = base64.b64encode(utf8(value))
signature = _create_signature(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
def decode_signed_value(secret, name, value, max_age_days=31):
if not value:
return None
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > time.time() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _create_signature(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
``` |
[
{
"content": "Here is a code file:\n```python\nfrom flask import Blueprint\nfrom models import db, GithubRepo, GithubRepoEvent\nimport json\n\ngh_renderer = Blueprint('github', __name__)\n\n@gh_renderer.route('/github/<int:repo_id>')\ndef get_gh_entry(repo_id):\n entry = db.session.query(GithubRepoEvent)\\\n... | [
{
"content": "Here is a code file:\n<|memory_start|>```python\nfrom flask import Blueprint\nfrom models import db, GithubRepo, GithubRepoEvent\nimport json\n\ngh_renderer = Blueprint('github', __name__)\n\n@gh_renderer.route('/github/<int:repo_id>')\ndef get_gh_entry(repo_id):\n entry = db.session.query(Gith... | ```python
from flask import Blueprint
from models import db, GithubRepo, GithubRepoEvent
import json
gh_renderer = Blueprint('github', __name__)
@gh_renderer.route('/github/<int:repo_id>')
def get_gh_entry(repo_id):
entry = db.session.query(GithubRepoEvent)\
.filter_by(repo_id = repo_id)\
.order_by(GithubRepoEvent.id.desc()).first()
repo = db.session.query(GithubRepo)\
.filter_by(id = repo_id).one()
if not entry:
return json.dumps({'type' : 'text',
'color' : '000000',
'channel' : 'Github',
'title' : repo.gh_repo,
'text' : 'No one has committed for a while!',
'image' : None,
'meta' : {
'text' : 'Github!',
'image' : 'http://newmerator.github.io/blacktocat.png'
},
})
return json.dumps({'type' : 'text',
'color' : '#000000',
'channel' : 'Github',
'title' : repo.gh_repo,
'text' : '%s pushed with message %s' % (entry.user_pushed, entry.message),
'image' : entry.avatar_url,
'meta' : {
'text' : 'Github!',
'image' : 'http://newmerator.github.io/blacktocat.png'
},
})
``` |
[
{
"content": "Here is the snippet:\n```python\nfrom binascii import hexlify, unhexlify\nfrom hashlib import sha512\nfrom random import sample\nfrom re import findall\nfrom six import b\nimport unittest\n\nfrom .makwa import Makwa, hashpw, checkpw\n\nn = int(\n 'C22C40BBD056BB213AAD7C830519101AB926AE18E3E9FC9... | [
{
"content": "Here is the snippet:\n<|memory_start|>```python\nfrom binascii import hexlify, unhexlify\nfrom hashlib import sha512\nfrom random import sample\nfrom re import findall\nfrom six import b\nimport unittest\n\nfrom .makwa import Makwa, hashpw, checkpw\n\nn = int(\n 'C22C40BBD056BB213AAD7C830519101... | ```python
from binascii import hexlify, unhexlify
from hashlib import sha512
from random import sample
from re import findall
from six import b
import unittest
from .makwa import Makwa, hashpw, checkpw
n = int(
'C22C40BBD056BB213AAD7C830519101AB926AE18E3E9FC9699C806E0AE5C2594'
'14A01AC1D52E873EC08046A68E344C8D74A508952842EF0F03F71A6EDC077FAA'
'14899A79F83C3AE136F774FA6EB88F1D1AEA5EA02FC0CCAF96E2CE86F3490F49'
'93B4B566C0079641472DEFC14BECCF48984A7946F1441EA144EA4C802A457550'
'BA3DF0F14C090A75FE9E6A77CF0BE98B71D56251A86943E719D27865A489566C'
'1DC57FCDEFACA6AB043F8E13F6C0BE7B39C92DA86E1D87477A189E73CE8E311D'
'3D51361F8B00249FB3D8435607B14A1E70170F9AF36784110A3F2E67428FC18F'
'B013B30FE6782AECB4428D7C8E354A0FBD061B01917C727ABEE0FE3FD3CEF761',
16
)
class MakwaTest(unittest.TestCase):
def test_spec_vector(self):
pi = unhexlify(
'4765676F206265736877616A692761616B656E20617765206D616B77613B206F6'
'E7A61616D206E616E69697A61616E697A692E'
)
sigma = unhexlify('C72703C22A96D9992F3DEA876497E392')
makwa = Makwa(work_factor=4096, pre_hashing=False)
digest = makwa._digest(pi, n, salt=sigma)
self.assertEqual(hexlify(digest), b'c9cea0e6ef09393ab1710a08')
h = makwa.hash(pi, n, salt=sigma)
self.assertEqual(h, '+RK3n5jz7gs_s211_xycDwiqW2ZkvPeqHZJfjkg_yc6g5u8JOTqxcQoI')
h = hashpw(pi, n, salt=sigma, work_factor=4096, pre_hash=False)
self.assertEqual(h, '+RK3n5jz7gs_s211_xycDwiqW2ZkvPeqHZJfjkg_yc6g5u8JOTqxcQoI')
self.assertEqual(h, '+RK3n5jz7gs_s211_xycDwiqW2ZkvPeqHZJfjkg_yc6g5u8JOTqxcQoI')
self.assertTrue(makwa.check(pi, h, n))
self.assertTrue(checkpw(pi, h, n))
self.assertFalse(makwa.check(b'password', h, n))
self.assertFalse(checkpw(b'password', h, n))
self.assertFalse(makwa.check(pi, h, 0xbadc0de))
self.assertFalse(checkpw(pi, h, 0xbadc0de))
def test_kdf_sha256(self):
m = Makwa()
matches = []
with open('kat.txt', 'r') as f:
pattern = r'KDF/SHA-256\n' \
'input: ([a-f0-9]*)\n' \
'output: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, output) in sample(matches, 100):
result = hexlify(m._kdf(unhexlify(input), 100))
self.assertEqual(result, b(output))
def test_kdf_sha512(self):
m = Makwa(h=sha512)
matches = []
with open('kat.txt', 'r') as f:
pattern = r'KDF/SHA-512\n' \
'input: ([a-f0-9]*)\n' \
'output: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, output) in sample(matches, 100):
result = hexlify(m._kdf(unhexlify(input), 100))
self.assertEqual(result, b(output))
def test_digest_sha256(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-256\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: ([a-f0-9]*)\n' \
'bin4096: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, bin384, bin4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
m = Makwa(
work_factor=384,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin384))
m = Makwa(
work_factor=4096,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin4096))
def test_digest_sha512(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-512\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: ([a-f0-9]*)\n' \
'bin4096: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, bin384, bin4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
m = Makwa(
h=sha512,
work_factor=384,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin384))
m = Makwa(
h=sha512,
work_factor=4096,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin4096))
def test_hashpw_sha256(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-256\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: [a-f0-9]*\n' \
'bin4096: [a-f0-9]*\n' \
'str384: ([A-Za-z0-9\+\/\_]*)\n' \
'str4096: ([A-Za-z0-9\+\/\_]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, str384, str4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
work_factor=384,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str384)
self.assertTrue(checkpw(unhexlify(input), hashed, n))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
work_factor=4096,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str4096)
self.assertTrue(checkpw(unhexlify(input), hashed, n))
def test_hashpw_sha512(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-512\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: [a-f0-9]*\n' \
'bin4096: [a-f0-9]*\n' \
'str384: ([A-Za-z0-9\+\/\_]*)\n' \
'str4096: ([A-Za-z0-9\+\/\_]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, str384, str4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
h=sha512,
work_factor=384,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str384)
self.assertTrue(checkpw(unhexlify(input), hashed, n, h=sha512))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
h=sha512,
work_factor=4096,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str4096)
self.assertTrue(checkpw(unhexlify(input), hashed, n, h=sha512))
if __name__ == '__main__':
unittest.main()
``` |
[
{
"content": "Produce an exact reconstruction of the code:\n```python\n# -*- coding: utf-8 -*-\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n # Deleting field 'GroupMigr... | [
{
"content": "Produce an exact reconstruction of the code:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n # Deleting ... | ```python
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'GroupMigration.members_deactivated_entered'
db.delete_column('vkontakte_groups_groupstatmembers', 'members_deactivated_entered')
# Deleting field 'GroupMigration.members_has_avatar_left'
db.delete_column('vkontakte_groups_groupstatmembers', 'members_has_avatar_left')
# Deleting field 'GroupMigration.members_entered'
db.delete_column('vkontakte_groups_groupstatmembers', 'members_entered')
# Deleting field 'GroupMigration.members_deactivated_left'
db.delete_column('vkontakte_groups_groupstatmembers', 'members_deactivated_left')
# Deleting field 'GroupMigration.members'
db.delete_column('vkontakte_groups_groupstatmembers', 'members')
# Deleting field 'GroupMigration.members_left'
db.delete_column('vkontakte_groups_groupstatmembers', 'members_left')
# Deleting field 'GroupMigration.members_has_avatar_entered'
db.delete_column('vkontakte_groups_groupstatmembers', 'members_has_avatar_entered')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'GroupMigration.members_deactivated_entered'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members_deactivated_entered' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'GroupMigration.members_has_avatar_left'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members_has_avatar_left' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'GroupMigration.members_entered'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members_entered' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'GroupMigration.members_deactivated_left'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members_deactivated_left' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'GroupMigration.members'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'GroupMigration.members_left'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members_left' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'GroupMigration.members_has_avatar_entered'
raise RuntimeError("Cannot reverse this migration. 'GroupMigration.members_has_avatar_entered' and its values cannot be restored.")
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'vkontakte_groups.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '800'}),
'photo': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_big': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_medium': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'screen_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['vkontakte_users.User']", 'symmetrical': 'False'})
},
'vkontakte_groups_migration.groupmigration': {
'Meta': {'ordering': "('group', 'time', '-id')", 'unique_together': "(('group', 'time'),)", 'object_name': 'GroupMigration', 'db_table': "'vkontakte_groups_groupstatmembers'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'migrations'", 'to': "orm['vkontakte_groups.Group']"}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_deactivated_entered_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_deactivated_entered_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'members_deactivated_left_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_deactivated_left_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'members_entered_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_entered_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'members_has_avatar_entered_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_has_avatar_entered_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'members_has_avatar_left_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_has_avatar_left_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'members_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'members_left_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'members_left_ids': ('picklefield.fields.PickledObjectField', [], {'default': '[]'}),
'offset': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'vkontakte_places.city': {
'Meta': {'ordering': "['name']", 'object_name': 'City'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cities'", 'null': 'True', 'to': "orm['vkontakte_places.Country']"}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'})
},
'vkontakte_places.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'})
},
'vkontakte_users.user': {
'Meta': {'ordering': "['remote_id']", 'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {}),
'activity': ('django.db.models.fields.TextField', [], {}),
'albums': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'audios': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'bdate': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'books': ('django.db.models.fields.TextField', [], {}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vkontakte_places.City']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'counters_updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vkontakte_places.Country']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'faculty': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'faculty_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'followers': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'friends': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'friends_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'friends_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followers_users'", 'symmetrical': 'False', 'to': "orm['vkontakte_users.User']"}),
'games': ('django.db.models.fields.TextField', [], {}),
'graduation': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'has_mobile': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'home_phone': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interests': ('django.db.models.fields.TextField', [], {}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'livejournal': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'mobile_phone': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'movies': ('django.db.models.fields.TextField', [], {}),
'mutual_friends': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'notes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'photo': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_big': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_medium': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_medium_rec': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_rec': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'rate': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'relation': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'screen_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'sex': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'skype': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'subscriptions': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'sum_counters': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'timezone': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'tv': ('django.db.models.fields.TextField', [], {}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'university': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'university_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'user_photos': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'user_videos': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'videos': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'wall_comments': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'vkontakte_wall.comment': {
'Meta': {'ordering': "['post', '-date']", 'object_name': 'Comment'},
'author_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['contenttypes.ContentType']"}),
'author_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'from_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'likes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'wall_comments'", 'to': "orm['vkontakte_wall.Post']"}),
'raw_html': ('django.db.models.fields.TextField', [], {}),
'remote_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': "'20'"}),
'reply_for_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replies'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'reply_for_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vkontakte_wall.Comment']", 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wall_owner_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_wall_comments'", 'to': "orm['contenttypes.ContentType']"}),
'wall_owner_id': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'vkontakte_wall.post': {
'Meta': {'ordering': "['wall_owner_id', '-date']", 'object_name': 'Post'},
'attachments': ('django.db.models.fields.TextField', [], {}),
'author_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_posts'", 'to': "orm['contenttypes.ContentType']"}),
'author_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'copy_owner_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'copy_post_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'copy_text': ('django.db.models.fields.TextField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'geo': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'like_users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'like_posts'", 'blank': 'True', 'to': "orm['vkontakte_users.User']"}),
'likes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'media': ('django.db.models.fields.TextField', [], {}),
'online': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'post_source': ('django.db.models.fields.TextField', [], {}),
'raw_html': ('django.db.models.fields.TextField', [], {}),
'remote_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': "'20'"}),
'reply_count': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'repost_users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'repost_posts'", 'blank': 'True', 'to': "orm['vkontakte_users.User']"}),
'reposts': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'signer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wall_owner_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_wall_posts'", 'to': "orm['contenttypes.ContentType']"}),
'wall_owner_id': ('django.db.models.fields.PositiveIntegerField', [], {})
}
}
complete_apps = ['vkontakte_groups_migration']
``` |
[
{
"content": "```python\nfrom sqlite3 import IntegrityError\n\nfrom controller.util import get_image_from_blob\nfrom database_manager.friend_manager import MinimumFriendParameterException\nfrom database_manager.util import ChumMeDBManager\nfrom .friend_form import FriendForm\n\nclass UpdateFriendForm(FriendForm... | [
{
"content": "<|memory_start|>```python\nfrom sqlite3 import IntegrityError\n\nfrom controller.util import get_image_from_blob\nfrom database_manager.friend_manager import MinimumFriendParameterException\nfrom database_manager.util import ChumMeDBManager\nfrom .friend_form import FriendForm\n\nclass UpdateFrien... | ```python
from sqlite3 import IntegrityError
from controller.util import get_image_from_blob
from database_manager.friend_manager import MinimumFriendParameterException
from database_manager.util import ChumMeDBManager
from .friend_form import FriendForm
class UpdateFriendForm(FriendForm):
def __init__(self, friend, **kwargs):
super().__init__(friend, **kwargs)
social_networks = ChumMeDBManager().friend_manager.\
get_social_network_links_by_friend_id(self.friend.id)
for i, link in social_networks.items():
field = self.social_network_fields[i - 1]
field.text_input.text = link
profile_image = ChumMeDBManager().profile_photo_manager.\
select_profile_photo(self.friend.id)
if profile_image:
blob = profile_image['blob']
extension = profile_image['ext'][1:]
self.image.texture = get_image_from_blob(blob, extension).texture
def update_friend(self):
updated_friend = self.build_friend(self.parent.update_friend_form)
try:
ChumMeDBManager().friend_manager.update_friend(updated_friend)
self._update_social_networks()
self._update_image()
except MinimumFriendParameterException:
self.display_error_popup('updating')
else:
self.parent.show_friend_details(updated_friend)
def _update_social_networks(self):
for i, field in enumerate(self.social_network_fields):
if field.check_box.active:
try:
ChumMeDBManager().friend_social_network_manager.\
add_friend_social_network(
self.friend.id,
i + 1,
field.text_input.text
)
except IntegrityError:
ChumMeDBManager().friend_social_network_manager.\
update_social_network(
field.text_input.text,
self.friend.id,
i + 1
)
def _update_image(self):
if self.blob_profile_image:
try:
ChumMeDBManager().profile_photo_manager.insert_profile_photo(
self.friend.id,
self.blob_profile_image['blob'],
self.blob_profile_image['ext']
)
except IntegrityError:
ChumMeDBManager().profile_photo_manager.update_profile_photo(
self.friend.id,
self.blob_profile_image['blob'],
self.blob_profile_image['ext']
)
def build_friend(self, form):
friend = super().build_friend(form)
friend.id = self.friend.id
return friend
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\n#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom lib.meos import MEoS\nfrom lib import unidades\n\n\nclass nC5(MEoS):\n \"\"\"Multiparameter equation of state for n-pentane\"\"\"\n name = \"pentane\"\n CASNumber = \"109-... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\n#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom lib.meos import MEoS\nfrom lib import unidades\n\n\nclass nC5(MEoS):\n \"\"\"Multiparameter equation of state for n-pentane\"\"\"\n name = \"pentane\"\n CA... | ```python
#!/usr/bin/python
# -*- coding: utf-8 -*-
from lib.meos import MEoS
from lib import unidades
class nC5(MEoS):
"""Multiparameter equation of state for n-pentane"""
name = "pentane"
CASNumber = "109-66-0"
formula = "CH3-(CH2)3-CH3"
synonym = "R-601"
rhoc = unidades.Density(232.)
Tc = unidades.Temperature(469.7)
Pc = unidades.Pressure(3370.0, "kPa")
M = 72.14878 # g/mol
Tt = unidades.Temperature(143.47)
Tb = unidades.Temperature(309.21)
f_acent = 0.251
momentoDipolar = unidades.DipoleMoment(0.07, "Debye")
id = 8
_Tr = unidades.Temperature(449.271155)
_rhor = unidades.Density(233.873368)
_w = 0.247058753
Fi1 = {"ao_log": [1, 3.0],
"pow": [0, 1],
"ao_pow": [],
"ao_exp": [], "titao": [],
"ao_hyp": [8.95043, 21.836, 33.4032, 0],
"hyp": [0.380391739, 1.789520971, 3.777411113, 0]}
CP1 = {"ao": 10.288132,
"an": [-0.2695377e-1, 0.20951065e-3, -0.27910773e-6, 0.12266269e-9],
"pow": [1, 2, 3, 4],
"ao_exp": [], "exp": [],
"ao_hyp": [], "hyp": []}
CP2 = {"ao": 22.5012/8.3159524*4.184,
"an": [], "pow": [],
"ao_exp": [], "exp": [],
"ao_hyp": [2.057417e8/8.3159524*4.184, 2.972927e7/8.3159524*4.184, 0, 0],
"hyp": [1.71958e3, 8.02069e2, 0, 0]}
helmholtz1 = {
"__type__": "Helmholtz",
"__name__": "short Helmholtz equation of state for pentane of Span and Wagner (2003)",
"__doi__": {"autor": "Span, R., Wagner, W.",
"title": "Equations of state for technical applications. II. Results for nonpolar fluids.",
"ref": "Int. J. Thermophys. 24 (2003), 41 – 109.",
"doi": "10.1023/A:1022310214958"},
"__test__": """
>>> st=nC5(T=700, rho=200)
>>> print "%0.4f %0.3f %0.4f" % (st.cp0.kJkgK, st.P.MPa, st.cp.kJkgK)
3.2053 13.454 3.6052
>>> st2=nC5(T=750, rho=100)
>>> print "%0.2f %0.5f" % (st2.h.kJkg-st.h.kJkg, st2.s.kJkgK-st.s.kJkgK)
213.42 0.34915
""", # Table III, Pag 46
"R": 8.31451,
"cp": Fi1,
"ref": "OTO",
"Tmin": Tt, "Tmax": 600.0, "Pmax": 100000.0, "rhomax": 11.2,
"Pmin": 0.76322e-4, "rhomin": 10.566,
"nr1": [0.10968643e1, -0.29988888e1, 0.99516887, -0.16170709,
0.11334460, 0.26760595e-3],
"d1": [1, 1, 1, 2, 3, 7],
"t1": [0.25, 1.125, 1.5, 1.375, 0.25, 0.875],
"nr2": [0.40979882, -0.40876423e-1, -0.38169482, -0.10931957,
-0.32073223e-1, 0.16877016e-1],
"d2": [2, 5, 1, 4, 3, 4],
"t2": [0.625, 1.75, 3.625, 3.625, 14.5, 12.],
"c2": [1, 1, 2, 2, 3, 3],
"gamma2": [1]*6}
GERG = {
"__type__": "Helmholtz",
"__name__": "Helmholtz equation of state for pentane of Kunz and Wagner (2004).",
"__doi__": {"autor": "Kunz, O., Wagner, W.",
"title": "The GERG-2008 Wide-Range Equation of State for Natural Gases and Other Mixtures: An Expansion of GERG-2004",
"ref": "J. Chem. Eng. Data, 2012, 57 (11), pp 3032-3091",
"doi": "10.1021/je300655b"},
"R": 8.314472,
"cp": Fi1,
"ref": "OTO",
"Tmin": 143.47, "Tmax": 600.0, "Pmax": 100000.0, "rhomax": 10.57,
# "Pmin": 73.476, "rhomin": 29.249,
"nr1": [0.10968643098001e1, -0.29988888298061e1, 0.99516886799212,
-0.16170708558539, 0.11334460072775, 0.26760595150748e-3],
"d1": [1, 1, 1, 2, 3, 7],
"t1": [0.25, 1.125, 1.5, 1.375, 0.25, 0.875],
"nr2": [0.40979881986931, -0.40876423083075e-1, -0.38169482469447,
-0.10931956843993, -0.32073223327990e-1, 0.16877016216975e-1],
"d2": [2, 5, 1, 4, 3, 4],
"t2": [0.625, 1.75, 3.625, 3.625, 14.5, 12.],
"c2": [1, 1, 2, 2, 3, 3],
"gamma2": [1]*6}
helmholtz3 = {
"__type__": "Helmholtz",
"__name__": "Helmholtz equation of state for pentane of Polt et al. (1992)",
"__doi__": {"autor": "Polt, A., Platzer, B., and Maurer, G.",
"title": "Parameter der thermischen Zustandsgleichung von Bender fuer 14 mehratomige reine Stoffe",
"ref": "Chem. Technik 22(1992)6 , 216/224",
"doi": ""},
"R": 8.3143,
"cp": CP1,
"ref": "NBP",
"Tmin": 238.0, "Tmax": 573.0, "Pmax": 30000.0, "rhomax": 9.410819,
"Pmin": 3.624503, "rhomin": 9.3861,
"nr1": [-0.117648900900e1, 0.163499095773e1, -0.366669005817,
0.724947274043, -0.221919300269e1, 0.188671490348e1,
-0.195774652096e1, 0.308440851184, 0.437424419722,
-0.625853472351, 0.382868807091, -0.119467393955, 0.218631441082,
0.485668874195e-1, -0.132198161379, 0.213549844850e-1],
"d1": [0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 4, 5],
"t1": [3, 4, 5, 0, 1, 2, 3, 4, 0, 1, 2, 0, 1, 0, 1, 1],
"nr2": [0.117648900900e1, -0.163499095773e1, 0.366669005817,
-0.363660829618e-2, 0.633672105685, -0.705792643982],
"d2": [0, 0, 0, 2, 2, 2],
"t2": [3, 4, 5, 3, 4, 5],
"c2": [2]*6,
"gamma2": [0.968832]*6}
helmholtz4 = {
"__type__": "Helmholtz",
"__name__": "Helmholtz equation of state for pentane of Starling (1973)",
"__doi__": {"autor": "Starling, K.E.",
"title": "Fluid Thermodynamic Properties for Light Petroleum Systems",
"ref": "Gulf Publishing Company, 1973.",
"doi": ""},
"R": 8.3159524,
"cp": CP2,
"ref": "NBP",
"Tmin": 177.0, "Tmax": 589.0, "Pmax": 55000.0, "rhomax": 10.2534,
"Pmin": 0.011064, "rhomin": 10.253,
"nr1": [0.175873733594e1, 0.485604047435, -0.111896446456e1,
-0.685918143315, 0.368714111378e-1, -0.167498784887e-2,
0.327765295239, -0.352742092747, -0.999487301826e-1,
0.781999120830e-2, 0.221577806386e-2],
"d1": [0, 1, 1, 1, 1, 1, 2, 2, 2, 5, 5],
"t1": [3, 0, 1, 3, 4, 5, 0, 1, 2, 1, 2],
"nr2": [-0.175873733594e1, -0.411653507564],
"d2": [0, 2],
"t2": [3, 3],
"c2": [2]*2,
"gamma2": [0.46812392]*2}
helmholtz5 = {
"__type__": "Helmholtz",
"__name__": "Helmholtz equation of state for pentane of Sun and Ely (2004)",
"__doi__": {"autor": "Sun, L. and Ely, J.F.",
"title": "Universal equation of state for engineering application: Algorithm and application to non-polar and polar fluids",
"ref": "Fluid Phase Equilib., 222-223:107-118, 2004.",
"doi": "10.1016/j.fluid.2004.06.028"},
"R": 8.31451,
"cp": Fi1,
"ref": "OTO",
"Tmin": Tt, "Tmax": 620.0, "Pmax": 800000.0, "rhomax": 40.,
"Pmin": 0.1, "rhomin": 40.,
"nr1": [2.20261753, 1.07797592, -3.82130221, 1.06627357e-1,
3.07513215e-4, -2.84309667e-1],
"d1": [1, 1, 1, 3, 7, 2],
"t1": [1.5, 0.25, 1.25, 0.25, 0.875, 1.375],
"nr2": [-7.28441220e-2, -4.60943732e-1, 8.39360011e-2 , -1.50650444e-2,
-2.03771872e-1, -7.90244277e-3, -5.68993564e-2, -2.99387974e-2],
"d2": [1, 1, 2, 5, 1, 1, 4, 2],
"t2": [0, 2.375, 2., 2.125, 3.5, 6.5, 4.75, 12.5],
"c2": [1, 1, 1, 1, 2, 2, 2, 3],
"gamma2": [1]*8}
MBWR = {
"__type__": "MBWR",
"__name__": " MBWR equation of state for pentane of Ratanapisit (1999).",
"__doi__": {"autor": "Ratanapisit, J., Ely, J.F.",
"title": "Application of New, Modified BWR Equations of State to the Corresponding-States Prediction of Natural Gas Properties",
"ref": "Int. J. Thermophys., 20(6):1721-1735, 1999.",
"doi": "10.1023/A:1022610013596"},
"R": 8.31434,
"cp": Fi1,
"ref": "OTO",
"Tmin": Tt, "Tmax": 600.0, "Pmax": 70000.0, "rhomax": 11.2,
"Pmin": 0.0000815, "rhomin": 10.558,
"b": [None, -7.41533782499e-2, 7.54044021950, -1.93328401588e2,
3.39428034054e4, -5.12571561595e6, 1.51195406963e-3,
-7.12225059892, 4.12664185793e3, 8.40258305443e5,
-4.68416651753e-4, 3.03565637672, -1.42146321204e3,
-1.10170659283e-1, -9.80664356304, 1.10979804446e3, 2.98029604130,
-1.41484307201e-1, -3.39208006239e1, 2.08782048763,
5.38055429992e5, -6.40401885304e8, -1.19676622034e5,
1.71973349582e10, -3.06383363882e3, 1.43168348944e6,
1.41452433419e1, -2.52955687564e7, -3.85316416299,
2.65416349789e3, 4.76643876980e-3, -8.37595968663,
-1.35160880503e3]}
eq = helmholtz1, GERG, helmholtz3, helmholtz4, helmholtz5, MBWR
_surface = {"sigma": [0.08015, 0.004384, -0.03437],
"exp": [1.408, 1.031, 1.818]}
_dielectric = {"eq": 3, "Tref": 273.16, "rhoref": 1000.,
"a0": [0.10924], "expt0": [-1.], "expd0": [1.],
"a1": [25.39, 0.025], "expt1": [0, 1], "expd1": [1, 1],
"a2": [78.39, 54.15, -12480, -4800.0],
"expt2": [0, 1, 0, 1], "expd2": [2, 2, 3, 3]}
_melting = {"eq": 1, "Tref": Tt, "Pref": 0.76322e-4,
"Tmin": Tt, "Tmax": 2000.0,
"a1": [-8647500000, 8647500001], "exp1": [0, 1.649],
"a2": [], "exp2": [], "a3": [], "exp3": []}
_vapor_Pressure = {
"eq": 5,
"ao": [-0.73918e1, 0.31102e1, -0.22415e1, -0.31585e1, -0.90451],
"exp": [1., 1.5, 1.74, 3.75, 8.0]}
_liquid_Density = {
"eq": 1,
"ao": [0.10178e1, 0.42703, 0.11334e1, 0.41518, -0.47950e-1],
"exp": [0.27, 0.44, 0.6, 4.0, 5.0]}
_vapor_Density = {
"eq": 3,
"ao": [-0.29389e1, -0.62784e1, -0.19941e2, -0.16709e2, -0.36543e2, -0.12799e3],
"exp": [0.4, 1.18, 3.2, 6.6, 7.0, 15.0]}
visco0 = {"eq": 2, "omega": 3,
"__name__": "NIST14",
"__doi__": {"autor": "",
"title": "Coefficients are taken from NIST14, Version 9.08",
"ref": "",
"doi": ""},
"ek": 341.10, "sigma": 0.5784,
"n_chapman": 0.226720214/M**0.5,
"F": [0, 0, 0, 100],
"E": [-13.47938293, 1176.6275165, 14.2278439927, -21951.0293411,
0.03766867689, 70.1529173825, 21435.7720323],
"rhoc": 3.215}
visco1 = {"eq": 4, "omega": 1,
"__name__": "Quiñones-Cisneros (2006)",
"__doi__": {"autor": "S.E.Quiñones-Cisneros and U.K. Deiters",
"title": "Generalization of the Friction Theory for Viscosity Modeling",
"ref": "J. Phys. Chem. B, 2006, 110 (25), pp 12820–12834",
"doi": "10.1021/jp0618577"},
"Tref": 469.7, "muref": 1.0,
"ek": 341.1, "sigma": 0.5784, "n_chapman": 0,
"n_ideal": [17.6805, -55.6942, 48.7177],
"t_ideal": [0, 0.25, 0.5],
"a": [1.08193e-5, -4.71699e-5, 0.0],
"b": [1.21502e-4, -9.84766e-5, 0.0],
"c": [5.08307e-5, -1.07e-5, 0.0],
"A": [-2.10025e-10, -1.56583e-9, 0.0],
"B": [1.98521e-8, 2.05972e-9, 0.0],
"C": [-1.18487e-7, 1.69571e-7, 0.0],
"D": [0.0, 0.0, 0.0]}
_viscosity = visco0, visco1
thermo0 = {"eq": 1,
"__name__": "NIST14",
"__doi__": {"autor": "",
"title": "Coefficients are taken from NIST14, Version 9.08",
"ref": "",
"doi": ""},
"Tref": 341.1, "kref": 1e-3,
"no": [1.35558587, -0.15569137, 1],
"co": [0, -1, -96],
"Trefb": 469.69, "rhorefb": 3.215, "krefb": 1e-3,
"nb": [18.6089331038, -5.83657061299, 3.48987100529,
0.704467355508, -0.206501417728, -0.22307039402],
"tb": [0, 0, 0, -1, 0, -1],
"db": [1, 3, 4, 4, 5, 5],
"cb": [0]*6,
"critical": 3,
"gnu": 0.63, "gamma": 1.239, "R0": 1.03,
"Xio": 0.194e-9, "gam0": 0.0496, "qd": 0.9345e-9, "Tcref": 704.55}
_thermal = thermo0,
``` |
[
{
"content": "Here is some code:\n```python\nimport pandas as pd\nimport pandas.util.testing as tm\nimport pytest\nfrom pytest import param\n\nimport ibis\nimport ibis.common.exceptions as com\n\npytestmark = pytest.mark.pandas\n\n\njoin_type = pytest.mark.parametrize(\n 'how',\n [\n 'inner',\n ... | [
{
"content": "Here is some code:\n<|memory_start|>```python\nimport pandas as pd\nimport pandas.util.testing as tm\nimport pytest\nfrom pytest import param\n\nimport ibis\nimport ibis.common.exceptions as com\n\npytestmark = pytest.mark.pandas\n\n\njoin_type = pytest.mark.parametrize(\n 'how',\n [\n ... | ```python
import pandas as pd
import pandas.util.testing as tm
import pytest
from pytest import param
import ibis
import ibis.common.exceptions as com
pytestmark = pytest.mark.pandas
join_type = pytest.mark.parametrize(
'how',
[
'inner',
'left',
'right',
'outer',
param(
'semi',
marks=pytest.mark.xfail(
raises=NotImplementedError, reason='Semi join not implemented'
),
),
param(
'anti',
marks=pytest.mark.xfail(
raises=NotImplementedError, reason='Anti join not implemented'
),
),
],
)
@join_type
def test_join(how, left, right, df1, df2):
expr = left.join(right, left.key == right.key, how=how)[
left, right.other_value, right.key3
]
result = expr.execute()
expected = pd.merge(df1, df2, how=how, on='key')
tm.assert_frame_equal(result[expected.columns], expected)
def test_cross_join(left, right, df1, df2):
expr = left.cross_join(right)[left, right.other_value, right.key3]
result = expr.execute()
expected = pd.merge(
df1.assign(dummy=1), df2.assign(dummy=1), how='inner', on='dummy'
).rename(columns=dict(key_x='key'))
del expected['dummy'], expected['key_y']
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_project_left_table(how, left, right, df1, df2):
expr = left.join(right, left.key == right.key, how=how)[left, right.key3]
result = expr.execute()
expected = pd.merge(df1, df2, how=how, on='key')[
list(left.columns) + ['key3']
]
tm.assert_frame_equal(result[expected.columns], expected)
def test_cross_join_project_left_table(left, right, df1, df2):
expr = left.cross_join(right)[left, right.key3]
result = expr.execute()
expected = pd.merge(
df1.assign(dummy=1), df2.assign(dummy=1), how='inner', on='dummy'
).rename(columns=dict(key_x='key'))[list(left.columns) + ['key3']]
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_multiple_predicates(how, left, right, df1, df2):
expr = left.join(
right, [left.key == right.key, left.key2 == right.key3], how=how
)[left, right.key3, right.other_value]
result = expr.execute()
expected = pd.merge(
df1, df2, how=how, left_on=['key', 'key2'], right_on=['key', 'key3']
).reset_index(drop=True)
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_multiple_predicates_written_as_one(
how, left, right, df1, df2
):
predicate = (left.key == right.key) & (left.key2 == right.key3)
expr = left.join(right, predicate, how=how)[
left, right.key3, right.other_value
]
result = expr.execute()
expected = pd.merge(
df1, df2, how=how, left_on=['key', 'key2'], right_on=['key', 'key3']
).reset_index(drop=True)
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_invalid_predicates(how, left, right):
predicate = (left.key == right.key) & (left.key2 <= right.key3)
expr = left.join(right, predicate, how=how)
with pytest.raises(TypeError):
expr.execute()
predicate = left.key >= right.key
expr = left.join(right, predicate, how=how)
with pytest.raises(TypeError):
expr.execute()
@join_type
@pytest.mark.xfail(reason='Hard to detect this case')
def test_join_with_duplicate_non_key_columns(how, left, right, df1, df2):
left = left.mutate(x=left.value * 2)
right = right.mutate(x=right.other_value * 3)
expr = left.join(right, left.key == right.key, how=how)
# This is undefined behavior because `x` is duplicated. This is difficult
# to detect
with pytest.raises(ValueError):
expr.execute()
@join_type
def test_join_with_duplicate_non_key_columns_not_selected(
how, left, right, df1, df2
):
left = left.mutate(x=left.value * 2)
right = right.mutate(x=right.other_value * 3)
right = right[['key', 'other_value']]
expr = left.join(right, left.key == right.key, how=how)[
left, right.other_value
]
result = expr.execute()
expected = pd.merge(
df1.assign(x=df1.value * 2),
df2[['key', 'other_value']],
how=how,
on='key',
)
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_post_expression_selection(how, left, right, df1, df2):
join = left.join(right, left.key == right.key, how=how)
expr = join[left.key, left.value, right.other_value]
result = expr.execute()
expected = pd.merge(df1, df2, on='key', how=how)[
['key', 'value', 'other_value']
]
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_post_expression_filter(how, left):
lhs = left[['key', 'key2']]
rhs = left[['key2', 'value']]
joined = lhs.join(rhs, 'key2', how=how)
projected = joined[lhs, rhs.value]
expr = projected[projected.value == 4]
result = expr.execute()
df1 = lhs.execute()
df2 = rhs.execute()
expected = pd.merge(df1, df2, on='key2', how=how)
expected = expected.loc[expected.value == 4].reset_index(drop=True)
tm.assert_frame_equal(result, expected)
@join_type
def test_multi_join_with_post_expression_filter(how, left, df1):
lhs = left[['key', 'key2']]
rhs = left[['key2', 'value']]
rhs2 = left[['key2', 'value']].relabel(dict(value='value2'))
joined = lhs.join(rhs, 'key2', how=how)
projected = joined[lhs, rhs.value]
filtered = projected[projected.value == 4]
joined2 = filtered.join(rhs2, 'key2')
projected2 = joined2[filtered.key, rhs2.value2]
expr = projected2[projected2.value2 == 3]
result = expr.execute()
df1 = lhs.execute()
df2 = rhs.execute()
df3 = rhs2.execute()
expected = pd.merge(df1, df2, on='key2', how=how)
expected = expected.loc[expected.value == 4].reset_index(drop=True)
expected = pd.merge(expected, df3, on='key2')[['key', 'value2']]
expected = expected.loc[expected.value2 == 3].reset_index(drop=True)
tm.assert_frame_equal(result, expected)
@join_type
def test_join_with_non_trivial_key(how, left, right, df1, df2):
# also test that the order of operands in the predicate doesn't matter
join = left.join(right, right.key.length() == left.key.length(), how=how)
expr = join[left.key, left.value, right.other_value]
result = expr.execute()
expected = (
pd.merge(
df1.assign(key_len=df1.key.str.len()),
df2.assign(key_len=df2.key.str.len()),
on='key_len',
how=how,
)
.drop(['key_len', 'key_y', 'key2', 'key3'], axis=1)
.rename(columns={'key_x': 'key'})
)
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_non_trivial_key_project_table(how, left, right, df1, df2):
# also test that the order of operands in the predicate doesn't matter
join = left.join(right, right.key.length() == left.key.length(), how=how)
expr = join[left, right.other_value]
expr = expr[expr.key.length() == 1]
result = expr.execute()
expected = (
pd.merge(
df1.assign(key_len=df1.key.str.len()),
df2.assign(key_len=df2.key.str.len()),
on='key_len',
how=how,
)
.drop(['key_len', 'key_y', 'key2', 'key3'], axis=1)
.rename(columns={'key_x': 'key'})
)
expected = expected.loc[expected.key.str.len() == 1]
tm.assert_frame_equal(result[expected.columns], expected)
@join_type
def test_join_with_project_right_duplicate_column(client, how, left, df1, df3):
# also test that the order of operands in the predicate doesn't matter
right = client.table('df3')
join = left.join(right, ['key'], how=how)
expr = join[left.key, right.key2, right.other_value]
result = expr.execute()
expected = (
pd.merge(df1, df3, on='key', how=how)
.drop(['key2_x', 'key3', 'value'], axis=1)
.rename(columns={'key2_y': 'key2'})
)
tm.assert_frame_equal(result[expected.columns], expected)
def test_join_with_window_function(
players_base, players_df, batting, batting_df
):
players = players_base
# this should be semi_join
tbl = batting.left_join(players, ['playerID'])
t = tbl[batting.G, batting.playerID, batting.teamID]
expr = t.groupby(t.teamID).mutate(
team_avg=lambda d: d.G.mean(),
demeaned_by_player=lambda d: d.G - d.G.mean(),
)
result = expr.execute()
expected = pd.merge(
batting_df, players_df[['playerID']], on='playerID', how='left'
)[['G', 'playerID', 'teamID']]
team_avg = expected.groupby('teamID').G.transform('mean')
expected = expected.assign(
team_avg=team_avg, demeaned_by_player=lambda df: df.G - team_avg
)
tm.assert_frame_equal(result[expected.columns], expected)
merge_asof_minversion = pytest.mark.skipif(
pd.__version__ < '0.19.2',
reason="at least pandas-0.19.2 required for merge_asof",
)
@merge_asof_minversion
def test_asof_join(time_left, time_right, time_df1, time_df2):
expr = time_left.asof_join(time_right, 'time')[
time_left, time_right.other_value
]
result = expr.execute()
expected = pd.merge_asof(time_df1, time_df2, on='time')
tm.assert_frame_equal(result[expected.columns], expected)
@merge_asof_minversion
def test_asof_join_predicate(time_left, time_right, time_df1, time_df2):
expr = time_left.asof_join(time_right, time_left.time == time_right.time)[
time_left, time_right.other_value
]
result = expr.execute()
expected = pd.merge_asof(time_df1, time_df2, on='time')
tm.assert_frame_equal(result[expected.columns], expected)
@merge_asof_minversion
def test_keyed_asof_join(
time_keyed_left, time_keyed_right, time_keyed_df1, time_keyed_df2
):
expr = time_keyed_left.asof_join(time_keyed_right, 'time', by='key')[
time_keyed_left, time_keyed_right.other_value
]
result = expr.execute()
expected = pd.merge_asof(
time_keyed_df1, time_keyed_df2, on='time', by='key'
)
tm.assert_frame_equal(result[expected.columns], expected)
@merge_asof_minversion
def test_keyed_asof_join_with_tolerance(
time_keyed_left, time_keyed_right, time_keyed_df1, time_keyed_df2
):
expr = time_keyed_left.asof_join(
time_keyed_right, 'time', by='key', tolerance=2 * ibis.interval(days=1)
)[time_keyed_left, time_keyed_right.other_value]
result = expr.execute()
expected = pd.merge_asof(
time_keyed_df1,
time_keyed_df2,
on='time',
by='key',
tolerance=pd.Timedelta('2D'),
)
tm.assert_frame_equal(result[expected.columns], expected)
@pytest.mark.parametrize(
"how",
[
"left",
pytest.param(
"right",
marks=pytest.mark.xfail(
raises=AttributeError, reason="right_join is not an ibis API"
),
),
"inner",
"outer",
],
)
@pytest.mark.parametrize(
"func",
[
pytest.param(lambda join: join["a0", "a1"], id="tuple"),
pytest.param(lambda join: join[["a0", "a1"]], id="list"),
pytest.param(lambda join: join.select(["a0", "a1"]), id="select"),
],
)
@pytest.mark.xfail(
raises=(com.IbisError, AttributeError),
reason="Select from unambiguous joins not implemented",
)
def test_select_on_unambiguous_join(how, func):
df_t = pd.DataFrame(dict(a0=[1, 2, 3], b1=list("aab")))
df_s = pd.DataFrame(dict(a1=[2, 3, 4], b2=list("abc")))
con = ibis.pandas.connect({"t": df_t, "s": df_s})
t = con.table("t")
s = con.table("s")
method = getattr(t, "{}_join".format(how))
join = method(s, t.b1 == s.b2)
expected = pd.merge(df_t, df_s, left_on=["b1"], right_on=["b2"], how=how)[
["a0", "a1"]
]
assert not expected.empty
expr = func(join)
result = expr.execute()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"func",
[
pytest.param(lambda join: join["a0", "a1"], id="tuple"),
pytest.param(lambda join: join[["a0", "a1"]], id="list"),
pytest.param(lambda join: join.select(["a0", "a1"]), id="select"),
],
)
@pytest.mark.xfail(
raises=(com.IbisError, AttributeError),
reason="Select from unambiguous joins not implemented",
)
@merge_asof_minversion
def test_select_on_unambiguous_asof_join(func):
df_t = pd.DataFrame(
dict(a0=[1, 2, 3], b1=pd.date_range("20180101", periods=3))
)
df_s = pd.DataFrame(
dict(a1=[2, 3, 4], b2=pd.date_range("20171230", periods=3))
)
con = ibis.pandas.connect({"t": df_t, "s": df_s})
t = con.table("t")
s = con.table("s")
join = t.asof_join(s, t.b1 == s.b2)
expected = pd.merge_asof(df_t, df_s, left_on=["b1"], right_on=["b2"])[
["a0", "a1"]
]
assert not expected.empty
expr = func(join)
result = expr.execute()
tm.assert_frame_equal(result, expected)
``` |
[
{
"content": "Here is a code snippet:\n```python\n# \n# The BSD License\n# \n# Copyright (c) 2008, Florian Noeding\n# All rights reserved.\n# \n# Redistribution and use in source and binary forms, with or without modification,\n# are permitted provided that the following conditions are met:\n# \n# Redistributio... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\n# \n# The BSD License\n# \n# Copyright (c) 2008, Florian Noeding\n# All rights reserved.\n# \n# Redistribution and use in source and binary forms, with or without modification,\n# are permitted provided that the following conditions are met:\n# \... | ```python
#
# The BSD License
#
# Copyright (c) 2008, Florian Noeding
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# Neither the name of the of the author nor the names of its contributors may be
# used to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import with_statement
import setuppaths
from llvm import *
from llvm.core import *
from llvm.ee import *
import os.path
import re
from esfunction import ESFunction
from esvalue import ESValue
from esvariable import ESVariable
from estype import ESType
from errors import *
import astwalker
from tree import Tree, TreeType
import typeannotator
import llvmdebug
class ModuleTranslator(astwalker.ASTWalker):
def _addHelperFunctionsPreTranslation(self):
# int puts(char *);
returnTypes = [self._findSymbol(name=u'int32', type_=ESType)]
paramTypes = [self._findSymbol(name=u'int8', type_=ESType).derivePointer()]
esType = ESType.createFunction(returnTypes, paramTypes)
esFunc = ESFunction(u'puts', '', '', esType, [u's'], mangling='C', linkage='extern')
self._addSymbol(name=u'puts', symbol=esFunc)
type = esType.toLLVMType()
func = self._module.add_function(type, 'puts')
# void abort();
returnTypes = [self._findSymbol(name=u'void', type_=ESType)]
paramTypes = []
esType = ESType.createFunction(returnTypes, paramTypes)
esFunc = ESFunction(u'abort', '', '', esType, [], mangling='C', linkage='extern')
type = esType.toLLVMType()
func = self._module.add_function(type, 'abort')
def _addHelperFunctionsPostTranslation(self):
# if this module contains a main function emit code which will call it
flist = self._findSymbol(name=u'main', type_=ESFunction, mayFail=True)
if flist:
assert(len(flist) == 1)
esMain = flist[0]
s = []
s.append('The main function defined in this module has an unsupported signature.')
s.append('supported signatures:')
s.append('\tdef main() as int32')
s.append('\tdef main() as void')
int32 = self._findSymbol(name=u'int32', type_=ESType)
void = self._findSymbol(name=u'void', type_=ESType)
validA = ESType.createFunction([int32], [])
validB = ESType.createFunction([void], [])
ok = False
for x in [validA, validB]:
if x.isEquivalentTo(esMain.esType, False):
ok = True
if not ok:
self._raiseException(RecoverableCompileError, postText=s)
# has arguments?
if len(esMain.esType.getFunctionParameterTypes()) == 0:
functionType= Type.function(Type.int(32), [])
function = self._module.add_function(functionType, 'main')
entryBB = function.append_basic_block('entry')
BB = function.append_basic_block('bb')
b = Builder.new(entryBB)
if self._debugMode:
dbgSubProg = self._debugInfoBuilder.addFunctionInfoStart(module=self._module, builder=b, lineNumber=0, name='main', displayName='main')
b.branch(BB)
b = Builder.new(BB)
r = b.call(esMain.llvmRef, [])
retTypes = esMain.esType.getFunctionReturnTypes()
assert(len(retTypes) == 1)
if retTypes[0].toLLVMType() != Type.void():
b.ret(r)
else:
b.ret(Constant.int(Type.int(32), 0))
if self._debugMode:
self._debugInfoBuilder.addFunctionInfoEnd(module=self._module, builder=b, subprogram=dbgSubProg)
else:
# TODO implement version with parameters
self._raiseException(RecoverableCompileError, postText=s)
def _addModuleXTors(self):
# create *appending* global_ctors and global_dtors variables
ft = Type.pointer(Type.function(Type.void(), []))
st = Type.struct([Type.int(32), ft])
def addXTors(xtors, what):
assert what in ['ctors', 'dtors']
if not xtors:
return
t = Type.array(st, len(xtors))
gvar = self._module.add_global_variable(t, 'llvm.global_%s' % what)
gvar.linkage = LINKAGE_APPENDING
elems = []
for x in xtors:
prio = Constant.int(Type.int(32), 65535)
func = x.llvmRef
elems.append(Constant.struct([prio, func]))
init = Constant.array(st, elems)
gvar.initializer = init
addXTors(self._moduleCTors, 'ctors')
addXTors(self._moduleDTors, 'dtors')
def _setupDebugInformation(self):
if not self._debugMode:
return
self._debugInfoBuilder = llvmdebug.DebugInfoBuilder()
self._debugInfoBuilder.setupModule(self._module, self._targetData)
self._debugInfoBuilder.addGlobalInfo(self._module)
self._debugInfoBuilder.addCompileUnitInfo(self._module, self._filename)
def _findCurrentFunction(self):
for x in reversed(self._nodes):
if x.type == TreeType.DEFFUNC:
return x.esFunction
assert(0 and 'no function found - type checker should have prevented this!')
def _onModuleStart(self, ast, packageName, moduleName, statements):
self._errors = 0
self._warnings = 0
self._module = Module.new(ast.moduleName)
self._moduleNode = ast
self._moduleCTors = ast.moduleCTors
self._moduleDTors = ast.moduleDTors
# setup target and data layout
self._targetData = TargetData.new('e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128')# FIXME; this is just the llvm-gcc default for x86_64-unknown-linux-gnu
self._module.data_layout = str(self._targetData) # FIXME
self._module.target = 'x86_64-unknown-linux-gnu' # FIXME
# setup debug Info
self._setupDebugInformation()
# add some helper functions / prototypes / ... to the module
self._addHelperFunctionsPreTranslation()
# first add global variables
for x in statements:
if x.type == TreeType.DEFGLOBAL:
self._dispatch(x)
# imported global variables don't have llvmRef entries: add them
for k, v in ast.symbolTable.getAllSymbols().iteritems():
if not isinstance(v, ESVariable):
continue
if getattr(v, 'llvmRef', None):
continue
# can't reuse _onDefGlobal at the moment, since we need to declare an "extern" global variable
llvmType = v.toLLVMType()
mangledName = v.mangleName()
v.llvmRef = self._module.add_global_variable(llvmType, mangledName)
# use default linkage: external
# translate
for x in statements:
# skip globals, as they were already handled above
if x.type == TreeType.DEFGLOBAL:
continue
try:
self._dispatch(x)
except RecoverableCompileError, e:
print e.message.rstrip()
self._errors += 1
except CompileError, e:
print e.message.rstrip()
self._errors += 1
break
if self._errors:
raise CompileError('errors occured during compilation: aborting')
# set module ctors, dtors
self._addModuleXTors()
# finally add some more helper functions / prototypes / ... to the module
self._addHelperFunctionsPostTranslation()
def _onImportAll(self, ast, moduleName):
pass
def _onDefFunction(self, ast, modifierKeys, modifierValues, name, returnTypeName, parameterNames, parameterTypeNames, block):
esFunction = ast.esFunction
esType = esFunction.esType
try:
# make really sure there is no function with this name
llvmRef = self._module.get_function_named(esFunction.mangledName)
except LLVMException:
llvmRef = None
if llvmRef:
if not llvmRef.is_declaration:
s1 = 'mangled name already in use: %s' % esFunction.mangledName
s2 ='This can be caused by defining a function with the same signature multiple times. If that\'s not the case please submit a bugreport with a testcase.'
self._raiseException(CompileError, tree=ast.getChild(1), inlineText=s1, postText=s2)
else:
llvmRef = self._module.add_function(esType.toLLVMType(), esFunction.mangledName)
esFunction.llvmRef = llvmRef # provide access through symbol table
ast.llvmRef = llvmRef # provide direct access through ast node
# set parameter names
for i,x in enumerate(parameterNames):
llvmRef.args[i].name = x.text
if not block:
return
entryBB = llvmRef.append_basic_block('entry')
bEntry = Builder.new(entryBB)
if self._debugMode:
dbgSubProg = self._debugInfoBuilder.addFunctionInfoStart(module=self._module, builder=bEntry, lineNumber=ast.line, name=esFunction.name, displayName=esFunction.name)
ast.dbgSubProg = dbgSubProg
# add variables
for i,x in enumerate(parameterNames):
var = self._findSymbol(name=x.text, type_=ESVariable)
var.llvmRef = self._createAllocaForVar(x.text, var.toLLVMType(), llvmRef.args[i])
if self._debugMode:
self._debugInfoBuilder.addLocalVariableInfo(module=self._module, builder=bEntry, llvmRef=var.llvmRef, esType=var.esType, subprogram=dbgSubProg, name=x.text, lineNumber=x.line, varType='arg')
# branch from entry to real code block and dispatch function body
bb = llvmRef.append_basic_block('bb')
self._currentBuilder = Builder.new(bb)
bEntry.branch(bb)
self._dispatch(block)
returnTypes = esFunction.esType.getFunctionReturnTypes()
bb = self._currentBuilder.block
if not (bb.instructions and bb.instructions[-1].is_terminator):
if len(returnTypes) == 1 and returnTypes[0].toLLVMType() == Type.void():
self._currentBuilder.ret_void()
else:
s = self._generateContext(preText='warning:', postText='control flow possibly reaches end of non-void function. Inserting trap instruction...', lineBase1=block.line, numAfter=3)
trapFunc = Function.intrinsic(self._module, INTR_TRAP, []);
self._currentBuilder.call(trapFunc, [])
self._currentBuilder.ret(Constant.int(Type.int(32), -1)) # and return, otherwise func.verify will fail
if self._debugMode:
self._debugInfoBuilder.addFunctionInfoEnd(module=self._module, builder=self._currentBuilder, subprogram=dbgSubProg)
llvmRef.verify()
def _onBlock(self, ast, blockContent):
for x in blockContent:
if self._debugMode:
self._debugInfoBuilder.addStopPoint(self._module, self._currentBuilder, x.line, x.charPos)
self._dispatch(x)
def _onReturn(self, ast, expressions):
esFunction = None
for n in reversed(self._nodes):
if n.type == TreeType.DEFFUNC:
esFunction = n.esFunction
break
assert(esFunction)
returnTypes = esFunction.esType.getFunctionReturnTypes()
assert(len(returnTypes) == 1)
if returnTypes[0].toLLVMType() == Type.void():
assert(not expressions)
self._currentBuilder.ret_void()
else:
self._dispatch(expressions[0])
llvmValue = expressions[0].llvmValue
self._currentBuilder.ret(llvmValue)
def _onAssert(self, ast, expression):
self._dispatch(expression)
# TODO add a compiler switch to disable asserts, so they become noop's
# TODO add a compiler switch to disable inclusion of context data
# if value is statically available bail out now / warn
# this does not work... investigate later
#if value == Constant.int(Type.int(1), 0):
# print 'assert is always False in %s:%d' % ('???', ast.line())
# find current function
llvmFunc = self._findCurrentFunction().llvmRef
# now implement an if
thenBB = llvmFunc.append_basic_block('assert_true') # trap path
elseBB = llvmFunc.append_basic_block('assert_false')
cond = self._currentBuilder.not_(expression.llvmValue)
self._currentBuilder.cbranch(cond, thenBB, elseBB)
thenBuilder = Builder.new(thenBB)
# build error string
if ast.line:
errorStringConst = 'assert failed! file %s line %d:\n' % (self._filename, ast.line)
start = max(ast.line - 1 - 5, 0)
stop = min(ast.line - 1 + 1, len(self._sourcecodeLines))
for i in range(start, stop):
errorStringConst += '% 5d: %s' % (i + 1, self._sourcecodeLines[i])
if i != stop - 1:
errorStringConst += '\n'
errorStringConst += ' # <----- failed\n'
else:
errorStringConst = '(unknown) assert failed!'
errorStringConst = Constant.stringz(errorStringConst);
errorString = self._module.add_global_variable(errorStringConst.type, 'assertErrorString')
errorString.initializer = errorStringConst
errorString.global_constant = True
word = self._findSymbol(name=u'word', type_=ESType).toLLVMType()
idx = [Constant.int(word, 0), Constant.int(word, 0)]
errorStringGEP = errorString.gep(idx)
puts = self._module.get_function_named('puts')
thenBuilder.call(puts, [errorStringGEP])
# emit abort
abortFunc = self._module.get_function_named('abort')
thenBuilder.call(abortFunc, [])
thenBuilder.branch(elseBB) # we'll never get here - but create proper structure of IR
self._currentBuilder = Builder.new(elseBB)
def _onIf(self, ast, expressions, blocks, elseBlock):
llvmFunc = self._findCurrentFunction().llvmRef
mergeBB = llvmFunc.append_basic_block('if_merge')
for i in range(len(expressions)):
thenBB = llvmFunc.append_basic_block('if_then')
elseBB = llvmFunc.append_basic_block('if_else')
self._dispatch(expressions[i])
self._currentBuilder.cbranch(expressions[i].llvmValue, thenBB, elseBB)
# generate code for then branch
self._currentBuilder = Builder.new(thenBB)
self._dispatch(blocks[i])
# branch to mergeBB, but only if there was no terminator instruction
currentBB = self._currentBuilder.block
if not (currentBB.instructions and currentBB.instructions[-1].is_terminator):
self._currentBuilder.branch(mergeBB)
# continue with next else if / else
self._currentBuilder = Builder.new(elseBB)
if elseBlock:
self._dispatch(elseBlock)
# close last elseBB
currentBB = self._currentBuilder.block
if not (currentBB.instructions and currentBB.instructions[-1].is_terminator):
self._currentBuilder.branch(mergeBB)
# continue in mergeBB
self._currentBuilder = Builder.new(mergeBB)
def _onFor(self, ast, variableName, rangeStart, rangeStop, rangeStep, block):
if rangeStart:
self._dispatch(rangeStart)
start = rangeStart.llvmValue
else:
start = Constant.int(Type.int(32), 0) # FIXME allow other types
self._dispatch(rangeStop)
stop = rangeStop.llvmValue
if rangeStep:
self._dispatch(rangeStep)
step = rangeStep.llvmValue
else:
step = Constant.int(Type.int(32), 1) # FIXME allow other types
inductVar = self._findSymbol(fromTree=variableName, type_=ESVariable)
if not hasattr(inductVar, 'llvmRef'):
inductVar.llvmRef = self._createAllocaForVar(variableName.text, inductVar.esType.toLLVMType())
# setup loop by initializing induction variable
self._currentBuilder.store(start, inductVar.llvmRef)
# create blocks
llvmFunc = self._findCurrentFunction().llvmRef
headBB = llvmFunc.append_basic_block('head') # decide between Up and Down
headDownBB = llvmFunc.append_basic_block('headDown')
headUpBB = llvmFunc.append_basic_block('headUp')
bodyBB = llvmFunc.append_basic_block('body')
stepBB = llvmFunc.append_basic_block('step')
# TODO: think about implementing an 'else' block, that gets called when the loop does not get executed
mergeBB = llvmFunc.append_basic_block('merge')
self._currentBuilder.branch(headBB)
# setup continue / break targets
ast.breakTarget = mergeBB
ast.continueTarget = stepBB
# count up or down?
b = Builder.new(headBB)
cond = b.icmp(IPRED_SGT, step, Constant.int(step.type, 0))
b.cbranch(cond, headUpBB, headDownBB)
# count down check
b = Builder.new(headDownBB)
cond = b.icmp(IPRED_SGT, b.load(inductVar.llvmRef), stop)
b.cbranch(cond, bodyBB, mergeBB)
# count up check
b = Builder.new(headUpBB)
cond = b.icmp(IPRED_SLT, b.load(inductVar.llvmRef), stop)
b.cbranch(cond, bodyBB, mergeBB)
# build loop body
self._currentBuilder = Builder.new(bodyBB)
self._dispatch(block)
# end loop body with branch to stepBB
self._currentBuilder.branch(stepBB)
# now increment inductVar and branch back to head for another round
b = Builder.new(stepBB)
r = b.add(b.load(inductVar.llvmRef), step)
b.store(r, inductVar.llvmRef)
b.branch(headBB)
# done! continue outside loop body
self._currentBuilder = Builder.new(mergeBB)
def _onWhile(self, ast, expression, block):
# create blocks
llvmFunc = self._findCurrentFunction().llvmRef
headBB = llvmFunc.append_basic_block('head')
bodyBB = llvmFunc.append_basic_block('body')
mergeBB = llvmFunc.append_basic_block('merge')
# branch to headBB / enter loop
self._currentBuilder.branch(headBB)
# create test
self._currentBuilder = Builder.new(headBB)
self._dispatch(expression)
self._currentBuilder.cbranch(expression.llvmValue, bodyBB, mergeBB)
# build body
self._currentBuilder = Builder.new(bodyBB)
ast.breakTarget = mergeBB
ast.continueTarget = headBB
self._dispatch(block)
self._currentBuilder.branch(headBB)
# continue with mergeBB
self._currentBuilder = Builder.new(mergeBB)
def _onBreak(self, ast):
target = None
for n in reversed(self._nodes):
if hasattr(n, 'breakTarget'):
target = n.breakTarget
break
assert(target and 'type checker should make sure that there is a break target')
self._currentBuilder.branch(target)
def _onContinue(self, ast):
target = None
for n in reversed(self._nodes):
if hasattr(n, 'continueTarget'):
target = n.continueTarget
break
assert(target and 'type checker should make sure that there is a break target')
self._currentBuilder.branch(target)
def _onPass(self, ast):
pass
def _onIntegerConstant(self, ast, value, suffix):
ast.llvmValue = Constant.int(ast.esType.toLLVMType(), value)
def _onFloatConstant(self, ast, value, suffix):
ast.llvmValue = Constant.real(ast.esType.toLLVMType(), str(value))
def _onStringConstant(self, ast, constant):
# FIXME
s = constant.text
assert(s.startswith('ar"'))
s = s[3:-1]
stringConst = Constant.stringz(s)
string = self._module.add_global_variable(stringConst.type, 'internalStringConstant')
string.initializer = stringConst
string.global_constant = True
string.linkage = LINKAGE_INTERNAL
word = self._findSymbol(name=u'word', type_=ESType).toLLVMType()
idx = [Constant.int(word, 0), Constant.int(word, 0)]
ast.llvmValue = string.gep(idx)
def _onVariable(self, ast, variableName):
# first try to find a function (which resolves to it's address), then a normal variable
flist = self._findSymbol(fromTree=variableName, type_=ESFunction, mayFail=True)
if flist:
if len(flist) > 1:
self._raiseException(RecoverableCompileError, tree=variableName, inlineText='taking the address of a overloaded function is not implemented, yet')
f = flist[0]
ast.llvmValue = f.llvmRef
ast.llvmRef = f.llvmRef
else:
var = self._findSymbol(fromTree=variableName, type_=ESVariable)
ast.llvmValue = self._currentBuilder.load(var.llvmRef)
ast.llvmRef = var.llvmRef
def _createAllocaForVar(self, name, llvmType, value=None):
# FIXME
if llvmType.kind == TYPE_INTEGER:
defaultValue = Constant.int(llvmType, 0)
elif llvmType.kind in [TYPE_FLOAT, TYPE_DOUBLE]:
defaultValue = Constant.real(llvmType, 0)
elif llvmType.kind == TYPE_POINTER:
defaultValue = Constant.null(llvmType)
elif llvmType.kind == TYPE_STRUCT:
defaultValue= Constant.null(llvmType)
else:
assert(0 and 'unsupported variable type')
if value == None:
value = defaultValue
# use the usual LLVM pattern to create mutable variables: use alloca
# important: the mem2reg pass is limited to analyzing the entry block of functions,
# so all variables must be defined there
llvmFunc = self._findCurrentFunction().llvmRef
entryBB = llvmFunc.get_entry_basic_block()
entryBuilder = Builder.new(entryBB)
entryBuilder.position_at_beginning(entryBB)
ref = entryBuilder.alloca(llvmType, name)
entryBuilder.store(value, ref)
return ref
def _onDefVariable(self, ast, variableName, typeName):
var = self._findSymbol(fromTree=variableName, type_=ESVariable)
var.llvmRef = self._createAllocaForVar(variableName.text, var.esType.toLLVMType())
if self._debugMode:
# first find reference to dbgSubProg / enclosing function ast node
dbgSubProg = None
for n in reversed(self._nodes):
if hasattr(n, 'dbgSubProg'):
dbgSubProg = n.dbgSubProg
break
assert(dbgSubProg and '_onDefVariable works only inside functions')
self._debugInfoBuilder.addLocalVariableInfo(module=self._module, builder=self._currentBuilder, llvmRef=var.llvmRef, esType=var.esType, subprogram=dbgSubProg, name=variableName.text, lineNumber=variableName.line, varType='auto')
def _onDefGlobal(self, ast, variableName, typeName, expression):
var = self._findSymbol(fromTree=variableName, type_=ESVariable)
llvmType = var.toLLVMType()
mangledName = var.mangleName() # FIXME use name mangling!
var.llvmRef = self._module.add_global_variable(llvmType, mangledName)
llvmRef = var.llvmRef
#llvmRef.linkage = LINKAGE_COMMON
if typeName:
llvmRef.initializer = Constant.null(llvmType)
else:
try:
self._dispatch(expression)
llvmRef.initializer = expression.llvmValue
except AttributeError, ae:
assert('_currentBuilder' in str(ae)) # TODO replace with check based on AST
self._raiseException(RecoverableCompileError, tree=expression, inlineText='expected trivial constant expression')
def _onCallFunc(self, ast, calleeName, expressions):
params = []
for x in expressions:
self._dispatch(x)
params.append(x.llvmValue)
esFunction = ast.esFunction
llvmFunc = getattr(esFunction, 'llvmRef', None)
if not llvmFunc:
# try to find function in this module
try:
llvmFunc = self._module.get_function_named(esFunction.mangledName)
except LLVMException:
llvmFunc = None
if not llvmFunc:
# was callee a function pointer?
esVariable = self._findSymbol(fromTree=calleeName, type_=ESVariable, mayFail=True)
if esVariable:
llvmFunc = self._currentBuilder.load(esVariable.llvmRef)
else:
# function was not declared, yet...
llvmFunc = self._module.add_function(esFunction.esType.toLLVMType(), esFunction.mangledName)
ast.llvmValue = self._currentBuilder.call(llvmFunc, params)
def _onBasicOperator(self, ast, op, arg1, arg2):
tt = TreeType
# arg1 is always valid, arg2 may be None
self._dispatch(arg1)
if arg2:
self._dispatch(arg2)
if op == tt.PLUS:
if arg2:
ast.llvmValue = self._currentBuilder.add(arg1.llvmValue, arg2.llvmValue)
else:
ast.llvmValue = arg1.llvmValue
elif op == tt.MINUS:
if arg2:
ast.llvmValue = self._currentBuilder.sub(arg1.llvmValue, arg2.llvmValue)
else:
ast.llvmValue = self._currentBuilder.sub(Constant.null(arg1.llvmValue.type), arg1.llvmValue)
elif op == tt.STAR:
ast.llvmValue = self._currentBuilder.mul(arg1.llvmValue, arg2.llvmValue)
elif op == tt.SLASH:
if arg1.esType.isSignedInteger():
ast.llvmValue = self._currentBuilder.sdiv(arg1.llvmValue, arg2.llvmValue)
elif arg1.esType.isUnsignedInteger():
ast.llvmValue = self._currentBuilder.udiv(arg1.llvmValue, arg2.llvmValue)
elif arg1.esType.isFloatingPoint():
ast.llvmValue = self._currentBuilder.fdiv(arg1.llvmValue, arg2.llvmValue)
else:
raise NotImplementedError('FIXME? TODO?')
elif op == tt.PERCENT:
if arg1.esType.isSignedInteger():
ast.llvmValue = self._currentBuilder.srem(arg1.llvmValue, arg2.llvmValue)
elif arg1.esType.isUnsignedInteger():
ast.llvmValue = self._currentBuilder.urem(arg1.llvmValue, arg2.llvmValue)
elif arg1.esType.isFloatingPoint():
ast.llvmValue = self._currentBuilder.frem(arg1.llvmValue, arg2.llvmValue)
else:
raise NotImplementedError('TODO')
elif op == tt.NOT:
ast.llvmValue = self._currentBuilder.not_(arg1.llvmValue)
elif op == tt.AND:
ast.llvmValue = self._currentBuilder.and_(arg1.llvmValue, arg2.llvmValue)
elif op == tt.OR:
ast.llvmValue = self._currentBuilder.or_(arg1.llvmValue, arg2.llvmValue)
elif op == tt.XOR:
ast.llvmValue = self._currentBuilder.xor(arg1.llvmValue, arg2.llvmValue)
elif op in [tt.LESS, tt.LESSEQUAL, tt.EQUAL, tt.NOTEQUAL, tt.GREATEREQUAL, tt.GREATER]:
if arg1.esType.isSignedInteger() and arg2.esType.isSignedInteger():
preds = {}
preds[tt.LESS] = IPRED_SLT
preds[tt.LESSEQUAL] = IPRED_SLE
preds[tt.EQUAL] = IPRED_EQ
preds[tt.NOTEQUAL] = IPRED_NE
preds[tt.GREATEREQUAL] = IPRED_SGE
preds[tt.GREATER] = IPRED_SGT
ast.llvmValue = self._currentBuilder.icmp(preds[op], arg1.llvmValue, arg2.llvmValue)
elif (arg1.esType.isUnsignedInteger() and arg2.esType.isUnsignedInteger()) or (arg1.esType.isPointer() and arg2.esType.isPointer()):
preds = {}
preds[tt.LESS] = IPRED_ULT
preds[tt.LESSEQUAL] = IPRED_ULE
preds[tt.EQUAL] = IPRED_EQ
preds[tt.NOTEQUAL] = IPRED_NE
preds[tt.GREATEREQUAL] = IPRED_UGE
preds[tt.GREATER] = IPRED_UGT
ast.llvmValue = self._currentBuilder.icmp(preds[op], arg1.llvmValue, arg2.llvmValue)
elif arg1.esType.isBoolean() and arg2.esType.isBoolean():
preds = {}
preds[tt.EQUAL] = IPRED_EQ
preds[tt.NOTEQUAL] = IPRED_NE
ast.llvmValue = self._currentBuilder.icmp(preds[op], arg1.llvmValue, arg2.llvmValue)
elif arg1.esType.isFloatingPoint() and arg2.esType.isFloatingPoint():
# TODO think about ordered and unordered comparisions...
# for now ordered
preds = {}
preds[tt.LESS] = RPRED_OLT
preds[tt.LESSEQUAL] = RPRED_OLE
preds[tt.EQUAL] = RPRED_OEQ
preds[tt.NOTEQUAL] = RPRED_ONE
preds[tt.GREATEREQUAL] = RPRED_OGE
preds[tt.GREATER] = RPRED_OGT
ast.llvmValue = self._currentBuilder.fcmp(preds[op], arg1.llvmValue, arg2.llvmValue)
else:
print arg1.esType, arg2.esType
raise NotImplementedError('TODO')
elif op == tt.DOUBLESTAR:
if arg2.llvmValue.type.kind == TYPE_INTEGER:
# powi
powiFunc = Function.intrinsic(self._module, INTR_POWI, [arg1.llvmValue.type])
ast.llvmValue = self._currentBuilder.call(powiFunc, [arg1.llvmValue, arg2.llvmValue])
else:
# pow
raise NotImplementedError('TODO')
else:
raise NotImplementedError('operator not implemented: %s / "%s"' % (op, ast.text))
def _simpleAssignment(self, var, llvmValue):
if not hasattr(var, 'llvmRef'):
# does not have an associated alloca, yet
# we MUST NOT pass a value to _createAllocaForVar! That value is not available in the entry BB!
var.llvmRef = self._createAllocaForVar(var.name, var.esType.toLLVMType())
if self._debugMode:
# first find reference to dbgSubProg / enclosing function ast node
dbgSubProg = None
for n in reversed(self._nodes):
if hasattr(n, 'dbgSubProg'):
dbgSubProg = n.dbgSubProg
break
assert(dbgSubProg and '_onDefVariable works only inside functions')
self._debugInfoBuilder.addLocalVariableInfo(module=self._module, builder=self._currentBuilder, llvmRef=var.llvmRef, esType=var.esType, subprogram=dbgSubProg, name=var.name, lineNumber=0, varType='auto') # FIXME fix line number
self._currentBuilder.store(llvmValue, var.llvmRef)
def _onAssign(self, ast, assigneeExpr, expression):
self._dispatch(expression)
# FIXME
if assigneeExpr.type == TreeType.VARIABLE:
variableName = assigneeExpr.children[0]
var = self._findSymbol(fromTree=variableName, type_=ESVariable)
self._simpleAssignment(var, expression.llvmValue)
elif assigneeExpr.type == TreeType.DEREFERENCE:
self._dispatch(assigneeExpr)
#variableName = assigneeExpr.children[0]
#var = self._findSymbol(fremTree=variableName, type_=ESVariable)
self._currentBuilder.store(expression.llvmValue, assigneeExpr.llvmRef)
else:
assert(0 and 'FIXME? TODO?')
def _onListAssign(self, ast, variableNames, expressions):
# use a very simple aproach:
# copy source variables into temporary variables
# copy data from temporary variables to destination variables
# this avoids difficult cases like: a,b = b,a or a,b,c = b,b,b
# but a,b = c,d is a bit slower - but the optimizer should transform that to an efficient version
# copy source -> temp
temps = []
n = len(variableNames)
assert(n == len(expressions))
for i in range(n):
self._dispatch(expressions[i])
ref = self._currentBuilder.alloca(expressions[i].esType.toLLVMType(), u'listassign_tmp')
self._currentBuilder.store(expressions[i].llvmValue, ref)
esVar = ESVariable(u'listassign_tmp', '__local', '__local', expressions[i].esType) # TODO insert real pkg / module names
esVar.llvmRef = ref
temps.append(esVar)
# copy temp -> destination
# this is a simple assignment
for i in range(n):
if variableNames[i].type == TreeType.VARIABLE:
var = self._findSymbol(fromTree=variableNames[i].children[0], type_=ESVariable)
value = self._currentBuilder.load(temps[i].llvmRef)
self._simpleAssignment(var, value)
else:
assert(0 and 'TODO')
def _onCast(self, ast, expression, typeName):
self._dispatch(expression)
bool = self._findSymbol(name=u'bool', type_=ESType)
targetT = ast.esType
sourceT = expression.esType
if targetT.isEquivalentTo(sourceT, True):# may be really the same or only structurally
# FIXME TODO is this correct???
ast.llvmValue = expression.llvmValue
return
bad = False
if targetT.isEquivalentTo(bool, False):
if sourceT.isSignedInteger() or sourceT.isUnsignedInteger():
ast.llvmValue = self._currentBuilder.icmp(IPRED_NE, expression.llvmValue, Constant.int(expression.llvmValue.type, 0))
elif sourceT.isFloatingPoint():
# TODO think about ordered and unordered
# for now use ordered
ast.llvmValue = self._currentBuilder.fcmp(RPRED_ONE, expression.llvmValue, Constant.real(expression.llvmValue.type, '0'))
else:
bad = True
elif targetT.isSignedInteger():
if sourceT.isEquivalentTo(bool, False):
ast.llvmValue = self._currentBuilder.zext(expression.llvmValue, targetT.toLLVMType())
elif sourceT.isSignedInteger():
t = targetT.toLLVMType()
s = sourceT.toLLVMType()
tBits = t.width
sBits = s.width
if sBits > tBits:
ast.llvmValue = self._currentBuilder.trunc(expression.llvmValue, t)
elif sBits < tBits:
ast.llvmValue = self._currentBuilder.sext(expression.llvmValue, t)
else:
assert(0 and 'dead code path; should have been caught by other checks!')
elif sourceT.isFloatingPoint():
ast.llvmValue = self._currentBuilder.fptosi(expression.llvmValue, targetT.toLLVMType())
else:
bad = True
elif targetT.isUnsignedInteger():
if sourceT.isEquivalentTo(bool, False):
bad = True # FIXME
elif sourceT.isUnsignedInteger():
t = targetT.toLLVMType()
s = sourceT.toLLVMType()
tBits = t.width
sBits = s.width
if sBits > tBits:
ast.llvmValue = self._currentBuilder.trunc(expression.llvmValue, t)
elif sBits < tBits:
ast.llvmValue = self._currentBuilder.zext(expression.llvmValue, t)
else:
assert(0 and 'dead code path; should have been caught by other checks!')
elif sourceT.isSignedInteger():
t = targetT.toLLVMType()
s = sourceT.toLLVMType()
tBits = t.width
sBits = s.width
if sBits > tBits:
raise NotImplementedError()
elif sBits < tBits:
ast.llvmValue = self._currentBuilder.sext(expression.llvmValue, t)
else:
# FIXME???
ast.llvmValue = expression.llvmValue
else:
bad = True
elif targetT.isFloatingPoint():
if sourceT.isSignedInteger():
ast.llvmValue = self._currentBuilder.sitofp(expression.llvmValue, targetT.toLLVMType())
elif sourceT.isUnsignedInteger():
ast.llvmValue = self._currentBuilder.uitofp(expression.llvmValue, targetT.toLLVMType())
else:
bad = True
elif targetT.isPointer():
if sourceT.isPointer():
ast.llvmValue = self._currentBuilder.bitcast(expression.llvmValue, targetT.toLLVMType())
#ast.llvmValue = expression.llvmValue
else:
bad = True
else:
bad = True
if bad:
raise NotImplementedError('cast from %s to %s is not yet supported' % (sourceT, targetT))
def _onDereference(self, ast, expression, indexExpression):
self._dispatch(expression)
# we have a problem: The derefencing is ambiguous
# either we want to load a value from memory --> we need ast.llvmValue
# or we want to store a value to memory --> we need ast.llvmRef
# when storing data to memory the load is wasteful - but it's result never get's used
# so the optimizer will remove it
# for now stay stay with the inefficient code...
word = self._findSymbol(name=u'word', type_=ESType).toLLVMType()
esType = expression.esType
if esType.isPointer():
if indexExpression:
self._dispatch(indexExpression)
if indexExpression.llvmValue.type != word:
llvmValue = indexExpression.llvmValue
llvmType = llvmValue.type
if llvmType.kind != TYPE_INTEGER:
self._raiseException(RecoverableCompileError, tree=indexExpression, inlineText='index type must be integer')
if llvmType.width == 32 and word.width == 64:
llvmValue = self._currentBuilder.sext(llvmValue, word)
elif llvmType.width == 64 and word.width == 32:
self._raise(RecoverableCompileError, tree=indexExpression, inlineText='the target architecture only supports 32 bit indices')
else:
assert(0 and 'FIXME or should this never happen --> dead code path?')
else:
llvmValue = indexExpression.llvmValue
idx = [llvmValue]
else:
idx = [Constant.int(word, 0)]
toDeref = expression.llvmValue
elif esType.isStruct():
if indexExpression.type == TreeType.NAME:
memberIdx = esType.getStructMemberIndexByName(indexExpression.text)
idx = [Constant.int(word, 0), Constant.int(Type.int(32), memberIdx)]
else:
raise NotImplementedError('TODO')
toDeref = expression.llvmRef
else:
assert(0 and 'dead code path')
# every variable is an alloca --> first get the real memory address
realAddrWithOffset = self._currentBuilder.gep(toDeref, idx)
ast.llvmRef = realAddrWithOffset
# now load data from it
ast.llvmValue = self._currentBuilder.load(realAddrWithOffset)
def _onAlias(self, ast, name, typeName):
pass
def _onTypedef(self, ast, name, typeName):
pass
def _onAddressOf(self, ast, expression):
self._dispatch(expression)
# see _onDereference for the ambigous use of this instruction
ast.llvmRef = expression.llvmRef
ast.llvmValue = expression.llvmRef
def _onNew(self, ast, typeName, numExpr):
if numExpr:
self._dispatch(numExpr)
numElements = numExpr.llvmValue
else:
numElements = Constant.int(Type.int(32), 1)
ast.llvmValue = self._currentBuilder.malloc_array(ast.esType.dereference().toLLVMType(), numElements)
def _onDefStruct(self, ast, name, members):
self._module.add_type_name(name.text, ast.esType.toLLVMType())
def _onNoneConstant(self, ast):
ast.llvmValue = Constant.null(Type.pointer(Type.int(8)))
def _onBooleanConstant(self, ast, value):
ast.llvmValue = Constant.int(Type.int(1), value)
def walkAST(self, ast, absFilename, sourcecode='', debugMode=False):
assert(ast.type == TreeType.MODULESTART)
self._module = None
self._debugMode = debugMode
astwalker.ASTWalker.walkAST(self, ast, absFilename, sourcecode)
self._module.verify()
return self._module
def run(module, function):
mp = ModuleProvider.new(module)
ee = ExecutionEngine.new(mp)
return ee.run_function(function, [])
``` |
[
{
"content": "Here is some code:\n```python\n# Copyright 2020 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n# Copyright 2020 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/license... | ```python
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AWS Credentials and AWS Signature V4 Request Signer.
This module provides credentials to access Google Cloud resources from Amazon
Web Services (AWS) workloads. These credentials are recommended over the
use of service account credentials in AWS as they do not involve the management
of long-live service account private keys.
AWS Credentials are initialized using external_account arguments which are
typically loaded from the external credentials JSON file.
Unlike other Credentials that can be initialized with a list of explicit
arguments, secrets or credentials, external account clients use the
environment and hints/guidelines provided by the external_account JSON
file to retrieve credentials and exchange them for Google access tokens.
This module also provides a basic implementation of the
`AWS Signature Version 4`_ request signing algorithm.
AWS Credentials use serialized signed requests to the
`AWS STS GetCallerIdentity`_ API that can be exchanged for Google access tokens
via the GCP STS endpoint.
.. _AWS Signature Version 4: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
.. _AWS STS GetCallerIdentity: https://docs.aws.amazon.com/STS/latest/APIReference/API_GetCallerIdentity.html
"""
import hashlib
import hmac
import io
import json
import os
import re
from six.moves import http_client
from six.moves import urllib
from google.auth import _helpers
from google.auth import environment_vars
from google.auth import exceptions
from google.auth import external_account
# AWS Signature Version 4 signing algorithm identifier.
_AWS_ALGORITHM = "AWS4-HMAC-SHA256"
# The termination string for the AWS credential scope value as defined in
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
_AWS_REQUEST_TYPE = "aws4_request"
# The AWS authorization header name for the security session token if available.
_AWS_SECURITY_TOKEN_HEADER = "x-amz-security-token"
# The AWS authorization header name for the auto-generated date.
_AWS_DATE_HEADER = "x-amz-date"
class RequestSigner(object):
"""Implements an AWS request signer based on the AWS Signature Version 4 signing
process.
https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
"""
def __init__(self, region_name):
"""Instantiates an AWS request signer used to compute authenticated signed
requests to AWS APIs based on the AWS Signature Version 4 signing process.
Args:
region_name (str): The AWS region to use.
"""
self._region_name = region_name
def get_request_options(
self,
aws_security_credentials,
url,
method,
request_payload="",
additional_headers={},
):
"""Generates the signed request for the provided HTTP request for calling
an AWS API. This follows the steps described at:
https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html
Args:
aws_security_credentials (Mapping[str, str]): A dictionary containing
the AWS security credentials.
url (str): The AWS service URL containing the canonical URI and
query string.
method (str): The HTTP method used to call this API.
request_payload (Optional[str]): The optional request payload if
available.
additional_headers (Optional[Mapping[str, str]]): The optional
additional headers needed for the requested AWS API.
Returns:
Mapping[str, str]: The AWS signed request dictionary object.
"""
# Get AWS credentials.
access_key = aws_security_credentials.get("access_key_id")
secret_key = aws_security_credentials.get("secret_access_key")
security_token = aws_security_credentials.get("security_token")
additional_headers = additional_headers or {}
uri = urllib.parse.urlparse(url)
# Validate provided URL.
if not uri.hostname or uri.scheme != "https":
raise ValueError("Invalid AWS service URL")
header_map = _generate_authentication_header_map(
host=uri.hostname,
canonical_uri=os.path.normpath(uri.path or "/"),
canonical_querystring=_get_canonical_querystring(uri.query),
method=method,
region=self._region_name,
access_key=access_key,
secret_key=secret_key,
security_token=security_token,
request_payload=request_payload,
additional_headers=additional_headers,
)
headers = {
"Authorization": header_map.get("authorization_header"),
"host": uri.hostname,
}
# Add x-amz-date if available.
if "amz_date" in header_map:
headers[_AWS_DATE_HEADER] = header_map.get("amz_date")
# Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.
for key in additional_headers:
headers[key] = additional_headers[key]
# Add session token if available.
if security_token is not None:
headers[_AWS_SECURITY_TOKEN_HEADER] = security_token
signed_request = {"url": url, "method": method, "headers": headers}
if request_payload:
signed_request["data"] = request_payload
return signed_request
def _get_canonical_querystring(query):
"""Generates the canonical query string given a raw query string.
Logic is based on
https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
Args:
query (str): The raw query string.
Returns:
str: The canonical query string.
"""
# Parse raw query string.
querystring = urllib.parse.parse_qs(query)
querystring_encoded_map = {}
for key in querystring:
quote_key = urllib.parse.quote(key, safe="-_.~")
# URI encode key.
querystring_encoded_map[quote_key] = []
for item in querystring[key]:
# For each key, URI encode all values for that key.
querystring_encoded_map[quote_key].append(
urllib.parse.quote(item, safe="-_.~")
)
# Sort values for each key.
querystring_encoded_map[quote_key].sort()
# Sort keys.
sorted_keys = list(querystring_encoded_map.keys())
sorted_keys.sort()
# Reconstruct the query string. Preserve keys with multiple values.
querystring_encoded_pairs = []
for key in sorted_keys:
for item in querystring_encoded_map[key]:
querystring_encoded_pairs.append("{}={}".format(key, item))
return "&".join(querystring_encoded_pairs)
def _sign(key, msg):
"""Creates the HMAC-SHA256 hash of the provided message using the provided
key.
Args:
key (str): The HMAC-SHA256 key to use.
msg (str): The message to hash.
Returns:
str: The computed hash bytes.
"""
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
def _get_signing_key(key, date_stamp, region_name, service_name):
"""Calculates the signing key used to calculate the signature for
AWS Signature Version 4 based on:
https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
Args:
key (str): The AWS secret access key.
date_stamp (str): The '%Y%m%d' date format.
region_name (str): The AWS region.
service_name (str): The AWS service name, eg. sts.
Returns:
str: The signing key bytes.
"""
k_date = _sign(("AWS4" + key).encode("utf-8"), date_stamp)
k_region = _sign(k_date, region_name)
k_service = _sign(k_region, service_name)
k_signing = _sign(k_service, "aws4_request")
return k_signing
def _generate_authentication_header_map(
host,
canonical_uri,
canonical_querystring,
method,
region,
access_key,
secret_key,
security_token,
request_payload="",
additional_headers={},
):
"""Generates the authentication header map needed for generating the AWS
Signature Version 4 signed request.
Args:
host (str): The AWS service URL hostname.
canonical_uri (str): The AWS service URL path name.
canonical_querystring (str): The AWS service URL query string.
method (str): The HTTP method used to call this API.
region (str): The AWS region.
access_key (str): The AWS access key ID.
secret_key (str): The AWS secret access key.
security_token (Optional[str]): The AWS security session token. This is
available for temporary sessions.
request_payload (Optional[str]): The optional request payload if
available.
additional_headers (Optional[Mapping[str, str]]): The optional
additional headers needed for the requested AWS API.
Returns:
Mapping[str, str]: The AWS authentication header dictionary object.
This contains the x-amz-date and authorization header information.
"""
# iam.amazonaws.com host => iam service.
# sts.us-east-2.amazonaws.com host => sts service.
service_name = host.split(".")[0]
current_time = _helpers.utcnow()
amz_date = current_time.strftime("%Y%m%dT%H%M%SZ")
date_stamp = current_time.strftime("%Y%m%d")
# Change all additional headers to be lower case.
full_headers = {}
for key in additional_headers:
full_headers[key.lower()] = additional_headers[key]
# Add AWS session token if available.
if security_token is not None:
full_headers[_AWS_SECURITY_TOKEN_HEADER] = security_token
# Required headers
full_headers["host"] = host
# Do not use generated x-amz-date if the date header is provided.
# Previously the date was not fixed with x-amz- and could be provided
# manually.
# https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req
if "date" not in full_headers:
full_headers[_AWS_DATE_HEADER] = amz_date
# Header keys need to be sorted alphabetically.
canonical_headers = ""
header_keys = list(full_headers.keys())
header_keys.sort()
for key in header_keys:
canonical_headers = "{}{}:{}\n".format(
canonical_headers, key, full_headers[key]
)
signed_headers = ";".join(header_keys)
payload_hash = hashlib.sha256((request_payload or "").encode("utf-8")).hexdigest()
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
canonical_request = "{}\n{}\n{}\n{}\n{}\n{}".format(
method,
canonical_uri,
canonical_querystring,
canonical_headers,
signed_headers,
payload_hash,
)
credential_scope = "{}/{}/{}/{}".format(
date_stamp, region, service_name, _AWS_REQUEST_TYPE
)
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
string_to_sign = "{}\n{}\n{}\n{}".format(
_AWS_ALGORITHM,
amz_date,
credential_scope,
hashlib.sha256(canonical_request.encode("utf-8")).hexdigest(),
)
# https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
signing_key = _get_signing_key(secret_key, date_stamp, region, service_name)
signature = hmac.new(
signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
).hexdigest()
# https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html
authorization_header = "{} Credential={}/{}, SignedHeaders={}, Signature={}".format(
_AWS_ALGORITHM, access_key, credential_scope, signed_headers, signature
)
authentication_header = {"authorization_header": authorization_header}
# Do not use generated x-amz-date if the date header is provided.
if "date" not in full_headers:
authentication_header["amz_date"] = amz_date
return authentication_header
class Credentials(external_account.Credentials):
"""AWS external account credentials.
This is used to exchange serialized AWS signature v4 signed requests to
AWS STS GetCallerIdentity service for Google access tokens.
"""
def __init__(
self,
audience,
subject_token_type,
token_url,
credential_source=None,
service_account_impersonation_url=None,
client_id=None,
client_secret=None,
quota_project_id=None,
scopes=None,
default_scopes=None,
):
"""Instantiates an AWS workload external account credentials object.
Args:
audience (str): The STS audience field.
subject_token_type (str): The subject token type.
token_url (str): The STS endpoint URL.
credential_source (Mapping): The credential source dictionary used
to provide instructions on how to retrieve external credential
to be exchanged for Google access tokens.
service_account_impersonation_url (Optional[str]): The optional
service account impersonation getAccessToken URL.
client_id (Optional[str]): The optional client ID.
client_secret (Optional[str]): The optional client secret.
quota_project_id (Optional[str]): The optional quota project ID.
scopes (Optional[Sequence[str]]): Optional scopes to request during
the authorization grant.
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
Raises:
google.auth.exceptions.RefreshError: If an error is encountered during
access token retrieval logic.
ValueError: For invalid parameters.
.. note:: Typically one of the helper constructors
:meth:`from_file` or
:meth:`from_info` are used instead of calling the constructor directly.
"""
super(Credentials, self).__init__(
audience=audience,
subject_token_type=subject_token_type,
token_url=token_url,
credential_source=credential_source,
service_account_impersonation_url=service_account_impersonation_url,
client_id=client_id,
client_secret=client_secret,
quota_project_id=quota_project_id,
scopes=scopes,
default_scopes=default_scopes,
)
credential_source = credential_source or {}
self._environment_id = credential_source.get("environment_id") or ""
self._region_url = credential_source.get("region_url")
self._security_credentials_url = credential_source.get("url")
self._cred_verification_url = credential_source.get(
"regional_cred_verification_url"
)
self._region = None
self._request_signer = None
self._target_resource = audience
# Get the environment ID. Currently, only one version supported (v1).
matches = re.match(r"^(aws)([\d]+)$", self._environment_id)
if matches:
env_id, env_version = matches.groups()
else:
env_id, env_version = (None, None)
if env_id != "aws" or self._cred_verification_url is None:
raise ValueError("No valid AWS 'credential_source' provided")
elif int(env_version or "") != 1:
raise ValueError(
"aws version '{}' is not supported in the current build.".format(
env_version
)
)
def retrieve_subject_token(self, request):
"""Retrieves the subject token using the credential_source object.
The subject token is a serialized `AWS GetCallerIdentity signed request`_.
The logic is summarized as:
Retrieve the AWS region from the AWS_REGION environment variable or from
the AWS metadata server availability-zone if not found in the
environment variable.
Check AWS credentials in environment variables. If not found, retrieve
from the AWS metadata server security-credentials endpoint.
When retrieving AWS credentials from the metadata server
security-credentials endpoint, the AWS role needs to be determined by
calling the security-credentials endpoint without any argument. Then the
credentials can be retrieved via: security-credentials/role_name
Generate the signed request to AWS STS GetCallerIdentity action.
Inject x-goog-cloud-target-resource into header and serialize the
signed request. This will be the subject-token to pass to GCP STS.
.. _AWS GetCallerIdentity signed request:
https://cloud.google.com/iam/docs/access-resources-aws#exchange-token
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
str: The retrieved subject token.
"""
# Initialize the request signer if not yet initialized after determining
# the current AWS region.
if self._request_signer is None:
self._region = self._get_region(request, self._region_url)
self._request_signer = RequestSigner(self._region)
# Retrieve the AWS security credentials needed to generate the signed
# request.
aws_security_credentials = self._get_security_credentials(request)
# Generate the signed request to AWS STS GetCallerIdentity API.
# Use the required regional endpoint. Otherwise, the request will fail.
request_options = self._request_signer.get_request_options(
aws_security_credentials,
self._cred_verification_url.replace("{region}", self._region),
"POST",
)
# The GCP STS endpoint expects the headers to be formatted as:
# [
# {key: 'x-amz-date', value: '...'},
# {key: 'Authorization', value: '...'},
# ...
# ]
# And then serialized as:
# quote(json.dumps({
# url: '...',
# method: 'POST',
# headers: [{key: 'x-amz-date', value: '...'}, ...]
# }))
request_headers = request_options.get("headers")
# The full, canonical resource name of the workload identity pool
# provider, with or without the HTTPS prefix.
# Including this header as part of the signature is recommended to
# ensure data integrity.
request_headers["x-goog-cloud-target-resource"] = self._target_resource
# Serialize AWS signed request.
# Keeping inner keys in sorted order makes testing easier for Python
# versions <=3.5 as the stringified JSON string would have a predictable
# key order.
aws_signed_req = {}
aws_signed_req["url"] = request_options.get("url")
aws_signed_req["method"] = request_options.get("method")
aws_signed_req["headers"] = []
# Reformat header to GCP STS expected format.
for key in sorted(request_headers.keys()):
aws_signed_req["headers"].append(
{"key": key, "value": request_headers[key]}
)
return urllib.parse.quote(
json.dumps(aws_signed_req, separators=(",", ":"), sort_keys=True)
)
def _get_region(self, request, url):
"""Retrieves the current AWS region from either the AWS_REGION
environment variable or from the AWS metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
url (str): The AWS metadata server region URL.
Returns:
str: The current AWS region.
Raises:
google.auth.exceptions.RefreshError: If an error occurs while
retrieving the AWS region.
"""
# The AWS metadata server is not available in some AWS environments
# such as AWS lambda. Instead, it is available via environment
# variable.
env_aws_region = os.environ.get(environment_vars.AWS_REGION)
if env_aws_region is not None:
return env_aws_region
if not self._region_url:
raise exceptions.RefreshError("Unable to determine AWS region")
response = request(url=self._region_url, method="GET")
# Support both string and bytes type response.data.
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != 200:
raise exceptions.RefreshError(
"Unable to retrieve AWS region", response_body
)
# This endpoint will return the region in format: us-east-2b.
# Only the us-east-2 part should be used.
return response_body[:-1]
def _get_security_credentials(self, request):
"""Retrieves the AWS security credentials required for signing AWS
requests from either the AWS security credentials environment variables
or from the AWS metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
Mapping[str, str]: The AWS security credentials dictionary object.
Raises:
google.auth.exceptions.RefreshError: If an error occurs while
retrieving the AWS security credentials.
"""
# Check environment variables for permanent credentials first.
# https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html
env_aws_access_key_id = os.environ.get(environment_vars.AWS_ACCESS_KEY_ID)
env_aws_secret_access_key = os.environ.get(
environment_vars.AWS_SECRET_ACCESS_KEY
)
# This is normally not available for permanent credentials.
env_aws_session_token = os.environ.get(environment_vars.AWS_SESSION_TOKEN)
if env_aws_access_key_id and env_aws_secret_access_key:
return {
"access_key_id": env_aws_access_key_id,
"secret_access_key": env_aws_secret_access_key,
"security_token": env_aws_session_token,
}
# Get role name.
role_name = self._get_metadata_role_name(request)
# Get security credentials.
credentials = self._get_metadata_security_credentials(request, role_name)
return {
"access_key_id": credentials.get("AccessKeyId"),
"secret_access_key": credentials.get("SecretAccessKey"),
"security_token": credentials.get("Token"),
}
def _get_metadata_security_credentials(self, request, role_name):
"""Retrieves the AWS security credentials required for signing AWS
requests from the AWS metadata server.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
role_name (str): The AWS role name required by the AWS metadata
server security_credentials endpoint in order to return the
credentials.
Returns:
Mapping[str, str]: The AWS metadata server security credentials
response.
Raises:
google.auth.exceptions.RefreshError: If an error occurs while
retrieving the AWS security credentials.
"""
headers = {"Content-Type": "application/json"}
response = request(
url="{}/{}".format(self._security_credentials_url, role_name),
method="GET",
headers=headers,
)
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(
"Unable to retrieve AWS security credentials", response_body
)
credentials_response = json.loads(response_body)
return credentials_response
def _get_metadata_role_name(self, request):
"""Retrieves the AWS role currently attached to the current AWS
workload by querying the AWS metadata server. This is needed for the
AWS metadata server security credentials endpoint in order to retrieve
the AWS security credentials needed to sign requests to AWS APIs.
Args:
request (google.auth.transport.Request): A callable used to make
HTTP requests.
Returns:
str: The AWS role name.
Raises:
google.auth.exceptions.RefreshError: If an error occurs while
retrieving the AWS role name.
"""
if self._security_credentials_url is None:
raise exceptions.RefreshError(
"Unable to determine the AWS metadata server security credentials endpoint"
)
response = request(url=self._security_credentials_url, method="GET")
# support both string and bytes type response.data
response_body = (
response.data.decode("utf-8")
if hasattr(response.data, "decode")
else response.data
)
if response.status != http_client.OK:
raise exceptions.RefreshError(
"Unable to retrieve AWS role name", response_body
)
return response_body
@classmethod
def from_info(cls, info, **kwargs):
"""Creates an AWS Credentials instance from parsed external account info.
Args:
info (Mapping[str, str]): The AWS external account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.aws.Credentials: The constructed credentials.
Raises:
ValueError: For invalid parameters.
"""
return cls(
audience=info.get("audience"),
subject_token_type=info.get("subject_token_type"),
token_url=info.get("token_url"),
service_account_impersonation_url=info.get(
"service_account_impersonation_url"
),
client_id=info.get("client_id"),
client_secret=info.get("client_secret"),
credential_source=info.get("credential_source"),
quota_project_id=info.get("quota_project_id"),
**kwargs
)
@classmethod
def from_file(cls, filename, **kwargs):
"""Creates an AWS Credentials instance from an external account json file.
Args:
filename (str): The path to the AWS external account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.aws.Credentials: The constructed credentials.
"""
with io.open(filename, "r", encoding="utf-8") as json_file:
data = json.load(json_file)
return cls.from_info(data, **kwargs)
``` |
[
{
"content": "```python\nfrom pprint import pprint\nfrom django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.conf import settings\nfrom stock.models import Hero, mastery, Rune\nimport json, requests, grequests\n\n# Create your procedures here.\n\n\n\n\ndef searchSummonerStats(summo... | [
{
"content": "<|memory_start|>```python\nfrom pprint import pprint\nfrom django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.conf import settings\nfrom stock.models import Hero, mastery, Rune\nimport json, requests, grequests\n\n# Create your procedures here.\n\n\n\n\ndef searchSum... | ```python
from pprint import pprint
from django.shortcuts import render
from django.http import HttpResponse
from django.conf import settings
from stock.models import Hero, mastery, Rune
import json, requests, grequests
# Create your procedures here.
def searchSummonerStats(summoner_id):
context = {}
if type(summoner_id) != list:
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.3/stats/by-summoner/'+ str(summoner_id) +'/summary?api_key=' + settings.LOL_API_KEY2
else:
urls = []
for summoner in summoner_id:
urls.append('https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.3/stats/by-summoner/'+ str(summoner) +'/summary?api_key=' + settings.LOL_API_KEY2)
rs = (grequests.get(u) for u in urls)
resp = grequests.map(rs)
stat_success = 1
for response in resp:
values_json = json.loads(response.text)
context[values_json['summonerId']] = values_json
if str(response) != '<Response [200]>':
stat_success = '0'
return (context, stat_success)
def searchSummonnerId(summoner_name):
context = {}
summoner_name = summoner_name.lower()
summoner_name = summoner_name.replace(" ", "")
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.4/summoner/by-name/'+ summoner_name +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
if resp.status_code == 200:
data = json.loads(resp.text)
try:
context['success'] = 1
context['summonerName'] = summoner_name
context['summonerLevel'] = data[summoner_name]['summonerLevel']
context['id'] = data[summoner_name]['id']
context['profileIcon'] = data[summoner_name]['profileIconId']
return context
except KeyError, e:
context['success'] = 0
return context
else:
context['success'] = 0
return context
def searchSummonerName(summoner_id):
if type(summoner_id) != list:
id_list = str(summoner_id)
else:
id_list = ''
for summoner in summoner_id:
id_list = id_list + str(summoner) + ','
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.4/summoner/'+ id_list +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def searchSummonerRank(summoner_id):
if type(summoner_id) != list:
id_list = str(summoner_id)
else:
id_list = ''
for summoner in summoner_id:
id_list = id_list + str(summoner) + ','
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v2.5/league/by-summoner/'+ id_list +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def searchSummonerChampionMastery(summoner_id, champion_id):
url = 'https://na.api.pvp.net/championmastery/location/'+ settings.LOL_PLATFORM_ID +'/player/'+ str(summoner_id) +'/champion/'+ str(champion_id) +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
try:
data = json.loads(resp.text)
except ValueError, e:
data = {}
data['championLevel'] = 0
return data
def searchTierImage(tier):
tier = tier.lower()
tier = tier.title()
imgage_dict = {
'Unranked': 'http://s18.postimg.org/5t36g8pf9/unranked_1_92a5f4dfbb5ffab13f901c80a9d14384.png',
'Bronze': 'https://s3.amazonaws.com/f.cl.ly/items/3q1f0B2j1E0Y0a3P310V/Bronze.png',
'Silver': 'https://s3.amazonaws.com/f.cl.ly/items/0J253J1z3o1d2Z152M2b/Silver.png',
'Gold': 'https://s3.amazonaws.com/f.cl.ly/items/1Y360o3N261b020g0h1r/Gold.png',
'Platinum': 'https://s3.amazonaws.com/f.cl.ly/items/3F2j1u2d3f0w0l260m3E/Platinum.png',
'Diamond': 'https://s3.amazonaws.com/f.cl.ly/items/2X2F2r192B3K1j0p0n3d/Diamond.png',
'Master': 'https://s3.amazonaws.com/f.cl.ly/items/083C392i0t1p1a3h1C3i/Master.png',
'Challenger': 'https://s3.amazonaws.com/f.cl.ly/items/0K350Q2C0b0E0n043e0L/Challenger.png',
}
return imgage_dict.get(tier, 'http://s18.postimg.org/5t36g8pf9/unranked_1_92a5f4dfbb5ffab13f901c80a9d14384.png')
def refreshRuneDatabase(request):
context ={}
# request the mastery list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/rune?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing masteries so the new information can be added
old_runes = Rune.objects.all()
old_runes.delete()
for rune in data['data']:
rune_id_riot = data['data'][rune]['id']
rune_name = data['data'][rune]['name']
rune_description = data['data'][rune]['description'].encode('ascii', 'ignore')
rune_tier = data['data'][rune]['rune']['tier']
rune_type_data = data['data'][rune]['rune']['type']
rune_bonus = rune_description.split(' de')[0]
rune_honest_text = rune_description.split(rune_bonus)[1]
rune_honest_text = rune_honest_text.split(' (')[0]
try:
rune_bonus = rune_bonus.split('+')[1]
except:
rune_bonus = rune_bonus.split('-')[1]
try:
rune_is_percentage = rune_bonus.split('%')[1]
rune_bonus = rune_bonus.split('%')[0]
rune_is_percentage = 1
except:
rune_is_percentage = 0
# rune_bonus = rune_bonus.replace(' ', '')
rune_bonus = rune_bonus.split(' ')[0]
rune_bonus = rune_bonus.replace(',', '.')
rune_bonus = rune_bonus.replace(' ', '')
new_rune = Rune(id_riot = rune_id_riot, name = rune_name, description = rune_description, tier = rune_tier, rune_type = rune_type_data, bonus = float(rune_bonus), honest_text = rune_honest_text, is_percentage = rune_is_percentage)
new_rune.save()
return render(request, 'refresh-rune-database.html', context)
def refreshMasteryDatabase(request):
context ={}
# request the mastery list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/mastery?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing masteries so the new information can be added
old_masteries = mastery.objects.all()
old_masteries.delete()
for mastery_item in data['data']:
mastery_id_riot = data['data'][mastery_item]['id']
mastery_name = data['data'][mastery_item]['name']
mastery_description = data['data'][mastery_item]['description']
table_position = str(mastery_id_riot)[1]
for item in mastery_description:
mastery_description_single_var = item
new_mastery = mastery(id_riot = mastery_id_riot, name = mastery_name, description = mastery_description_single_var, position = table_position)
new_mastery.save()
return render(request, 'refresh-mastery-database.html', context)
def refreshChampionDatabase(request):
context ={}
# request the champion list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/champion?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing heroes so the new information can be added
old_heroes = Hero.objects.all()
old_heroes.delete()
for champion in data['data']:
champion_id_riot = data['data'][champion]['id']
champion_name = data['data'][champion]['name']
champion_title = data['data'][champion]['title']
champion_key = data['data'][champion]['key']
new_champion = Hero(id_riot = champion_id_riot, name = champion_name, title = champion_title, key = champion_key)
new_champion.save()
return render(request, 'refresh-champion-database.html', context)
# Create your views here.
def index(request):
context = {}
return render(request, 'index.html', context)
def getSummonerId(request):
context = {}
return render(request, 'getid.html', context)
def requestId(request):
#receive data from the template
template_form = request.POST['requestId']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
context = searchSummonnerId(summoner_name)
return render(request, 'requestid.html', context)
def getmatchhistory(request):
context = {}
return render(request, 'getmatchhistory.html', context)
def requestmatchhistory(request):
#receive data from the template
template_form = request.POST['requestmatchhistory']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
summoner_info = searchSummonnerId(summoner_name)
context = {}
context['summoner_name'] = summoner_name
try:
url = 'https://na.api.pvp.net/api/lol/' + settings.LOL_REGION + '/v2.2/matchlist/by-summoner/' + str(summoner_info['id']) + '?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
context['header'] = []
context['header'].append('Lane')
context['header'].append('Champion')
context['header'].append('Season')
context['header'].append('Match ID')
context['header'].append('Duration')
context['matches'] = []
match_data_to_context = []
for match in data['matches']:
match_data_to_context = []
match_data_to_context.append(match['lane'])
champion_name = Hero.objects.filter(id_riot = match['champion'])
try:
match_data_to_context.append(champion_name[0].name)
except IndexError:
match_data_to_context.append('-')
match_data_to_context.append(match['season'])
match_data_to_context.append(match['matchId'])
match_data_to_context.append(match['timestamp'])
context['matches'].append(match_data_to_context)
return render(request, 'requestmatchhistory.html', context)
except KeyError:
context['success'] = 'false'
return render(request, 'requestmatchhistory.html', context)
def getcurrentgame(request):
context = {}
return render(request, 'getcurrentgame.html', context)
def requestcurrentgame(request):
#receive data from the template
template_form = request.POST['requestcurrentgame']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
summoner_info = searchSummonnerId(summoner_name)
context = {}
context2 = {}
# check if the the player name was found in the lol database (1)
if summoner_info['success'] == 1:
url = 'https://na.api.pvp.net/observer-mode/rest/consumer/getSpectatorGameInfo/'+ settings.LOL_PLATFORM_ID +'/'+ str(summoner_info['id']) +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
# check if this player is currently in game (2)
if resp.status_code == 200:
data = json.loads(resp.text)
data_formated={}
#search for the participant names based on their IDs
players_ids_list = []
for player in data['participants']:
players_ids_list.append(player['summonerId'])
player_objects = searchSummonerName(players_ids_list)
player_ranks = searchSummonerRank(players_ids_list)
player_stats, stat_success = searchSummonerStats(players_ids_list)
# fill the data array with the name
for player in player_objects:
data_formated[player] ={}
data_formated[player]['name'] = player_objects[player]['name']
for player in data['participants']:
data_formated[str(player['summonerId'])]['side'] = player['teamId']
if stat_success == 1:
for stat in player_stats[int(player['summonerId'])]['playerStatSummaries']:
if stat['playerStatSummaryType'] == 'Unranked':
data_formated[str(player['summonerId'])]['wins'] = stat['wins']
# fill the data array with the tier
for player in player_ranks:
data_formated[player]['tier'] = player_ranks[player][0]['tier']
#fill the data array with the champion name
for player in data['participants']:
heroes_ids = player['championId']
champion = Hero.objects.filter(id_riot = heroes_ids)
data_formated[str(player['summonerId'])]['champion'] = champion[0].__str__()
# champion_name_process = champion[0].__str__()
# champion_name_process = champion_name_process.replace(' ', '')
# champion_name_process = champion_name_process.replace('.', '')
champion_name_process = champion[0].__key__()
data_formated[str(player['summonerId'])]['champion'] = '<span style="margin-left: 12px;"><img style="margin-right: 6px;" src="http://ddragon.leagueoflegends.com/cdn/6.6.1/img/champion/' + champion_name_process + '.png" class="rank--img tier-img"><a style="color: rgba(0,0,0,.87);" href="http://champion.gg/champion/' + champion_name_process + '">' + data_formated[str(player['summonerId'])]['champion'] + '</a><span>'
try:
data_formated[str(player['summonerId'])]['tier']
data_formated[str(player['summonerId'])]['tier'] = '<span style="margin-left: 12px;"><img style="margin-right: 2px;" src="'+ searchTierImage(data_formated[str(player['summonerId'])]['tier']) +'" class="rank--img tier-img">' + data_formated[str(player['summonerId'])]['tier'] + '<span>'
except:
data_formated[str(player['summonerId'])]['tier'] = 'UNRANKED'
data_formated[str(player['summonerId'])]['tier'] = '<span style="margin-left: 12px;"><img style="margin-right: 2px;" src="'+ searchTierImage(data_formated[str(player['summonerId'])]['tier']) +'" class="rank--img tier-img">' + data_formated[str(player['summonerId'])]['tier'] + '<span>'
mastery_set = {}
# fill the data array with the masteries stats
for player in data['participants']:
mastery_set[1] = 0
mastery_set[2] = 0
mastery_set[3] = 0
masteries = player['masteries']
for diff_mastery in masteries:
mastery_object = mastery.objects.get(id_riot = diff_mastery['masteryId'])
mastery_set[mastery_object.__position__()] = mastery_set[mastery_object.__position__()] + diff_mastery['rank']
data_formated[str(player['summonerId'])]['masteries'] = str(mastery_set[1]) + ' / ' + str(mastery_set[3]) + ' / ' +str(mastery_set[2])
context['header'] = []
context['header'].append('Champion')
context['header'].append('Name')
context['header'].append('Tier')
if stat_success == 1:
context['header'].append('Wins')
context['header'].append('Masteries')
context['players'] = []
player_data_to_context = []
for player in data_formated:
if data_formated[player]['side'] == 100:
player_data_to_context = []
player_data_to_context.append(data_formated[player]['champion'])
player_data_to_context.append(data_formated[player]['name'])
player_data_to_context.append(data_formated[player]['tier'])
if stat_success == 1:
player_data_to_context.append(data_formated[player]['wins'])
player_data_to_context.append(data_formated[player]['masteries'])
context['players'].append(player_data_to_context)
context2['header'] = []
context2['header'].append('Champion')
context2['header'].append('Name')
context2['header'].append('Tier')
if stat_success == 1:
context2['header'].append('Wins')
context2['header'].append('Masteries')
context2['players'] = []
player_data_to_context = []
for player in data_formated:
if data_formated[player]['side'] == 200:
player_data_to_context = []
player_data_to_context.append(data_formated[player]['champion'])
player_data_to_context.append(data_formated[player]['name'])
player_data_to_context.append(data_formated[player]['tier'])
if stat_success == 1:
player_data_to_context.append(data_formated[player]['wins'])
player_data_to_context.append(data_formated[player]['masteries'])
context2['players'].append(player_data_to_context)
return render(request, 'requestcurrentgame.html', {'context': context, 'context2': context2, 'summoner_name': summoner_name, 'summoner_info': summoner_info})
# check if this player is currently in game (2)
else:
return render(request, 'general-error.html', context)
# check if the the player name was found in the lol database (1)
else:
return render(request, 'general-error.html', context)
#settings.LOL_PLATFORM_ID
#str(summoner_info['id'])
#settings.LOL_API_KEY
#id do bazetinho 7523004
#id do fafis 454451
#id do leo 514850
``` |
[
{
"content": "Provide a verbatim copy of the code:\n```python\n\"\"\"\nDjango settings for mmsn project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.6/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.6/ref/settings/\n... | [
{
"content": "Provide a verbatim copy of the code:\n<|memory_start|>```python\n\"\"\"\nDjango settings for mmsn project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.6/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.6... | ```python
"""
Django settings for mmsn project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import socket
import dj_database_url
from django.utils.crypto import get_random_string
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'd%ehat=&bb5pr+=unsxmpxq(57@1nx+okkyni3n9lk!a#pduq&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'shiptrak', 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
#"django.core.context_processors.request",
#"django.contrib.auth.context_processors.auth",
],
},
},
]
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'grappelli',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'shiptrak',
)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
#'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'mmsn.urls'
WSGI_APPLICATION = 'mmsn.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
import os
STATIC_ROOT = 'staticfiles'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles/')
STATIC_URL = '/static/'
STATICFILES_DIRS = ()
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.FileSystemFinder',
)
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'default': {
'handlers': ['console'],
'level': 'DEBUG',
}
}
}
GOOGLE_MAPS_API_KEY = ''
CACHE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'callsign_data'))
WINLINK_API_URL = "http://cms.winlink.org/"
h = socket.gethostname()
try:
(h, domain) = h.split('.', 2)
print("from mmsn.settings.{0} import *".format(h))
exec(("from mmsn.settings.{0} import *".format(h)), locals())
print("Overriding production configuration with local settings for host {}".format(h))
except Exception as e:
SECRET_KEY = get_random_string(50, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)')
ALLOWED_HOSTS = ['*']
CSRF_TRUSTED_ORIGINS = ['*']
SESSION_COOKIE_SECURE = False
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
DATABASES = {
'default': dj_database_url.config()
}
DEBUG = False
GOOGLE_MAPS_API_KEY = 'AIzaSyDHRIu1CdX0O95_bTdyyiom4Z84uzKG0bw'
GOOGLE_ANALYTICS_ID = 'UA-52163451-1'
``` |
[
{
"content": "Here is some code:\n```python\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, You can obtain one at http://mozilla.org/MPL/2.0/.\n\n\"\"\"\"\nthese are the rules that transform a raw crash into a... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, You can obtain one at http://mozilla.org/MPL/2.0/.\n\n\"\"\"\"\nthese are the rules that transform a ... | ```python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""""
these are the rules that transform a raw crash into a processed crash
"""
from socorro.lib.ver_tools import normalize
from socorro.lib.util import DotDict
from sys import maxint
#==============================================================================
class ProcessedTransformRule(object):
"""the base class for Support Rules. It provides the framework for the
rules 'predicate', 'action', and 'version' as well as utilites to help
rules do their jobs."""
#--------------------------------------------------------------------------
def predicate(self, raw_crash, processed_crash, processor):
"""the default predicate for processed_transform invokes any derivied
_predicate function, trapping any exceptions raised in the process. We
are obligated to catch these exceptions to give subsequent rules the
opportunity act. An error during the predicate application is a
failure of the rule, not a failure of the classification system itself
"""
try:
return self._predicate(raw_crash, processed_crash, processor)
except Exception, x:
processor.config.logger.debug(
'processed_transform: %s predicate rejection - consideration '
'of %s failed because of "%s"',
self.__class__,
raw_crash.get('uuid', 'unknown uuid'),
x,
exc_info=True
)
return False
#--------------------------------------------------------------------------
def _predicate(self, raw_crash, processed_crash, processor):
""""The default processed_transform predicate just returns True. We
want all the processed_transform rules to run.
parameters:
raw_crash - a mapping representing the raw crash data originally
submitted by the client
processed_crash - the ultimate result of the processor, this is the
analyzed version of a crash. It contains the
output of the MDSW program for each of the dumps
within the crash.
processor - a reference to the processor object that is assigned
to working on the current crash. This object contains
resources that might be useful to a classifier rule.
'processor.config' is the configuration for the
processor in which database connection paramaters can
be found. 'processor.config.logger' is useful for any
logging of debug information.
'processor.c_signature_tool' or
'processor.java_signature_tool' contain utilities that
might be useful during classification.
returns:
True - this rule should be applied
False - this rule should not be applied
"""
return True
#--------------------------------------------------------------------------
def action(self, raw_crash, processed_crash, processor):
"""the default action for processed_transform invokes any derivied
_action function, trapping any exceptions raised in the process. We
are obligated to catch these exceptions to give subsequent rules the
opportunity act and perhaps (mitigate the error). An error during the
action application is a failure of the rule, not a failure of the
classification system itself."""
try:
return self._action(raw_crash, processed_crash, processor)
except KeyError, x:
processor.config.logger.debug(
'processed_transform: %s action failure - %s failed because of '
'"%s"',
self.__class__,
raw_crash.get('uuid', 'unknown uuid'),
x,
)
except Exception, x:
processor.config.logger.debug(
'processed_transform: %s action failure - %s failed because of '
'"%s"',
self.__class__,
raw_crash.get('uuid', 'unknown uuid'),
x,
exc_info=True
)
return False
#--------------------------------------------------------------------------
def _action(self, raw_crash, processed_crash, processor):
"""Rules derived from this base class ought to override this method
with an actual classification rule. Successful application of this
method should include a call to '_add_classification'.
parameters:
raw_crash - a mapping representing the raw crash data originally
submitted by the client
processed_crash - the ultimate result of the processor, this is the
analized version of a crash. It contains the
output of the MDSW program for each of the dumps
within the crash.
processor - a reference to the processor object that is assigned
to working on the current crash. This object contains
resources that might be useful to a classifier rule.
'processor.config' is the configuration for the
processor in which database connection paramaters can
be found. 'processor.config.logger' is useful for any
logging of debug information.
'processor.c_signature_tool' or
'processor.java_signature_tool' contain utilities that
might be useful during classification.
returns:
True - this rule was applied successfully and no further rules
should be applied
False - this rule did not succeed and further rules should be
tried
"""
return True
#--------------------------------------------------------------------------
def version(self):
"""This method should be overridden in a base class."""
return '0.0'
#==============================================================================
class OOMSignature(ProcessedTransformRule):
"""To satisfy Bug 1007530, this rule will modify the signature to
tag OOM (out of memory) crashes"""
signature_fragments = (
'NS_ABORT_OOM',
'mozalloc_handle_oom',
'CrashAtUnhandlableOOM'
)
#--------------------------------------------------------------------------
def version(self):
return '1.0'
#--------------------------------------------------------------------------
def _predicate(self, raw_crash, processed_crash, processor):
if 'OOMAllocationSize' in raw_crash:
return True
signature = processed_crash.signature
for a_signature_fragment in self.signature_fragments:
if a_signature_fragment in signature:
return True
return False
#--------------------------------------------------------------------------
def _action(self, raw_crash, processed_crash, processor):
processed_crash.original_signature = processed_crash.signature
try:
size = int(raw_crash.OOMAllocationSize)
except (TypeError, AttributeError, KeyError):
processed_crash.signature = (
"OOM | unknown | " + processed_crash.signature
)
return True
if size <= 262144: # 256K
processed_crash.signature = "OOM | small"
else:
processed_crash.signature = (
"OOM | large | " + processed_crash.signature
)
return True
#==============================================================================
class SigTrunc(ProcessedTransformRule):
"""ensure that the signature is never longer than 255 characters"""
#--------------------------------------------------------------------------
def version(self):
return '1.0'
#--------------------------------------------------------------------------
def _predicate(self, raw_crash, processed_crash, processor):
return len(processed_crash.signature) > 255
#--------------------------------------------------------------------------
def _action(self, raw_crash, processed_crash, processor):
processed_crash.signature = "%s..." % processed_crash.signature[:252]
return True
#------------------------------------------------------------------------------
# the following tuple of tuples is a structure for loading rules into the
# TransformRules system. The tuples take the form:
# predicate_function, predicate_args, predicate_kwargs,
# action_function, action_args, action_kwargs.
#
# The args and kwargs components are additional information that a predicate
# or an action might need to have to do its job. Providing values for args
# or kwargs essentially acts in a manner similar to functools.partial.
# When the predicate or action functions are invoked, these args and kwags
# values will be passed into the function along with the raw_crash,
# processed_crash and processor objects.
default_rules = (
(OOMSignature, (), {}, OOMSignature, (), {}),
(SigTrunc, (), {}, SigTrunc, (), {}),
)
``` |
[
{
"content": "Write the code verbatim:\n```python\nfrom corehq.apps.reports.dont_use.fields import ReportSelectField\nfrom corehq.apps.reports.filters.users import SelectCaseOwnerFilter\nfrom django.utils.translation import ugettext_noop\nfrom django.utils.translation import ugettext as _\nfrom corehq.apps.es i... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\nfrom corehq.apps.reports.dont_use.fields import ReportSelectField\nfrom corehq.apps.reports.filters.users import SelectCaseOwnerFilter\nfrom django.utils.translation import ugettext_noop\nfrom django.utils.translation import ugettext as _\nfrom ... | ```python
from corehq.apps.reports.dont_use.fields import ReportSelectField
from corehq.apps.reports.filters.users import SelectCaseOwnerFilter
from django.utils.translation import ugettext_noop
from django.utils.translation import ugettext as _
from corehq.apps.es import CaseES
def _get_blocks(domain):
query = (CaseES('report_cases')
.domain(domain)
.case_type(['pregnant_mother', 'baby'])
.size(0)
.terms_aggregation('block.#value', 'block'))
return query.run().aggregations.block.keys
class SelectBlockField(ReportSelectField):
slug = "block"
name = ugettext_noop("Name of the Block")
cssId = "opened_closed"
cssClasses = "span3"
def update_params(self):
blocks = _get_blocks(self.domain)
block = self.request.GET.get(self.slug, '')
self.selected = block
self.options = [dict(val=block_item, text="%s" % block_item) for block_item in blocks]
self.default_option = _("Select Block")
class SelectSubCenterField(ReportSelectField):
slug = "sub_center"
name = ugettext_noop("Sub Center")
cssId = "opened_closed"
cssClasses = "span3"
default_option = "Select Sub Center"
options = []
class SelectASHAField(SelectCaseOwnerFilter):
name = ugettext_noop("ASHA")
default_option = ugettext_noop("Type ASHA name")
``` |
[
{
"content": "```python\nimport copy\n\nfrom nose import with_setup\n\nfrom pybbn.graph.variable import Variable\n\n\ndef setup():\n \"\"\"\n Setup.\n :return: None.\n \"\"\"\n pass\n\n\ndef teardown():\n \"\"\"\n Teardown.\n :return: None.\n \"\"\"\n pass\n\n\n@with_setup(setup, t... | [
{
"content": "<|memory_start|>```python\nimport copy\n\nfrom nose import with_setup\n\nfrom pybbn.graph.variable import Variable\n\n\ndef setup():\n \"\"\"\n Setup.\n :return: None.\n \"\"\"\n pass\n\n\ndef teardown():\n \"\"\"\n Teardown.\n :return: None.\n \"\"\"\n pass\n\n\n@wit... | ```python
import copy
from nose import with_setup
from pybbn.graph.variable import Variable
def setup():
"""
Setup.
:return: None.
"""
pass
def teardown():
"""
Teardown.
:return: None.
"""
pass
@with_setup(setup, teardown)
def test_copy():
"""
Tests variable copy.
:return: None.
"""
lhs = Variable(0, 'a', ['t', 'f'])
rhs = copy.copy(lhs)
assert lhs.id == rhs.id
assert lhs.name == rhs.name
assert len(lhs.values) == len(rhs.values)
for lhs_v, rhs_v in zip(lhs.values, rhs.values):
assert lhs_v == rhs_v
lhs.values[0] = 'true'
assert lhs.values[0] == rhs.values[0]
@with_setup(setup, teardown)
def test_deep_copy():
"""
Tests variable deepcopy.
:return: None.
"""
lhs = Variable(0, 'a', ['t', 'f'])
rhs = copy.deepcopy(lhs)
assert lhs.id == rhs.id
assert lhs.name == rhs.name
assert len(lhs.values) == len(rhs.values)
for lhs_v, rhs_v in zip(lhs.values, rhs.values):
assert lhs_v == rhs_v
lhs.values[0] = 'true'
assert lhs.values[0] != rhs.values[0]
``` |
[
{
"content": "Repeat the full code snippet:\n```python\nimport json\nimport sys\nfrom tornado import gen\n\nfrom tornado.httpclient import AsyncHTTPClient, HTTPError\n\nfrom tornado.log import app_log\n\nfrom vizydrop.sdk.source import DataSource, SourceSchema, SourceFilter\nfrom vizydrop.fields import *\n\nRES... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\nimport json\nimport sys\nfrom tornado import gen\n\nfrom tornado.httpclient import AsyncHTTPClient, HTTPError\n\nfrom tornado.log import app_log\n\nfrom vizydrop.sdk.source import DataSource, SourceSchema, SourceFilter\nfrom vizydrop.fields... | ```python
import json
import sys
from tornado import gen
from tornado.httpclient import AsyncHTTPClient, HTTPError
from tornado.log import app_log
from vizydrop.sdk.source import DataSource, SourceSchema, SourceFilter
from vizydrop.fields import *
RESPONSE_SIZE_LIMIT = 10 # MB
class TrelloCardSourceFilters(SourceFilter):
def get_board_options(account, **kwargs):
client = AsyncHTTPClient()
req = account.get_request("https://api.trello.com/1/members/me/boards")
response = yield client.fetch(req)
options = json.loads(response.body.decode('utf-8'))
ret = []
for option in options:
ret.append({"value": option['id'], "title": option['name']})
return ret
def get_list_options(account, boards, **kwargs):
client = AsyncHTTPClient()
ret = []
if isinstance(boards, str):
boards = boards.split(',')
for board in boards:
req = account.get_request("https://api.trello.com/1/boards/{}/lists".format(board))
response = yield client.fetch(req)
options = json.loads(response.body.decode('utf-8'))
for option in options:
ret.append({"value": option['id'], "title": option['name']})
return ret
boards = MultiListField(name="Board", description="Trello board", optional=False, get_options=get_board_options)
lists = MultiListField(name="List", description="Board list", optional=True, get_options=get_list_options)
class TrelloCardSource(DataSource):
class Meta:
identifier = "cards"
name = "Cards"
tags = ["Kanban", "Cards", ]
description = "List of cards"
filter = TrelloCardSourceFilters
class Schema(SourceSchema):
id = IDField(description="Card ID")
name = TextField(name="Name", description="Name of the card")
board_name = TextField(name="Board Name", description="Name of the board")
closed = BooleanField(name="Closed", description="Is the card closed?")
desc = TextField(name="Description", description="Card Description")
dateLastActivity = DateTimeField(name="Last Activity Date", description="Date/Time of last activity")
pos = DecimalField(name="Position", description="Numeric representation of card's priority")
due = DateTimeField(name="Due Date", description="Due date for card")
labels = TextField(name="Labels", description="List of labels")
list = TextField(name="List", description="Current list/state of the card")
@classmethod
@gen.coroutine
def get_data(cls, account, source_filter, limit=100, skip=0):
"""
Gathers card information from Trello
GET https://api.trello.com/1/boards/[board_id]/cards
"""
if not account:
raise ValueError('cannot gather cards without an account')
client = AsyncHTTPClient()
app_log.info("Start retrieval of cards")
cards = []
for board in source_filter.boards:
app_log.info("Retrieving board {}".format(board))
try:
resp = yield client.fetch(account.get_request("https://api.trello.com/1/boards/{}/name".format(board)))
except HTTPError as err:
return {"code": err.code, "reason": err.response.reason, "error": err.response.body.decode('utf-8')}
else:
board_name = json.loads(resp.body.decode('utf-8'))['_value']
uri = "https://api.trello.com/1/boards/{}/cards".format(board)
req = account.get_request(uri)
try:
response = yield client.fetch(req)
except HTTPError as e:
return {
"code": e.code,
"reason": e.response.reason,
"error": e.response.body.decode('utf-8')
}
if response.code != 200:
return {
"code": response.code,
"reason": response.reason,
"error": response.error
}
data = response.body.decode('utf-8')
resp_obj = json.loads(data)
for obj in resp_obj:
obj['board_name'] = board_name
if hasattr(source_filter,
'lists') and source_filter.lists is not None and source_filter.lists.__len__() > 0:
if obj['idList'] in source_filter.lists:
cards.append(obj)
else:
cards.append(obj)
app_log.info("Board {} retrieved {} cards".format(board, cards.__len__()))
# check our response size
if sys.getsizeof(cards) > RESPONSE_SIZE_LIMIT * 1000000:
app_log.warn("Request for {} exceeds size limit".format(account._id))
raise HTTPError(413)
list_ids = set([el['idList'] for el in cards])
boards = set([el['idBoard'] for el in cards])
list_name_map = {}
# grab our list names
app_log.info("Starting resolution of lists")
for board in boards:
app_log.info("Resolving list names for {}".format(board))
uri = "https://api.trello.com/1/boards/{}/lists".format(board)
req = account.get_request(uri)
try:
response = yield client.fetch(req)
except HTTPError as e:
return {
"code": e.code,
"reason": e.response.reason,
"error": e.response.body.decode('utf-8')
}
lists = json.loads(response.body.decode('utf-8'))
for list in lists:
if list['id'] not in list_ids:
continue
list_name_map[list['id']] = list['name']
app_log.info("Board {} resolution yielded {} lists".format(board, lists.__len__()))
for card in cards:
card['list'] = list_name_map[card['idList']]
card['labels'] = ','.join(label['name'] for label in card['labels'])
reply_data = cls.format_data_to_schema(cards)
app_log.info("Source complete, grabbed {} cards".format(cards.__len__()))
return json.dumps(reply_data)
``` |
[
{
"content": "```python\n# Copyright 2014-2016 Ivan Kravets <me@ikravets.com>\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0... | [
{
"content": "<|memory_start|>```python\n# Copyright 2014-2016 Ivan Kravets <me@ikravets.com>\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/lice... | ```python
# Copyright 2014-2016 Ivan Kravets <me@ikravets.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Energia
Energia Wiring-based framework enables pretty much anyone to start easily
creating microcontroller-based projects and applications. Its easy-to-use
libraries and functions provide developers of all experience levels to start
blinking LEDs, buzzing buzzers and sensing sensors more quickly than ever
before.
http://energia.nu/reference/
"""
from os.path import join
from SCons.Script import DefaultEnvironment
env = DefaultEnvironment()
env.Replace(
PLATFORMFW_DIR=join("$PIOPACKAGES_DIR", "framework-energia${PLATFORM[2:]}")
)
ENERGIA_VERSION = int(
open(join(env.subst("$PLATFORMFW_DIR"),
"version.txt")).read().replace(".", "").strip())
# include board variant
env.VariantDirWrap(
join("$BUILD_DIR", "FrameworkEnergiaVariant"),
join("$PLATFORMFW_DIR", "variants", "${BOARD_OPTIONS['build']['variant']}")
)
env.Append(
CPPDEFINES=[
"ARDUINO=101",
"ENERGIA=%d" % ENERGIA_VERSION
],
CPPPATH=[
join("$BUILD_DIR", "FrameworkEnergia"),
join("$BUILD_DIR", "FrameworkEnergiaVariant")
]
)
if env.get("BOARD_OPTIONS", {}).get("build", {}).get("core") == "lm4f":
env.Append(
LINKFLAGS=["-Wl,--entry=ResetISR"]
)
#
# Target: Build Core Library
#
libs = []
libs.append(env.BuildLibrary(
join("$BUILD_DIR", "FrameworkEnergia"),
join("$PLATFORMFW_DIR", "cores", "${BOARD_OPTIONS['build']['core']}")
))
env.Append(LIBS=libs)
``` |
[
{
"content": "Here is a code snippet:\n```python\n\"\"\"Support for monitoring a Smappee energy sensor.\"\"\"\nfrom datetime import timedelta\nimport logging\n\nfrom homeassistant.const import ENERGY_KILO_WATT_HOUR, POWER_WATT\nfrom homeassistant.helpers.entity import Entity\n\nfrom . import DATA_SMAPPEE\n\n_LO... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\n\"\"\"Support for monitoring a Smappee energy sensor.\"\"\"\nfrom datetime import timedelta\nimport logging\n\nfrom homeassistant.const import ENERGY_KILO_WATT_HOUR, POWER_WATT\nfrom homeassistant.helpers.entity import Entity\n\nfrom . import DAT... | ```python
"""Support for monitoring a Smappee energy sensor."""
from datetime import timedelta
import logging
from homeassistant.const import ENERGY_KILO_WATT_HOUR, POWER_WATT
from homeassistant.helpers.entity import Entity
from . import DATA_SMAPPEE
_LOGGER = logging.getLogger(__name__)
SENSOR_PREFIX = "Smappee"
SENSOR_TYPES = {
"solar": ["Solar", "mdi:white-balance-sunny", "local", POWER_WATT, "solar"],
"active_power": [
"Active Power",
"mdi:power-plug",
"local",
POWER_WATT,
"active_power",
],
"current": ["Current", "mdi:gauge", "local", "A", "current"],
"voltage": ["Voltage", "mdi:gauge", "local", "V", "voltage"],
"active_cosfi": ["Power Factor", "mdi:gauge", "local", "%", "active_cosfi"],
"alwayson_today": [
"Always On Today",
"mdi:gauge",
"remote",
ENERGY_KILO_WATT_HOUR,
"alwaysOn",
],
"solar_today": [
"Solar Today",
"mdi:white-balance-sunny",
"remote",
ENERGY_KILO_WATT_HOUR,
"solar",
],
"power_today": [
"Power Today",
"mdi:power-plug",
"remote",
ENERGY_KILO_WATT_HOUR,
"consumption",
],
"water_sensor_1": ["Water Sensor 1", "mdi:water", "water", "m3", "value1"],
"water_sensor_2": ["Water Sensor 2", "mdi:water", "water", "m3", "value2"],
"water_sensor_temperature": [
"Water Sensor Temperature",
"mdi:temperature-celsius",
"water",
"°",
"temperature",
],
"water_sensor_humidity": [
"Water Sensor Humidity",
"mdi:water-percent",
"water",
"%",
"humidity",
],
"water_sensor_battery": [
"Water Sensor Battery",
"mdi:battery",
"water",
"%",
"battery",
],
}
SCAN_INTERVAL = timedelta(seconds=30)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Smappee sensor."""
smappee = hass.data[DATA_SMAPPEE]
dev = []
if smappee.is_remote_active:
for location_id in smappee.locations.keys():
for sensor in SENSOR_TYPES:
if "remote" in SENSOR_TYPES[sensor]:
dev.append(
SmappeeSensor(
smappee, location_id, sensor, SENSOR_TYPES[sensor]
)
)
elif "water" in SENSOR_TYPES[sensor]:
for items in smappee.info[location_id].get("sensors"):
dev.append(
SmappeeSensor(
smappee,
location_id,
"{}:{}".format(sensor, items.get("id")),
SENSOR_TYPES[sensor],
)
)
if smappee.is_local_active:
for location_id in smappee.locations.keys():
for sensor in SENSOR_TYPES:
if "local" in SENSOR_TYPES[sensor]:
if smappee.is_remote_active:
dev.append(
SmappeeSensor(
smappee, location_id, sensor, SENSOR_TYPES[sensor]
)
)
else:
dev.append(
SmappeeSensor(smappee, None, sensor, SENSOR_TYPES[sensor])
)
add_entities(dev, True)
class SmappeeSensor(Entity):
"""Implementation of a Smappee sensor."""
def __init__(self, smappee, location_id, sensor, attributes):
"""Initialize the Smappee sensor."""
self._smappee = smappee
self._location_id = location_id
self._attributes = attributes
self._sensor = sensor
self.data = None
self._state = None
self._name = self._attributes[0]
self._icon = self._attributes[1]
self._type = self._attributes[2]
self._unit_of_measurement = self._attributes[3]
self._smappe_name = self._attributes[4]
@property
def name(self):
"""Return the name of the sensor."""
if self._location_id:
location_name = self._smappee.locations[self._location_id]
else:
location_name = "Local"
return "{} {} {}".format(SENSOR_PREFIX, location_name, self._name)
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
if self._location_id:
attr["Location Id"] = self._location_id
attr["Location Name"] = self._smappee.locations[self._location_id]
return attr
def update(self):
"""Get the latest data from Smappee and update the state."""
self._smappee.update()
if self._sensor in ["alwayson_today", "solar_today", "power_today"]:
data = self._smappee.consumption[self._location_id]
if data:
consumption = data.get("consumptions")[-1]
_LOGGER.debug("%s %s", self._sensor, consumption)
value = consumption.get(self._smappe_name)
self._state = round(value / 1000, 2)
elif self._sensor == "active_cosfi":
cosfi = self._smappee.active_cosfi()
_LOGGER.debug("%s %s", self._sensor, cosfi)
if cosfi:
self._state = round(cosfi, 2)
elif self._sensor == "current":
current = self._smappee.active_current()
_LOGGER.debug("%s %s", self._sensor, current)
if current:
self._state = round(current, 2)
elif self._sensor == "voltage":
voltage = self._smappee.active_voltage()
_LOGGER.debug("%s %s", self._sensor, voltage)
if voltage:
self._state = round(voltage, 3)
elif self._sensor == "active_power":
data = self._smappee.instantaneous
_LOGGER.debug("%s %s", self._sensor, data)
if data:
value1 = [
float(i["value"])
for i in data
if i["key"].endswith("phase0ActivePower")
]
value2 = [
float(i["value"])
for i in data
if i["key"].endswith("phase1ActivePower")
]
value3 = [
float(i["value"])
for i in data
if i["key"].endswith("phase2ActivePower")
]
active_power = sum(value1 + value2 + value3) / 1000
self._state = round(active_power, 2)
elif self._sensor == "solar":
data = self._smappee.instantaneous
_LOGGER.debug("%s %s", self._sensor, data)
if data:
value1 = [
float(i["value"])
for i in data
if i["key"].endswith("phase3ActivePower")
]
value2 = [
float(i["value"])
for i in data
if i["key"].endswith("phase4ActivePower")
]
value3 = [
float(i["value"])
for i in data
if i["key"].endswith("phase5ActivePower")
]
power = sum(value1 + value2 + value3) / 1000
self._state = round(power, 2)
elif self._type == "water":
sensor_name, sensor_id = self._sensor.split(":")
data = self._smappee.sensor_consumption[self._location_id].get(
int(sensor_id)
)
if data:
tempdata = data.get("records")
if tempdata:
consumption = tempdata[-1]
_LOGGER.debug("%s (%s) %s", sensor_name, sensor_id, consumption)
value = consumption.get(self._smappe_name)
self._state = value
``` |
[
{
"content": "Replicate the source code:\n```python\n\"\"\" Learned classification model \"\"\"\nimport tensorflow as tf\nfrom PeriodicClassification import ModelConfig as myConfig\nfrom PeriodicClassification import Preprocess as pre\n\n\ndef _model(X, keep_prob):\n # input\n W1 = tf.Variable(tf.random_n... | [
{
"content": "Replicate the source code:\n<|memory_start|>```python\n\"\"\" Learned classification model \"\"\"\nimport tensorflow as tf\nfrom PeriodicClassification import ModelConfig as myConfig\nfrom PeriodicClassification import Preprocess as pre\n\n\ndef _model(X, keep_prob):\n # input\n W1 = tf.Vari... | ```python
""" Learned classification model """
import tensorflow as tf
from PeriodicClassification import ModelConfig as myConfig
from PeriodicClassification import Preprocess as pre
def _model(X, keep_prob):
# input
W1 = tf.Variable(tf.random_normal([myConfig.INPUT_SIZE, myConfig.HIDDEN_SIZE]), name="weight1")
b1 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE]))
L1 = tf.matmul(X, W1) + b1
L1 = tf.nn.dropout(L1, keep_prob[0])
"""hidden Layers
dropout:
"""
W2 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE, myConfig.HIDDEN_SIZE]), name="weight2")
b2 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE]))
L2 = tf.nn.softsign(tf.matmul(L1, W2) + b2)
L2 = tf.nn.dropout(L2, keep_prob[1])
W3 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE, myConfig.HIDDEN_SIZE]), name="weight3")
b3 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE]))
L3 = tf.nn.softsign(tf.matmul(L2, W3) + b3)
L3 = tf.nn.dropout(L3, keep_prob[1])
W4 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE, myConfig.HIDDEN_SIZE]), name="weight4")
b4 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE]))
L4 = tf.nn.softsign(tf.matmul(L3, W4) + b4)
L4 = tf.nn.dropout(L4, keep_prob[1])
W5 = tf.Variable(tf.random_normal([myConfig.HIDDEN_SIZE, myConfig.OUTPUT_SIZE]), name="weight5")
b5 = tf.Variable(tf.random_normal([myConfig.OUTPUT_SIZE]))
L5 = tf.nn.softsign(tf.matmul(L4, W5) + b5)
hypothesis = tf.nn.dropout(L5, keep_prob[2])
# weight paramenters and bias
param_list = [W1, W2, W3, W4, W5, b1, b2, b3, b4, b5]
saver = tf.train.Saver(param_list)
return hypothesis, saver
def _classification(hypothesis):
p = tf.nn.softmax(hypothesis)
h_predict = tf.argmax(p, 1)
return h_predict
def _DNN_main(USER_DATA_PATH):
list_time_series = pre._reader(USER_DATA_PATH)
time_series = pre._resize(list_time_series)
print(time_series.shape)
X = tf.placeholder(tf.float32, [None, myConfig.INPUT_SIZE])
keep_prob = tf.placeholder(tf.float32) #0.1, 0.2, 0.3
hypo, model_saver = _model(X=X, keep_prob=keep_prob)
h_predict = _classification(hypothesis=hypo)
"""Initialize"""
sess = tf.Session()
sess.run(tf.global_variables_initializer())
saver = tf.train.import_meta_graph(myConfig.SAVED_MODEL_PATH)
saver.restore(sess, tf.train.latest_checkpoint(myConfig.CHECKPOINT_PATH))
t_trained = sess.run([h_predict], feed_dict={X: time_series, keep_prob: [1.0, 1.0, 1.0]})
print(t_trained[0])
if t_trained[0] == 1:
print('Non periodic')
return False
else:
print('Periodic')
return True
# Usage Example
# _DNN_main("user's data path")
_DNN_main("D:/DKE/data/period_classification/시연데이터/ECG_데이터_1.csv")
``` |
[
{
"content": "Reproduce the code exactly as provided (keep formatting):\n```python\nclass Semigroup:\n \"\"\"\n In mathematics, a semigroup is an algebraic structure\n consisting of a set together with an associative binary operation.\n A semigroup generalizes a monoid in that there might not exist ... | [
{
"content": "Reproduce the code exactly as provided (keep formatting):\n<|memory_start|>```python\nclass Semigroup:\n \"\"\"\n In mathematics, a semigroup is an algebraic structure\n consisting of a set together with an associative binary operation.\n A semigroup generalizes a monoid in that there ... | ```python
class Semigroup:
"""
In mathematics, a semigroup is an algebraic structure
consisting of a set together with an associative binary operation.
A semigroup generalizes a monoid in that there might not exist an identity element.
It also (originally) generalized a group (a monoid with all inverses)
to a type where every element did not have to have an inverse, this the name semigroup.
"""
def __init__(self, value):
self.value = value
def __eq__(self, other) -> bool:
return self.value == other.value
def fold(self, fn):
return fn(self.value)
@classmethod
def neutral(cls):
return cls(cls.neutral_element)
class Sum(Semigroup):
"""
Sum is a Monoid that will combine 2 numbers under addition.
"""
neutral_element = 0
def __str__(self) -> str: # pragma: no cover
return 'Sum[value={}]'.format(self.value)
def concat(self, semigroup: 'Sum') -> 'Sum':
"""
:param semigroup: other semigroup to concat
:type semigroup: Sum[B]
:returns: new Sum with sum of concat semigroups values
:rtype: Sum[A]
"""
return Sum(self.value + semigroup.value)
class All(Semigroup):
"""
All is a Monoid that will combine 2 values of any type using logical conjunction on their coerced Boolean values.
"""
neutral_element = True
def __str__(self) -> str: # pragma: no cover
return 'All[value={}]'.format(self.value)
def concat(self, semigroup: 'All') -> 'All':
"""
:param semigroup: other semigroup to concat
:type semigroup: All[B]
:returns: new All with last truly value or first falsy
:rtype: All[A | B]
"""
return All(self.value and semigroup.value)
class One(Semigroup):
"""
One is a Monoid that will combine 2 values of any type using logical disjunction OR on their coerced Boolean values.
"""
neutral_element = False
def __str__(self) -> str: # pragma: no cover
return 'One[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: One[B]
:returns: new One with first truly value or last falsy
:rtype: One[A | B]
"""
return One(self.value or semigroup.value)
class First(Semigroup):
"""
First is a Monoid that will always return the first, value when 2 First instances are combined.
"""
def __str__(self) -> str: # pragma: no cover
return 'Fist[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: First[B]
:returns: new First with first value
:rtype: First[A]
"""
return First(self.value)
class Last(Semigroup):
"""
Last is a Monoid that will always return the lastest, value when 2 Last instances are combined.
"""
def __str__(self) -> str: # pragma: no cover
return 'Last[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Last[B]
:returns: new Last with last value
:rtype: Last[A]
"""
return Last(semigroup.value)
class Map(Semigroup):
"""
Map is a Semigroup that will always return contated all values inside Map value
"""
def __str__(self) -> str: # pragma: no cover
return 'Map[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Map[B]
:returns: new Map with concated all values
:rtype: Map[A]
"""
return Map(
{key: value.concat(semigroup.value[key]) for key, value in self.value.items()}
)
class Max(Semigroup):
"""
Max is a Monoid that will combines 2 numbers, resulting in the largest of the two.
"""
neutral_element = -float("inf")
def __str__(self) -> str: # pragma: no cover
return 'Max[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Max[B]
:returns: new Max with largest value
:rtype: Max[A | B]
"""
return Max(self.value if self.value > semigroup.value else semigroup.value)
class Min(Semigroup):
"""
Min is a Monoid that will combines 2 numbers, resulting in the smallest of the two.
"""
neutral_element = float("inf")
def __str__(self) -> str: # pragma: no cover
return 'Min[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Min[B]
:returns: new Min with smallest value
:rtype: Min[A | B]
"""
return Min(self.value if self.value <= semigroup.value else semigroup.value)
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\n# ------------------------------------------------------------------------------\n# Screenshot All Channels\n# ------------------------------------------------------------------------------\n# Will do Screenshots of all Channels and export them... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\n# ------------------------------------------------------------------------------\n# Screenshot All Channels\n# ------------------------------------------------------------------------------\n# Will do Screenshots of all Channels... | ```python
# ------------------------------------------------------------------------------
# Screenshot All Channels
# ------------------------------------------------------------------------------
# Will do Screenshots of all Channels and export them based on the path provided
# in Screenshot Settings
# ------------------------------------------------------------------------------
# http://mari.ideascale.com
# http://cg-cnu.blogspot.in/
# ------------------------------------------------------------------------------
# Written by Sreenivas Alapati, 2014
# ------------------------------------------------------------------------------
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF HE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
import mari
def screenshotAllChannels():
'''Take screenshot of all the channels for the current view '''
if mari.projects.current() == None:
mari.utils.message("No project currently open", title = "Error")
return
mari.utils.message("Snapshotting multiple Channels requires Incremental Screenshot Setting to be enabled")
mari.history.startMacro('Snapshot all Channels')
curGeo = mari.geo.current()
curChannel = curGeo.currentChannel()
chanList = curGeo.channelList()
curCanvas = mari.canvases.current()
mari.app.setWaitCursor()
for chan in chanList:
curGeo.setCurrentChannel(chan)
curCanvas.repaint()
snapAction = mari.actions.find ('/Mari/Canvas/Take Screenshot')
snapAction.trigger()
curGeo.setCurrentChannel(curChannel)
curCanvas.repaint()
mari.app.restoreCursor()
mari.history.stopMacro()
return
``` |
[
{
"content": "Replicate the source code:\n```python\nfrom sympy import (\n symbols, expand, expand_func, nan, oo, Float, conjugate, diff,\n re, im, Abs, O, factorial, exp_polar, polar_lift, gruntz, limit,\n Symbol, I, integrate, S,\n sqrt, sin, cos, sinh, cosh, exp, log, pi, EulerGamma,\n erf, er... | [
{
"content": "Replicate the source code:\n<|memory_start|>```python\nfrom sympy import (\n symbols, expand, expand_func, nan, oo, Float, conjugate, diff,\n re, im, Abs, O, factorial, exp_polar, polar_lift, gruntz, limit,\n Symbol, I, integrate, S,\n sqrt, sin, cos, sinh, cosh, exp, log, pi, EulerGam... | ```python
from sympy import (
symbols, expand, expand_func, nan, oo, Float, conjugate, diff,
re, im, Abs, O, factorial, exp_polar, polar_lift, gruntz, limit,
Symbol, I, integrate, S,
sqrt, sin, cos, sinh, cosh, exp, log, pi, EulerGamma,
erf, erfc, erfi, erf2, erfinv, erfcinv, erf2inv,
gamma, uppergamma, loggamma,
Ei, expint, E1, li, Li, Si, Ci, Shi, Chi,
fresnels, fresnelc,
hyper, meijerg)
from sympy.functions.special.error_functions import _erfs, _eis
from sympy.core.function import ArgumentIndexError
from sympy.utilities.pytest import raises
x, y, z = symbols('x,y,z')
w = Symbol("w", real=True)
n = Symbol("n", integer=True)
def test_erf():
assert erf(nan) == nan
assert erf(oo) == 1
assert erf(-oo) == -1
assert erf(0) == 0
assert erf(I*oo) == oo*I
assert erf(-I*oo) == -oo*I
assert erf(-2) == -erf(2)
assert erf(-x*y) == -erf(x*y)
assert erf(-x - y) == -erf(x + y)
assert erf(erfinv(x)) == x
assert erf(erfcinv(x)) == 1 - x
assert erf(erf2inv(0, x)) == x
assert erf(erf2inv(0, erf(erfcinv(1 - erf(erfinv(x)))))) == x
assert erf(I).is_real is False
assert erf(0).is_real is True
assert conjugate(erf(z)) == erf(conjugate(z))
assert erf(x).as_leading_term(x) == 2*x/sqrt(pi)
assert erf(1/x).as_leading_term(x) == erf(1/x)
assert erf(z).rewrite('uppergamma') == sqrt(z**2)*erf(sqrt(z**2))/z
assert erf(z).rewrite('erfc') == S.One - erfc(z)
assert erf(z).rewrite('erfi') == -I*erfi(I*z)
assert erf(z).rewrite('fresnels') == (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) -
I*fresnels(z*(1 - I)/sqrt(pi)))
assert erf(z).rewrite('fresnelc') == (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) -
I*fresnels(z*(1 - I)/sqrt(pi)))
assert erf(z).rewrite('hyper') == 2*z*hyper([S.Half], [3*S.Half], -z**2)/sqrt(pi)
assert erf(z).rewrite('meijerg') == z*meijerg([S.Half], [], [0], [-S.Half], z**2)/sqrt(pi)
assert erf(z).rewrite('expint') == sqrt(z**2)/z - z*expint(S.Half, z**2)/sqrt(S.Pi)
assert limit(exp(x)*exp(x**2)*(erf(x + 1/exp(x)) - erf(x)), x, oo) == \
2/sqrt(pi)
assert limit((1 - erf(z))*exp(z**2)*z, z, oo) == 1/sqrt(pi)
assert limit((1 - erf(x))*exp(x**2)*sqrt(pi)*x, x, oo) == 1
assert limit(((1 - erf(x))*exp(x**2)*sqrt(pi)*x - 1)*2*x**2, x, oo) == -1
assert erf(x).as_real_imag() == \
((erf(re(x) - I*re(x)*Abs(im(x))/Abs(re(x)))/2 +
erf(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))/2,
I*(erf(re(x) - I*re(x)*Abs(im(x))/Abs(re(x))) -
erf(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))) *
re(x)*Abs(im(x))/(2*im(x)*Abs(re(x)))))
raises(ArgumentIndexError, lambda: erf(x).fdiff(2))
def test_erf_series():
assert erf(x).series(x, 0, 7) == 2*x/sqrt(pi) - \
2*x**3/3/sqrt(pi) + x**5/5/sqrt(pi) + O(x**7)
def test_erf_evalf():
assert abs( erf(Float(2.0)) - 0.995322265 ) < 1E-8 # XXX
def test__erfs():
assert _erfs(z).diff(z) == -2/sqrt(S.Pi) + 2*z*_erfs(z)
assert _erfs(1/z).series(z) == \
z/sqrt(pi) - z**3/(2*sqrt(pi)) + 3*z**5/(4*sqrt(pi)) + O(z**6)
assert expand(erf(z).rewrite('tractable').diff(z).rewrite('intractable')) \
== erf(z).diff(z)
assert _erfs(z).rewrite("intractable") == (-erf(z) + 1)*exp(z**2)
def test_erfc():
assert erfc(nan) == nan
assert erfc(oo) == 0
assert erfc(-oo) == 2
assert erfc(0) == 1
assert erfc(I*oo) == -oo*I
assert erfc(-I*oo) == oo*I
assert erfc(-x) == S(2) - erfc(x)
assert erfc(erfcinv(x)) == x
assert erfc(I).is_real is False
assert erfc(0).is_real is True
assert conjugate(erfc(z)) == erfc(conjugate(z))
assert erfc(x).as_leading_term(x) == S.One
assert erfc(1/x).as_leading_term(x) == erfc(1/x)
assert erfc(z).rewrite('erf') == 1 - erf(z)
assert erfc(z).rewrite('erfi') == 1 + I*erfi(I*z)
assert erfc(z).rewrite('fresnels') == 1 - (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) -
I*fresnels(z*(1 - I)/sqrt(pi)))
assert erfc(z).rewrite('fresnelc') == 1 - (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) -
I*fresnels(z*(1 - I)/sqrt(pi)))
assert erfc(z).rewrite('hyper') == 1 - 2*z*hyper([S.Half], [3*S.Half], -z**2)/sqrt(pi)
assert erfc(z).rewrite('meijerg') == 1 - z*meijerg([S.Half], [], [0], [-S.Half], z**2)/sqrt(pi)
assert erfc(z).rewrite('uppergamma') == 1 - sqrt(z**2)*erf(sqrt(z**2))/z
assert erfc(z).rewrite('expint') == S.One - sqrt(z**2)/z + z*expint(S.Half, z**2)/sqrt(S.Pi)
assert erfc(x).as_real_imag() == \
((erfc(re(x) - I*re(x)*Abs(im(x))/Abs(re(x)))/2 +
erfc(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))/2,
I*(erfc(re(x) - I*re(x)*Abs(im(x))/Abs(re(x))) -
erfc(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))) *
re(x)*Abs(im(x))/(2*im(x)*Abs(re(x)))))
raises(ArgumentIndexError, lambda: erfc(x).fdiff(2))
def test_erfc_series():
assert erfc(x).series(x, 0, 7) == 1 - 2*x/sqrt(pi) + \
2*x**3/3/sqrt(pi) - x**5/5/sqrt(pi) + O(x**7)
def test_erfc_evalf():
assert abs( erfc(Float(2.0)) - 0.00467773 ) < 1E-8 # XXX
def test_erfi():
assert erfi(nan) == nan
assert erfi(oo) == S.Infinity
assert erfi(-oo) == S.NegativeInfinity
assert erfi(0) == S.Zero
assert erfi(I*oo) == I
assert erfi(-I*oo) == -I
assert erfi(-x) == -erfi(x)
assert erfi(I*erfinv(x)) == I*x
assert erfi(I*erfcinv(x)) == I*(1 - x)
assert erfi(I*erf2inv(0, x)) == I*x
assert erfi(I).is_real is False
assert erfi(0).is_real is True
assert conjugate(erfi(z)) == erfi(conjugate(z))
assert erfi(z).rewrite('erf') == -I*erf(I*z)
assert erfi(z).rewrite('erfc') == I*erfc(I*z) - I
assert erfi(z).rewrite('fresnels') == (1 - I)*(fresnelc(z*(1 + I)/sqrt(pi)) -
I*fresnels(z*(1 + I)/sqrt(pi)))
assert erfi(z).rewrite('fresnelc') == (1 - I)*(fresnelc(z*(1 + I)/sqrt(pi)) -
I*fresnels(z*(1 + I)/sqrt(pi)))
assert erfi(z).rewrite('hyper') == 2*z*hyper([S.Half], [3*S.Half], z**2)/sqrt(pi)
assert erfi(z).rewrite('meijerg') == z*meijerg([S.Half], [], [0], [-S.Half], -z**2)/sqrt(pi)
assert erfi(z).rewrite('uppergamma') == (sqrt(-z**2)/z*(uppergamma(S.Half,
-z**2)/sqrt(S.Pi) - S.One))
assert erfi(z).rewrite('expint') == sqrt(-z**2)/z - z*expint(S.Half, -z**2)/sqrt(S.Pi)
assert erfi(x).as_real_imag() == \
((erfi(re(x) - I*re(x)*Abs(im(x))/Abs(re(x)))/2 +
erfi(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))/2,
I*(erfi(re(x) - I*re(x)*Abs(im(x))/Abs(re(x))) -
erfi(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))) *
re(x)*Abs(im(x))/(2*im(x)*Abs(re(x)))))
raises(ArgumentIndexError, lambda: erfi(x).fdiff(2))
def test_erfi_series():
assert erfi(x).series(x, 0, 7) == 2*x/sqrt(pi) + \
2*x**3/3/sqrt(pi) + x**5/5/sqrt(pi) + O(x**7)
def test_erfi_evalf():
assert abs( erfi(Float(2.0)) - 18.5648024145756 ) < 1E-13 # XXX
def test_erf2():
assert erf2(0, 0) == S.Zero
assert erf2(x, x) == S.Zero
assert erf2(nan, 0) == nan
assert erf2(-oo, y) == erf(y) + 1
assert erf2( oo, y) == erf(y) - 1
assert erf2( x, oo) == 1 - erf(x)
assert erf2( x,-oo) == -1 - erf(x)
assert erf2(x, erf2inv(x, y)) == y
assert erf2(-x, -y) == -erf2(x,y)
assert erf2(-x, y) == erf(y) + erf(x)
assert erf2( x, -y) == -erf(y) - erf(x)
assert erf2(x, y).rewrite('fresnels') == erf(y).rewrite(fresnels)-erf(x).rewrite(fresnels)
assert erf2(x, y).rewrite('fresnelc') == erf(y).rewrite(fresnelc)-erf(x).rewrite(fresnelc)
assert erf2(x, y).rewrite('hyper') == erf(y).rewrite(hyper)-erf(x).rewrite(hyper)
assert erf2(x, y).rewrite('meijerg') == erf(y).rewrite(meijerg)-erf(x).rewrite(meijerg)
assert erf2(x, y).rewrite('uppergamma') == erf(y).rewrite(uppergamma) - erf(x).rewrite(uppergamma)
assert erf2(x, y).rewrite('expint') == erf(y).rewrite(expint)-erf(x).rewrite(expint)
assert erf2(I, 0).is_real is False
assert erf2(0, 0).is_real is True
#assert conjugate(erf2(x, y)) == erf2(conjugate(x), conjugate(y))
assert erf2(x, y).rewrite('erf') == erf(y) - erf(x)
assert erf2(x, y).rewrite('erfc') == erfc(x) - erfc(y)
assert erf2(x, y).rewrite('erfi') == I*(erfi(I*x) - erfi(I*y))
raises(ArgumentIndexError, lambda: erfi(x).fdiff(3))
def test_erfinv():
assert erfinv(0) == 0
assert erfinv(1) == S.Infinity
assert erfinv(nan) == S.NaN
assert erfinv(erf(w)) == w
assert erfinv(erf(-w)) == -w
assert erfinv(x).diff() == sqrt(pi)*exp(erfinv(x)**2)/2
assert erfinv(z).rewrite('erfcinv') == erfcinv(1-z)
def test_erfinv_evalf():
assert abs( erfinv(Float(0.2)) - 0.179143454621292 ) < 1E-13
def test_erfcinv():
assert erfcinv(1) == 0
assert erfcinv(0) == S.Infinity
assert erfcinv(nan) == S.NaN
assert erfcinv(x).diff() == -sqrt(pi)*exp(erfcinv(x)**2)/2
assert erfcinv(z).rewrite('erfinv') == erfinv(1-z)
def test_erf2inv():
assert erf2inv(0, 0) == S.Zero
assert erf2inv(0, 1) == S.Infinity
assert erf2inv(1, 0) == S.One
assert erf2inv(0, y) == erfinv(y)
assert erf2inv(oo,y) == erfcinv(-y)
assert erf2inv(x, y).diff(x) == exp(-x**2 + erf2inv(x, y)**2)
assert erf2inv(x, y).diff(y) == sqrt(pi)*exp(erf2inv(x, y)**2)/2
# NOTE we multiply by exp_polar(I*pi) and need this to be on the principal
# branch, hence take x in the lower half plane (d=0).
def mytn(expr1, expr2, expr3, x, d=0):
from sympy.utilities.randtest import test_numerically, random_complex_number
subs = {}
for a in expr1.free_symbols:
if a != x:
subs[a] = random_complex_number()
return expr2 == expr3 and test_numerically(expr1.subs(subs),
expr2.subs(subs), x, d=d)
def mytd(expr1, expr2, x):
from sympy.utilities.randtest import test_derivative_numerically, \
random_complex_number
subs = {}
for a in expr1.free_symbols:
if a != x:
subs[a] = random_complex_number()
return expr1.diff(x) == expr2 and test_derivative_numerically(expr1.subs(subs), x)
def tn_branch(func, s=None):
from sympy import I, pi, exp_polar
from random import uniform
def fn(x):
if s is None:
return func(x)
return func(s, x)
c = uniform(1, 5)
expr = fn(c*exp_polar(I*pi)) - fn(c*exp_polar(-I*pi))
eps = 1e-15
expr2 = fn(-c + eps*I) - fn(-c - eps*I)
return abs(expr.n() - expr2.n()).n() < 1e-10
def test_ei():
pos = Symbol('p', positive=True)
neg = Symbol('n', negative=True)
assert Ei(-pos) == Ei(polar_lift(-1)*pos) - I*pi
assert Ei(neg) == Ei(polar_lift(neg)) - I*pi
assert tn_branch(Ei)
assert mytd(Ei(x), exp(x)/x, x)
assert mytn(Ei(x), Ei(x).rewrite(uppergamma),
-uppergamma(0, x*polar_lift(-1)) - I*pi, x)
assert mytn(Ei(x), Ei(x).rewrite(expint),
-expint(1, x*polar_lift(-1)) - I*pi, x)
assert Ei(x).rewrite(expint).rewrite(Ei) == Ei(x)
assert Ei(x*exp_polar(2*I*pi)) == Ei(x) + 2*I*pi
assert Ei(x*exp_polar(-2*I*pi)) == Ei(x) - 2*I*pi
assert mytn(Ei(x), Ei(x).rewrite(Shi), Chi(x) + Shi(x), x)
assert mytn(Ei(x*polar_lift(I)), Ei(x*polar_lift(I)).rewrite(Si),
Ci(x) + I*Si(x) + I*pi/2, x)
assert Ei(log(x)).rewrite(li) == li(x)
assert Ei(2*log(x)).rewrite(li) == li(x**2)
assert gruntz(Ei(x+exp(-x))*exp(-x)*x, x, oo) == 1
assert Ei(x).series(x) == EulerGamma + log(x) + x + x**2/4 + \
x**3/18 + x**4/96 + x**5/600 + O(x**6)
def test_expint():
assert mytn(expint(x, y), expint(x, y).rewrite(uppergamma),
y**(x - 1)*uppergamma(1 - x, y), x)
assert mytd(
expint(x, y), -y**(x - 1)*meijerg([], [1, 1], [0, 0, 1 - x], [], y), x)
assert mytd(expint(x, y), -expint(x - 1, y), y)
assert mytn(expint(1, x), expint(1, x).rewrite(Ei),
-Ei(x*polar_lift(-1)) + I*pi, x)
assert expint(-4, x) == exp(-x)/x + 4*exp(-x)/x**2 + 12*exp(-x)/x**3 \
+ 24*exp(-x)/x**4 + 24*exp(-x)/x**5
assert expint(-S(3)/2, x) == \
exp(-x)/x + 3*exp(-x)/(2*x**2) - 3*sqrt(pi)*erf(sqrt(x))/(4*x**S('5/2')) \
+ 3*sqrt(pi)/(4*x**S('5/2'))
assert tn_branch(expint, 1)
assert tn_branch(expint, 2)
assert tn_branch(expint, 3)
assert tn_branch(expint, 1.7)
assert tn_branch(expint, pi)
assert expint(y, x*exp_polar(2*I*pi)) == \
x**(y - 1)*(exp(2*I*pi*y) - 1)*gamma(-y + 1) + expint(y, x)
assert expint(y, x*exp_polar(-2*I*pi)) == \
x**(y - 1)*(exp(-2*I*pi*y) - 1)*gamma(-y + 1) + expint(y, x)
assert expint(2, x*exp_polar(2*I*pi)) == 2*I*pi*x + expint(2, x)
assert expint(2, x*exp_polar(-2*I*pi)) == -2*I*pi*x + expint(2, x)
assert expint(1, x).rewrite(Ei).rewrite(expint) == expint(1, x)
assert mytn(E1(x), E1(x).rewrite(Shi), Shi(x) - Chi(x), x)
assert mytn(E1(polar_lift(I)*x), E1(polar_lift(I)*x).rewrite(Si),
-Ci(x) + I*Si(x) - I*pi/2, x)
assert mytn(expint(2, x), expint(2, x).rewrite(Ei).rewrite(expint),
-x*E1(x) + exp(-x), x)
assert mytn(expint(3, x), expint(3, x).rewrite(Ei).rewrite(expint),
x**2*E1(x)/2 + (1 - x)*exp(-x)/2, x)
assert expint(S(3)/2, z).nseries(z) == \
2 + 2*z - z**2/3 + z**3/15 - z**4/84 + z**5/540 - \
2*sqrt(pi)*sqrt(z) + O(z**6)
assert E1(z).series(z) == -EulerGamma - log(z) + z - \
z**2/4 + z**3/18 - z**4/96 + z**5/600 + O(z**6)
assert expint(4, z).series(z) == S(1)/3 - z/2 + z**2/2 + \
z**3*(log(z)/6 - S(11)/36 + EulerGamma/6) - z**4/24 + \
z**5/240 + O(z**6)
def test__eis():
assert _eis(z).diff(z) == -_eis(z) + 1/z
assert _eis(1/z).series(z) == \
z + z**2 + 2*z**3 + 6*z**4 + 24*z**5 + O(z**6)
assert Ei(z).rewrite('tractable') == exp(z)*_eis(z)
assert li(z).rewrite('tractable') == z*_eis(log(z))
assert _eis(z).rewrite('intractable') == exp(-z)*Ei(z)
assert expand(li(z).rewrite('tractable').diff(z).rewrite('intractable')) \
== li(z).diff(z)
assert expand(Ei(z).rewrite('tractable').diff(z).rewrite('intractable')) \
== Ei(z).diff(z)
assert _eis(z).series(z, n=3) == EulerGamma + log(z) + z*(-log(z) - \
EulerGamma + 1) + z**2*(log(z)/2 - S(3)/4 + EulerGamma/2) + O(z**3*log(z))
def tn_arg(func):
def test(arg, e1, e2):
from random import uniform
v = uniform(1, 5)
v1 = func(arg*x).subs(x, v).n()
v2 = func(e1*v + e2*1e-15).n()
return abs(v1 - v2).n() < 1e-10
return test(exp_polar(I*pi/2), I, 1) and \
test(exp_polar(-I*pi/2), -I, 1) and \
test(exp_polar(I*pi), -1, I) and \
test(exp_polar(-I*pi), -1, -I)
def test_li():
z = Symbol("z")
zr = Symbol("z", real=True)
zp = Symbol("z", positive=True)
zn = Symbol("z", negative=True)
assert li(0) == 0
assert li(1) == -oo
assert li(oo) == oo
assert isinstance(li(z), li)
assert diff(li(z), z) == 1/log(z)
assert conjugate(li(z)) == li(conjugate(z))
assert conjugate(li(-zr)) == li(-zr)
assert conjugate(li(-zp)) == conjugate(li(-zp))
assert conjugate(li(zn)) == conjugate(li(zn))
assert li(z).rewrite(Li) == Li(z) + li(2)
assert li(z).rewrite(Ei) == Ei(log(z))
assert li(z).rewrite(uppergamma) == (-log(1/log(z))/2 - log(-log(z)) +
log(log(z))/2 - expint(1, -log(z)))
assert li(z).rewrite(Si) == (-log(I*log(z)) - log(1/log(z))/2 +
log(log(z))/2 + Ci(I*log(z)) + Shi(log(z)))
assert li(z).rewrite(Ci) == (-log(I*log(z)) - log(1/log(z))/2 +
log(log(z))/2 + Ci(I*log(z)) + Shi(log(z)))
assert li(z).rewrite(Shi) == (-log(1/log(z))/2 + log(log(z))/2 +
Chi(log(z)) - Shi(log(z)))
assert li(z).rewrite(Chi) == (-log(1/log(z))/2 + log(log(z))/2 +
Chi(log(z)) - Shi(log(z)))
assert li(z).rewrite(hyper) ==(log(z)*hyper((1, 1), (2, 2), log(z)) -
log(1/log(z))/2 + log(log(z))/2 + EulerGamma)
assert li(z).rewrite(meijerg) == (-log(1/log(z))/2 - log(-log(z)) + log(log(z))/2 -
meijerg(((), (1,)), ((0, 0), ()), -log(z)))
assert gruntz(1/li(z), z, oo) == 0
def test_Li():
assert Li(2) == 0
assert Li(oo) == oo
assert isinstance(Li(z), Li)
assert diff(Li(z), z) == 1/log(z)
assert gruntz(1/Li(z), z, oo) == 0
assert Li(z).rewrite(li) == li(z) - li(2)
def test_si():
assert Si(I*x) == I*Shi(x)
assert Shi(I*x) == I*Si(x)
assert Si(-I*x) == -I*Shi(x)
assert Shi(-I*x) == -I*Si(x)
assert Si(-x) == -Si(x)
assert Shi(-x) == -Shi(x)
assert Si(exp_polar(2*pi*I)*x) == Si(x)
assert Si(exp_polar(-2*pi*I)*x) == Si(x)
assert Shi(exp_polar(2*pi*I)*x) == Shi(x)
assert Shi(exp_polar(-2*pi*I)*x) == Shi(x)
assert Si(oo) == pi/2
assert Si(-oo) == -pi/2
assert Shi(oo) == oo
assert Shi(-oo) == -oo
assert mytd(Si(x), sin(x)/x, x)
assert mytd(Shi(x), sinh(x)/x, x)
assert mytn(Si(x), Si(x).rewrite(Ei),
-I*(-Ei(x*exp_polar(-I*pi/2))/2
+ Ei(x*exp_polar(I*pi/2))/2 - I*pi) + pi/2, x)
assert mytn(Si(x), Si(x).rewrite(expint),
-I*(-expint(1, x*exp_polar(-I*pi/2))/2 +
expint(1, x*exp_polar(I*pi/2))/2) + pi/2, x)
assert mytn(Shi(x), Shi(x).rewrite(Ei),
Ei(x)/2 - Ei(x*exp_polar(I*pi))/2 + I*pi/2, x)
assert mytn(Shi(x), Shi(x).rewrite(expint),
expint(1, x)/2 - expint(1, x*exp_polar(I*pi))/2 - I*pi/2, x)
assert tn_arg(Si)
assert tn_arg(Shi)
assert Si(x).nseries(x, n=8) == \
x - x**3/18 + x**5/600 - x**7/35280 + O(x**9)
assert Shi(x).nseries(x, n=8) == \
x + x**3/18 + x**5/600 + x**7/35280 + O(x**9)
assert Si(sin(x)).nseries(x, n=5) == x - 2*x**3/9 + 17*x**5/450 + O(x**6)
assert Si(x).nseries(x, 1, n=3) == \
Si(1) + (x - 1)*sin(1) + (x - 1)**2*(-sin(1)/2 + cos(1)/2) + O((x - 1)**3, (x, 1))
def test_ci():
m1 = exp_polar(I*pi)
m1_ = exp_polar(-I*pi)
pI = exp_polar(I*pi/2)
mI = exp_polar(-I*pi/2)
assert Ci(m1*x) == Ci(x) + I*pi
assert Ci(m1_*x) == Ci(x) - I*pi
assert Ci(pI*x) == Chi(x) + I*pi/2
assert Ci(mI*x) == Chi(x) - I*pi/2
assert Chi(m1*x) == Chi(x) + I*pi
assert Chi(m1_*x) == Chi(x) - I*pi
assert Chi(pI*x) == Ci(x) + I*pi/2
assert Chi(mI*x) == Ci(x) - I*pi/2
assert Ci(exp_polar(2*I*pi)*x) == Ci(x) + 2*I*pi
assert Chi(exp_polar(-2*I*pi)*x) == Chi(x) - 2*I*pi
assert Chi(exp_polar(2*I*pi)*x) == Chi(x) + 2*I*pi
assert Ci(exp_polar(-2*I*pi)*x) == Ci(x) - 2*I*pi
assert Ci(oo) == 0
assert Ci(-oo) == I*pi
assert Chi(oo) == oo
assert Chi(-oo) == oo
assert mytd(Ci(x), cos(x)/x, x)
assert mytd(Chi(x), cosh(x)/x, x)
assert mytn(Ci(x), Ci(x).rewrite(Ei),
Ei(x*exp_polar(-I*pi/2))/2 + Ei(x*exp_polar(I*pi/2))/2, x)
assert mytn(Chi(x), Chi(x).rewrite(Ei),
Ei(x)/2 + Ei(x*exp_polar(I*pi))/2 - I*pi/2, x)
assert tn_arg(Ci)
assert tn_arg(Chi)
from sympy import O, EulerGamma, log, limit
assert Ci(x).nseries(x, n=4) == \
EulerGamma + log(x) - x**2/4 + x**4/96 + O(x**5)
assert Chi(x).nseries(x, n=4) == \
EulerGamma + log(x) + x**2/4 + x**4/96 + O(x**5)
assert limit(log(x) - Ci(2*x), x, 0) == -log(2) - EulerGamma
def test_fresnel():
assert fresnels(0) == 0
assert fresnels(oo) == S.Half
assert fresnels(-oo) == -S.Half
assert fresnels(z) == fresnels(z)
assert fresnels(-z) == -fresnels(z)
assert fresnels(I*z) == -I*fresnels(z)
assert fresnels(-I*z) == I*fresnels(z)
assert conjugate(fresnels(z)) == fresnels(conjugate(z))
assert fresnels(z).diff(z) == sin(pi*z**2/2)
assert fresnels(z).rewrite(erf) == (S.One + I)/4 * (
erf((S.One + I)/2*sqrt(pi)*z) - I*erf((S.One - I)/2*sqrt(pi)*z))
assert fresnels(z).rewrite(hyper) == \
pi*z**3/6 * hyper([S(3)/4], [S(3)/2, S(7)/4], -pi**2*z**4/16)
assert fresnels(z).series(z, n=15) == \
pi*z**3/6 - pi**3*z**7/336 + pi**5*z**11/42240 + O(z**15)
assert fresnels(w).is_real is True
assert fresnels(z).as_real_imag() == \
((fresnels(re(z) - I*re(z)*Abs(im(z))/Abs(re(z)))/2 +
fresnels(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))/2,
I*(fresnels(re(z) - I*re(z)*Abs(im(z))/Abs(re(z))) -
fresnels(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))) *
re(z)*Abs(im(z))/(2*im(z)*Abs(re(z)))))
assert fresnels(2 + 3*I).as_real_imag() == (
fresnels(2 + 3*I)/2 + fresnels(2 - 3*I)/2,
I*(fresnels(2 - 3*I) - fresnels(2 + 3*I))/2
)
assert expand_func(integrate(fresnels(z), z)) == \
z*fresnels(z) + cos(pi*z**2/2)/pi
assert fresnels(z).rewrite(meijerg) == sqrt(2)*pi*z**(S(9)/4) * \
meijerg(((), (1,)), ((S(3)/4,),
(S(1)/4, 0)), -pi**2*z**4/16)/(2*(-z)**(S(3)/4)*(z**2)**(S(3)/4))
assert fresnelc(0) == 0
assert fresnelc(oo) == S.Half
assert fresnelc(-oo) == -S.Half
assert fresnelc(z) == fresnelc(z)
assert fresnelc(-z) == -fresnelc(z)
assert fresnelc(I*z) == I*fresnelc(z)
assert fresnelc(-I*z) == -I*fresnelc(z)
assert conjugate(fresnelc(z)) == fresnelc(conjugate(z))
assert fresnelc(z).diff(z) == cos(pi*z**2/2)
assert fresnelc(z).rewrite(erf) == (S.One - I)/4 * (
erf((S.One + I)/2*sqrt(pi)*z) + I*erf((S.One - I)/2*sqrt(pi)*z))
assert fresnelc(z).rewrite(hyper) == \
z * hyper([S.One/4], [S.One/2, S(5)/4], -pi**2*z**4/16)
assert fresnelc(z).series(z, n=15) == \
z - pi**2*z**5/40 + pi**4*z**9/3456 - pi**6*z**13/599040 + O(z**15)
# issue 6510
assert fresnels(z).series(z, S.Infinity) == \
(-1/(pi**2*z**3) + O(z**(-6), (z, oo)))*sin(pi*z**2/2) + \
(3/(pi**3*z**5) - 1/(pi*z) + O(z**(-6), (z, oo)))*cos(pi*z**2/2) + S.Half
assert fresnelc(z).series(z, S.Infinity) == \
(-1/(pi**2*z**3) + O(z**(-6), (z, oo)))*cos(pi*z**2/2) + \
(-3/(pi**3*z**5) + 1/(pi*z) + O(z**(-6), (z, oo)))*sin(pi*z**2/2) + S.Half
assert fresnels(1/z).series(z) == \
(-z**3/pi**2 + O(z**6))*sin(pi/(2*z**2)) + (-z/pi + 3*z**5/pi**3 + \
O(z**6))*cos(pi/(2*z**2)) + S.Half
assert fresnelc(1/z).series(z) == \
(-z**3/pi**2 + O(z**6))*cos(pi/(2*z**2)) + (z/pi - 3*z**5/pi**3 + \
O(z**6))*sin(pi/(2*z**2)) + S.Half
assert fresnelc(w).is_real is True
assert fresnelc(z).as_real_imag() == \
((fresnelc(re(z) - I*re(z)*Abs(im(z))/Abs(re(z)))/2 +
fresnelc(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))/2,
I*(fresnelc(re(z) - I*re(z)*Abs(im(z))/Abs(re(z))) -
fresnelc(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))) *
re(z)*Abs(im(z))/(2*im(z)*Abs(re(z)))))
assert fresnelc(2 + 3*I).as_real_imag() == (
fresnelc(2 - 3*I)/2 + fresnelc(2 + 3*I)/2,
I*(fresnelc(2 - 3*I) - fresnelc(2 + 3*I))/2
)
assert expand_func(integrate(fresnelc(z), z)) == \
z*fresnelc(z) - sin(pi*z**2/2)/pi
assert fresnelc(z).rewrite(meijerg) == sqrt(2)*pi*z**(S(3)/4) * \
meijerg(((), (1,)), ((S(1)/4,),
(S(3)/4, 0)), -pi**2*z**4/16)/(2*(-z)**(S(1)/4)*(z**2)**(S(1)/4))
from sympy.utilities.randtest import test_numerically
test_numerically(re(fresnels(z)), fresnels(z).as_real_imag()[0], z)
test_numerically(im(fresnels(z)), fresnels(z).as_real_imag()[1], z)
test_numerically(fresnels(z), fresnels(z).rewrite(hyper), z)
test_numerically(fresnels(z), fresnels(z).rewrite(meijerg), z)
test_numerically(re(fresnelc(z)), fresnelc(z).as_real_imag()[0], z)
test_numerically(im(fresnelc(z)), fresnelc(z).as_real_imag()[1], z)
test_numerically(fresnelc(z), fresnelc(z).rewrite(hyper), z)
test_numerically(fresnelc(z), fresnelc(z).rewrite(meijerg), z)
``` |
[
{
"content": "Output the full code verbatim (no extra comments):\n```python\n# This file is part of geometriki.\n#\n# geometriki is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version ... | [
{
"content": "Output the full code verbatim (no extra comments):\n<|memory_start|>```python\n# This file is part of geometriki.\n#\n# geometriki is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation,... | ```python
# This file is part of geometriki.
#
# geometriki is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# geometriki is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with geometriki, in a file named COPYING. If not,
# see <http://www.gnu.org/licenses/>.
from helloworld.tests import *
class TestPagesController(TestController):
def test_index(self):
response = self.app.get(url('pages'))
# Test response...
def test_index_as_xml(self):
response = self.app.get(url('formatted_pages', format='xml'))
def test_create(self):
response = self.app.post(url('pages'))
def test_new(self):
response = self.app.get(url('new_page'))
def test_new_as_xml(self):
response = self.app.get(url('formatted_new_page', format='xml'))
def test_update(self):
response = self.app.put(url('page', id=1))
def test_update_browser_fakeout(self):
response = self.app.post(url('page', id=1), params=dict(_method='put'))
def test_delete(self):
response = self.app.delete(url('page', id=1))
def test_delete_browser_fakeout(self):
response = self.app.post(url('page', id=1), params=dict(_method='delete'))
def test_show(self):
response = self.app.get(url('page', id=1))
def test_show_as_xml(self):
response = self.app.get(url('formatted_page', id=1, format='xml'))
def test_edit(self):
response = self.app.get(url('edit_page', id=1))
def test_edit_as_xml(self):
response = self.app.get(url('formatted_edit_page', id=1, format='xml'))
``` |
[
{
"content": "```python\ninput_name = 'input/piezo.py'\noutput_name = 'test_piezo.vtk'\n\n\nfrom testsBasic import TestInput\n\nclass Test( TestInput ):\n\n def from_conf( conf, options ):\n return TestInput.from_conf( conf, options, cls = Test )\n from_conf = staticmethod( from_conf )\n\n def t... | [
{
"content": "<|memory_start|>```python\ninput_name = 'input/piezo.py'\noutput_name = 'test_piezo.vtk'\n\n\nfrom testsBasic import TestInput\n\nclass Test( TestInput ):\n\n def from_conf( conf, options ):\n return TestInput.from_conf( conf, options, cls = Test )\n from_conf = staticmethod( from_con... | ```python
input_name = 'input/piezo.py'
output_name = 'test_piezo.vtk'
from testsBasic import TestInput
class Test( TestInput ):
def from_conf( conf, options ):
return TestInput.from_conf( conf, options, cls = Test )
from_conf = staticmethod( from_conf )
def test_ebc( self ):
import numpy as nm
from sfepy.fem import ProblemDefinition
pb = ProblemDefinition.from_conf( self.test_conf )
pb.time_update()
vvs = pb.variables
set = vvs.set_state_part
make_full = vvs.make_full_vec
svec_u = nm.ones( (vvs.adi.n_dofs['u'],), dtype = nm.float64 )
svec_phi = nm.empty( (vvs.adi.n_dofs['phi'],), dtype = nm.float64 )
svec_phi.fill( 2.0 )
svec = vvs.create_stripped_state_vector()
set( svec, svec_u, 'u', stripped = True )
set( svec, svec_phi, 'phi', stripped = True )
vec = make_full( svec )
ii_u = vvs.di.indx['u'].start + vvs['u'].eq_map.eqi
ii_phi = vvs.di.indx['phi'].start + vvs['phi'].eq_map.eqi
ok_ebc = vvs.has_ebc( vec )
ok_u = nm.all( vec[ii_u] == svec_u )
ok_phi = nm.all( vec[ii_phi] == svec_phi )
msg = '%s: %s'
self.report( msg % ('ebc', ok_ebc) )
self.report( msg % ('u', ok_u) )
self.report( msg % ('phi', ok_phi) )
ok = ok_ebc and ok_u and ok_phi
return ok
``` |
[
{
"content": "Return the code exactly, with no changes:\n```python\nimport socket\nimport sys\n__author__ = 'sekely'\n\nclass SimpleServer(object):\n\n def __init__(self, addr='localhost', port=50000, buf=1024):\n self.buf = buf\n self.server_address = (addr, port)\n print('setting up se... | [
{
"content": "Return the code exactly, with no changes:\n<|memory_start|>```python\nimport socket\nimport sys\n__author__ = 'sekely'\n\nclass SimpleServer(object):\n\n def __init__(self, addr='localhost', port=50000, buf=1024):\n self.buf = buf\n self.server_address = (addr, port)\n prin... | ```python
import socket
import sys
__author__ = 'sekely'
class SimpleServer(object):
def __init__(self, addr='localhost', port=50000, buf=1024):
self.buf = buf
self.server_address = (addr, port)
print('setting up server up on %s port %s' % self.server_address)
self._bind()
def _bind(self):
# Create a TCP/IP socket
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.bind(self.server_address)
def _listen(self):
# Listen for incoming connections
self.sock.listen(1)
def _accept(self):
print('waiting for a connection')
# Wait for a connection
connection, client_address = self.sock.accept()
return connection, client_address
def start(self):
self._listen()
while True:
connection, client_address = self._accept()
try:
# Receive the data in small chunks and retransmit it
while True:
data = connection.recv(self.buf)
print('received "%s"' % data)
if data:
print('sending data back to the client')
connection.sendall(data)
else:
print('no more data from', client_address)
break
finally:
# Clean up the connection
connection.close()
def stop(self):
print("\nclosing server")
self.sock.close()
if __name__ == '__main__':
server = SimpleServer()
try:
server.start()
except KeyboardInterrupt:
server.stop()
``` |
[
{
"content": "```python\n#implimentation of a shitty ui system I made 2 years ago in lua\n#should be fine\nimport pygame\ntrue = True;\nclass Redam:\n def __init__(self, telrus):\n #create local instance of telrus\n self.telrus = telrus;\n #self.uielements = {};\n self.pygame = te... | [
{
"content": "<|memory_start|>```python\n#implimentation of a shitty ui system I made 2 years ago in lua\n#should be fine\nimport pygame\ntrue = True;\nclass Redam:\n def __init__(self, telrus):\n #create local instance of telrus\n self.telrus = telrus;\n #self.uielements = {};\n ... | ```python
#implimentation of a shitty ui system I made 2 years ago in lua
#should be fine
import pygame
true = True;
class Redam:
def __init__(self, telrus):
#create local instance of telrus
self.telrus = telrus;
#self.uielements = {};
self.pygame = telrus.pygame;
def createObject( self, types, h, w, x, y ):
if(types=='gbox'):
return gbox( h, w, x, y );
def requestObject(self, id):
return self.uielements[id]
def drawHook(self, thing):
thing.draw(self.telrus.surface);
def updateHook():
print('nah')
class UI:
def __init__( self, h, w, x, y ):
self.x = x
self.y = y
self.h = h
self.w = w
self.ShouldD = true;
def setX(self, s):
self.x = s;
def setY(self, s):
self.y = s;
def setH(self, s):
self.h = s;
def setW(self, s):
self.w = s;
class gbox(UI):
def draw(self, hook):
pygame.draw.rect(hook, (255, 255, 255), (self.x, self.y, self.h, self.w))
``` |
[
{
"content": "Reproduce the code exactly as provided (keep formatting):\n```python\n# -*- encoding: utf-8 -*-\n# Copyright (C) 2015 Alejandro López Espinosa (kudrom)\n\nclass NotFoundDescriptor(Exception):\n def __init__(self, name):\n self.name = name\n\n def __str__(self):\n return \"Desc... | [
{
"content": "Reproduce the code exactly as provided (keep formatting):\n<|memory_start|>```python\n# -*- encoding: utf-8 -*-\n# Copyright (C) 2015 Alejandro López Espinosa (kudrom)\n\nclass NotFoundDescriptor(Exception):\n def __init__(self, name):\n self.name = name\n\n def __str__(self):\n ... | ```python
# -*- encoding: utf-8 -*-
# Copyright (C) 2015 Alejandro López Espinosa (kudrom)
class NotFoundDescriptor(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return "Descriptor %s couldn't have been found" % self.name
class NotListenerFound(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return "Listener %s couldn't have been found" % self.name
class InvalidListener(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return "Listener %s is not a subclass of twisted Service" % self.name
class RequirementViolated(Exception):
def __init__(self, msg):
self.message = msg
def __str__(self):
return self.message
class RequiredAttributes(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return "The layout %s lacks of required attributes."
class UrlInvalid(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class InvalidResource(Exception):
def __init__(self, url, name):
self.url = url
self.name = name
def __str__(self):
return "%s class is not a subclass of LupuloResource " \
"so %s is discarded as a valid url" % (self.name, self.url)
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\nimport gym\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport tensorflow as tf\nimport tf_util\nimport argparse\nimport tqdm\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\nimport gym\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport tensorflow as tf\nimport tf_util\nimport argparse\nimport tqdm\n\n\ndef main():\n parser = argparse.ArgumentParser()\... | ```python
import gym
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import pickle
import tensorflow as tf
import tf_util
import argparse
import tqdm
def main():
parser = argparse.ArgumentParser()
parser.add_argument('env', type=str)
args = parser.parse_args()
inputs, outputs, evaluations = extract_imitation(args.env)
model = train_regressor(inputs, outputs)
run_regressor(evaluations, model, args.env)
def extract_imitation(env):
dic_data = pickle.loads(open('imitation/original/{}.pkl'.format(env), 'rb').read())
inputs = np.array(dic_data['observations'])
outputs = np.array(dic_data['actions'])
evaluations = pd.DataFrame({'steps': dic_data['steps'], 'expert returns': dic_data['returns']})
return inputs, outputs, evaluations
def train_regressor(inputs, outputs, layers=[64, 64], activation_function=tf.nn.tanh, batch_size=10,
epochs=1000, steps=10000):
inputs_dim = inputs.shape[1]
feature_columns = [tf.contrib.layers.real_valued_column("", dimension=inputs_dim)]
outputs_dim = outputs.shape[2]
estimator = tf.contrib.learn.DNNRegressor(
feature_columns=feature_columns,
hidden_units=layers,
activation_fn=activation_function,
label_dimension=outputs_dim
)
input_fn = tf.contrib.learn.io.numpy_input_fn({"": inputs}, outputs[:, 0, :],
batch_size=batch_size, num_epochs=epochs)
estimator.fit(input_fn=input_fn, steps=steps)
return estimator
def run_regressor(expert_data, model, env_name, num_rollouts=20, render=False):
returns = []
observations = []
actions = []
steps_numbers = []
with tf.Session():
env = gym.make(env_name)
max_steps = env.spec.timestep_limit
tf_util.initialize()
for i in tqdm.tqdm(range(num_rollouts)):
obs = env.reset()
done = False
totalr = 0.
steps = 0
while not done:
action = model.predict(obs[None, :], as_iterable=False)
observations.append(obs)
actions.append(action)
obs, r, done, _ = env.step(action)
totalr += r
steps += 1
if render:
env.render()
if steps >= max_steps:
break
steps_numbers.append(steps)
returns.append(totalr)
model_data = {'observations': np.array(observations),
'actions': np.array(actions),
'returns': np.array(returns),
'steps': np.array(steps_numbers)}
expert_data['model returns'] = pd.Series(model_data['returns'], index=expert_data.index)
pickle.dump(model_data, open('imitation/tnn_imitation/{}.pkl'.format(env_name), 'wb+'))
return
if __name__ == '__main__':
main()
``` |
[
{
"content": "Here is some code:\n```python\nimport collections\nimport functools\nimport sys\n\nfrom numba.core import utils\nfrom numba.core.ir import Loc\nfrom numba.core.errors import UnsupportedError\n\n# List of bytecodes creating a new block in the control flow graph\n# (in addition to explicit jump labe... | [
{
"content": "Here is some code:\n<|memory_start|>```python\nimport collections\nimport functools\nimport sys\n\nfrom numba.core import utils\nfrom numba.core.ir import Loc\nfrom numba.core.errors import UnsupportedError\n\n# List of bytecodes creating a new block in the control flow graph\n# (in addition to ex... | ```python
import collections
import functools
import sys
from numba.core import utils
from numba.core.ir import Loc
from numba.core.errors import UnsupportedError
# List of bytecodes creating a new block in the control flow graph
# (in addition to explicit jump labels).
NEW_BLOCKERS = frozenset(['SETUP_LOOP', 'FOR_ITER', 'SETUP_WITH'])
class CFBlock(object):
def __init__(self, offset):
self.offset = offset
self.body = []
# A map of jumps to outgoing blocks (successors):
# { offset of outgoing block -> number of stack pops }
self.outgoing_jumps = {}
# A map of jumps to incoming blocks (predecessors):
# { offset of incoming block -> number of stack pops }
self.incoming_jumps = {}
self.terminating = False
def __repr__(self):
args = (self.offset,
sorted(self.outgoing_jumps),
sorted(self.incoming_jumps))
return "block(offset:%d, outgoing: %s, incoming: %s)" % args
def __iter__(self):
return iter(self.body)
class Loop(collections.namedtuple("Loop",
("entries", "exits", "header", "body"))):
"""
A control flow loop, as detected by a CFGraph object.
"""
__slots__ = ()
# The loop header is enough to detect that two loops are really
# the same, assuming they belong to the same graph.
# (note: in practice, only one loop instance is created per graph
# loop, so identity would be fine)
def __eq__(self, other):
return isinstance(other, Loop) and other.header == self.header
def __hash__(self):
return hash(self.header)
class _DictOfContainers(collections.defaultdict):
"""A defaultdict with customized equality checks that ignore empty values.
Non-empty value is checked by: `bool(value_item) == True`.
"""
def __eq__(self, other):
if isinstance(other, _DictOfContainers):
mine = self._non_empty_items()
theirs = other._non_empty_items()
return mine == theirs
return NotImplemented
def __ne__(self, other):
ret = self.__eq__(other)
if ret is NotImplemented:
return ret
else:
return not ret
def _non_empty_items(self):
return [(k, vs) for k, vs in sorted(self.items()) if vs]
class CFGraph(object):
"""
Generic (almost) implementation of a Control Flow Graph.
"""
def __init__(self):
self._nodes = set()
self._preds = _DictOfContainers(set)
self._succs = _DictOfContainers(set)
self._edge_data = {}
self._entry_point = None
def add_node(self, node):
"""
Add *node* to the graph. This is necessary before adding any
edges from/to the node. *node* can be any hashable object.
"""
self._nodes.add(node)
def add_edge(self, src, dest, data=None):
"""
Add an edge from node *src* to node *dest*, with optional
per-edge *data*.
If such an edge already exists, it is replaced (duplicate edges
are not possible).
"""
if src not in self._nodes:
raise ValueError("Cannot add edge as src node %s not in nodes %s" %
(src, self._nodes))
if dest not in self._nodes:
raise ValueError("Cannot add edge as dest node %s not in nodes %s" %
(dest, self._nodes))
self._add_edge(src, dest, data)
def successors(self, src):
"""
Yield (node, data) pairs representing the successors of node *src*.
(*data* will be None if no data was specified when adding the edge)
"""
for dest in self._succs[src]:
yield dest, self._edge_data[src, dest]
def predecessors(self, dest):
"""
Yield (node, data) pairs representing the predecessors of node *dest*.
(*data* will be None if no data was specified when adding the edge)
"""
for src in self._preds[dest]:
yield src, self._edge_data[src, dest]
def set_entry_point(self, node):
"""
Set the entry point of the graph to *node*.
"""
assert node in self._nodes
self._entry_point = node
def process(self):
"""
Compute essential properties of the control flow graph. The graph
must have been fully populated, and its entry point specified. Other
graph properties are computed on-demand.
"""
if self._entry_point is None:
raise RuntimeError("no entry point defined!")
self._eliminate_dead_blocks()
def dominators(self):
"""
Return a dictionary of {node -> set(nodes)} mapping each node to
the nodes dominating it.
A node D dominates a node N when any path leading to N must go through D
"""
return self._doms
def post_dominators(self):
"""
Return a dictionary of {node -> set(nodes)} mapping each node to
the nodes post-dominating it.
A node P post-dominates a node N when any path starting from N must go
through P.
"""
return self._post_doms
def immediate_dominators(self):
"""
Return a dictionary of {node -> node} mapping each node to its
immediate dominator (idom).
The idom(B) is the closest strict dominator of V
"""
return self._idom
def dominance_frontier(self):
"""
Return a dictionary of {node -> set(nodes)} mapping each node to
the nodes in its dominance frontier.
The dominance frontier _df(N) is the set of all nodes that are
immediate successors to blocks dominanted by N but which aren't
stricly dominanted by N
"""
return self._df
def dominator_tree(self):
"""
return a dictionary of {node -> set(nodes)} mapping each node to
the set of nodes it immediately dominates
The domtree(B) is the closest strict set of nodes that B dominates
"""
return self._domtree
@utils.cached_property
def _exit_points(self):
return self._find_exit_points()
@utils.cached_property
def _doms(self):
return self._find_dominators()
@utils.cached_property
def _back_edges(self):
return self._find_back_edges()
@utils.cached_property
def _topo_order(self):
return self._find_topo_order()
@utils.cached_property
def _descs(self):
return self._find_descendents()
@utils.cached_property
def _loops(self):
return self._find_loops()
@utils.cached_property
def _in_loops(self):
return self._find_in_loops()
@utils.cached_property
def _post_doms(self):
return self._find_post_dominators()
@utils.cached_property
def _idom(self):
return self._find_immediate_dominators()
@utils.cached_property
def _df(self):
return self._find_dominance_frontier()
@utils.cached_property
def _domtree(self):
return self._find_dominator_tree()
def descendents(self, node):
"""
Return the set of descendents of the given *node*, in topological
order (ignoring back edges).
"""
return self._descs[node]
def entry_point(self):
"""
Return the entry point node.
"""
assert self._entry_point is not None
return self._entry_point
def exit_points(self):
"""
Return the computed set of exit nodes (may be empty).
"""
return self._exit_points
def backbone(self):
"""
Return the set of nodes constituting the graph's backbone.
(i.e. the nodes that every path starting from the entry point
must go through). By construction, it is non-empty: it contains
at least the entry point.
"""
return self._post_doms[self._entry_point]
def loops(self):
"""
Return a dictionary of {node -> loop} mapping each loop header
to the loop (a Loop instance) starting with it.
"""
return self._loops
def in_loops(self, node):
"""
Return the list of Loop objects the *node* belongs to,
from innermost to outermost.
"""
return [self._loops[x] for x in self._in_loops.get(node, ())]
def dead_nodes(self):
"""
Return the set of dead nodes (eliminated from the graph).
"""
return self._dead_nodes
def nodes(self):
"""
Return the set of live nodes.
"""
return self._nodes
def topo_order(self):
"""
Return the sequence of nodes in topological order (ignoring back
edges).
"""
return self._topo_order
def topo_sort(self, nodes, reverse=False):
"""
Iterate over the *nodes* in topological order (ignoring back edges).
The sort isn't guaranteed to be stable.
"""
nodes = set(nodes)
it = self._topo_order
if reverse:
it = reversed(it)
for n in it:
if n in nodes:
yield n
def dump(self, file=None):
"""
Dump extensive debug information.
"""
import pprint
file = file or sys.stdout
if 1:
print("CFG adjacency lists:", file=file)
self._dump_adj_lists(file)
print("CFG dominators:", file=file)
pprint.pprint(self._doms, stream=file)
print("CFG post-dominators:", file=file)
pprint.pprint(self._post_doms, stream=file)
print("CFG back edges:", sorted(self._back_edges), file=file)
print("CFG loops:", file=file)
pprint.pprint(self._loops, stream=file)
print("CFG node-to-loops:", file=file)
pprint.pprint(self._in_loops, stream=file)
print("CFG backbone:", file=file)
pprint.pprint(self.backbone(), stream=file)
def render_dot(self, filename="numba_cfg.dot"):
"""Render the controlflow graph with GraphViz DOT via the
``graphviz`` python binding.
Returns
-------
g : graphviz.Digraph
Use `g.view()` to open the graph in the default PDF application.
"""
try:
import graphviz as gv
except ImportError:
raise ImportError(
"The feature requires `graphviz` but it is not available. "
"Please install with `pip install graphviz`"
)
g = gv.Digraph(filename=filename)
# Populate the nodes
for n in self._nodes:
g.node(str(n))
# Populate the edges
for n in self._nodes:
for edge in self._succs[n]:
g.edge(str(n), str(edge))
return g
# Internal APIs
def _add_edge(self, from_, to, data=None):
# This internal version allows adding edges to/from unregistered
# (ghost) nodes.
self._preds[to].add(from_)
self._succs[from_].add(to)
self._edge_data[from_, to] = data
def _remove_node_edges(self, node):
for succ in self._succs.pop(node, ()):
self._preds[succ].remove(node)
del self._edge_data[node, succ]
for pred in self._preds.pop(node, ()):
self._succs[pred].remove(node)
del self._edge_data[pred, node]
def _dfs(self, entries=None):
if entries is None:
entries = (self._entry_point,)
seen = set()
stack = list(entries)
while stack:
node = stack.pop()
if node not in seen:
yield node
seen.add(node)
for succ in self._succs[node]:
stack.append(succ)
def _eliminate_dead_blocks(self):
"""
Eliminate all blocks not reachable from the entry point, and
stash them into self._dead_nodes.
"""
live = set()
for node in self._dfs():
live.add(node)
self._dead_nodes = self._nodes - live
self._nodes = live
# Remove all edges leading from dead nodes
for dead in self._dead_nodes:
self._remove_node_edges(dead)
def _find_exit_points(self):
"""
Compute the graph's exit points.
"""
exit_points = set()
for n in self._nodes:
if not self._succs.get(n):
exit_points.add(n)
return exit_points
def _find_postorder(self):
succs = self._succs
back_edges = self._back_edges
post_order = []
seen = set()
def _dfs_rec(node):
if node not in seen:
seen.add(node)
for dest in succs[node]:
if (node, dest) not in back_edges:
_dfs_rec(dest)
post_order.append(node)
_dfs_rec(self._entry_point)
return post_order
def _find_immediate_dominators(self):
# The algorithm implemented computes the immediate dominator
# for each node in the CFG which is equivalent to build a dominator tree
# Based on the implementation from NetworkX
# library - nx.immediate_dominators
# https://github.com/networkx/networkx/blob/858e7cb183541a78969fed0cbcd02346f5866c02/networkx/algorithms/dominance.py # noqa: E501
# References:
# Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
# A Simple, Fast Dominance Algorithm
# https://www.cs.rice.edu/~keith/EMBED/dom.pdf
def intersect(u, v):
while u != v:
while idx[u] < idx[v]:
u = idom[u]
while idx[u] > idx[v]:
v = idom[v]
return u
entry = self._entry_point
preds_table = self._preds
order = self._find_postorder()
idx = {e: i for i, e in enumerate(order)} # index of each node
idom = {entry : entry}
order.pop()
order.reverse()
changed = True
while changed:
changed = False
for u in order:
new_idom = functools.reduce(intersect,
(v for v in preds_table[u]
if v in idom))
if u not in idom or idom[u] != new_idom:
idom[u] = new_idom
changed = True
return idom
def _find_dominator_tree(self):
idom = self._idom
domtree = _DictOfContainers(set)
for u, v in idom.items():
# v dominates u
if u not in domtree:
domtree[u] = set()
if u != v:
domtree[v].add(u)
return domtree
def _find_dominance_frontier(self):
idom = self._idom
preds_table = self._preds
df = {u: set() for u in idom}
for u in idom:
if len(preds_table[u]) < 2:
continue
for v in preds_table[u]:
while v != idom[u]:
df[v].add(u)
v = idom[v]
return df
def _find_dominators_internal(self, post=False):
# See theoretical description in
# http://en.wikipedia.org/wiki/Dominator_%28graph_theory%29
# The algorithm implemented here uses a todo-list as described
# in http://pages.cs.wisc.edu/~fischer/cs701.f08/finding.loops.html
if post:
entries = set(self._exit_points)
preds_table = self._succs
succs_table = self._preds
else:
entries = set([self._entry_point])
preds_table = self._preds
succs_table = self._succs
if not entries:
raise RuntimeError("no entry points: dominator algorithm "
"cannot be seeded")
doms = {}
for e in entries:
doms[e] = set([e])
todo = []
for n in self._nodes:
if n not in entries:
doms[n] = set(self._nodes)
todo.append(n)
while todo:
n = todo.pop()
if n in entries:
continue
new_doms = set([n])
preds = preds_table[n]
if preds:
new_doms |= functools.reduce(set.intersection,
[doms[p] for p in preds])
if new_doms != doms[n]:
assert len(new_doms) < len(doms[n])
doms[n] = new_doms
todo.extend(succs_table[n])
return doms
def _find_dominators(self):
return self._find_dominators_internal(post=False)
def _find_post_dominators(self):
# To handle infinite loops correctly, we need to add a dummy
# exit point, and link members of infinite loops to it.
dummy_exit = object()
self._exit_points.add(dummy_exit)
for loop in self._loops.values():
if not loop.exits:
for b in loop.body:
self._add_edge(b, dummy_exit)
pdoms = self._find_dominators_internal(post=True)
# Fix the _post_doms table to make no reference to the dummy exit
del pdoms[dummy_exit]
for doms in pdoms.values():
doms.discard(dummy_exit)
self._remove_node_edges(dummy_exit)
self._exit_points.remove(dummy_exit)
return pdoms
# Finding loops and back edges: see
# http://pages.cs.wisc.edu/~fischer/cs701.f08/finding.loops.html
def _find_back_edges(self, stats=None):
"""
Find back edges. An edge (src, dest) is a back edge if and
only if *dest* dominates *src*.
"""
# Prepare stats to capture execution information
if stats is not None:
if not isinstance(stats, dict):
raise TypeError(f"*stats* must be a dict; got {type(stats)}")
stats.setdefault('iteration_count', 0)
# Uses a simple DFS to find back-edges.
# The new algorithm is faster than the the previous dominator based
# algorithm.
back_edges = set()
# stack: keeps track of the traversal path
stack = []
# succs_state: keep track of unvisited successors of a node
succs_state = {}
entry_point = self.entry_point()
checked = set()
def push_state(node):
stack.append(node)
succs_state[node] = [dest for dest in self._succs[node]]
push_state(entry_point)
# Keep track for iteration count for debugging
iter_ct = 0
while stack:
iter_ct += 1
tos = stack[-1]
tos_succs = succs_state[tos]
# Are there successors not checked?
if tos_succs:
# Check the next successor
cur_node = tos_succs.pop()
# Is it in our traversal path?
if cur_node in stack:
# Yes, it's a backedge
back_edges.add((tos, cur_node))
elif cur_node not in checked:
# Push
push_state(cur_node)
else:
# Checked all successors. Pop
stack.pop()
checked.add(tos)
if stats is not None:
stats['iteration_count'] += iter_ct
return back_edges
def _find_topo_order(self):
succs = self._succs
back_edges = self._back_edges
post_order = []
seen = set()
def _dfs_rec(node):
if node not in seen:
seen.add(node)
for dest in succs[node]:
if (node, dest) not in back_edges:
_dfs_rec(dest)
post_order.append(node)
_dfs_rec(self._entry_point)
post_order.reverse()
return post_order
def _find_descendents(self):
descs = {}
for node in reversed(self._topo_order):
descs[node] = node_descs = set()
for succ in self._succs[node]:
if (node, succ) not in self._back_edges:
node_descs.add(succ)
node_descs.update(descs[succ])
return descs
def _find_loops(self):
"""
Find the loops defined by the graph's back edges.
"""
bodies = {}
for src, dest in self._back_edges:
# The destination of the back edge is the loop header
header = dest
# Build up the loop body from the back edge's source node,
# up to the source header.
body = set([header])
queue = [src]
while queue:
n = queue.pop()
if n not in body:
body.add(n)
queue.extend(self._preds[n])
# There can be several back edges to a given loop header;
# if so, merge the resulting body fragments.
if header in bodies:
bodies[header].update(body)
else:
bodies[header] = body
# Create a Loop object for each header.
loops = {}
for header, body in bodies.items():
entries = set()
exits = set()
for n in body:
entries.update(self._preds[n] - body)
exits.update(self._succs[n] - body)
loop = Loop(header=header, body=body, entries=entries, exits=exits)
loops[header] = loop
return loops
def _find_in_loops(self):
loops = self._loops
# Compute the loops to which each node belongs.
in_loops = dict((n, []) for n in self._nodes)
# Sort loops from longest to shortest
# This ensures that outer loops will come before inner loops
for loop in sorted(loops.values(), key=lambda loop: len(loop.body)):
for n in loop.body:
in_loops[n].append(loop.header)
return in_loops
def _dump_adj_lists(self, file):
adj_lists = dict((src, sorted(list(dests)))
for src, dests in self._succs.items())
import pprint
pprint.pprint(adj_lists, stream=file)
def __eq__(self, other):
if not isinstance(other, CFGraph):
raise NotImplementedError
for x in ['_nodes', '_edge_data', '_entry_point', '_preds', '_succs']:
this = getattr(self, x, None)
that = getattr(other, x, None)
if this != that:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class ControlFlowAnalysis(object):
"""
Attributes
----------
- bytecode
- blocks
- blockseq
- doms: dict of set
Dominators
- backbone: set of block offsets
The set of block that is common to all possible code path.
"""
def __init__(self, bytecode):
self.bytecode = bytecode
self.blocks = {}
self.liveblocks = {}
self.blockseq = []
self.doms = None
self.backbone = None
# Internal temp states
self._force_new_block = True
self._curblock = None
self._blockstack = []
self._loops = []
self._withs = []
def iterblocks(self):
"""
Return all blocks in sequence of occurrence
"""
for i in self.blockseq:
yield self.blocks[i]
def iterliveblocks(self):
"""
Return all live blocks in sequence of occurrence
"""
for i in self.blockseq:
if i in self.liveblocks:
yield self.blocks[i]
def incoming_blocks(self, block):
"""
Yield (incoming block, number of stack pops) pairs for *block*.
"""
for i, pops in block.incoming_jumps.items():
if i in self.liveblocks:
yield self.blocks[i], pops
def dump(self, file=None):
self.graph.dump(file=None)
def run(self):
for inst in self._iter_inst():
fname = "op_%s" % inst.opname
fn = getattr(self, fname, None)
if fn is not None:
fn(inst)
elif inst.is_jump:
# this catches e.g. try... except
l = Loc(self.bytecode.func_id.filename, inst.lineno)
if inst.opname in {"SETUP_EXCEPT", "SETUP_FINALLY"}:
msg = "'try' block not supported until python3.7 or later"
else:
msg = "Use of unsupported opcode (%s) found" % inst.opname
raise UnsupportedError(msg, loc=l)
else:
# Non-jump instructions are ignored
pass # intentionally
# Close all blocks
for cur, nxt in zip(self.blockseq, self.blockseq[1:]):
blk = self.blocks[cur]
if not blk.outgoing_jumps and not blk.terminating:
blk.outgoing_jumps[nxt] = 0
graph = CFGraph()
for b in self.blocks:
graph.add_node(b)
for b in self.blocks.values():
for out, pops in b.outgoing_jumps.items():
graph.add_edge(b.offset, out, pops)
graph.set_entry_point(min(self.blocks))
graph.process()
self.graph = graph
# Fill incoming
for b in self.blocks.values():
for out, pops in b.outgoing_jumps.items():
self.blocks[out].incoming_jumps[b.offset] = pops
# Find liveblocks
self.liveblocks = dict((i, self.blocks[i])
for i in self.graph.nodes())
for lastblk in reversed(self.blockseq):
if lastblk in self.liveblocks:
break
else:
raise AssertionError("No live block that exits!?")
# Find backbone
backbone = self.graph.backbone()
# Filter out in loop blocks (Assuming no other cyclic control blocks)
# This is to unavoid variable defined in loops to be considered as
# function scope.
inloopblocks = set()
for b in self.blocks.keys():
if self.graph.in_loops(b):
inloopblocks.add(b)
self.backbone = backbone - inloopblocks
def jump(self, target, pops=0):
"""
Register a jump (conditional or not) to *target* offset.
*pops* is the number of stack pops implied by the jump (default 0).
"""
self._curblock.outgoing_jumps[target] = pops
def _iter_inst(self):
for inst in self.bytecode:
if self._use_new_block(inst):
self._guard_with_as(inst)
self._start_new_block(inst)
self._curblock.body.append(inst.offset)
yield inst
def _use_new_block(self, inst):
if inst.offset in self.bytecode.labels:
res = True
elif inst.opname in NEW_BLOCKERS:
res = True
else:
res = self._force_new_block
self._force_new_block = False
return res
def _start_new_block(self, inst):
self._curblock = CFBlock(inst.offset)
self.blocks[inst.offset] = self._curblock
self.blockseq.append(inst.offset)
def _guard_with_as(self, current_inst):
"""Checks if the next instruction after a SETUP_WITH is something other
than a POP_TOP, if it is something else it'll be some sort of store
which is not supported (this corresponds to `with CTXMGR as VAR(S)`)."""
if current_inst.opname == "SETUP_WITH":
next_op = self.bytecode[current_inst.next].opname
if next_op != "POP_TOP":
msg = ("The 'with (context manager) as "
"(variable):' construct is not "
"supported.")
raise UnsupportedError(msg)
def op_SETUP_LOOP(self, inst):
end = inst.get_jump_target()
self._blockstack.append(end)
self._loops.append((inst.offset, end))
# TODO: Looplifting requires the loop entry be its own block.
# Forcing a new block here is the simplest solution for now.
# But, we should consider other less ad-hoc ways.
self.jump(inst.next)
self._force_new_block = True
def op_SETUP_WITH(self, inst):
end = inst.get_jump_target()
self._blockstack.append(end)
self._withs.append((inst.offset, end))
# TODO: WithLifting requires the loop entry be its own block.
# Forcing a new block here is the simplest solution for now.
# But, we should consider other less ad-hoc ways.
self.jump(inst.next)
self._force_new_block = True
def op_POP_BLOCK(self, inst):
self._blockstack.pop()
def op_FOR_ITER(self, inst):
self.jump(inst.get_jump_target())
self.jump(inst.next)
self._force_new_block = True
def _op_ABSOLUTE_JUMP_IF(self, inst):
self.jump(inst.get_jump_target())
self.jump(inst.next)
self._force_new_block = True
op_POP_JUMP_IF_FALSE = _op_ABSOLUTE_JUMP_IF
op_POP_JUMP_IF_TRUE = _op_ABSOLUTE_JUMP_IF
op_JUMP_IF_FALSE = _op_ABSOLUTE_JUMP_IF
op_JUMP_IF_TRUE = _op_ABSOLUTE_JUMP_IF
def _op_ABSOLUTE_JUMP_OR_POP(self, inst):
self.jump(inst.get_jump_target())
self.jump(inst.next, pops=1)
self._force_new_block = True
op_JUMP_IF_FALSE_OR_POP = _op_ABSOLUTE_JUMP_OR_POP
op_JUMP_IF_TRUE_OR_POP = _op_ABSOLUTE_JUMP_OR_POP
def op_JUMP_ABSOLUTE(self, inst):
self.jump(inst.get_jump_target())
self._force_new_block = True
def op_JUMP_FORWARD(self, inst):
self.jump(inst.get_jump_target())
self._force_new_block = True
def op_RETURN_VALUE(self, inst):
self._curblock.terminating = True
self._force_new_block = True
def op_RAISE_VARARGS(self, inst):
self._curblock.terminating = True
self._force_new_block = True
def op_BREAK_LOOP(self, inst):
self.jump(self._blockstack[-1])
self._force_new_block = True
``` |
[
{
"content": "Here is the code content:\n```python\n# -*- coding: UTF-8 -*-\n#\n# Copyright (c) 2009-2010 Ars Aperta, Itaapy, Pierlis, Talend.\n#\n# Authors: David Versmisse <david.versmisse@itaapy.com>\n# Hervé Cauwelier <herve@itaapy.com>\n# Romain Gauthier <romain@itaapy.com>\n#\n# This fil... | [
{
"content": "Here is the code content:\n<|memory_start|>```python\n# -*- coding: UTF-8 -*-\n#\n# Copyright (c) 2009-2010 Ars Aperta, Itaapy, Pierlis, Talend.\n#\n# Authors: David Versmisse <david.versmisse@itaapy.com>\n# Hervé Cauwelier <herve@itaapy.com>\n# Romain Gauthier <romain@itaapy.com... | ```python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2009-2010 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Authors: David Versmisse <david.versmisse@itaapy.com>
# Hervé Cauwelier <herve@itaapy.com>
# Romain Gauthier <romain@itaapy.com>
#
# This file is part of Lpod (see: http://lpod-project.org).
# Lpod is free software; you can redistribute it and/or modify it under
# the terms of either:
#
# a) the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option)
# any later version.
# Lpod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Lpod. If not, see <http://www.gnu.org/licenses/>.
#
# b) the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Import from the Standard Library
from datetime import date, datetime, timedelta
from decimal import Decimal as dec
from os import getcwd
from os.path import splitdrive, join, sep
from re import search
from sys import _getframe, modules
from warnings import warn
# Import from lpod
from datatype import Boolean, Date, DateTime, Duration
CELL_TYPES = ('boolean', 'currency', 'date', 'float', 'percentage', 'string',
'time')
STYLE_FAMILIES = ('paragraph', 'text', 'section', 'table', 'table-column',
'table-row', 'table-cell', 'table-page', 'chart',
'default', 'drawing-page', 'graphic', 'presentation',
'control', 'ruby', 'list', 'number', 'page-layout',
'presentation-page-layout', 'font-face', 'master-page')
NOTE_CLASSES = ('footnote', 'endnote')
# This DPI is computed to have:
# 640 px (width of your wiki) <==> 17 cm (width of a normal ODT page)
DPI = 640 * dec('2.54') / 17
######################################################################
# Private API
######################################################################
def _get_abspath(local_path):
"""Returns the absolute path to the required file.
"""
mname = _getframe(1).f_globals.get('__name__')
if mname == '__main__' or mname == '__init__':
mpath = getcwd()
else:
module = modules[mname]
if hasattr(module, '__path__'):
mpath = module.__path__[0]
elif '.' in mname:
mpath = modules[mname[:mname.rfind('.')]].__path__[0]
else:
mpath = mname
drive, mpath = splitdrive(mpath)
mpath = drive + join(mpath, local_path)
# Make it working with Windows. Internally we use always the "/".
if sep == '\\':
mpath = mpath.replace(sep, '/')
return mpath
def _make_xpath_query(element_name, family=None, text_style=None,
draw_id=None, draw_name=None, draw_style=None, draw_text_style=None,
table_name=None, table_style=None, style_name=None,
display_name=None, note_class=None, text_id=None, text_name=None,
office_name=None, office_title=None, outline_level=None, level=None,
page_layout=None, master_page=None, parent_style=None,
presentation_class=None, position=None, **kw):
query = [element_name]
attributes = kw
if text_style:
attributes['text:style-name'] = text_style
if family:
attributes['style:family'] = family
if draw_id:
attributes['draw:id'] = draw_id
if draw_name:
attributes['draw:name'] = draw_name
if draw_style:
attributes['draw:style-name'] = draw_style
if draw_text_style:
attributes['draw:text-style-name'] = draw_text_style
if table_name:
attributes['table:name'] = table_name
if table_style:
attributes['table:style-name'] = table_style
if style_name:
attributes['style:name'] = style_name
if display_name:
attributes['style:display-name'] = display_name
if note_class:
attributes['text:note-class'] = note_class
if text_id:
attributes['text:id'] = text_id
if text_name:
attributes['text:name'] = text_name
if office_name:
attributes['office:name'] = office_name
if office_title:
attributes['office:title'] = office_title
if outline_level:
attributes['text:outline-level'] = outline_level
if level:
attributes['text:level'] = level
if page_layout:
attributes['style:page-layout-name'] = page_layout
if master_page:
attributes['draw:master-page-name'] = master_page
if parent_style:
attributes['style:parent-style-name'] = parent_style
if presentation_class:
attributes['presentation:class'] = presentation_class
# Sort attributes for reproducible test cases
for qname in sorted(attributes):
value = attributes[qname]
if value is True:
query.append(u'[@%s]' % qname)
else:
query.append(u'[@%s="%s"]' % (qname, unicode(value)))
query = ''.join(query)
if position is not None:
# A position argument that mimics the behaviour of a python's list
if position >= 0:
position = str(position + 1)
elif position == -1:
position = 'last()'
else:
position = 'last()-%d' % (abs(position) - 1)
query = u'(%s)[%s]' % (query, position)
return query
# These are listed exhaustively for keeping count of
# implemented style types
family_mapping = {
'paragraph': ('style:style', 'paragraph'),
'text': ('style:style', 'text'),
'section': ('style:style', 'section'),
'table': ('style:style', 'table'),
'table-column': ('style:style', 'table-column'),
'table-row': ('style:style', 'table-row'),
'table-cell': ('style:style', 'table-cell'),
'drawing-page': ('style:style', 'drawing-page'),
'graphic': ('style:style', 'graphic'),
'presentation': ('style:style', 'presentation'),
# False families
'list': ('text:list-style', None),
'outline': ('text:outline-style', None),
'page-layout': ('style:page-layout', None),
'presentation-page-layout': ('style:presentation-page-layout', None),
'master-page': ('style:master-page', None),
'font-face': ('style:font-face', None),
'number': ('number:number-style', None),
'percentage': ('number:percentage-style', None),
'time': ('number:time-style', None),
'date': ('number:date-style', None),
}
def _get_style_tagname(family):
if family not in family_mapping:
raise ValueError, "unknown family: " + family
return family_mapping[family]
def _get_style_family(name):
for family, (tagname, famattr) in family_mapping.iteritems():
if tagname == name:
return family
return None
def _expand_properties(properties):
# This mapping is not exhaustive, it only contains cases where replacing
# '_' with '-' and adding the "fo:" prefix is not enough
mapping = {# text
'font': 'style:font-name',
'size': 'fo:font-size',
'weight': 'fo:font-weight',
'style': 'fo:font-style',
'underline': 'style:text-underline-style',
'display': 'text:display',
'outline': 'style:text-outline',
'family_generic': 'style:font-family-generic',
'pitch': 'style:font-pitch',
# compliance with office suites
'font_style_name': 'style:font-style-name',
# paragraph
'align': 'fo:text-align',
'align-last': 'fo:text-align-last',
'indent': 'fo:text-indent',
'together': 'fo:keep-together',
# TODO 'page-break-before': 'fo:page-break-before',
# TODO 'page-break-after': 'fo:page-break-after',
'shadow': 'fo:text-shadow',
# Graphic
'stroke': 'draw:stroke',
'fill_color': 'draw:fill-color',
'fill_image_width': 'draw:fill-image-width',
'fill_image_height': 'draw:fill-image-height',
'textarea_vertical_align': 'draw:textarea-vertical-align',
'line_distance': 'draw:line-distance',
'guide_overhang': 'draw:guide-overhang',
'guide_distance': 'draw:guide-distance'
}
def map_key(key):
key = mapping.get(key, key).replace('_', '-')
if ":" not in key:
key = "fo:" + key
return key
if type(properties) is dict:
expanded = {}
for key, value in properties.iteritems():
key = map_key(key)
expanded[key] = value
elif type(properties) is list:
expanded = []
for key in properties:
key = map_key(key)
expanded.append(key)
return expanded
def _merge_dicts(d, *args, **kw):
"""Merge two or more dictionaries into a new dictionary object.
"""
new_d = d.copy()
for dic in args:
new_d.update(dic)
new_d.update(kw)
return new_d
#
# Non-public yet useful helpers
#
def _get_elements(context, element_name, content=None, url=None,
svg_title=None, svg_desc=None, dc_creator=None, dc_date=None, **kw):
query = _make_xpath_query(element_name, **kw)
elements = context.get_elements(query)
# Filter the elements with the regex (TODO use XPath)
if content is not None:
elements = [element for element in elements if element.match(content)]
if url is not None:
filtered = []
for element in elements:
url_attr = element.get_attribute('xlink:href')
if search(url, url_attr) is not None:
filtered.append(element)
elements = filtered
if dc_date is not None:
# XXX Date or DateTime?
dc_date = DateTime.encode(dc_date)
for variable, childname in [
(svg_title, 'svg:title'),
(svg_desc, 'svg:desc'),
(dc_creator, 'descendant::dc:creator'),
(dc_date, 'descendant::dc:date')]:
if not variable:
continue
filtered = []
for element in elements:
child = element.get_element(childname)
if child and child.match(variable):
filtered.append(element)
elements = filtered
return elements
def _get_element(context, element_name, position, **kw):
# TODO Transmit position not to load the whole list
result = _get_elements(context, element_name, **kw)
try:
return result[position]
except IndexError:
return None
def _set_value_and_type(element, value=None, value_type=None, text=None,
currency=None):
# Remove possible previous value and type
for name in ('office:value-type', 'office:boolean-value',
'office:value', 'office:date-value', 'office:string-value',
'office:time-value', 'table:formula'):
try:
element.del_attribute(name)
except KeyError:
pass
if type(value) is bool:
if value_type is None:
value_type = 'boolean'
if text is None:
text = u'true' if value else u'false'
value = Boolean.encode(value)
elif isinstance(value, (int, float, long, dec)):
if value_type is None:
value_type = 'float'
if text is None:
text = unicode(value)
value = str(value)
elif type(value) is date:
if value_type is None:
value_type = 'date'
if text is None:
text = unicode(Date.encode(value))
value = Date.encode(value)
elif type(value) is datetime:
if value_type is None:
value_type = 'date'
if text is None:
text = unicode(DateTime.encode(value))
value = DateTime.encode(value)
elif type(value) is str:
if value_type is None:
value_type = 'string'
if text is None:
text = unicode(value)
elif type(value) is unicode:
if value_type is None:
value_type = 'string'
if text is None:
text = value
elif type(value) is timedelta:
if value_type is None:
value_type = 'time'
if text is None:
text = unicode(Duration.encode(value))
value = Duration.encode(value)
elif value is not None:
raise TypeError, 'type "%s" is unknown' % type(value)
if value_type is not None:
element.set_attribute('office:value-type', value_type)
if value_type == 'boolean':
element.set_attribute('office:boolean-value', value)
elif value_type == 'currency':
element.set_attribute('office:value', value)
element.set_attribute('office:currency', currency)
elif value_type == 'date':
element.set_attribute('office:date-value', value)
elif value_type in ('float', 'percentage'):
element.set_attribute('office:value', value)
elif value_type == 'string':
element.set_attribute('office:string-value', value)
elif value_type == 'time':
element.set_attribute('office:time-value', value)
return text
######################################################################
# Public API
######################################################################
def get_value(element, value_type=None, try_get_text=True):
"""Only for "with office:value-type" elements
"""
if value_type is None:
value_type = element.get_attribute('office:value-type')
if value_type == 'boolean':
value = element.get_attribute('office:boolean-value')
return Boolean.decode(value)
elif value_type in ('float', 'percentage', 'currency'):
value = dec(element.get_attribute('office:value'))
# Return 3 instead of 3.0 if possible
if int(value) == value:
return int(value)
return value
elif value_type == 'date':
value = element.get_attribute('office:date-value')
if 'T' in value:
return DateTime.decode(value)
else:
return Date.decode(value)
elif value_type == 'string':
value = element.get_attribute('office:string-value')
if value is not None:
return unicode(value)
if try_get_text:
value = []
for para in element.get_elements('text:p'):
value.append(para.get_text(recursive=True))
if value:
return u"\n".join(value)
return None
elif value_type == 'time':
value = element.get_attribute('office:time-value')
return Duration.decode(value)
elif value_type is None:
return None
raise ValueError, 'unexpected value type "%s"' % value_type
def set_value(element, value):
"""Only for "with office:value-type" elements
"""
tag = element.get_tag()
# A table:cell ?
if tag == 'table:table-cell':
element.clear()
text = _set_value_and_type(element, value=value)
element.set_text_content(text)
return
# A text:variable-set ?
if tag == 'text:variable-set':
name = element.get_attribute('text:name')
display = element.get_attribute('text:display')
element.clear()
text = _set_value_and_type(element, value=value)
element.set_attribute('text:name', name)
if display is not None:
element.set_attribute('text:display', display)
element.set_text(text)
return
# A text:user-field-decl ?
if tag == 'text:user-field-decl':
name = element.get_attribute('text:name')
element.clear()
_set_value_and_type(element, value=value)
element.set_attribute('text:name', name)
return
# Else => error
raise ValueError, 'set_value: unexpected element "%s"' % tag
def convert_unicode(text):
"""Mostly used to compare lxml serialization to what is expected.
"""
result = []
for c in text:
code = ord(c)
if code >= 128:
result.append('&#%d;' % code)
else:
result.append(c)
return ''.join(result)
def oooc_to_ooow(formula):
"""Convert (proprietary) formula from calc format to writer format.
Arguments:
formula -- unicode
Return: unicode
"""
prefix, formula = formula.split(":=", 1)
assert "oooc" in prefix
# Convert cell addresses
formula = formula.replace("[.", "<").replace(":.", ":").replace("]", ">")
# Convert functions
formula = formula.replace("SUM(", "sum ").replace(")", "")
return "ooow:" + formula
def obsolete(old_name, new_func, *args, **kw):
def decorate(*dec_args, **dec_kw):
new_name = new_func.__name__
if args:
new_name += '(' + ', '.join(repr(x) for x in args) + ')'
message = '"%s" is obsolete, call "%s" instead' % (old_name,
new_name)
warn(message, category=DeprecationWarning)
return new_func(*(dec_args + args), **dec_kw)
return decorate
def isiterable(obj):
if isinstance(obj, basestring):
return False
try:
iter(obj)
except TypeError:
return False
return True
``` |
[
{
"content": "Reconstruct the code exactly:\n```python\n# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>\n# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>\n# Created By:\n# Maintained By:\n\nfrom ggrc import db\nfrom sqlalchemy import event\nfrom sqlal... | [
{
"content": "Reconstruct the code exactly:\n<|memory_start|>```python\n# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>\n# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>\n# Created By:\n# Maintained By:\n\nfrom ggrc import db\nfrom sqlalchemy import e... | ```python
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By:
# Maintained By:
from ggrc import db
from sqlalchemy import event
from sqlalchemy.sql.expression import text
from sqlalchemy.schema import DDL
from .sql import SqlIndexer
class MysqlRecordProperty(db.Model):
__tablename__ = 'fulltext_record_properties'
__table_args__ = {'mysql_engine': 'myisam'}
key = db.Column(db.Integer, primary_key=True)
type = db.Column(db.String(64), primary_key=True)
tags = db.Column(db.String)
property = db.Column(db.String(64), primary_key=True)
content = db.Column(db.Text)
event.listen(
MysqlRecordProperty.__table__,
'after_create',
DDL('ALTER TABLE {tablename} ADD FULLTEXT INDEX {tablename}_text_idx '
'(content)'.format(tablename=MysqlRecordProperty.__tablename__))
)
class MysqlIndexer(SqlIndexer):
record_type = MysqlRecordProperty
def search(self, terms):
return db.session.query(self.record_type).filter(
'match (content) against (:terms)').params(terms=terms).all()
Indexer = MysqlIndexer
``` |
[
{
"content": "Write the code verbatim:\n```python\n#!/usr/bin/env python\n\n__author__ = 'Donovan Parks'\n__copyright__ = 'Copyright 2013'\n__credits__ = ['Donovan Parks']\n__license__ = 'GPL3'\n__version__ = '1.0.0'\n__maintainer__ = 'Donovan Parks'\n__email__ = 'donovan.parks@gmail.com'\n__status__ = 'Develop... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n#!/usr/bin/env python\n\n__author__ = 'Donovan Parks'\n__copyright__ = 'Copyright 2013'\n__credits__ = ['Donovan Parks']\n__license__ = 'GPL3'\n__version__ = '1.0.0'\n__maintainer__ = 'Donovan Parks'\n__email__ = 'donovan.parks@gmail.com'\n__sta... | ```python
#!/usr/bin/env python
__author__ = 'Donovan Parks'
__copyright__ = 'Copyright 2013'
__credits__ = ['Donovan Parks']
__license__ = 'GPL3'
__version__ = '1.0.0'
__maintainer__ = 'Donovan Parks'
__email__ = 'donovan.parks@gmail.com'
__status__ = 'Development'
import argparse
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def doWork(args):
fin = open(args.inputFile)
data = fin.readlines()
fin.close()
# determine if format is sparse or dense
bSparse = False
for line in data:
if line[0] == '#':
continue
lineSplit = line.split('\t')
if len(lineSplit) == 2 or len(lineSplit) == 3:
bSparse = True
break
# convert to EBD format
sampleOTUs = {}
otuIds = set([])
if bSparse:
print('Parsing OTU data in sparse biom-format.')
for line in data:
if line[0] == '#' or line.strip() == '':
continue
lineSplit = line.split('\t')
otuId = lineSplit[0]
sampleId = lineSplit[1]
count = 1
if len(lineSplit) == 3:
count = float(lineSplit[2])
if sampleId not in sampleOTUs:
sampleOTUs[sampleId] = {}
sampleOTUs[sampleId][otuId] = sampleOTUs[sampleId].get(otuId, 0) + count
otuIds.add(otuId)
else:
print('Parsing OTU data in dense biom-format.')
for line in data:
if '#OTU ID' in line or 'OTU ID' in line or '#otu id' in line or 'otu id' in line:
lineSplit = line.split('\t')
if lineSplit[-1].strip() == 'Consensus Lineage':
# QIIME-formatted OTU table with taxonomic information
sampleIds = [x.strip() for x in lineSplit[1:-1]]
else:
# standard biom-formatted table
sampleIds = [x.strip() for x in lineSplit[1:]]
for sampleId in sampleIds:
sampleOTUs[sampleId] = {}
continue
if line[0] == '#' or line.strip() == '':
continue
lineSplit = line.split('\t')
otuId = lineSplit[0]
counts = [float(x) for x in lineSplit[1:len(sampleIds)+1]]
for i in range(0, len(sampleIds)):
sampleOTUs[sampleIds[i]][otuId] = counts[i]
otuIds.add(otuId)
# write EBD OTU table
fout = open(args.outputFile, 'w')
sortedOTUs = sorted(list(otuIds))
for otuId in sortedOTUs:
fout.write('\t' + otuId)
fout.write('\n')
for sampleId in sampleOTUs:
fout.write(sampleId)
for otuId in sortedOTUs:
if otuId in sampleOTUs[sampleId]:
fout.write('\t' + str(sampleOTUs[sampleId][otuId]))
else:
fout.write('\t0')
fout.write('\n')
print('EBD formatted OTU data written to: ' + args.outputFile)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Convert UniFrac environment files for use with EBD.")
parser.add_argument('inputFile', help='Input OTU table in sparse or dense UniFrac format.')
parser.add_argument('outputFile', help='Output OTU table in EBD format.')
args = parser.parse_args()
doWork(args)
``` |
[
{
"content": "Reconstruct the code exactly:\n```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.11.7 on 2017-11-03 03:24\nfrom __future__ import unicode_literals\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport sigs.managers\n\n\ncl... | [
{
"content": "Reconstruct the code exactly:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.11.7 on 2017-11-03 03:24\nfrom __future__ import unicode_literals\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport sigs.... | ```python
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-03 03:24
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import sigs.managers
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='SIG',
fields=[
('id', models.CharField(default='test', help_text='The UUID of the Special Interest Group.', max_length=25, primary_key=True, serialize=False, verbose_name='SIG ID')),
('is_active', models.BooleanField(default=True, help_text='Whether or not the SIG is active', verbose_name='Is Active')),
('date_created', models.DateTimeField(auto_now_add=True, help_text='The date the SIG was created.', verbose_name='Date Created')),
('description', models.CharField(help_text='A description of what the special interest of the group is.', max_length=1000, verbose_name='Description')),
('chair', models.ForeignKey(help_text='The current Chair of the SIG.', on_delete=django.db.models.deletion.PROTECT, related_name='chair', to=settings.AUTH_USER_MODEL, verbose_name='SIG Chair')),
('founder', models.ForeignKey(help_text='The person who founded the SIG.', on_delete=django.db.models.deletion.PROTECT, related_name='founder', to=settings.AUTH_USER_MODEL, verbose_name='SIG Founder')),
],
managers=[
('objects', sigs.managers.SIGManager()),
],
),
]
``` |
[
{
"content": "Return the code exactly, with no changes:\n```python\nimport requests\n\nhost = 'localhost'\nconfiguration = 'Debug'\nservice_name = 'DEQEnviro/MapService'\n\ntoken_url = 'http://{}:6080/arcgis/admin/generateToken'.format(host)\nupdate_soe_url = 'http://{}:6080/arcgis/admin/services/types/extensio... | [
{
"content": "Return the code exactly, with no changes:\n<|memory_start|>```python\nimport requests\n\nhost = 'localhost'\nconfiguration = 'Debug'\nservice_name = 'DEQEnviro/MapService'\n\ntoken_url = 'http://{}:6080/arcgis/admin/generateToken'.format(host)\nupdate_soe_url = 'http://{}:6080/arcgis/admin/service... | ```python
import requests
host = 'localhost'
configuration = 'Debug'
service_name = 'DEQEnviro/MapService'
token_url = 'http://{}:6080/arcgis/admin/generateToken'.format(host)
update_soe_url = 'http://{}:6080/arcgis/admin/services/types/extensions/update'.format(
host)
upload_url = 'http://{}:6080/arcgis/admin/uploads/upload?token={}'.format(
host, '{}')
start_service_url = 'http://{}:6080/arcgis/admin/services/{}.MapServer/start'.format(
host, service_name)
file_name = r'C:\Projects\GitHub\deq-enviro\api\Deq.Search.Soe\bin\{}\Deq.Search.Soe.soe'.format(
configuration)
data = {'username': '',
'password': '',
'client': 'requestip',
'f': 'json'}
r = requests.post(token_url, data=data)
data = {'f': 'json'}
print 'got token'
files = {'itemFile': open(file_name, 'rb'),
'f': 'json'}
data['token'] = r.json()['token']
print 'uploading'
r = requests.post(upload_url.format(data['token']), files=files)
print r.status_code, r.json()['status']
data['id'] = r.json()['item']['itemID']
print 'updating', data['id']
r = requests.post(update_soe_url, params=data)
print r.status_code, r.json()['status']
print 'starting service'
r = requests.post(
start_service_url, params={'f': 'json', 'token': data['token']})
print r.status_code, r.json()['status']
print 'done'
``` |
[
{
"content": "Repeat the code exactly:\n```python\nfrom flask import Flask, jsonify\nimport threading\nimport zmq\nimport time\nimport logging\nfrom Queue import Queue\n\n# Clear the Log file if it exists\nwith open(\"server.log\", \"w\"):\n pass\n\nlogging.basicConfig(filename='server.log',level=logging.DEB... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\nfrom flask import Flask, jsonify\nimport threading\nimport zmq\nimport time\nimport logging\nfrom Queue import Queue\n\n# Clear the Log file if it exists\nwith open(\"server.log\", \"w\"):\n pass\n\nlogging.basicConfig(filename='server.log',l... | ```python
from flask import Flask, jsonify
import threading
import zmq
import time
import logging
from Queue import Queue
# Clear the Log file if it exists
with open("server.log", "w"):
pass
logging.basicConfig(filename='server.log',level=logging.DEBUG,\
format='%(levelname)s:%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
app = Flask(__name__)
context = zmq.Context()
servers = ['tcp://127.0.0.1:5558', 'tcp://127.0.0.1:5559']
servers_heartbeats = ['tcp://127.0.0.1:6668', 'tcp://127.0.0.1:6669']
server_nbr = 0
message_queue = Queue()
primary_router_msg = context.socket(zmq.PUB)
primary_router_msg.connect(servers[0])
backup_router_msg = context.socket(zmq.PUB)
backup_router_msg.connect(servers[1])
@app.route("/square/<int:num>")
def square(num):
message_queue.put(num)
return jsonify(status="Work will be sent to worker!")
@app.route("/")
def root():
return jsonify(status="Web server is running!")
@app.route("/health")
def health():
return jsonify(heath="It's all good :)")
def message_sender():
global servers
global server_nbr
global context
global send_message
while True:
message = message_queue.get()
print message
if server_nbr == 0:
primary_router_msg.send("%s %s" %("DATA", message))
elif server_nbr == 1:
backup_router_msg.send("%s %s" %("DATA", message))
message_queue.task_done()
# Background thread to do heartbeat with router
def heartbeat_listener():
# We want to modify the global states server_nbr
# and use global zeromq context
global servers_heartbeats
global server_nbr
global context
HEARTBEAT_TIMEOUT = 1000 * 5 # Timeout in seconds
DELAY = 3000
router_heartbeat = context.socket(zmq.REQ)
router_heartbeat.connect(servers_heartbeats[server_nbr])
poller = zmq.Poller()
poller.register(router_heartbeat, zmq.POLLIN)
heartbeat = "HB"
while True:
try:
router_heartbeat.send(heartbeat,zmq.NOBLOCK)
expect_reply = True
except:
except_reply = False
pass
while expect_reply:
socks = dict(poller.poll(HEARTBEAT_TIMEOUT))
if router_heartbeat in socks:
reply = router_heartbeat.recv(zmq.NOBLOCK)
expect_reply = False
else:
logging.warning("Router is probably dead. Connecting to backup router")
time.sleep(DELAY/1000)
# Unregister old socket and delete it
poller.unregister(router_heartbeat)
router_heartbeat.close()
# Change server and recreate sockets
server_nbr = (server_nbr + 1) % 2
router_heartbeat = context.socket(zmq.REQ)
poller.register(router_heartbeat, zmq.POLLIN)
# reconnect and resend request
router_heartbeat.connect(servers_heartbeats[server_nbr])
router_heartbeat.send(heartbeat,zmq.NOBLOCK)
if __name__ == "__main__":
app.debug = True
logging.info("Starting a heartbeat daemon process...")
listner = threading.Thread(name="Heartbeat_listener", target = heartbeat_listener).start()
sender = threading.Thread(name="Message sender", target = message_sender).start()
logging.info("**** Daemon started. Now running app server ****")
app.run(threaded=True)
logging.error("App server crashed.")
context.term()
``` |
[
{
"content": "Here is a code file:\n```python\n#!/usr/bin/env python3\n\n# This file is part of the Soletta Project\n#\n# Copyright (C) 2015 Intel Corporation. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following... | [
{
"content": "Here is a code file:\n<|memory_start|>```python\n#!/usr/bin/env python3\n\n# This file is part of the Soletta Project\n#\n# Copyright (C) 2015 Intel Corporation. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided th... | ```python
#!/usr/bin/env python3
# This file is part of the Soletta Project
#
# Copyright (C) 2015 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from multiprocessing import Manager
from threading import Thread
import argparse
import subprocess
WARN = '\033[93m'
PASS = '\033[92m'
FAIL = '\033[31m'
ENDC = '\033[0m'
STATUS_COLOR = [FAIL, PASS]
STATUS_TAG = ["FAIL", "PASS"]
def run_test_program(cmd, test, stat, log):
success = 1
print("%sRUNNING: %s%s" % (WARN, ENDC, test))
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT,
shell=True, universal_newlines=True)
log.append(output)
except subprocess.CalledProcessError as e:
success = 0
log.append(e.output)
stat[test] = success
# print out each test's result right away
print("%s%s:%s %s" % (STATUS_COLOR[success], STATUS_TAG[success], ENDC, test))
def print_log(log_file, stat, log):
output = ""
status_cnt = {'FAIL': 0, 'PASS': 0}
for k,v in sorted(stat.items()):
curr_status = STATUS_TAG[v]
status_cnt[curr_status] = status_cnt[curr_status] + 1
output += "============================================================================\n"
output += "Testsuite summary\n"
output += "============================================================================\n"
output += "# TOTAL: %d\n" % len(stat)
output += "# SUCCESS: %d\n" % status_cnt["PASS"]
output += "# FAIL: %d\n" % status_cnt["FAIL"]
output += "============================================================================\n"
output += "See %s\n" % log_file
output += "============================================================================\n"
# show the stat in the stdout
print(output)
log_output = ""
f = open(log_file, mode="w+")
for i in log:
log_output += "%s\n" % i
f.write(log_output)
f.close()
def run_valgrind_test(args):
manager = Manager()
common_args = "--error-exitcode=1 --num-callers=30"
valgrind_tools = {
'memcheck': '--leak-check=full --show-reachable=no',
'helgrind': '--history-level=approx',
'drd': None,
'exp-sgcheck': None,
}
for k,v in valgrind_tools.items():
stat = manager.dict()
log = manager.list()
threads = []
for i in args.tests.split():
cmd = "{valgrind} {test_path} {supp} --tool={tool} {tool_args} {common}". \
format(valgrind=args.valgrind, test_path=i, supp=args.valgrind_supp, \
tool=k, tool_args=v, common=common_args)
t = Thread(target=run_test_program, args=(cmd, i, stat, log,))
t.start()
threads.append(t)
for t in threads:
t.join()
print_log("test-suite-%s.log" % k, stat, log)
def run_test(args):
manager = Manager()
stat = manager.dict()
log = manager.list()
threads = []
for i in args.tests.split():
t = Thread(target=run_test_program, args=(i, i, stat, log,))
t.start()
threads.append(t)
for t in threads:
t.join()
print_log("test-suite.log", stat, log)
def run_suite(args):
if args.valgrind:
run_valgrind_test(args)
else:
run_test(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--tests", help="List of tests to run", type=str)
parser.add_argument("--valgrind", help="Path to valgrind, if provided " \
"the tests are run with it", type=str)
parser.add_argument("--valgrind-supp", help="Path to valgrind's suppression file", type=str)
args = parser.parse_args()
if args.valgrind_supp:
args.valgrind_supp = "--suppressions=%s" % args.valgrind_supp
run_suite(args)
``` |
[
{
"content": "Repeat the full code snippet:\n```python\nfrom django.db import models as model_fields\nfrom django.conf.urls import url, include\nfrom django.contrib.auth import models as django_models\nfrom polymorphic import PolymorphicModel\nfrom cabot.cabotapp import models\nfrom rest_framework import router... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\nfrom django.db import models as model_fields\nfrom django.conf.urls import url, include\nfrom django.contrib.auth import models as django_models\nfrom polymorphic import PolymorphicModel\nfrom cabot.cabotapp import models\nfrom rest_framewo... | ```python
from django.db import models as model_fields
from django.conf.urls import url, include
from django.contrib.auth import models as django_models
from polymorphic import PolymorphicModel
from cabot.cabotapp import models
from rest_framework import routers, serializers, viewsets, mixins
import logging
logger = logging.getLogger(__name__)
router = routers.DefaultRouter()
def create_viewset(arg_model, arg_fields, arg_read_only_fields=(), no_create=False):
arg_read_only_fields = ('id',) + arg_read_only_fields
for field in arg_read_only_fields:
if field not in arg_fields:
arg_fields = arg_fields + (field,)
class Serializer(serializers.ModelSerializer):
class Meta:
model = arg_model
fields = arg_fields
read_only_fields = arg_read_only_fields
viewset_class = None
if no_create:
class NoCreateViewSet(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
pass
viewset_class = NoCreateViewSet
else:
viewset_class = viewsets.ModelViewSet
arg_queryset = None
if issubclass(arg_model, PolymorphicModel):
arg_queryset = arg_model.objects.instance_of(arg_model)
else:
arg_queryset = arg_model.objects.all()
class ViewSet(viewset_class):
queryset = arg_queryset
serializer_class = Serializer
ordering = ['id']
filter_fields = arg_fields
return ViewSet
check_group_mixin_fields = (
'name',
'users_to_notify',
'alerts_enabled',
'status_checks',
'email_alert',
'hipchat_alert',
'sms_alert',
'telephone_alert',
'hackpad_id',
)
router.register(r'services', create_viewset(
arg_model=models.Service,
arg_fields=check_group_mixin_fields + (
'url',
),
))
router.register(r'instances', create_viewset(
arg_model=models.Instance,
arg_fields=check_group_mixin_fields + (
'address',
),
))
status_check_fields = (
'name',
'active',
'importance',
'frequency',
'debounce',
)
router.register(r'status_checks', create_viewset(
arg_model=models.StatusCheck,
arg_fields=status_check_fields,
no_create=True,
))
router.register(r'icmp_checks', create_viewset(
arg_model=models.ICMPStatusCheck,
arg_fields=status_check_fields,
))
router.register(r'graphite_checks', create_viewset(
arg_model=models.GraphiteStatusCheck,
arg_fields=status_check_fields + (
'metric',
'check_type',
'value',
'expected_num_hosts',
),
))
router.register(r'http_checks', create_viewset(
arg_model=models.HttpStatusCheck,
arg_fields=status_check_fields + (
'endpoint',
'username',
'password',
'text_match',
'status_code',
'timeout',
'verify_ssl_certificate',
),
))
router.register(r'jenkins_checks', create_viewset(
arg_model=models.JenkinsStatusCheck,
arg_fields=status_check_fields + (
'max_queued_build_time',
),
))
'''
Omitting user API, could expose/allow modifying dangerous fields.
router.register(r'users', create_viewset(
arg_model=django_models.User,
arg_fields=(
'password',
'is_active',
'groups',
#'user_permissions', # Doesn't work, removing for now
'username',
'first_name',
'last_name',
'email',
),
))
router.register(r'user_profiles', create_viewset(
arg_model=models.UserProfile,
arg_fields=(
'user',
'mobile_number',
'hipchat_alias',
'fallback_alert_user',
),
))
'''
router.register(r'shifts', create_viewset(
arg_model=models.Shift,
arg_fields=(
'start',
'end',
'user',
'uid',
'deleted',
)
))
``` |
[
{
"content": "Here is some code:\n```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nThis experiment was created using PsychoPy2 Experiment Builder (v1.90.1),\n on Tue May 8 15:02:31 2018\n\nCreated 5/8/18 by DJ.\nUpdated 5/8/18 by DJ - added NetStationEEG code from https://github.com/imnota... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nThis experiment was created using PsychoPy2 Experiment Builder (v1.90.1),\n on Tue May 8 15:02:31 2018\n\nCreated 5/8/18 by DJ.\nUpdated 5/8/18 by DJ - added NetStationEEG code from https://g... | ```python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This experiment was created using PsychoPy2 Experiment Builder (v1.90.1),
on Tue May 8 15:02:31 2018
Created 5/8/18 by DJ.
Updated 5/8/18 by DJ - added NetStationEEG code from https://github.com/imnotamember/PyNetstation (per Pete's instructions)
Updated 6/8/18 by DJ - made into list of movie files instead of single movie files
"""
from __future__ import absolute_import, division
from psychopy import locale_setup, sound, gui, visual, core, data, event, logging, clock
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
import numpy as np # whole numpy lib is available, prepend 'np.'
from numpy import (sin, cos, tan, log, log10, pi, average,
sqrt, std, deg2rad, rad2deg, linspace, asarray)
from numpy.random import random, randint, normal, shuffle
import os # handy system and path functions
import sys # to get file system encoding
# Declare movie params
params = {
'movieFileList': u'/Users/jangrawdc/Documents/Python/PsychoPyParadigms/BasicExperiments/Movies/MovieList.txt', # text file with spaces/linebreaks between movies
'imiDur': 5.0, # time between movies.
'warmUpTime': 6.0, # time before first movie
'coolDownTime': 6.0, # time after last movie
'movieSize': (640.0*3,360.0*3), # for Boldscreen
'fixCrossHeight': 0.5,
# eeg params
'isEegConnected': False, # is an EGI EEG system connected?
'tcpipAddress': '10.10.10.42',
'tcpipPort': 55513
}
# Load movies
fid = open(params['movieFileList'],'r')
movieFileText = fid.read()
movieFiles = movieFileText.split()
fid.close()
print "%d movie files read."%(len(movieFiles))
print(movieFiles)
# === EEG === #
# === Initialize
if params['isEegConnected'] == False:
# # This will import the debugging version of the PyNetStation module,
# # which will not actually attempt a connection but will check to make sure
# # your code is properly functioning.
import egi.fake as egi # FOR TESTING WITHOUT CONNECTION TO NETSTATION COMPUTER
else:
# # This will import the single-threaded version of the PyNetStation module
import egi.simple as egi # FOR RUNNING CONNECTED TO NETSTATION COMPUTER -- USE THIS IN A REAL EXPERIMENT
# === Timing Obj
# # Create a proper timing object to reference. To retrieve the time you want later,
# # call this method using ms_localtime(), it returns the time in a millisecond format
# # appropriate for the NetStation TCP/IP protocol.
# # This is only necessary if you are in need of direct contact with the clock object that NetStation is utilizing,
# # which you don't actually need since it's working behind the scenes in the egi module.
# ms_localtime = egi.ms_localtime
# === Netstation Obj
# # Create the NetStation event-sending object. After this you can call
# # the methods via the object instance, in this case 'ns'.
ns = egi.Netstation()
# === Establish Cxn
# # The next line is for connecting the actual, single-threaded module version to the computer.
if params['isEegConnected']:
ns.connect(params['tcpipAddress'], params['tcpipPort']) # sample address and port -- change according to your network settings
# === Link Expt to Session
# # This sends some initialization info to NetStation for recording events.
ns.BeginSession()
# # This synchronizes the clocks of the stim computer and the NetStation computer.
ns.sync()
# === END EEG === #
# Ensure that relative paths start from the same directory as this script
_thisDir = os.path.dirname(os.path.abspath(__file__)).decode(sys.getfilesystemencoding())
os.chdir(_thisDir)
# Store info about the experiment session
expName = u'MovieListTask_Builder_d1' # Name of experiment
expInfo = {'session': '001', 'participant': ''}
dlg = gui.DlgFromDict(dictionary=expInfo, title=expName)
if dlg.OK == False:
core.quit() # user pressed cancel
expInfo['date'] = data.getDateStr() # add a simple timestamp
expInfo['expName'] = expName
# Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc
filename = _thisDir + os.sep + u'data/%s_%s_%s' % (expInfo['participant'], expName, expInfo['date'])
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(name=expName, version='',
extraInfo=expInfo, runtimeInfo=None,
originPath=u'/Users/jangrawdc/Documents/Python/PsychoPyParadigms/BasicExperiments/TEST.psyexp',
savePickle=True, saveWideText=True,
dataFileName=filename)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen, not a file
endExpNow = False # flag for 'escape' or other condition => quit the exp
# Start Code - component code to be run before the window creation
# Setup the Window
win = visual.Window(
size=(1024, 768), fullscr=True, screen=0,
allowGUI=False, allowStencil=False,
monitor='testMonitor', color=[0,0,0], colorSpace='rgb',
blendMode='avg', useFBO=True)
# store frame rate of monitor if we can measure it
expInfo['frameRate'] = win.getActualFrameRate()
if expInfo['frameRate'] != None:
frameDur = 1.0 / round(expInfo['frameRate'])
else:
frameDur = 1.0 / 60.0 # could not measure, so guess
# Initialize components for Routine "Trigger"
TriggerClock = core.Clock()
TriggerText = visual.TextStim(win=win, name='TriggerText',
text=u'Waiting for trigger...\n\n(Experimenter: press t to override.)',
font=u'Arial',
pos=(0, 0), height=0.1, wrapWidth=None, ori=0,
color=u'white', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "Fixation"
fixationClock = core.Clock()
fixCross = visual.TextStim(win=win, name='fixCross',
text=u'+',
font=u'Arial',
pos=(0, 0), height=params['fixCrossHeight'], wrapWidth=None, ori=0,
color=u'white', colorSpace='rgb', opacity=1,
depth=0.0);
# Initialize components for Routine "Movie"
MovieClock = core.Clock()
movie = []
movieDur = [0]*len(movieFiles)
for i in range(len(movieFiles)):
movie.append(visual.MovieStim3(
win=win, name='movie',
noAudio = False,
filename=movieFiles[i],
ori=0, pos=(0, 0), size=params['movieSize'], opacity=1,
depth=0.0,
))
# save out duration of this movie
movieDur[i] = movie[i].duration
# print results for debugging
print('Movie %d: loaded %s'%(i, movieFiles[i]))
print('duration: %f'%movieDur[i]);
ImiText = fixCross
# Initialize components for Routine "WaitForEnd"
WaitForEndClock = core.Clock()
WaitForEndText = visual.TextStim(win=win, name='WaitForEndText',
text=u'Please stay still until\nthe scanner noise stops.',
font=u'Arial',
pos=(0, 0), height=0.1, wrapWidth=None, ori=0,
color=u'white', colorSpace='rgb', opacity=1,
depth=0.0);
# Create some handy timers
globalClock = core.Clock() # to track the time since experiment started
routineTimer = core.CountdownTimer() # to track time remaining of each (non-slip) routine
# === EEG === #
# # This starts the recording in NetStation acquisition. Equivalent to pressing the Record button.
# # If at some point you pause the experiment using the "StopRecording()" method,
# # just call this method again to restart the recording.
ns.StartRecording()
# === END EEG === #
# ------Prepare to start Routine "Trigger"-------
t = 0
TriggerClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
TriggerKey = event.BuilderKeyResponse()
# keep track of which components have finished
TriggerComponents = [TriggerText, TriggerKey]
for thisComponent in TriggerComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='WAIT', timestamp=None, label="WaitForTrigger", description="Waiting for Trigger from fMRI", pad=False)
# === END EEG === #
# -------Start Routine "Trigger"-------
while continueRoutine:
# get current time
t = TriggerClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *TriggerText* updates
if t >= 0.0 and TriggerText.status == NOT_STARTED:
# keep track of start time/frame for later
TriggerText.tStart = t
TriggerText.frameNStart = frameN # exact frame index
TriggerText.setAutoDraw(True)
# *TriggerKey* updates
if t >= 0.0 and TriggerKey.status == NOT_STARTED:
# keep track of start time/frame for later
TriggerKey.tStart = t
TriggerKey.frameNStart = frameN # exact frame index
TriggerKey.status = STARTED
# keyboard checking is just starting
win.callOnFlip(TriggerKey.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if TriggerKey.status == STARTED:
theseKeys = event.getKeys(keyList=['t'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
TriggerKey.keys = theseKeys[-1] # just the last key pressed
TriggerKey.rt = TriggerKey.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in TriggerComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "Trigger"-------
for thisComponent in TriggerComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if TriggerKey.keys in ['', [], None]: # No response was made
TriggerKey.keys=None
thisExp.addData('TriggerKey.keys',TriggerKey.keys)
if TriggerKey.keys != None: # we had a response
thisExp.addData('TriggerKey.rt', TriggerKey.rt)
thisExp.nextEntry()
# the Routine "Trigger" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "fixation"-------
t = 0
fixationClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(params['warmUpTime'])
# update component parameters for each repeat
# keep track of which components have finished
fixationComponents = [fixCross]
for thisComponent in fixationComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='FIX', timestamp=None, label="Fixation", description="Fixation Cross", pad=False)
# === END EEG === #
# -------Start Routine "fixation"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = fixationClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *fixCross* updates
if t >= 0.0 and fixCross.status == NOT_STARTED:
# keep track of start time/frame for later
fixCross.tStart = t
fixCross.frameNStart = frameN # exact frame index
fixCross.setAutoDraw(True)
frameRemains = 0.0 + params['warmUpTime'] - win.monitorFramePeriod * 0.75 # most of one frame period left
if fixCross.status == STARTED and t >= frameRemains:
fixCross.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in fixationComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "fixation"-------
for thisComponent in fixationComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# ------Prepare to start Routine "Movie"-------
# update component parameters for each repeat
# keep track of which components have finished
MovieComponents = movie + [ImiText]
for thisComponent in MovieComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# ------Start Movie Loop-------
for i in range(len(movie)):
# initialize vars
t = 0
MovieClock.reset() # clock
frameN = -1
continueRoutine = True
# Update timer
if i<(len(movie)-1):
routineTimer.add(movieDur[i]+params['imiDur'])
else:
routineTimer.add(movieDur[i])
# Make sure repeated-use ImiText object is listed as "not started"
ImiText.status = NOT_STARTED
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='MOVI', timestamp=None, label="StartMovie%d"%i, description="Started Movie %d"%i, pad=False)
# === END EEG === #
# -------Start Routine "Movie"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = MovieClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *movie* updates
if t >= 0.0 and movie[i].status == NOT_STARTED:
# keep track of start time/frame for later
movie[i].tStart = t
movie[i].frameNStart = frameN # exact frame index
movie[i].setAutoDraw(True)
frameRemains = 0.0 + movieDur[i]- win.monitorFramePeriod * 0.75 # most of one frame period left
if movie[i].status == STARTED and t >= frameRemains:
movie[i].setAutoDraw(False)
# *ImiText* updates
if t >= movieDur[i] and ImiText.status == NOT_STARTED and i<(len(movie)-1):
# keep track of start time/frame for later
ImiText.tStart = t
ImiText.frameNStart = frameN # exact frame index
ImiText.setAutoDraw(True)
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='IMI', timestamp=None, label="Inter-Movie Interval", description="Inter-Movie Interval", pad=False)
# === END EEG === #
frameRemains = movieDur[i] + params['imiDur']- win.monitorFramePeriod * 0.75 # most of one frame period left
if ImiText.status == STARTED and t >= frameRemains:
ImiText.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in MovieComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "Movie"-------
for thisComponent in MovieComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# ------Prepare to start Routine "fixation"-------
t = 0
fixationClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(params['coolDownTime'])
# update component parameters for each repeat
# keep track of which components have finished
fixationComponents = [fixCross]
for thisComponent in fixationComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='FIX', timestamp=None, label="Fixation", description="Fixation Cross", pad=False)
# === END EEG === #
# -------Start Routine "fixation"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = fixationClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *fixCross* updates
if t >= 0.0 and fixCross.status == NOT_STARTED:
# keep track of start time/frame for later
fixCross.tStart = t
fixCross.frameNStart = frameN # exact frame index
fixCross.setAutoDraw(True)
frameRemains = 0.0 + params['coolDownTime'] - win.monitorFramePeriod * 0.75 # most of one frame period left
if fixCross.status == STARTED and t >= frameRemains:
fixCross.setAutoDraw(False)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in fixationComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "fixation"-------
for thisComponent in fixationComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# ------Prepare to start Routine "WaitForEnd"-------
t = 0
WaitForEndClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
EndKey = event.BuilderKeyResponse()
# keep track of which components have finished
WaitForEndComponents = [WaitForEndText, EndKey]
for thisComponent in WaitForEndComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='END', timestamp=None, label="WaitForEnd", description="Waiting for End of Scan", pad=False)
# === END EEG === #
# -------Start Routine "WaitForEnd"-------
while continueRoutine:
# get current time
t = WaitForEndClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *WaitForEndText* updates
if t >= 0.0 and WaitForEndText.status == NOT_STARTED:
# keep track of start time/frame for later
WaitForEndText.tStart = t
WaitForEndText.frameNStart = frameN # exact frame index
WaitForEndText.setAutoDraw(True)
# *EndKey* updates
if t >= 0.0 and EndKey.status == NOT_STARTED:
# keep track of start time/frame for later
EndKey.tStart = t
EndKey.frameNStart = frameN # exact frame index
EndKey.status = STARTED
# keyboard checking is just starting
win.callOnFlip(EndKey.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if EndKey.status == STARTED:
theseKeys = event.getKeys(keyList=['q', 'escape'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
EndKey.keys = theseKeys[-1] # just the last key pressed
EndKey.rt = EndKey.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in WaitForEndComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "WaitForEnd"-------
for thisComponent in WaitForEndComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if EndKey.keys in ['', [], None]: # No response was made
EndKey.keys=None
thisExp.addData('EndKey.keys',EndKey.keys)
if EndKey.keys != None: # we had a response
thisExp.addData('EndKey.rt', EndKey.rt)
thisExp.nextEntry()
# the Routine "WaitForEnd" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# === EEG === #
# # This re-aligns the clocks between the stim computer and the NetStation computer.
# # Best to put at the start of each trial for maximal timing accuracy.
ns.sync()
# Send Message to EEG
win.callOnFlip(ns.send_event, key='DONE', timestamp=None, label="ExperimentDone", description="End of Scan", pad=False)
# === End Session
# # This method is misleading, as it merely pauses the recording in NetStation. Equivalent to the pause button.
# # It is not actually stopping the recording session. That is done by the 'EndSession()' method.
ns.StopRecording()
# # I don't typically use this, as it is closes the current "Session" in NetStation.
# # I find it easier to just pause the recording using "StopRecording()" and then
# # get ending impedance measurements before manually closing NetStation.
ns.EndSession()
# # This line ends the connection via the ns object, and should then be destroying the object itself.
# # It is good practice to use so as not to waste memory or leave TCP/IP links open, which could lead to being
# # unable to reconnect without restarting the computer running the experiment.
if params['isEegConnected']:
ns.disconnect()
# === END EEG === #
# ----------Finishing Experiment----------
# these shouldn't be strictly necessary (should auto-save)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
logging.flush()
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
win.close()
core.quit()
``` |
[
{
"content": "Output the full code verbatim (no extra comments):\n```python\n\"\"\" Implementes functions ring support.\n\"\"\"\n#\n# Author: Pearu Peterson\n# Created: April, 2008\n#\n\n__all__ = ['FunctionRing']\n\nfrom ..core import classes, objects, init_module\nfrom ..basealgebra import Verbatim, Algebra\n... | [
{
"content": "Output the full code verbatim (no extra comments):\n<|memory_start|>```python\n\"\"\" Implementes functions ring support.\n\"\"\"\n#\n# Author: Pearu Peterson\n# Created: April, 2008\n#\n\n__all__ = ['FunctionRing']\n\nfrom ..core import classes, objects, init_module\nfrom ..basealgebra import Ver... | ```python
""" Implementes functions ring support.
"""
#
# Author: Pearu Peterson
# Created: April, 2008
#
__all__ = ['FunctionRing']
from ..core import classes, objects, init_module
from ..basealgebra import Verbatim, Algebra
from ..ring import CommutativeRing
init_module.import_heads()
class FunctionRing(CommutativeRing):
""" Base class to functions ring classes.
Use ``Function`` function to construct instances.
"""
argument_algebras = None
nargs = None
@classmethod
def get_value_algebra(cls):
return CommutativeRing
def get_argument_algebra(self, index):
return self.get_value_algebra()
@classmethod
def get_function_algebra(cls):
return classes.OperatorRing
@classmethod
def get_differential_algebra(cls):
return classes.DifferentialRing
@classmethod
def get_predefined_symbols(cls, name):
if name=='D': return D
return
@classmethod
def convert(cls, obj, typeerror=True):
tobj = type(obj)
if tobj is cls:
return obj
if isinstance(obj, cls.get_value_algebra()):
return cls(NUMBER, obj)
return super(CommutativeRing, cls).convert(obj, typeerror=typeerror)
def as_algebra(self, cls, typeerror=True):
if cls is classes.Verbatim:
return self.as_verbatim()
if type(self) is cls:
return self
#if isinstance(self, cls):
# return self.as_verbatim().as_algebra(cls)
if typeerror:
raise TypeError('Cannot convert %s to %s instance' % (type(self).__name__, cls.__name__))
return NotImplemented
def __call__(self, *args, **options):
cls = self.get_value_algebra()
#cls = classes.Calculus
evaluate = options.get('evaluate', True)
if evaluate:
result = self.head.apply(cls, self.data, self, args)
if result is not NotImplemented:
return result
return cls(APPLY, (self, args))
classes.FunctionRing = FunctionRing
``` |
[
{
"content": "```python\n\"\"\"Utilities to create clients\n\nClients are virtual machines provisioned on a ``provisioning_server``. All\nvirtual machine images are stored on the ``image_dir`` path on the provisioning\nserver.\n\nMake sure to configure the ``clients`` section on the configuration file. Also\nma... | [
{
"content": "<|memory_start|>```python\n\"\"\"Utilities to create clients\n\nClients are virtual machines provisioned on a ``provisioning_server``. All\nvirtual machine images are stored on the ``image_dir`` path on the provisioning\nserver.\n\nMake sure to configure the ``clients`` section on the configuratio... | ```python
"""Utilities to create clients
Clients are virtual machines provisioned on a ``provisioning_server``. All
virtual machine images are stored on the ``image_dir`` path on the provisioning
server.
Make sure to configure the ``clients`` section on the configuration file. Also
make sure that the server have in place: the base images for rhel66 and rhel71,
snap-guest and its dependencies and the ``image_dir`` path created.
"""
import logging
import os
from robottelo import ssh
from robottelo.config import settings
from robottelo.constants import DISTRO_RHEL6, DISTRO_RHEL7, REPOS
from robottelo.decorators import bz_bug_is_open
from robottelo.helpers import install_katello_ca, remove_katello_ca
logger = logging.getLogger(__name__)
class VirtualMachineError(Exception):
"""Exception raised for failed virtual machine management operations"""
class VirtualMachine(object):
"""Manages a virtual machine to allow client provisioning for robottelo
It expects that base images are created and snap-guest is setup on the
provisioning server.
This also can be used as a context manager::
with VirtualMachine() as vm:
result = vm.run('ls')
out = result.stdout
Make sure to call :meth:`destroy` to stop and clean the image on the
provisioning server, otherwise the virtual machine and its image will stay
on the server consuming hardware resources.
It is possible to customize the ``provisioning_server`` and ``image_dir``
as per virtual machine basis. Just set the wanted values when
instantiating.
"""
def __init__(
self, cpu=1, ram=512, distro=None, provisioning_server=None,
image_dir=None, tag=None, hostname=None, domain=None,
target_image=None, bridge=None):
distro_el6 = settings.distro.image_el6
distro_el7 = settings.distro.image_el7
self.cpu = cpu
self.ram = ram
if distro == DISTRO_RHEL6:
distro = distro_el6
if distro == DISTRO_RHEL7:
distro = distro_el7
if distro is None:
distro = distro_el7
self.distro = distro
if self.distro not in (distro_el6, distro_el7):
raise VirtualMachineError(
u'{0} is not a supported distro. Choose one of {1}, {2}'
.format(self.distro, distro_el6, distro_el7)
)
if provisioning_server is None:
self.provisioning_server = settings.clients.provisioning_server
else:
self.provisioning_server = provisioning_server
if self.provisioning_server is None or self.provisioning_server == '':
raise VirtualMachineError(
'A provisioning server must be provided. Make sure to fill '
'"provisioning_server" on clients section of your robottelo '
'configuration. Or provide a not None provisioning_server '
'argument.'
)
if image_dir is None:
self.image_dir = settings.clients.image_dir
else:
self.image_dir = image_dir
self._hostname = hostname
self.ip_addr = None
self._domain = domain
self._created = False
self._subscribed = False
self._target_image = target_image or str(id(self))
if tag:
self._target_image = tag + self._target_image
self.bridge = bridge
@property
def subscribed(self):
return self._subscribed
@property
def domain(self):
if self._domain is None:
try:
domain = self.provisioning_server.split('.', 1)[1]
except IndexError:
raise VirtualMachineError(
u"Failed to fetch domain from provisioning server: {0} "
.format(self.provisioning_server))
else:
domain = self._domain
return domain
@property
def hostname(self):
if self._hostname:
return self._hostname
else:
return u'{0}.{1}'.format(self._target_image, self.domain)
@property
def target_image(self):
if self._hostname:
return self._target_image
else:
return self.hostname
def create(self):
"""Creates a virtual machine on the provisioning server using
snap-guest
:raises robottelo.vm.VirtualMachineError: Whenever a virtual machine
could not be executed.
"""
if self._created:
return
command_args = [
'snap-guest',
'-b {source_image}',
'-t {target_image}',
'-m {vm_ram}',
'-c {vm_cpu}',
'-n bridge={bridge} -f',
]
if self.image_dir is not None:
command_args.append('-p {image_dir}')
if self._hostname is not None:
command_args.append('--hostname {hostname}')
if self._domain is not None:
command_args.append('-d {domain}')
if self.bridge is None:
self.bridge = 'br0'
command = u' '.join(command_args).format(
source_image=u'{0}-base'.format(self.distro),
target_image=self.target_image,
vm_ram=self.ram,
vm_cpu=self.cpu,
image_dir=self.image_dir,
hostname=self.hostname,
domain=self.domain,
bridge=self.bridge
)
result = ssh.command(command, self.provisioning_server)
if result.return_code != 0:
raise VirtualMachineError(
u'Failed to run snap-guest: {0}'.format(result.stderr))
else:
self._created = True
# Give some time to machine boot
result = ssh.command(
u'for i in {{1..60}}; do ping -c1 {0}.local && exit 0; sleep 1;'
u' done; exit 1'.format(self._target_image),
self.provisioning_server
)
if result.return_code != 0:
logger.error('Failed to obtain VM IP, reverting changes')
self.destroy()
raise VirtualMachineError(
'Failed to fetch virtual machine IP address information')
output = ''.join(result.stdout)
self.ip_addr = output.split('(')[1].split(')')[0]
ssh_check = ssh.command(
u'for i in {{1..60}}; do nc -vn {0} 22 <<< "" && exit 0; sleep 1;'
u' done; exit 1'.format(self.ip_addr),
self.provisioning_server
)
if ssh_check.return_code != 0:
logger.error('Failed to SSH to the VM, reverting changes')
self.destroy()
raise VirtualMachineError(
'Failed to connect to SSH port of the virtual machine')
def destroy(self):
"""Destroys the virtual machine on the provisioning server"""
logger.info('Destroying the VM')
if not self._created:
return
if self._subscribed:
self.unregister()
ssh.command(
u'virsh destroy {0}'.format(self.target_image),
hostname=self.provisioning_server
)
ssh.command(
u'virsh undefine {0}'.format(self.target_image),
hostname=self.provisioning_server
)
image_name = u'{0}.img'.format(self.target_image)
ssh.command(
u'rm {0}'.format(os.path.join(self.image_dir, image_name)),
hostname=self.provisioning_server
)
def download_install_rpm(self, repo_url, package_name):
"""Downloads and installs custom rpm on the virtual machine.
:param repo_url: URL to repository, where package is located.
:param package_name: Desired package name.
:return: None.
:raises robottelo.vm.VirtualMachineError: If package wasn't installed.
"""
self.run(
u'wget -nd -r -l1 --no-parent -A \'{0}.rpm\' {1}'
.format(package_name, repo_url)
)
self.run(u'rpm -i {0}.rpm'.format(package_name))
result = self.run(u'rpm -q {0}'.format(package_name))
if result.return_code != 0:
raise VirtualMachineError(
u'Failed to install {0} rpm.'.format(package_name)
)
def enable_repo(self, repo, force=False):
"""Enables specified Red Hat repository on the virtual machine. Does
nothing if capsule or satellite tools repo was passed and downstream
with custom repo URLs detected (custom repos are enabled by default
when registering a host).
:param repo: Red Hat repository name.
:param force: enforce enabling command, even when custom repos are
detected for satellite tools or capsule.
:return: None.
"""
downstream_repo = None
if repo == REPOS['rhst6']['id']:
downstream_repo = settings.sattools_repo['rhel6']
elif repo == REPOS['rhst7']['id']:
downstream_repo = settings.sattools_repo['rhel7']
elif repo in (REPOS['rhsc6']['id'], REPOS['rhsc7']['id']):
downstream_repo = settings.capsule_repo
if force or settings.cdn or not downstream_repo:
self.run(u'subscription-manager repos --enable {0}'.format(repo))
def install_katello_agent(self):
"""Installs katello agent on the virtual machine.
:return: None.
:raises robottelo.vm.VirtualMachineError: If katello-ca wasn't
installed.
"""
self.run('yum install -y katello-agent')
result = self.run('rpm -q katello-agent')
if result.return_code != 0:
raise VirtualMachineError('Failed to install katello-agent')
if bz_bug_is_open('1431747'):
gofer_start = self.run('service goferd start')
if gofer_start.return_code != 0:
raise VirtualMachineError('Failed to start katello-agent')
gofer_check = self.run('service goferd status')
if gofer_check.return_code != 0:
raise VirtualMachineError('katello-agent is not running')
def install_katello_ca(self):
"""Downloads and installs katello-ca rpm on the virtual machine.
Uses common helper `install_katello_ca(hostname=None)`, but passes
`self.ip_addr` instead of the hostname as we are using fake hostnames
for virtual machines.
:return: None.
:raises robottelo.vm.VirtualMachineError: If katello-ca wasn't
installed.
"""
try:
install_katello_ca(hostname=self.ip_addr)
except AssertionError:
raise VirtualMachineError(
'Failed to download and install the katello-ca rpm')
def register_contenthost(self, org, activation_key=None, lce=None,
force=True, releasever=None, username=None,
password=None, auto_attach=False):
"""Registers content host on foreman server using activation-key. This
can be done in two ways: either by specifying organization name and
activation key name or by specifying organization name and lifecycle
environment name (administrator credentials for authentication will be
passed automatically)
:param activation_key: Activation key name to register content host
with.
:param lce: lifecycle environment name to which register the content
host.
:param org: Organization name to register content host for.
:param force: Register the content host even if it's already registered
:param releasever: Set a release version
:param username: a user name to register the content host with
:param password: the user password
:param auto_attach: automatically attach compatible subscriptions to
this system.
:return: SSHCommandResult instance filled with the result of the
registration.
"""
cmd = (u'subscription-manager register --org {0}'.format(org))
if activation_key is not None:
cmd += u' --activationkey {0}'.format(activation_key)
elif lce is not None:
if username is None and password is None:
username = settings.server.admin_username
password = settings.server.admin_password
cmd += u' --environment {0} --username {1} --password {2}'.format(
lce,
username,
password,
)
if auto_attach:
cmd += u' --auto-attach'
else:
raise VirtualMachineError(
'Please provide either activation key or lifecycle '
'environment name to successfully register a host'
)
if releasever is not None:
cmd += u' --release {0}'.format(releasever)
if force:
cmd += u' --force'
result = self.run(cmd)
if (u'The system has been registered with ID' in
u''.join(result.stdout)):
self._subscribed = True
return result
def remove_katello_ca(self):
"""Removes katello-ca rpm from the virtual machine.
Uses common helper `remove_katello_ca(hostname=None)`, but passes
`self.ip_addr` instead of the hostname as we are using fake hostnames
for virtual machines.
:return: None.
:raises robottelo.vm.VirtualMachineError: If katello-ca wasn't removed.
"""
try:
remove_katello_ca(hostname=self.ip_addr)
except AssertionError:
raise VirtualMachineError('Failed to remove the katello-ca rpm')
def unregister(self):
"""Run subscription-manager unregister.
:return: SSHCommandResult instance filled with the result of the
unregistration.
"""
return self.run(u'subscription-manager unregister')
def run(self, cmd):
"""Runs a ssh command on the virtual machine
:param str cmd: Command to run on the virtual machine
:return: A :class:`robottelo.ssh.SSHCommandResult` instance with
the commands results
:rtype: robottelo.ssh.SSHCommandResult
:raises robottelo.vm.VirtualMachineError: If the virtual machine is not
created.
"""
if not self._created:
raise VirtualMachineError(
'The virtual machine should be created before running any ssh '
'command'
)
return ssh.command(cmd, hostname=self.ip_addr)
def get(self, remote_path, local_path=None):
"""Get a remote file from the virtual machine."""
if not self._created:
raise VirtualMachineError(
'The virtual machine should be created before getting any file'
)
ssh.download_file(remote_path, local_path, hostname=self.ip_addr)
def put(self, local_path, remote_path=None):
"""Put a local file to the virtual machine."""
if not self._created:
raise VirtualMachineError(
'The virtual machine should be created before putting any file'
)
ssh.upload_file(local_path, remote_path, hostname=self.ip_addr)
def configure_rhel_repo(self, rhel_repo):
"""Configures specified Red Hat repository on the virtual machine.
:param rhel_repo: Red Hat repository link from properties file.
:return: None.
"""
# 'Access Insights', 'puppet' requires RHEL 6/7 repo and it is not
# possible to sync the repo during the tests as they are huge(in GB's)
# hence this adds a file in /etc/yum.repos.d/rhel6/7.repo
self.run(
'wget -O /etc/yum.repos.d/rhel.repo {0}'
.format(rhel_repo)
)
def configure_puppet(self, rhel_repo=None):
"""Configures puppet on the virtual machine/Host.
:param rhel_repo: Red Hat repository link from properties file.
:return: None.
"""
sat6_hostname = settings.server.hostname
self.configure_rhel_repo(rhel_repo)
puppet_conf = (
'pluginsync = true\n'
'report = true\n'
'ignoreschedules = true\n'
'daemon = false\n'
'ca_server = {0}\n'
'server = {1}\n'
.format(sat6_hostname, sat6_hostname)
)
result = self.run(u'yum install puppet -y')
if result.return_code != 0:
raise VirtualMachineError(
'Failed to install the puppet rpm')
self.run(
'echo "{0}" >> /etc/puppet/puppet.conf'
.format(puppet_conf)
)
# This particular puppet run on client would populate a cert on sat6
# under the capsule --> certifcates or via cli "puppet cert list", so
# that we sign it.
self.run(u'puppet agent -t')
ssh.command(u'puppet cert sign --all')
# This particular puppet run would create the host entity under
# 'All Hosts' and let's redirect stderr to /dev/null as errors at this
# stage can be ignored.
self.run(u'puppet agent -t 2> /dev/null')
def execute_foreman_scap_client(self, policy_id=None):
"""Executes foreman_scap_client on the vm/clients to create security
audit report.
:param policy_id: The Id of the OSCAP policy.
:return: None.
"""
if policy_id is None:
result = self.run(
u'awk -F "/" \'/download_path/ {print $4}\' '
'/etc/foreman_scap_client/config.yaml'
)
policy_id = result.stdout[0]
self.run(u'foreman_scap_client {0}'.format(policy_id))
if result.return_code != 0:
raise VirtualMachineError(
'Failed to execute foreman_scap_client run.')
def configure_rhai_client(self, activation_key, org, rhel_distro):
""" Configures a Red Hat Access Insights service on the system by
installing the redhat-access-insights package and registering to the
service.
:param activation_key: Activation key to be used to register the
system to satellite
:param org: The org to which the system is required to be registered
:param rhel_distro: rhel distribution for
:return: None
"""
# Download and Install ketello-ca rpm
self.install_katello_ca()
self.register_contenthost(org, activation_key)
# Red Hat Access Insights requires RHEL 6/7 repo and it is not
# possible to sync the repo during the tests, Adding repo file.
if rhel_distro == DISTRO_RHEL6:
rhel_repo = settings.rhel6_repo
insights_repo = settings.rhai.insights_client_el6repo
if rhel_distro == DISTRO_RHEL7:
rhel_repo = settings.rhel7_repo
insights_repo = settings.rhai.insights_client_el7repo
missing_repos = []
if insights_repo is None:
missing_repos.append('RHAI client')
if rhel_repo is None:
missing_repos.append('RHEL')
if missing_repos:
raise VirtualMachineError(
'Missing {0} repository configuration for {1}.'
.format(' and '.join(missing_repos), rhel_distro)
)
self.configure_rhel_repo(rhel_repo)
self.run(
'wget -O /etc/yum.repos.d/insights.repo {0}'.format(insights_repo))
# Install redhat-access-insights package
package_name = 'redhat-access-insights'
result = self.run('yum install -y {0}'.format(package_name))
if result.return_code != 0:
raise VirtualMachineError(
'Unable to install redhat-access-insights package'
)
# Verify if package is installed by query it
result = self.run('rpm -qi {0}'.format(package_name))
logger.info('Insights client rpm version: {0}'.format(
result.stdout))
if result.return_code != 0:
raise VirtualMachineError(
'Unable to install redhat-access-insights package'
)
# Register client with Red Hat Access Insights
result = self.run('redhat-access-insights --register')
if result.return_code != 0:
raise VirtualMachineError(
'Unable to register client to Access Insights through '
'Satellite')
def __enter__(self):
try:
self.create()
except Exception as exp:
# in any case log the exception
logger.exception(exp)
self.destroy()
raise
return self
def __exit__(self, *exc):
self.destroy()
``` |
[
{
"content": "Here is some code:\n```python\nfrom __future__ import unicode_literals\nfrom future.builtins import str\nfrom future.utils import native\n\nimport os\nfrom shutil import rmtree\nfrom uuid import uuid4\n\nfrom zhiliao.conf import settings\nfrom zhiliao.core.templatetags.mezzanine_tags import thumbn... | [
{
"content": "Here is some code:\n<|memory_start|>```python\nfrom __future__ import unicode_literals\nfrom future.builtins import str\nfrom future.utils import native\n\nimport os\nfrom shutil import rmtree\nfrom uuid import uuid4\n\nfrom zhiliao.conf import settings\nfrom zhiliao.core.templatetags.mezzanine_ta... | ```python
from __future__ import unicode_literals
from future.builtins import str
from future.utils import native
import os
from shutil import rmtree
from uuid import uuid4
from zhiliao.conf import settings
from zhiliao.core.templatetags.mezzanine_tags import thumbnail
from zhiliao.galleries.models import Gallery, GALLERIES_UPLOAD_DIR
from zhiliao.utils.tests import TestCase, copy_test_to_media
class GalleriesTests(TestCase):
def test_gallery_import(self):
"""
Test that a gallery creates images when given a zip file to
import, and that descriptions are created.
"""
zip_name = "gallery.zip"
copy_test_to_media("zhiliao.core", zip_name)
title = native(str(uuid4())) # i.e. Py3 str / Py2 unicode
gallery = Gallery.objects.create(title=title, zip_import=zip_name)
images = list(gallery.images.all())
self.assertTrue(images)
self.assertTrue(all([image.description for image in images]))
# Clean up.
rmtree(os.path.join(settings.MEDIA_ROOT,
GALLERIES_UPLOAD_DIR, title))
def test_thumbnail_generation(self):
"""
Test that a thumbnail is created and resized.
"""
try:
from PIL import Image
except ImportError:
return
image_name = "image.jpg"
size = (24, 24)
copy_test_to_media("zhiliao.core", image_name)
thumb_name = os.path.join(settings.THUMBNAILS_DIR_NAME, image_name,
image_name.replace(".", "-%sx%s." % size))
thumb_path = os.path.join(settings.MEDIA_ROOT, thumb_name)
thumb_image = thumbnail(image_name, *size)
self.assertEqual(os.path.normpath(thumb_image.lstrip("/")), thumb_name)
self.assertNotEqual(os.path.getsize(thumb_path), 0)
thumb = Image.open(thumb_path)
self.assertEqual(thumb.size, size)
# Clean up.
del thumb
os.remove(os.path.join(settings.MEDIA_ROOT, image_name))
os.remove(os.path.join(thumb_path))
rmtree(os.path.join(os.path.dirname(thumb_path)))
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\n#!/usr/bin/python\n\n# This script is used with an LED and a momentary button, perhaps the same,\n# like https://www.sparkfun.com/products/10440\n# The LED should be wired to GPIO pin 23 and the button to pin 24.\n# The idea is tha... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\n#!/usr/bin/python\n\n# This script is used with an LED and a momentary button, perhaps the same,\n# like https://www.sparkfun.com/products/10440\n# The LED should be wired to GPIO pin 23 and the button to pin 24.\n#... | ```python
#!/usr/bin/python
# This script is used with an LED and a momentary button, perhaps the same,
# like https://www.sparkfun.com/products/10440
# The LED should be wired to GPIO pin 23 and the button to pin 24.
# The idea is that it is run at startup (for example, from rc.local)
# It turns the LED on to indicate that it's working, and then waits
# for the user to hold down the button. When the script notices that
# the user is holding down the button (which may take up to 5 seconds),
# it starts flashing the LED to confirm. If the user continues to hold
# the button down, the LED goes off and the shutdown sequence is triggered.
# While the system is shutting down (which may take some time), the LED
# does a triple flash. When it's finished shutting down, the LED will
# turn off.
import os
import RPi.GPIO as GPIO
from time import sleep
LED = 23
BUTTON = 24
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, initial=1)
GPIO.setup(BUTTON, GPIO.IN)
def flashLED(secs):
GPIO.output(LED, 0)
sleep(secs)
GPIO.output(LED, 1)
shutdown = False
count = 0
while not shutdown:
# check to see if the button is pressed
if GPIO.input(BUTTON):
# keep track of how many cycles the button has been pressed
count += 1
if count < 5:
# if it hasn't been pressed long enough yet, flash the LED
flashLED(0.25)
else:
# if it has been pressed long enough, trigger shutdown
shutdown = True
# button is not pressed
else:
# reset the counter
count = 0
# check infrequently until we notice that the button is being pressed
if count > 0:
sleep(.25)
else:
sleep(5)
# let the user know that the button press has been noted by turning off the LED
GPIO.output(LED, 0)
os.system("shutdown -h now")
sleep(1)
# triple flash the LED until the program is killed by system shutdown
while True:
flashLED(.1)
sleep(.1)
flashLED(.1)
sleep(.1)
flashLED(.1)
sleep(.5)
``` |
[
{
"content": "```python\nfrom twitter import Twitter\nimport ConfigParser as configparser\nfrom twitter import OAuth\nfrom time import sleep\nimport mcpi.minecraft as minecraft\n\n\n\ndef auth():\n config = configparser.ConfigParser()\n config.read(\"api.config\")\n auth_details = OAuth(config.get(\"ap... | [
{
"content": "<|memory_start|>```python\nfrom twitter import Twitter\nimport ConfigParser as configparser\nfrom twitter import OAuth\nfrom time import sleep\nimport mcpi.minecraft as minecraft\n\n\n\ndef auth():\n config = configparser.ConfigParser()\n config.read(\"api.config\")\n auth_details = OAuth... | ```python
from twitter import Twitter
import ConfigParser as configparser
from twitter import OAuth
from time import sleep
import mcpi.minecraft as minecraft
def auth():
config = configparser.ConfigParser()
config.read("api.config")
auth_details = OAuth(config.get("api","token"),
config.get("api", "token_secret"),
config.get("api", "con_key"),
config.get("api","con_secret"))
return Twitter(auth=auth_details)
def find_max_id(query, t):
results = t.search.tweets(q=query, result_type="recent")
return results["search_metadata"]["max_id"]
def connect_to_mc():
world = minecraft.Minecraft.create("localhost")
return world
def set_minecraft_block():
world = connect_to_mc()
world.setBlock(0, 0, 0, 1)
def watch_for_tweet(query, t, max_id):
while True:
results = t.search.tweets(q=query,
since_id=max_id,
result_type="recent")
if results["statuses"]:
print(results["statuses"][0]["text"])
set_minecraft_block()
break
sleep(5)
if __name__ == "__main__":
query = "python"
t = auth()
max_id = find_max_id(query, t)
watch_for_tweet(query, t, max_id)
print(max_id)
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\n## @package sparse_lookup\n# Module caffe2.python.layers.sparse_lookup\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nfrom caffe2.pytho... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\n## @package sparse_lookup\n# Module caffe2.python.layers.sparse_lookup\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nf... | ```python
## @package sparse_lookup
# Module caffe2.python.layers.sparse_lookup
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python.helpers.arg_scope import get_current_scope
from caffe2.python import schema
from caffe2.python.layers.layers import (
get_categorical_limit,
get_key,
IdList,
IdScoreList,
LayerPsParam,
ModelLayer,
)
import collections
import functools
import math
import numpy as np
import operator
def get_sparse_lookup_predictor_version(version):
assert version in {'fp32', 'fp16', 'uint8rowwise', 'fused_uint8rowwise'},\
"Unexpected version of sparse_lookup layer {0}".format(version)
return version
def _is_id_list(input_record):
return schema.equal_schemas(input_record, IdList)
def _is_id_score_list(input_record):
return schema.equal_schemas(input_record,
IdScoreList,
check_field_types=False)
class SparseLookup(ModelLayer):
_id_list_supported_reducers = [
'LogMeanExp', 'LogSumExp', 'Max', 'Mean', 'Sum',
'WeightedSum', 'WeightedMean', 'Sqrt', 'None']
_id_score_list_supported_reducers = [
'PositionWeighted', 'Mean', 'Sum', 'WeightedSum', 'WeightedMean', 'None']
def __init__(self, model, input_record, inner_shape, reducer,
weight_init=None, weight_optim=None,
name='sparse_lookup', regularizer=None, **kwargs):
super(SparseLookup, self).__init__(model, name, input_record, **kwargs)
# TODO Add some asserts about input type
if isinstance(inner_shape, int):
inner_shape = [inner_shape]
assert isinstance(inner_shape, list) or isinstance(inner_shape, tuple),\
"Unexpected type for inner_shape, expected list or tuple, got {0}".\
format(type(inner_shape))
if reducer == "PositionWeighted":
assert _is_id_score_list(self.input_record), (
"PositionWeighted only support IdScoreList, but got {} " +
"please use PositionWeighted layer to convert IdList " +
"to IdScoreList").format(repr(self.input_record))
self.external_weights = input_record.values()
self.reducer = reducer
input_dim = get_categorical_limit(input_record)
assert input_dim > 0, (
"{} should have categorical limit > 0, but got {}".format(
get_key(input_record)(), input_dim))
scale = math.sqrt(1.0 / input_dim)
self.shape = [input_dim] + inner_shape
self.weight_init = weight_init if weight_init else (
'UniformFill', {'min': -scale, 'max': scale})
if _is_id_list(self.input_record):
sparse_key = self.input_record.items()
elif _is_id_score_list(self.input_record):
sparse_key = self.input_record.keys()
else:
raise NotImplementedError()
if self.input_record.lengths.metadata:
avg_length = self.input_record.lengths.metadata.expected_value
else:
avg_length = None
self.w = self.create_param(
param_name='w',
shape=self.shape,
initializer=self.weight_init,
optimizer=weight_optim,
ps_param=LayerPsParam(
sparse_key=sparse_key,
average_length=avg_length),
regularizer=regularizer
)
self.scale_bias_init = ('ConstantFill', {'value': 0.0})
self.scale_bias = self.create_param(
param_name='scale_bias',
shape=[],
initializer=self.scale_bias_init,
optimizer=model.NoOptim,
)
self.output_schema = schema.Scalar(
(np.float32, inner_shape),
self.get_next_blob_reference('output'),
)
def get_memory_usage(self):
return functools.reduce(operator.mul, self.shape) * 4
def get_fp16_compatible_parameters(self):
return [self.w]
def support_8bit(self):
# Rowwise quantization makes sense only if shape it's 2D matrix with
# second dimension >= 8
if len(self.shape) != 2 or self.shape[1] < 8:
return False
return True
def get_8bits_compatible_parameters(self, fused=True):
if not self.support_8bit():
return []
if fused:
RowwiseQuantized8BitsWeight = collections.namedtuple(
'RowwiseQuantized8BitsWeight', 'w'
)
return [RowwiseQuantized8BitsWeight(self.w)]
else:
RowwiseQuantized8BitsWeight = collections.namedtuple(
'RowwiseQuantized8BitsWeight', 'w, scale_bias'
)
return [RowwiseQuantized8BitsWeight(self.w, self.scale_bias)]
def _gather_wrapper(self, net, version, in_indices, out):
# Gather can work on all kinds of input data types, and output
# data with the same type. Convert the output of Gather to float,
# because the follow-up Ops expect fp32.
if version == 'fp32':
return net.Gather([self.w, in_indices], out)
elif version == 'fp16':
gathered_w = net.Gather([self.w, in_indices], 'gathered_w')
return net.HalfToFloat(gathered_w, out)
elif version == 'uint8rowwise':
gathered_w = net.Gather([self.w, in_indices], 'gathered_w')
gathered_scale_bias = net.Gather(
[self.scale_bias, in_indices],
'gathered_scale_bias'
)
return net.Rowwise8BitQuantizedToFloat(
[gathered_w, gathered_scale_bias], out)
elif version == 'fused_uint8rowwise':
gathered_w = net.Gather([self.w, in_indices], 'gathered_w')
return net.Fused8BitRowwiseQuantizedToFloat(gathered_w, out)
else:
raise "Unsupported version of operators in SparseLookup " +\
"layer: {0}".format(version)
def _sparse_lengths_weighted_reducer(
self, in_indices, weights, reducer,
net, version, grad_on_weights=0):
op_input = [
self.w,
weights,
in_indices,
self.input_record.lengths()
]
layer_name = 'SparseLengths' + reducer
if version in ['fp32', 'fp16']:
# SparseLengths* Ops will accept either fp16 or fp32 embedding
# matrix and output fp32 pooled embedding
net.__getattr__(layer_name)(
op_input,
self.output_schema.field_blobs(),
grad_on_weights=grad_on_weights,
)
elif version == 'uint8rowwise':
op_input.insert(len(op_input), self.scale_bias)
net.__getattr__(layer_name + '8BitsRowwise')(
op_input, self.output_schema.field_blobs())
elif version == 'fused_uint8rowwise':
net.__getattr__(layer_name + 'Fused8BitRowwise')(
op_input, self.output_schema.field_blobs())
else:
raise "Unsupported version of operator in SparseLookUp " +\
"layer: {0}".format(version)
# deal with sparse features of id_list type
def _add_ops_id_list(self, net, version):
assert self.reducer in self._id_list_supported_reducers, (
"Unsupported reducer: {} for ID_LIST".format(self.reducer)
)
if self.reducer in ['Sum', 'Mean', 'WeightedSum', 'WeightedMean']:
op_input = [self.w,
self.input_record.items(),
self.input_record.lengths()]
# For id list features, the behaviors of 'Sum' and
# 'WeightedSum' are identical, since we can regard the weight on each
# id as 1. Similarly, for 'Mean' and 'WeightedMean'.
if self.reducer == 'WeightedSum':
self.reducer = 'Sum'
elif self.reducer == 'WeightedMean':
self.reducer = 'Mean'
layer_name = 'SparseLengths' + self.reducer
if version in ['fp32', 'fp16']:
# SparseLengths* Ops will accept either fp16 or fp32 embedding
# matrix and output fp32 pooled embedding
net.__getattr__(layer_name)(
op_input,
self.output_schema.field_blobs(),
)
elif version == 'uint8rowwise':
op_input.insert(len(op_input), self.scale_bias)
net.__getattr__(layer_name + '8BitsRowwise')(
op_input, self.output_schema.field_blobs())
elif version == 'fused_uint8rowwise':
net.__getattr__(layer_name + 'Fused8BitRowwise')(
op_input, self.output_schema.field_blobs())
else:
raise "Unsupported version of operator in SparseLookUp " +\
"layer: {0}".format(version)
elif self.reducer == 'Sqrt':
sqrt_weight = net.LengthsToWeights(
[self.input_record.lengths()],
[net.NextScopedBlob('lengths_sqrt')],
power=0.5,
)
self._sparse_lengths_weighted_reducer(
self.input_record.items(),
sqrt_weight,
'WeightedSum', net, version)
elif self.reducer == 'None':
# Gather operator will gather the embedding for each id of
# each IdList.
self._gather_wrapper(net, version, self.input_record.items(),
self.output_schema.field_blobs())
else:
table_rows = self._gather_wrapper(
net, version, self.input_record.items(), 'table_rows')
segment_ids = net.LengthsToSegmentIds(
self.input_record.lengths(),
self.input_record.lengths() + '_sid')
net.__getattr__('SortedSegmentRange' + self.reducer)(
[table_rows, segment_ids],
self.output_schema.field_blobs(),
)
# deal with sparse features of id_score_list type
def _add_ops_id_score_list(self, net, version):
assert self.reducer in self._id_score_list_supported_reducers, (
"Unsupported reducer: {} for ID_SCORE_LIST".format(self.reducer)
)
if self.reducer in ['WeightedSum', 'WeightedMean']:
self._sparse_lengths_weighted_reducer(
self.input_record.keys(),
self.input_record.values(),
self.reducer, net, version)
elif self.reducer in ['Sum', 'Mean']:
op_input = [self.w,
self.input_record.keys(),
self.input_record.lengths()]
layer_name = 'SparseLengths' + self.reducer
if version in ['fp32', 'fp16']:
net.__getattr__(layer_name)(
op_input,
self.output_schema.field_blobs(),
)
elif version == 'uint8rowwise':
net.__getattr__(layer_name + '8BitsRowwise')(
op_input, self.output_schema.field_blobs())
elif version == 'fused_uint8rowwise':
net.__getattr__(layer_name + 'Fused8BitRowwise')(
op_input, self.output_schema.field_blobs())
else:
raise "Unsupported version of operator in SparseLookUp " +\
"layer: {0}".format(version)
elif self.reducer == 'PositionWeighted':
self._sparse_lengths_weighted_reducer(
self.input_record.keys(),
self.external_weights,
'WeightedSum', net, version, grad_on_weights=1)
elif self.reducer == 'None':
# Gather operator will gather the embedding for each id of
# each IdList.
self._gather_wrapper(net, version, self.input_record.keys(),
self.output_schema.field_blobs())
else:
raise "Only Sum, Mean, None are supported for IdScoreList input." +\
"Trying to create with {}".format(self.reducer)
def add_ops(self, net):
cur_scope = get_current_scope()
version = get_sparse_lookup_predictor_version(
**cur_scope.get(get_sparse_lookup_predictor_version.__name__,
{'version': 'fp32'}))
# TODO(amalevich): Layer should not be responsible for decision about
# quantization.
if not self.support_8bit() and version in {'uint8rowwise',
'fused_uint8rowwise'}:
version = 'fp32'
if _is_id_list(self.input_record):
self._add_ops_id_list(net, version=version)
elif _is_id_score_list(self.input_record):
self._add_ops_id_score_list(net, version=version)
else:
raise "Unsupported input type {0}".format(self.input_record)
``` |
[
{
"content": "Here is the code content:\n```python\nfrom django.test.utils import override_settings\nfrom django.test import TestCase\nimport os\nfrom casexml.apps.case.mock import CaseBlock\nfrom casexml.apps.phone.tests.utils import synclog_from_restore_payload\nfrom couchforms.util import post_xform_to_couch... | [
{
"content": "Here is the code content:\n<|memory_start|>```python\nfrom django.test.utils import override_settings\nfrom django.test import TestCase\nimport os\nfrom casexml.apps.case.mock import CaseBlock\nfrom casexml.apps.phone.tests.utils import synclog_from_restore_payload\nfrom couchforms.util import pos... | ```python
from django.test.utils import override_settings
from django.test import TestCase
import os
from casexml.apps.case.mock import CaseBlock
from casexml.apps.phone.tests.utils import synclog_from_restore_payload
from couchforms.util import post_xform_to_couch
from casexml.apps.case.models import CommCareCase
from casexml.apps.case.tests.util import (check_user_has_case, delete_all_sync_logs,
delete_all_xforms, delete_all_cases, assert_user_doesnt_have_case,
assert_user_has_case)
from casexml.apps.case import process_cases
from casexml.apps.phone.models import SyncLog, User
from casexml.apps.phone.restore import generate_restore_payload, RestoreConfig
from dimagi.utils.parsing import json_format_datetime
from couchforms.models import XFormInstance
from casexml.apps.case.xml import V2, V1
from casexml.apps.case.util import post_case_blocks
from casexml.apps.case.sharedmodels import CommCareCaseIndex
from datetime import datetime
from xml.etree import ElementTree
USER_ID = "main_user"
OTHER_USER_ID = "someone_else"
SHARED_ID = "our_group"
PARENT_TYPE = "mother"
@override_settings(CASEXML_FORCE_DOMAIN_CHECK=False)
class SyncBaseTest(TestCase):
"""
Shared functionality among tests
"""
def setUp(self):
delete_all_cases()
delete_all_xforms()
delete_all_sync_logs()
self.user = User(user_id=USER_ID, username="syncguy",
password="changeme", date_joined=datetime(2011, 6, 9))
# this creates the initial blank sync token in the database
restore_config = RestoreConfig(self.user)
self.sync_log = synclog_from_restore_payload(restore_config.get_payload())
def _createCaseStubs(self, id_list, user_id=USER_ID, owner_id=USER_ID):
for id in id_list:
caseblock = CaseBlock(
create=True,
case_id=id,
user_id=user_id,
owner_id=owner_id,
case_type=PARENT_TYPE,
version=V2
).as_xml()
self._postFakeWithSyncToken(caseblock, self.sync_log.get_id)
def _postWithSyncToken(self, filename, token_id):
file_path = os.path.join(os.path.dirname(__file__), "data", filename)
with open(file_path, "rb") as f:
xml_data = f.read()
form = post_xform_to_couch(xml_data)
# set last sync token on the form before saving
form.last_sync_token = token_id
process_cases(form)
return form
def _postFakeWithSyncToken(self, caseblock, token_id):
return post_case_blocks([caseblock], form_extras={"last_sync_token": token_id})
def _checkLists(self, l1, l2):
self.assertEqual(len(l1), len(l2))
for i in l1:
self.assertTrue(i in l2, "%s found in %s" % (i, l2))
for i in l2:
self.assertTrue(i in l1, "%s found in %s" % (i, l1))
def _testUpdate(self, sync_id, case_id_map, dependent_case_id_map=None):
dependent_case_id_map = dependent_case_id_map or {}
sync_log = SyncLog.get(sync_id)
# check case map
self.assertEqual(len(case_id_map), len(sync_log.cases_on_phone))
for case_id, indices in case_id_map.items():
self.assertTrue(sync_log.phone_has_case(case_id))
state = sync_log.get_case_state(case_id)
self._checkLists(indices, state.indices)
# check dependent case map
self.assertEqual(len(dependent_case_id_map), len(sync_log.dependent_cases_on_phone))
for case_id, indices in dependent_case_id_map.items():
self.assertTrue(sync_log.phone_has_dependent_case(case_id))
state = sync_log.get_dependent_case_state(case_id)
self._checkLists(indices, state.indices)
class SyncTokenUpdateTest(SyncBaseTest):
"""
Tests sync token updates on submission related to the list of cases
on the phone and the footprint.
"""
def testInitialEmpty(self):
"""
Tests that a newly created sync token has no cases attached to it.
"""
[sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all()
self._testUpdate(sync_log.get_id, {}, {})
def testTokenAssociation(self):
"""
Test that individual create, update, and close submissions update
the appropriate case lists in the sync token
"""
[sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all()
self._postWithSyncToken("create_short.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"asdf": []})
# a normal update should have no affect
self._postWithSyncToken("update_short.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"asdf": []})
# close should remove it from the cases_on_phone list
# (and currently puts it into the dependent list though this
# might change.
self._postWithSyncToken("close_short.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {}, {"asdf": []})
def testMultipleUpdates(self):
"""
Test that multiple update submissions don't update the case lists
and don't create duplicates in them
"""
[sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all()
self._postWithSyncToken("create_short.xml", sync_log.get_id)
self._postWithSyncToken("update_short.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"asdf": []})
self._postWithSyncToken("update_short_2.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"asdf": []})
def testMultiplePartsSingleSubmit(self):
"""
Tests a create and update in the same form
"""
[sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all()
self._postWithSyncToken("case_create.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"IKA9G79J4HDSPJLG3ER2OHQUY": []})
def testMultipleCases(self):
"""
Test creating multiple cases from multilple forms
"""
[sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all()
self._postWithSyncToken("create_short.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"asdf": []})
self._postWithSyncToken("case_create.xml", sync_log.get_id)
self._testUpdate(sync_log.get_id, {"asdf": [],
"IKA9G79J4HDSPJLG3ER2OHQUY": []})
def testOwnUpdatesDontSync(self):
case_id = "own_updates_dont_sync"
self._createCaseStubs([case_id])
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
update_block = CaseBlock(
create=False,
case_id=case_id,
user_id=USER_ID,
version=V2,
update={"greeting": "hello"}
).as_xml()
self._postFakeWithSyncToken(update_block, self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
reassign_block = CaseBlock(
create=False,
case_id=case_id,
owner_id=OTHER_USER_ID,
version=V2
).as_xml()
self._postFakeWithSyncToken(reassign_block, self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
def testIndexReferences(self):
"""
Tests that indices properly get set in the sync log when created.
"""
# first create the parent case
parent_id = "mommy"
updated_id = "updated_mommy_id"
new_parent_id = "daddy"
self._createCaseStubs([parent_id, updated_id, new_parent_id])
self._testUpdate(self.sync_log.get_id, {parent_id: [], updated_id: [], new_parent_id: []})
# create the child
child_id = "baby"
index_id = 'my_mom_is'
child = CaseBlock(
create=True,
case_id=child_id,
user_id=USER_ID,
version=V2,
index={index_id: (PARENT_TYPE, parent_id)},
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
index_ref = CommCareCaseIndex(identifier=index_id,
referenced_type=PARENT_TYPE,
referenced_id=parent_id)
self._testUpdate(self.sync_log.get_id, {parent_id: [], updated_id: [], new_parent_id: [],
child_id: [index_ref]})
# update the child's index (parent type)
updated_type = "updated_mother_type"
child = CaseBlock(create=False, case_id=child_id, user_id=USER_ID, version=V2,
index={index_id: (updated_type, parent_id)},
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
index_ref = CommCareCaseIndex(identifier=index_id,
referenced_type=updated_type,
referenced_id=parent_id)
self._testUpdate(self.sync_log.get_id, {parent_id: [], updated_id: [], new_parent_id: [],
child_id: [index_ref]})
# update the child's index (parent id)
child = CaseBlock(create=False, case_id=child_id, user_id=USER_ID, version=V2,
index={index_id: (updated_type, updated_id)},
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
index_ref = CommCareCaseIndex(identifier=index_id,
referenced_type=updated_type,
referenced_id=updated_id)
self._testUpdate(self.sync_log.get_id, {parent_id: [], updated_id: [], new_parent_id: [],
child_id: [index_ref]})
# add new index
new_index_id = "my_daddy"
new_index_type = "dad"
child = CaseBlock(create=False, case_id=child_id, user_id=USER_ID, version=V2,
index={new_index_id: (new_index_type, new_parent_id)},
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
new_index_ref = CommCareCaseIndex(identifier=new_index_id,
referenced_type=new_index_type,
referenced_id=new_parent_id)
self._testUpdate(self.sync_log.get_id, {parent_id: [], updated_id: [], new_parent_id: [],
child_id: [index_ref, new_index_ref]})
# delete index
child = CaseBlock(create=False, case_id=child_id, user_id=USER_ID, version=V2,
index={index_id: (updated_type, "")},
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
self._testUpdate(self.sync_log.get_id, {parent_id: [], updated_id: [], new_parent_id: [],
child_id: [new_index_ref]})
def testClosedParentIndex(self):
"""
Tests that things work properly when you have a reference to the parent
case in a child, even if it's closed.
"""
# first create the parent case
parent_id = "mommy"
self._createCaseStubs([parent_id])
self._testUpdate(self.sync_log.get_id, {parent_id: []})
# create the child
child_id = "baby"
index_id = 'my_mom_is'
child = CaseBlock(
create=True,
case_id=child_id,
user_id=USER_ID,
version=V2,
index={index_id: (PARENT_TYPE, parent_id)},
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
index_ref = CommCareCaseIndex(identifier=index_id,
referenced_type=PARENT_TYPE,
referenced_id=parent_id)
self._testUpdate(self.sync_log.get_id, {parent_id: [],
child_id: [index_ref]})
# close the mother case
close = CaseBlock(create=False, case_id=parent_id, user_id=USER_ID,
version=V2, close=True
).as_xml()
self._postFakeWithSyncToken(close, self.sync_log.get_id)
self._testUpdate(self.sync_log.get_id, {child_id: [index_ref]},
{parent_id: []})
# try a clean restore again
assert_user_has_case(self, self.user, parent_id)
assert_user_has_case(self, self.user, child_id)
def testAssignToNewOwner(self):
# first create the parent case
parent_id = "mommy"
self._createCaseStubs([parent_id])
self._testUpdate(self.sync_log.get_id, {parent_id: []})
# create the child
child_id = "baby"
index_id = 'my_mom_is'
self._postFakeWithSyncToken(
CaseBlock(create=True, case_id=child_id, user_id=USER_ID, version=V2,
index={index_id: (PARENT_TYPE, parent_id)},
).as_xml(), self.sync_log.get_id)
index_ref = CommCareCaseIndex(identifier=index_id,
referenced_type=PARENT_TYPE,
referenced_id=parent_id)
# should be there
self._testUpdate(self.sync_log.get_id, {parent_id: [],
child_id: [index_ref]})
# assign to new owner
new_owner = "not_mine"
self._postFakeWithSyncToken(
CaseBlock(create=False, case_id=child_id, user_id=USER_ID, version=V2,
owner_id=new_owner
).as_xml(), self.sync_log.get_id)
# should be moved
self._testUpdate(self.sync_log.get_id, {parent_id: []},
{child_id: [index_ref]})
def testArchiveUpdates(self):
"""
Tests that archiving a form (and changing a case) causes the
case to be included in the next sync.
"""
case_id = "archive_syncs"
self._createCaseStubs([case_id])
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
update_block = CaseBlock(
create=False,
case_id=case_id,
user_id=USER_ID,
version=V2,
update={"greeting": "hello"}
).as_xml()
form = self._postFakeWithSyncToken(update_block, self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
form.archive()
assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
class SyncTokenCachingTest(SyncBaseTest):
def testCaching(self):
self.assertFalse(self.sync_log.has_cached_payload(V2))
# first request should populate the cache
original_payload = RestoreConfig(
self.user, version=V2, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
next_sync_log = synclog_from_restore_payload(original_payload)
self.sync_log = SyncLog.get(self.sync_log._id)
self.assertTrue(self.sync_log.has_cached_payload(V2))
# a second request with the same config should be exactly the same
cached_payload = RestoreConfig(
self.user, version=V2, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
self.assertEqual(original_payload, cached_payload)
# a second request without caching should be different (generate a new id)
uncached_payload = RestoreConfig(
self.user, version=V2, caching_enabled=False,
restore_id=self.sync_log._id,
).get_payload()
self.assertNotEqual(original_payload, uncached_payload)
uncached_sync_log = synclog_from_restore_payload(uncached_payload)
self.assertNotEqual(next_sync_log._id, uncached_sync_log._id)
# caching a different version should also produce something new
versioned_payload = RestoreConfig(
self.user, version=V1, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
self.assertNotEqual(original_payload, versioned_payload)
versioned_sync_log = synclog_from_restore_payload(versioned_payload)
self.assertNotEqual(next_sync_log._id, versioned_sync_log._id)
def testCacheInvalidation(self):
original_payload = RestoreConfig(
self.user, version=V2, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
self.sync_log = SyncLog.get(self.sync_log._id)
self.assertTrue(self.sync_log.has_cached_payload(V2))
# posting a case associated with this sync token should invalidate the cache
case_id = "cache_invalidation"
self._createCaseStubs([case_id])
self.sync_log = SyncLog.get(self.sync_log._id)
self.assertFalse(self.sync_log.has_cached_payload(V2))
# resyncing should recreate the cache
next_payload = RestoreConfig(
self.user, version=V2, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
self.sync_log = SyncLog.get(self.sync_log._id)
self.assertTrue(self.sync_log.has_cached_payload(V2))
self.assertNotEqual(original_payload, next_payload)
self.assertFalse(case_id in original_payload)
self.assertTrue(case_id in next_payload)
def testCacheNonInvalidation(self):
original_payload = RestoreConfig(
self.user, version=V2, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
self.sync_log = SyncLog.get(self.sync_log._id)
self.assertTrue(self.sync_log.has_cached_payload(V2))
# posting a case associated with this sync token should invalidate the cache
# submitting a case not with the token will not touch the cache for that token
case_id = "cache_noninvalidation"
post_case_blocks([CaseBlock(
create=True,
case_id=case_id,
user_id=self.user.user_id,
owner_id=self.user.user_id,
case_type=PARENT_TYPE,
version=V2,
).as_xml()])
next_payload = RestoreConfig(
self.user, version=V2, caching_enabled=True,
restore_id=self.sync_log._id,
).get_payload()
self.assertEqual(original_payload, next_payload)
self.assertFalse(case_id in next_payload)
class MultiUserSyncTest(SyncBaseTest):
"""
Tests the interaction of two users in sync mode doing various things
"""
def setUp(self):
super(MultiUserSyncTest, self).setUp()
# the other user is an "owner" of the original users cases as well,
# for convenience
self.other_user = User(user_id=OTHER_USER_ID, username="ferrel",
password="changeme", date_joined=datetime(2011, 6, 9),
additional_owner_ids=[SHARED_ID])
# this creates the initial blank sync token in the database
self.other_sync_log = synclog_from_restore_payload(generate_restore_payload(self.other_user))
self.assertTrue(SHARED_ID in self.other_sync_log.owner_ids_on_phone)
self.assertTrue(OTHER_USER_ID in self.other_sync_log.owner_ids_on_phone)
self.user.additional_owner_ids = [SHARED_ID]
self.sync_log = synclog_from_restore_payload(generate_restore_payload(self.user))
self.assertTrue(SHARED_ID in self.sync_log.owner_ids_on_phone)
self.assertTrue(USER_ID in self.sync_log.owner_ids_on_phone)
def testSharedCase(self):
# create a case by one user
case_id = "shared_case"
self._createCaseStubs([case_id], owner_id=SHARED_ID)
# should sync to the other owner
assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id)
def testOtherUserEdits(self):
# create a case by one user
case_id = "other_user_edits"
self._createCaseStubs([case_id], owner_id=SHARED_ID)
# sync to the other's phone to be able to edit
assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id)
latest_sync = SyncLog.last_for_user(OTHER_USER_ID)
# update from another
self._postFakeWithSyncToken(
CaseBlock(create=False, case_id=case_id, user_id=OTHER_USER_ID,
version=V2, update={'greeting': "Hello!"}
).as_xml(), latest_sync.get_id)
# original user syncs again
# make sure updates take
match = assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
self.assertTrue("Hello!" in ElementTree.tostring(match))
def testOtherUserAddsIndex(self):
time = datetime.now()
# create a case from one user
case_id = "other_user_adds_index"
self._createCaseStubs([case_id], owner_id=SHARED_ID)
# sync to the other's phone to be able to edit
assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id)
latest_sync = SyncLog.last_for_user(OTHER_USER_ID)
mother_id = "other_user_adds_index_mother"
parent_case = CaseBlock(
create=True,
date_modified=time,
case_id=mother_id,
user_id=OTHER_USER_ID,
case_type=PARENT_TYPE,
version=V2,
).as_xml(format_datetime=json_format_datetime)
self._postFakeWithSyncToken(
parent_case,
latest_sync.get_id
)
# the original user should not get the parent case
assert_user_doesnt_have_case(self, self.user, mother_id, restore_id=self.sync_log.get_id)
# update the original case from another, adding an indexed case
self._postFakeWithSyncToken(
CaseBlock(
create=False,
case_id=case_id,
user_id=OTHER_USER_ID,
owner_id=USER_ID,
version=V2,
index={'mother': ('mother', mother_id)}
).as_xml(format_datetime=json_format_datetime),
latest_sync.get_id
)
# original user syncs again
# make sure index updates take and indexed case also syncs
expected_parent_case = CaseBlock(
create=True,
date_modified=time,
case_id=mother_id,
user_id=OTHER_USER_ID,
case_type=PARENT_TYPE,
owner_id=OTHER_USER_ID,
version=V2,
).as_xml(format_datetime=json_format_datetime)
check_user_has_case(self, self.user, expected_parent_case,
restore_id=self.sync_log.get_id, version=V2)
orig = assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
self.assertTrue("index" in ElementTree.tostring(orig))
def testMultiUserEdits(self):
time = datetime.now()
# create a case from one user
case_id = "multi_user_edits"
self._createCaseStubs([case_id], owner_id=SHARED_ID)
# both users syncs
self.sync_log = synclog_from_restore_payload(generate_restore_payload(self.user))
self.other_sync_log = synclog_from_restore_payload(generate_restore_payload(self.other_user))
# update case from same user
my_change = CaseBlock(
create=False,
date_modified=time,
case_id=case_id,
user_id=USER_ID,
version=V2,
update={'greeting': 'hello'}
).as_xml(format_datetime=json_format_datetime)
self._postFakeWithSyncToken(
my_change,
self.sync_log.get_id
)
# update from another user
their_change = CaseBlock(
create=False,
date_modified=time,
case_id=case_id,
user_id=USER_ID,
version=V2,
update={'greeting_2': 'hello'}
).as_xml(format_datetime=json_format_datetime)
self._postFakeWithSyncToken(
their_change,
self.other_sync_log.get_id
)
# original user syncs again
# make sure updates both appear (and merge?)
joint_change = CaseBlock(
create=False,
date_modified=time,
case_id=case_id,
user_id=USER_ID,
version=V2,
update={
'greeting': 'hello',
'greeting_2': 'hello'
},
owner_id=SHARED_ID,
case_name='',
case_type='mother',
).as_xml(format_datetime=json_format_datetime)
check_user_has_case(self, self.user, joint_change, restore_id=self.sync_log.get_id, version=V2)
check_user_has_case(self, self.other_user, joint_change, restore_id=self.other_sync_log.get_id, version=V2)
def testOtherUserCloses(self):
# create a case from one user
case_id = "other_user_closes"
self._createCaseStubs([case_id], owner_id=SHARED_ID)
# sync then close case from another user
self.other_sync_log = synclog_from_restore_payload(generate_restore_payload(self.other_user))
close_block = CaseBlock(
create=False,
case_id=case_id,
user_id=USER_ID,
version=V2,
close=True
).as_xml()
self._postFakeWithSyncToken(
close_block,
self.other_sync_log.get_id
)
# original user syncs again
# make sure close block appears
assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
def testOtherUserUpdatesUnowned(self):
# create a case from one user and assign ownership elsewhere
case_id = "other_user_updates_unowned"
self._createCaseStubs([case_id], owner_id=OTHER_USER_ID)
# sync and update from another user
assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id)
self.other_sync_log = SyncLog.last_for_user(OTHER_USER_ID)
update = CaseBlock(
create=False,
case_id=case_id,
user_id=OTHER_USER_ID,
version=V2,
update={'greeting': 'hello'}
).as_xml()
self._postFakeWithSyncToken(
update,
self.other_sync_log.get_id
)
# original user syncs again
# make sure there are no new changes
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
def testIndexesSync(self):
# create a parent and child case (with index) from one user
parent_id = "indexes_sync_parent"
case_id = "indexes_sync"
self._createCaseStubs([parent_id])
child = CaseBlock(
create=True,
case_id=case_id,
user_id=USER_ID,
owner_id=USER_ID,
version=V2,
index={'mother': ('mother', parent_id)}
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
# make sure the second user doesn't get either
assert_user_doesnt_have_case(self, self.other_user, parent_id, restore_id=self.other_sync_log.get_id)
assert_user_doesnt_have_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id)
# assign just the child case to a second user
child_update = CaseBlock(
create=False,
case_id=case_id,
user_id=USER_ID,
owner_id=OTHER_USER_ID,
version=V2,
update={"greeting": "hello"}
).as_xml()
self._postFakeWithSyncToken(child_update, self.sync_log.get_id)
# second user syncs
# make sure both cases restore
assert_user_has_case(self, self.other_user, parent_id, restore_id=self.other_sync_log.get_id)
assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id)
def testOtherUserUpdatesIndex(self):
# create a parent and child case (with index) from one user
parent_id = "other_updates_index_parent"
case_id = "other_updates_index_child"
self._createCaseStubs([parent_id])
parent = CaseBlock(case_id=parent_id, version=V2).as_xml()
child = CaseBlock(
create=True,
case_id=case_id,
user_id=USER_ID,
owner_id=USER_ID,
version=V2,
index={'mother': ('mother', parent_id)}
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
# assign the parent case away from same user
parent_update = CaseBlock(
create=False,
case_id=parent_id,
user_id=USER_ID,
owner_id=OTHER_USER_ID,
update={"greeting": "hello"},
version=V2).as_xml()
self._postFakeWithSyncToken(parent_update, self.sync_log.get_id)
self.sync_log = SyncLog.get(self.sync_log.get_id)
# these tests added to debug another issue revealed by this test
self.assertTrue(self.sync_log.phone_has_case(case_id))
self.assertTrue(self.sync_log.phone_has_dependent_case(parent_id))
self.assertTrue(self.sync_log.phone_is_holding_case(case_id))
self.assertTrue(self.sync_log.phone_is_holding_case(parent_id))
# original user syncs again
# make sure there are no new changes
assert_user_doesnt_have_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
# update the parent case from another user
assert_user_has_case(self, self.other_user, parent_id, restore_id=self.other_sync_log.get_id)
self.other_sync_log = SyncLog.last_for_user(OTHER_USER_ID)
other_parent_update = CaseBlock(
create=False,
case_id=parent_id,
user_id=OTHER_USER_ID,
update={"greeting2": "hi"},
version=V2
).as_xml()
self._postFakeWithSyncToken(other_parent_update, self.other_sync_log.get_id)
# make sure the indexed case syncs again
self.sync_log = SyncLog.last_for_user(USER_ID)
assert_user_has_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
def testOtherUserReassignsIndexed(self):
# create a parent and child case (with index) from one user
parent_id = "other_reassigns_index_parent"
case_id = "other_reassigns_index_child"
self._createCaseStubs([parent_id])
child = CaseBlock(
create=True,
case_id=case_id,
user_id=USER_ID,
owner_id=SHARED_ID,
version=V2,
index={'mother': ('mother', parent_id)}
).as_xml()
self._postFakeWithSyncToken(child, self.sync_log.get_id)
# assign the parent case away from the same user
parent_update = CaseBlock(
create=False,
case_id=parent_id,
user_id=USER_ID,
owner_id=OTHER_USER_ID,
update={"greeting": "hello"},
version=V2).as_xml()
self._postFakeWithSyncToken(parent_update, self.sync_log.get_id)
# sync cases to second user
self.other_sync_log = synclog_from_restore_payload(generate_restore_payload(self.other_user))
# change the child's owner from another user
child_reassignment = CaseBlock(
create=False,
case_id=case_id,
user_id=OTHER_USER_ID,
owner_id=OTHER_USER_ID,
version=V2,
update={"childgreeting": "hi!"},
).as_xml()
self._postFakeWithSyncToken(child_reassignment, self.other_sync_log.get_id)
# also change the parent from the second user
other_parent_update = CaseBlock(
create=False,
case_id=parent_id,
user_id=OTHER_USER_ID,
owner_id=OTHER_USER_ID,
update={"other_greeting": "something new"},
version=V2).as_xml()
self._postFakeWithSyncToken(other_parent_update, self.other_sync_log.get_id)
# original user syncs again
self.sync_log = SyncLog.last_for_user(self.user.user_id)
# both cases should sync to original user with updated ownership / edits
assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
assert_user_has_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
# Ghetto
payload = generate_restore_payload(self.user, self.sync_log.get_id,
version=V2)
self.assertTrue("something new" in payload)
self.assertTrue("hi!" in payload)
# change the parent again from the second user
other_parent_update = CaseBlock(
create=False,
case_id=parent_id,
user_id=OTHER_USER_ID,
owner_id=OTHER_USER_ID,
update={"other_greeting": "something different"},
version=V2).as_xml()
self._postFakeWithSyncToken(other_parent_update, self.other_sync_log.get_id)
# original user syncs again
self.sync_log = SyncLog.last_for_user(self.user.user_id)
# should be no changes
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
# change the child again from the second user
other_child_update = CaseBlock(
create=False,
case_id=case_id,
user_id=OTHER_USER_ID,
owner_id=OTHER_USER_ID,
version=V2,
update={"childgreeting": "hi changed!"},
).as_xml()
self._postFakeWithSyncToken(other_child_update, self.other_sync_log.get_id)
# original user syncs again
self.sync_log = SyncLog.last_for_user(self.user.user_id)
# should be no changes
assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
assert_user_doesnt_have_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
# change owner of child back to orginal user from second user
child_reassignment = CaseBlock(
create=False,
case_id=case_id,
user_id=OTHER_USER_ID,
owner_id=USER_ID,
version=V2
).as_xml()
self._postFakeWithSyncToken(child_reassignment, self.other_sync_log.get_id)
# original user syncs again
self.sync_log = SyncLog.last_for_user(self.user.user_id)
# both cases should now sync
assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
assert_user_has_case(self, self.user, parent_id, restore_id=self.sync_log.get_id)
# ghetto
payload = generate_restore_payload(self.user, self.sync_log.get_id,
version=V2)
self.assertTrue("something different" in payload)
self.assertTrue("hi changed!" in payload)
def testComplicatedGatesBug(self):
# found this bug in the wild, used the real (test) forms to fix it
# just running through this test used to fail hard, even though there
# are no asserts
self.assertEqual(0, len(CommCareCase.view("case/by_user", reduce=False).all()))
folder_path = os.path.join("bugs", "dependent_case_conflicts")
files = ["reg1.xml", "reg2.xml", "cf.xml", "close.xml"]
for f in files:
form = self._postWithSyncToken(os.path.join(folder_path, f), self.sync_log.get_id)
form = XFormInstance.get(form.get_id)
self.assertFalse(hasattr(form, "problem"))
self.sync_log = synclog_from_restore_payload(generate_restore_payload(self.user, version="2.0"))
class SyncTokenReprocessingTest(SyncBaseTest):
"""
Tests sync token logic for fixing itself when it gets into a bad state.
"""
def testUpdateNonExisting(self):
case_id = 'non_existent'
caseblock = CaseBlock(
create=False,
case_id=case_id,
user_id=USER_ID,
owner_id=USER_ID,
case_type=PARENT_TYPE,
version=V2
).as_xml()
try:
self._postFakeWithSyncToken(caseblock, self.sync_log.get_id)
self.fail('posting an update to a non-existant case should fail')
except AssertionError:
# this should fail because it's a true error
pass
def testShouldHaveCase(self):
case_id = "should_have"
self._createCaseStubs([case_id])
sync_log = SyncLog.get(self.sync_log._id)
self.assertEqual(1, len(sync_log.cases_on_phone))
self.assertEqual(case_id, sync_log.cases_on_phone[0].case_id)
# manually delete it and then try to update
sync_log.cases_on_phone = []
sync_log.save()
update = CaseBlock(
create=False,
case_id=case_id,
user_id=USER_ID,
owner_id=USER_ID,
case_type=PARENT_TYPE,
version=V2,
update={'something': "changed"},
).as_xml()
# this should work because it should magically fix itself
self._postFakeWithSyncToken(update, self.sync_log.get_id)
sync_log = SyncLog.get(self.sync_log._id)
self.assertFalse(getattr(sync_log, 'has_assert_errors', False))
def testCodependencies(self):
case_id1 = 'bad1'
case_id2 = 'bad2'
initial_caseblocks = [CaseBlock(
create=True,
case_id=case_id,
user_id='not_me',
owner_id='not_me',
case_type=PARENT_TYPE,
version=V2
).as_xml() for case_id in [case_id1, case_id2]]
post_case_blocks(
initial_caseblocks,
)
def _get_bad_caseblocks(ids):
return [CaseBlock(
create=False,
case_id=id,
user_id=USER_ID,
owner_id=USER_ID,
case_type=PARENT_TYPE,
version=V2
).as_xml() for id in ids]
try:
post_case_blocks(
_get_bad_caseblocks([case_id1, case_id2]),
form_extras={ "last_sync_token": self.sync_log._id }
)
self.fail('posting an update to non-existant cases should fail')
except AssertionError:
# this should fail because it's a true error
pass
try:
post_case_blocks(
_get_bad_caseblocks([case_id2, case_id1]),
form_extras={ "last_sync_token": self.sync_log._id }
)
self.fail('posting an update to non-existant cases should fail')
except AssertionError:
# this should fail because it's a true error
pass
``` |
[
{
"content": "Repeat the full code snippet:\n```python\n\"\"\"\nCode to help with managing a TVTK data set in Pythonic ways.\n\"\"\"\n\n# Author: Prabhu Ramachandran <prabhu@aero.iitb.ac.in>\n# Copyright (c) 2008, Enthought, Inc.\n# License: BSD Style.\n\nfrom enthought.traits.api import (HasTraits, Instance, A... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\n\"\"\"\nCode to help with managing a TVTK data set in Pythonic ways.\n\"\"\"\n\n# Author: Prabhu Ramachandran <prabhu@aero.iitb.ac.in>\n# Copyright (c) 2008, Enthought, Inc.\n# License: BSD Style.\n\nfrom enthought.traits.api import (HasTra... | ```python
"""
Code to help with managing a TVTK data set in Pythonic ways.
"""
# Author: Prabhu Ramachandran <prabhu@aero.iitb.ac.in>
# Copyright (c) 2008, Enthought, Inc.
# License: BSD Style.
from enthought.traits.api import (HasTraits, Instance, Array, Str,
Property, Dict)
from enthought.tvtk.api import tvtk
from enthought.tvtk.array_handler import array2vtk
######################################################################
# Utility functions.
######################################################################
def get_array_type(arr):
"""Returns if the array is a scalar ('scalars'), vector
('vectors') or tensor ('tensors'). It looks at the number of
components to decide. If it has a wierd number of components it
returns the empty string.
"""
n = arr.number_of_components
ret = {1: 'scalars', 3: 'vectors', 4: 'scalars', 9:'tensors'}
return ret.get(n) or ''
def get_attribute_list(data):
""" Gets scalar, vector and tensor information from the given data
(either cell or point data).
"""
attr = {'scalars':[], 'vectors':[], 'tensors':[]}
if data is not None:
n = data.number_of_arrays
for i in range(n):
name = data.get_array_name(i)
t = get_array_type(data.get_array(i))
if len(t) > 0 and name is not None:
attr[t].extend([name])
def _mk_first(lst, value):
"""Makes the specified `value` the first item in `lst`."""
lst.remove(value)
lst.insert(0, value)
attr1 = attr.copy()
for a in attr:
v = getattr(data, a)
if v is not None:
name = v.name
if name is not None:
try:
_mk_first(attr[a], v.name)
except ValueError:
# Sometimes we have a multi-component scalar.
attr1[a].insert(0, name)
return attr1
def get_all_attributes(obj):
"""Gets the scalar, vector and tensor attributes that are
available in the given VTK data object.
"""
point_attr = get_attribute_list(obj.point_data)
cell_attr = get_attribute_list(obj.cell_data)
return point_attr, cell_attr
################################################################################
# `DatasetManager` class.
################################################################################
class DatasetManager(HasTraits):
# The TVTK dataset we manage.
dataset = Instance(tvtk.DataSet)
# Our output, this is the dataset modified by us with different
# active arrays.
output = Property(Instance(tvtk.DataSet))
# The point scalars for the dataset. You may manipulate the arrays
# in-place. However adding new keys in this dict will not set the
# data in the `dataset` for that you must explicitly call
# `add_array`.
point_scalars = Dict(Str, Array)
# Point vectors.
point_vectors = Dict(Str, Array)
# Point tensors.
point_tensors = Dict(Str, Array)
# The cell scalars for the dataset.
cell_scalars = Dict(Str, Array)
cell_vectors = Dict(Str, Array)
cell_tensors = Dict(Str, Array)
# This filter allows us to change the attributes of the data
# object and will ensure that the pipeline is properly taken care
# of. Directly setting the array in the VTK object will not do
# this.
_assign_attribute = Instance(tvtk.AssignAttribute, args=(),
allow_none=False)
######################################################################
# Public interface.
######################################################################
def add_array(self, array, name, category='point'):
"""
Add an array to the dataset to specified category ('point' or
'cell').
"""
assert len(array.shape) <= 2, "Only 2D arrays can be added."
data = getattr(self.dataset, '%s_data'%category)
if len(array.shape) == 2:
assert array.shape[1] in [1, 3, 4, 9], \
"Only Nxm arrays where (m in [1,3,4,9]) are supported"
va = tvtk.to_tvtk(array2vtk(array))
va.name = name
data.add_array(va)
mapping = {1:'scalars', 3: 'vectors', 4: 'scalars',
9: 'tensors'}
dict = getattr(self, '%s_%s'%(category,
mapping[array.shape[1]]))
dict[name] = array
else:
va = tvtk.to_tvtk(array2vtk(array))
va.name = name
data.add_array(va)
dict = getattr(self, '%s_scalars'%(category))
dict[name] = array
def remove_array(self, name, category='point'):
"""Remove an array by its name and optional category (point and
cell). Returns the removed array.
"""
type = self._find_array(name, category)
data = getattr(self.dataset, '%s_data'%category)
data.remove_array(name)
d = getattr(self, '%s_%s'%(category, type))
return d.pop(name)
def rename_array(self, name1, name2, category='point'):
"""Rename a particular array from `name1` to `name2`.
"""
type = self._find_array(name1, category)
data = getattr(self.dataset, '%s_data'%category)
arr = data.get_array(name1)
arr.name = name2
d = getattr(self, '%s_%s'%(category, type))
d[name2] = d.pop(name1)
def activate(self, name, category='point'):
"""Make the specified array the active one.
"""
type = self._find_array(name, category)
self._activate_data_array(type, category, name)
def update(self):
"""Update the dataset when the arrays are changed.
"""
self.dataset.modified()
self._assign_attribute.update()
######################################################################
# Non-public interface.
######################################################################
def _dataset_changed(self, value):
self._setup_data()
self._assign_attribute.input = value
def _get_output(self):
return self._assign_attribute.output
def _setup_data(self):
"""Updates the arrays from what is available in the input data.
"""
input = self.dataset
pnt_attr, cell_attr = get_all_attributes(input)
self._setup_data_arrays(cell_attr, 'cell')
self._setup_data_arrays(pnt_attr, 'point')
def _setup_data_arrays(self, attributes, d_type):
"""Given the dict of the attributes from the
`get_all_attributes` function and the data type (point/cell)
data this will setup the object and the data.
"""
attrs = ['scalars', 'vectors', 'tensors']
aa = self._assign_attribute
input = self.dataset
data = getattr(input, '%s_data'%d_type)
for attr in attrs:
values = attributes[attr]
# Get the arrays from VTK, create numpy arrays and setup our
# traits.
arrays = {}
for name in values:
va = data.get_array(name)
npa = va.to_array()
# Now test if changes to the numpy array are reflected
# in the VTK array, if they are we are set, else we
# have to set the VTK array back to the numpy array.
if len(npa.shape) > 1:
old = npa[0,0]
npa[0][0] = old - 1
if abs(va[0][0] - npa[0,0]) > 1e-8:
va.from_array(npa)
npa[0][0] = old
else:
old = npa[0]
npa[0] = old - 1
if abs(va[0] - npa[0]) > 1e-8:
va.from_array(npa)
npa[0] = old
arrays[name] = npa
setattr(self, '%s_%s'%(d_type, attr), arrays)
def _activate_data_array(self, data_type, category, name):
"""Activate (or deactivate) a particular array.
Given the nature of the data (scalars, vectors etc.) and the
type of data (cell or points) it activates the array given by
its name.
Parameters:
-----------
data_type: one of 'scalars', 'vectors', 'tensors'
category: one of 'cell', 'point'.
name: string of array name to activate.
"""
input = self.dataset
data = None
data = getattr(input, category + '_data')
method = getattr(data, 'set_active_%s'%data_type)
if len(name) == 0:
# If the value is empty then we deactivate that attribute.
method(None)
else:
aa = self._assign_attribute
method(name)
aa.assign(name, data_type.upper(), category.upper() +'_DATA')
aa.update()
def _find_array(self, name, category='point'):
"""Return information on which kind of attribute contains the
specified named array in a particular category."""
types = ['scalars', 'vectors', 'tensors']
for type in types:
attr = '%s_%s'%(category, type)
d = getattr(self, attr)
if name in d.keys():
return type
raise KeyError('No %s array named %s available in dataset'
%(category, name))
``` |
[
{
"content": "Output the full code verbatim (no extra comments):\n```python\n# Protocol Buffers - Google's data interchange format\n# Copyright 2008 Google Inc. All rights reserved.\n# http://code.google.com/p/protobuf/\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, a... | [
{
"content": "Output the full code verbatim (no extra comments):\n<|memory_start|>```python\n# Protocol Buffers - Google's data interchange format\n# Copyright 2008 Google Inc. All rights reserved.\n# http://code.google.com/p/protobuf/\n#\n# Redistribution and use in source and binary forms, with or without\n#... | ```python
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -*- coding: utf-8 -*-
#
# Copyright 2007 Google Inc. All Rights Reserved.
"""Unittest for reflection.py, which also indirectly tests the output of the
pure-Python protocol compiler.
"""
__author__ = 'robinson@google.com (Will Robinson)'
import operator
import unittest
# TODO(robinson): When we split this test in two, only some of these imports
# will be necessary in each test.
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf.internal import more_extensions_pb2
from google.protobuf.internal import more_messages_pb2
from google.protobuf.internal import wire_format
from google.protobuf.internal import test_util
from google.protobuf.internal import decoder
class RefectionTest(unittest.TestCase):
def testSimpleHasBits(self):
# Test a scalar.
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.HasField('optional_int32'))
self.assertEqual(0, proto.optional_int32)
# HasField() shouldn't be true if all we've done is
# read the default value.
self.assertTrue(not proto.HasField('optional_int32'))
proto.optional_int32 = 1
# Setting a value however *should* set the "has" bit.
self.assertTrue(proto.HasField('optional_int32'))
proto.ClearField('optional_int32')
# And clearing that value should unset the "has" bit.
self.assertTrue(not proto.HasField('optional_int32'))
def testHasBitsWithSinglyNestedScalar(self):
# Helper used to test foreign messages and groups.
#
# composite_field_name should be the name of a non-repeated
# composite (i.e., foreign or group) field in TestAllTypes,
# and scalar_field_name should be the name of an integer-valued
# scalar field within that composite.
#
# I never thought I'd miss C++ macros and templates so much. :(
# This helper is semantically just:
#
# assert proto.composite_field.scalar_field == 0
# assert not proto.composite_field.HasField('scalar_field')
# assert not proto.HasField('composite_field')
#
# proto.composite_field.scalar_field = 10
# old_composite_field = proto.composite_field
#
# assert proto.composite_field.scalar_field == 10
# assert proto.composite_field.HasField('scalar_field')
# assert proto.HasField('composite_field')
#
# proto.ClearField('composite_field')
#
# assert not proto.composite_field.HasField('scalar_field')
# assert not proto.HasField('composite_field')
# assert proto.composite_field.scalar_field == 0
#
# # Now ensure that ClearField('composite_field') disconnected
# # the old field object from the object tree...
# assert old_composite_field is not proto.composite_field
# old_composite_field.scalar_field = 20
# assert not proto.composite_field.HasField('scalar_field')
# assert not proto.HasField('composite_field')
def TestCompositeHasBits(composite_field_name, scalar_field_name):
proto = unittest_pb2.TestAllTypes()
# First, check that we can get the scalar value, and see that it's the
# default (0), but that proto.HasField('omposite') and
# proto.composite.HasField('scalar') will still return False.
composite_field = getattr(proto, composite_field_name)
original_scalar_value = getattr(composite_field, scalar_field_name)
self.assertEqual(0, original_scalar_value)
# Assert that the composite object does not "have" the scalar.
self.assertTrue(not composite_field.HasField(scalar_field_name))
# Assert that proto does not "have" the composite field.
self.assertTrue(not proto.HasField(composite_field_name))
# Now set the scalar within the composite field. Ensure that the setting
# is reflected, and that proto.HasField('composite') and
# proto.composite.HasField('scalar') now both return True.
new_val = 20
setattr(composite_field, scalar_field_name, new_val)
self.assertEqual(new_val, getattr(composite_field, scalar_field_name))
# Hold on to a reference to the current composite_field object.
old_composite_field = composite_field
# Assert that the has methods now return true.
self.assertTrue(composite_field.HasField(scalar_field_name))
self.assertTrue(proto.HasField(composite_field_name))
# Now call the clear method...
proto.ClearField(composite_field_name)
# ...and ensure that the "has" bits are all back to False...
composite_field = getattr(proto, composite_field_name)
self.assertTrue(not composite_field.HasField(scalar_field_name))
self.assertTrue(not proto.HasField(composite_field_name))
# ...and ensure that the scalar field has returned to its default.
self.assertEqual(0, getattr(composite_field, scalar_field_name))
# Finally, ensure that modifications to the old composite field object
# don't have any effect on the parent.
#
# (NOTE that when we clear the composite field in the parent, we actually
# don't recursively clear down the tree. Instead, we just disconnect the
# cleared composite from the tree.)
self.assertTrue(old_composite_field is not composite_field)
setattr(old_composite_field, scalar_field_name, new_val)
self.assertTrue(not composite_field.HasField(scalar_field_name))
self.assertTrue(not proto.HasField(composite_field_name))
self.assertEqual(0, getattr(composite_field, scalar_field_name))
# Test simple, single-level nesting when we set a scalar.
TestCompositeHasBits('optionalgroup', 'a')
TestCompositeHasBits('optional_nested_message', 'bb')
TestCompositeHasBits('optional_foreign_message', 'c')
TestCompositeHasBits('optional_import_message', 'd')
def testReferencesToNestedMessage(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
del proto
# A previous version had a bug where this would raise an exception when
# hitting a now-dead weak reference.
nested.bb = 23
def testDisconnectingNestedMessageBeforeSettingField(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
proto.ClearField('optional_nested_message') # Should disconnect from parent
self.assertTrue(nested is not proto.optional_nested_message)
nested.bb = 23
self.assertTrue(not proto.HasField('optional_nested_message'))
self.assertEqual(0, proto.optional_nested_message.bb)
def testHasBitsWhenModifyingRepeatedFields(self):
# Test nesting when we add an element to a repeated field in a submessage.
proto = unittest_pb2.TestNestedMessageHasBits()
proto.optional_nested_message.nestedmessage_repeated_int32.append(5)
self.assertEqual(
[5], proto.optional_nested_message.nestedmessage_repeated_int32)
self.assertTrue(proto.HasField('optional_nested_message'))
# Do the same test, but with a repeated composite field within the
# submessage.
proto.ClearField('optional_nested_message')
self.assertTrue(not proto.HasField('optional_nested_message'))
proto.optional_nested_message.nestedmessage_repeated_foreignmessage.add()
self.assertTrue(proto.HasField('optional_nested_message'))
def testHasBitsForManyLevelsOfNesting(self):
# Test nesting many levels deep.
recursive_proto = unittest_pb2.TestMutualRecursionA()
self.assertTrue(not recursive_proto.HasField('bb'))
self.assertEqual(0, recursive_proto.bb.a.bb.a.bb.optional_int32)
self.assertTrue(not recursive_proto.HasField('bb'))
recursive_proto.bb.a.bb.a.bb.optional_int32 = 5
self.assertEqual(5, recursive_proto.bb.a.bb.a.bb.optional_int32)
self.assertTrue(recursive_proto.HasField('bb'))
self.assertTrue(recursive_proto.bb.HasField('a'))
self.assertTrue(recursive_proto.bb.a.HasField('bb'))
self.assertTrue(recursive_proto.bb.a.bb.HasField('a'))
self.assertTrue(recursive_proto.bb.a.bb.a.HasField('bb'))
self.assertTrue(not recursive_proto.bb.a.bb.a.bb.HasField('a'))
self.assertTrue(recursive_proto.bb.a.bb.a.bb.HasField('optional_int32'))
def testSingularListFields(self):
proto = unittest_pb2.TestAllTypes()
proto.optional_fixed32 = 1
proto.optional_int32 = 5
proto.optional_string = 'foo'
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['optional_int32' ], 5),
(proto.DESCRIPTOR.fields_by_name['optional_fixed32'], 1),
(proto.DESCRIPTOR.fields_by_name['optional_string' ], 'foo') ],
proto.ListFields())
def testRepeatedListFields(self):
proto = unittest_pb2.TestAllTypes()
proto.repeated_fixed32.append(1)
proto.repeated_int32.append(5)
proto.repeated_int32.append(11)
proto.repeated_string.append('foo')
proto.repeated_string.append('bar')
proto.repeated_string.append('baz')
proto.optional_int32 = 21
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['optional_int32' ], 21),
(proto.DESCRIPTOR.fields_by_name['repeated_int32' ], [5, 11]),
(proto.DESCRIPTOR.fields_by_name['repeated_fixed32'], [1]),
(proto.DESCRIPTOR.fields_by_name['repeated_string' ],
['foo', 'bar', 'baz']) ],
proto.ListFields())
def testSingularListExtensions(self):
proto = unittest_pb2.TestAllExtensions()
proto.Extensions[unittest_pb2.optional_fixed32_extension] = 1
proto.Extensions[unittest_pb2.optional_int32_extension ] = 5
proto.Extensions[unittest_pb2.optional_string_extension ] = 'foo'
self.assertEqual(
[ (unittest_pb2.optional_int32_extension , 5),
(unittest_pb2.optional_fixed32_extension, 1),
(unittest_pb2.optional_string_extension , 'foo') ],
proto.ListFields())
def testRepeatedListExtensions(self):
proto = unittest_pb2.TestAllExtensions()
proto.Extensions[unittest_pb2.repeated_fixed32_extension].append(1)
proto.Extensions[unittest_pb2.repeated_int32_extension ].append(5)
proto.Extensions[unittest_pb2.repeated_int32_extension ].append(11)
proto.Extensions[unittest_pb2.repeated_string_extension ].append('foo')
proto.Extensions[unittest_pb2.repeated_string_extension ].append('bar')
proto.Extensions[unittest_pb2.repeated_string_extension ].append('baz')
proto.Extensions[unittest_pb2.optional_int32_extension ] = 21
self.assertEqual(
[ (unittest_pb2.optional_int32_extension , 21),
(unittest_pb2.repeated_int32_extension , [5, 11]),
(unittest_pb2.repeated_fixed32_extension, [1]),
(unittest_pb2.repeated_string_extension , ['foo', 'bar', 'baz']) ],
proto.ListFields())
def testListFieldsAndExtensions(self):
proto = unittest_pb2.TestFieldOrderings()
test_util.SetAllFieldsAndExtensions(proto)
unittest_pb2.my_extension_int
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['my_int' ], 1),
(unittest_pb2.my_extension_int , 23),
(proto.DESCRIPTOR.fields_by_name['my_string'], 'foo'),
(unittest_pb2.my_extension_string , 'bar'),
(proto.DESCRIPTOR.fields_by_name['my_float' ], 1.0) ],
proto.ListFields())
def testDefaultValues(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, proto.optional_int32)
self.assertEqual(0, proto.optional_int64)
self.assertEqual(0, proto.optional_uint32)
self.assertEqual(0, proto.optional_uint64)
self.assertEqual(0, proto.optional_sint32)
self.assertEqual(0, proto.optional_sint64)
self.assertEqual(0, proto.optional_fixed32)
self.assertEqual(0, proto.optional_fixed64)
self.assertEqual(0, proto.optional_sfixed32)
self.assertEqual(0, proto.optional_sfixed64)
self.assertEqual(0.0, proto.optional_float)
self.assertEqual(0.0, proto.optional_double)
self.assertEqual(False, proto.optional_bool)
self.assertEqual('', proto.optional_string)
self.assertEqual('', proto.optional_bytes)
self.assertEqual(41, proto.default_int32)
self.assertEqual(42, proto.default_int64)
self.assertEqual(43, proto.default_uint32)
self.assertEqual(44, proto.default_uint64)
self.assertEqual(-45, proto.default_sint32)
self.assertEqual(46, proto.default_sint64)
self.assertEqual(47, proto.default_fixed32)
self.assertEqual(48, proto.default_fixed64)
self.assertEqual(49, proto.default_sfixed32)
self.assertEqual(-50, proto.default_sfixed64)
self.assertEqual(51.5, proto.default_float)
self.assertEqual(52e3, proto.default_double)
self.assertEqual(True, proto.default_bool)
self.assertEqual('hello', proto.default_string)
self.assertEqual('world', proto.default_bytes)
self.assertEqual(unittest_pb2.TestAllTypes.BAR, proto.default_nested_enum)
self.assertEqual(unittest_pb2.FOREIGN_BAR, proto.default_foreign_enum)
self.assertEqual(unittest_import_pb2.IMPORT_BAR,
proto.default_import_enum)
proto = unittest_pb2.TestExtremeDefaultValues()
self.assertEqual(u'\u1234', proto.utf8_string)
def testHasFieldWithUnknownFieldName(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(ValueError, proto.HasField, 'nonexistent_field')
def testClearFieldWithUnknownFieldName(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(ValueError, proto.ClearField, 'nonexistent_field')
def testDisallowedAssignments(self):
# It's illegal to assign values directly to repeated fields
# or to nonrepeated composite fields. Ensure that this fails.
proto = unittest_pb2.TestAllTypes()
# Repeated fields.
self.assertRaises(AttributeError, setattr, proto, 'repeated_int32', 10)
# Lists shouldn't work, either.
self.assertRaises(AttributeError, setattr, proto, 'repeated_int32', [10])
# Composite fields.
self.assertRaises(AttributeError, setattr, proto,
'optional_nested_message', 23)
# Assignment to a repeated nested message field without specifying
# the index in the array of nested messages.
self.assertRaises(AttributeError, setattr, proto.repeated_nested_message,
'bb', 34)
# Assignment to an attribute of a repeated field.
self.assertRaises(AttributeError, setattr, proto.repeated_float,
'some_attribute', 34)
# proto.nonexistent_field = 23 should fail as well.
self.assertRaises(AttributeError, setattr, proto, 'nonexistent_field', 23)
# TODO(robinson): Add type-safety check for enums.
def testSingleScalarTypeSafety(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(TypeError, setattr, proto, 'optional_int32', 1.1)
self.assertRaises(TypeError, setattr, proto, 'optional_int32', 'foo')
self.assertRaises(TypeError, setattr, proto, 'optional_string', 10)
self.assertRaises(TypeError, setattr, proto, 'optional_bytes', 10)
def testSingleScalarBoundsChecking(self):
def TestMinAndMaxIntegers(field_name, expected_min, expected_max):
pb = unittest_pb2.TestAllTypes()
setattr(pb, field_name, expected_min)
setattr(pb, field_name, expected_max)
self.assertRaises(ValueError, setattr, pb, field_name, expected_min - 1)
self.assertRaises(ValueError, setattr, pb, field_name, expected_max + 1)
TestMinAndMaxIntegers('optional_int32', -(1 << 31), (1 << 31) - 1)
TestMinAndMaxIntegers('optional_uint32', 0, 0xffffffff)
TestMinAndMaxIntegers('optional_int64', -(1 << 63), (1 << 63) - 1)
TestMinAndMaxIntegers('optional_uint64', 0, 0xffffffffffffffff)
TestMinAndMaxIntegers('optional_nested_enum', -(1 << 31), (1 << 31) - 1)
def testRepeatedScalarTypeSafety(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(TypeError, proto.repeated_int32.append, 1.1)
self.assertRaises(TypeError, proto.repeated_int32.append, 'foo')
self.assertRaises(TypeError, proto.repeated_string, 10)
self.assertRaises(TypeError, proto.repeated_bytes, 10)
proto.repeated_int32.append(10)
proto.repeated_int32[0] = 23
self.assertRaises(IndexError, proto.repeated_int32.__setitem__, 500, 23)
self.assertRaises(TypeError, proto.repeated_int32.__setitem__, 0, 'abc')
def testSingleScalarGettersAndSetters(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, proto.optional_int32)
proto.optional_int32 = 1
self.assertEqual(1, proto.optional_int32)
# TODO(robinson): Test all other scalar field types.
def testSingleScalarClearField(self):
proto = unittest_pb2.TestAllTypes()
# Should be allowed to clear something that's not there (a no-op).
proto.ClearField('optional_int32')
proto.optional_int32 = 1
self.assertTrue(proto.HasField('optional_int32'))
proto.ClearField('optional_int32')
self.assertEqual(0, proto.optional_int32)
self.assertTrue(not proto.HasField('optional_int32'))
# TODO(robinson): Test all other scalar field types.
def testEnums(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(1, proto.FOO)
self.assertEqual(1, unittest_pb2.TestAllTypes.FOO)
self.assertEqual(2, proto.BAR)
self.assertEqual(2, unittest_pb2.TestAllTypes.BAR)
self.assertEqual(3, proto.BAZ)
self.assertEqual(3, unittest_pb2.TestAllTypes.BAZ)
def testRepeatedScalars(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.repeated_int32)
self.assertEqual(0, len(proto.repeated_int32))
proto.repeated_int32.append(5);
proto.repeated_int32.append(10);
self.assertTrue(proto.repeated_int32)
self.assertEqual(2, len(proto.repeated_int32))
self.assertEqual([5, 10], proto.repeated_int32)
self.assertEqual(5, proto.repeated_int32[0])
self.assertEqual(10, proto.repeated_int32[-1])
# Test out-of-bounds indices.
self.assertRaises(IndexError, proto.repeated_int32.__getitem__, 1234)
self.assertRaises(IndexError, proto.repeated_int32.__getitem__, -1234)
# Test incorrect types passed to __getitem__.
self.assertRaises(TypeError, proto.repeated_int32.__getitem__, 'foo')
self.assertRaises(TypeError, proto.repeated_int32.__getitem__, None)
# Test that we can use the field as an iterator.
result = []
for i in proto.repeated_int32:
result.append(i)
self.assertEqual([5, 10], result)
# Test clearing.
proto.ClearField('repeated_int32')
self.assertTrue(not proto.repeated_int32)
self.assertEqual(0, len(proto.repeated_int32))
def testRepeatedScalarsRemove(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.repeated_int32)
self.assertEqual(0, len(proto.repeated_int32))
proto.repeated_int32.append(5)
proto.repeated_int32.append(10)
proto.repeated_int32.append(5)
proto.repeated_int32.append(5)
self.assertEqual(4, len(proto.repeated_int32))
proto.repeated_int32.remove(5)
self.assertEqual(3, len(proto.repeated_int32))
self.assertEqual(10, proto.repeated_int32[0])
self.assertEqual(5, proto.repeated_int32[1])
self.assertEqual(5, proto.repeated_int32[2])
proto.repeated_int32.remove(5)
self.assertEqual(2, len(proto.repeated_int32))
self.assertEqual(10, proto.repeated_int32[0])
self.assertEqual(5, proto.repeated_int32[1])
proto.repeated_int32.remove(10)
self.assertEqual(1, len(proto.repeated_int32))
self.assertEqual(5, proto.repeated_int32[0])
# Remove a non-existent element.
self.assertRaises(ValueError, proto.repeated_int32.remove, 123)
def testRepeatedComposites(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.repeated_nested_message)
self.assertEqual(0, len(proto.repeated_nested_message))
m0 = proto.repeated_nested_message.add()
m1 = proto.repeated_nested_message.add()
self.assertTrue(proto.repeated_nested_message)
self.assertEqual(2, len(proto.repeated_nested_message))
self.assertTrue(m0 is proto.repeated_nested_message[0])
self.assertTrue(m1 is proto.repeated_nested_message[1])
self.assertTrue(isinstance(m0, unittest_pb2.TestAllTypes.NestedMessage))
# Test out-of-bounds indices.
self.assertRaises(IndexError, proto.repeated_nested_message.__getitem__,
1234)
self.assertRaises(IndexError, proto.repeated_nested_message.__getitem__,
-1234)
# Test incorrect types passed to __getitem__.
self.assertRaises(TypeError, proto.repeated_nested_message.__getitem__,
'foo')
self.assertRaises(TypeError, proto.repeated_nested_message.__getitem__,
None)
# Test that we can use the field as an iterator.
result = []
for i in proto.repeated_nested_message:
result.append(i)
self.assertEqual(2, len(result))
self.assertTrue(m0 is result[0])
self.assertTrue(m1 is result[1])
# Test item deletion.
del proto.repeated_nested_message[0]
self.assertEqual(1, len(proto.repeated_nested_message))
self.assertTrue(m1 is proto.repeated_nested_message[0])
# Test clearing.
proto.ClearField('repeated_nested_message')
self.assertTrue(not proto.repeated_nested_message)
self.assertEqual(0, len(proto.repeated_nested_message))
def testHandWrittenReflection(self):
# TODO(robinson): We probably need a better way to specify
# protocol types by hand. But then again, this isn't something
# we expect many people to do. Hmm.
FieldDescriptor = descriptor.FieldDescriptor
foo_field_descriptor = FieldDescriptor(
name='foo_field', full_name='MyProto.foo_field',
index=0, number=1, type=FieldDescriptor.TYPE_INT64,
cpp_type=FieldDescriptor.CPPTYPE_INT64,
label=FieldDescriptor.LABEL_OPTIONAL, default_value=0,
containing_type=None, message_type=None, enum_type=None,
is_extension=False, extension_scope=None,
options=descriptor_pb2.FieldOptions())
mydescriptor = descriptor.Descriptor(
name='MyProto', full_name='MyProto', filename='ignored',
containing_type=None, nested_types=[], enum_types=[],
fields=[foo_field_descriptor], extensions=[],
options=descriptor_pb2.MessageOptions())
class MyProtoClass(message.Message):
DESCRIPTOR = mydescriptor
__metaclass__ = reflection.GeneratedProtocolMessageType
myproto_instance = MyProtoClass()
self.assertEqual(0, myproto_instance.foo_field)
self.assertTrue(not myproto_instance.HasField('foo_field'))
myproto_instance.foo_field = 23
self.assertEqual(23, myproto_instance.foo_field)
self.assertTrue(myproto_instance.HasField('foo_field'))
def testTopLevelExtensionsForOptionalScalar(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.optional_int32_extension
self.assertTrue(not extendee_proto.HasExtension(extension))
self.assertEqual(0, extendee_proto.Extensions[extension])
# As with normal scalar fields, just doing a read doesn't actually set the
# "has" bit.
self.assertTrue(not extendee_proto.HasExtension(extension))
# Actually set the thing.
extendee_proto.Extensions[extension] = 23
self.assertEqual(23, extendee_proto.Extensions[extension])
self.assertTrue(extendee_proto.HasExtension(extension))
# Ensure that clearing works as well.
extendee_proto.ClearExtension(extension)
self.assertEqual(0, extendee_proto.Extensions[extension])
self.assertTrue(not extendee_proto.HasExtension(extension))
def testTopLevelExtensionsForRepeatedScalar(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.repeated_string_extension
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
extendee_proto.Extensions[extension].append('foo')
self.assertEqual(['foo'], extendee_proto.Extensions[extension])
string_list = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
self.assertTrue(string_list is not extendee_proto.Extensions[extension])
# Shouldn't be allowed to do Extensions[extension] = 'a'
self.assertRaises(TypeError, operator.setitem, extendee_proto.Extensions,
extension, 'a')
def testTopLevelExtensionsForOptionalMessage(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.optional_foreign_message_extension
self.assertTrue(not extendee_proto.HasExtension(extension))
self.assertEqual(0, extendee_proto.Extensions[extension].c)
# As with normal (non-extension) fields, merely reading from the
# thing shouldn't set the "has" bit.
self.assertTrue(not extendee_proto.HasExtension(extension))
extendee_proto.Extensions[extension].c = 23
self.assertEqual(23, extendee_proto.Extensions[extension].c)
self.assertTrue(extendee_proto.HasExtension(extension))
# Save a reference here.
foreign_message = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
self.assertTrue(foreign_message is not extendee_proto.Extensions[extension])
# Setting a field on foreign_message now shouldn't set
# any "has" bits on extendee_proto.
foreign_message.c = 42
self.assertEqual(42, foreign_message.c)
self.assertTrue(foreign_message.HasField('c'))
self.assertTrue(not extendee_proto.HasExtension(extension))
# Shouldn't be allowed to do Extensions[extension] = 'a'
self.assertRaises(TypeError, operator.setitem, extendee_proto.Extensions,
extension, 'a')
def testTopLevelExtensionsForRepeatedMessage(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.repeatedgroup_extension
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
group = extendee_proto.Extensions[extension].add()
group.a = 23
self.assertEqual(23, extendee_proto.Extensions[extension][0].a)
group.a = 42
self.assertEqual(42, extendee_proto.Extensions[extension][0].a)
group_list = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
self.assertTrue(group_list is not extendee_proto.Extensions[extension])
# Shouldn't be allowed to do Extensions[extension] = 'a'
self.assertRaises(TypeError, operator.setitem, extendee_proto.Extensions,
extension, 'a')
def testNestedExtensions(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.TestRequired.single
# We just test the non-repeated case.
self.assertTrue(not extendee_proto.HasExtension(extension))
required = extendee_proto.Extensions[extension]
self.assertEqual(0, required.a)
self.assertTrue(not extendee_proto.HasExtension(extension))
required.a = 23
self.assertEqual(23, extendee_proto.Extensions[extension].a)
self.assertTrue(extendee_proto.HasExtension(extension))
extendee_proto.ClearExtension(extension)
self.assertTrue(required is not extendee_proto.Extensions[extension])
self.assertTrue(not extendee_proto.HasExtension(extension))
# If message A directly contains message B, and
# a.HasField('b') is currently False, then mutating any
# extension in B should change a.HasField('b') to True
# (and so on up the object tree).
def testHasBitsForAncestorsOfExtendedMessage(self):
# Optional scalar extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual(0, toplevel.submessage.Extensions[
more_extensions_pb2.optional_int_extension])
self.assertTrue(not toplevel.HasField('submessage'))
toplevel.submessage.Extensions[
more_extensions_pb2.optional_int_extension] = 23
self.assertEqual(23, toplevel.submessage.Extensions[
more_extensions_pb2.optional_int_extension])
self.assertTrue(toplevel.HasField('submessage'))
# Repeated scalar extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual([], toplevel.submessage.Extensions[
more_extensions_pb2.repeated_int_extension])
self.assertTrue(not toplevel.HasField('submessage'))
toplevel.submessage.Extensions[
more_extensions_pb2.repeated_int_extension].append(23)
self.assertEqual([23], toplevel.submessage.Extensions[
more_extensions_pb2.repeated_int_extension])
self.assertTrue(toplevel.HasField('submessage'))
# Optional message extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual(0, toplevel.submessage.Extensions[
more_extensions_pb2.optional_message_extension].foreign_message_int)
self.assertTrue(not toplevel.HasField('submessage'))
toplevel.submessage.Extensions[
more_extensions_pb2.optional_message_extension].foreign_message_int = 23
self.assertEqual(23, toplevel.submessage.Extensions[
more_extensions_pb2.optional_message_extension].foreign_message_int)
self.assertTrue(toplevel.HasField('submessage'))
# Repeated message extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual(0, len(toplevel.submessage.Extensions[
more_extensions_pb2.repeated_message_extension]))
self.assertTrue(not toplevel.HasField('submessage'))
foreign = toplevel.submessage.Extensions[
more_extensions_pb2.repeated_message_extension].add()
self.assertTrue(foreign is toplevel.submessage.Extensions[
more_extensions_pb2.repeated_message_extension][0])
self.assertTrue(toplevel.HasField('submessage'))
def testDisconnectionAfterClearingEmptyMessage(self):
toplevel = more_extensions_pb2.TopLevelMessage()
extendee_proto = toplevel.submessage
extension = more_extensions_pb2.optional_message_extension
extension_proto = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
extension_proto.foreign_message_int = 23
self.assertTrue(not toplevel.HasField('submessage'))
self.assertTrue(extension_proto is not extendee_proto.Extensions[extension])
def testExtensionFailureModes(self):
extendee_proto = unittest_pb2.TestAllExtensions()
# Try non-extension-handle arguments to HasExtension,
# ClearExtension(), and Extensions[]...
self.assertRaises(KeyError, extendee_proto.HasExtension, 1234)
self.assertRaises(KeyError, extendee_proto.ClearExtension, 1234)
self.assertRaises(KeyError, extendee_proto.Extensions.__getitem__, 1234)
self.assertRaises(KeyError, extendee_proto.Extensions.__setitem__, 1234, 5)
# Try something that *is* an extension handle, just not for
# this message...
unknown_handle = more_extensions_pb2.optional_int_extension
self.assertRaises(KeyError, extendee_proto.HasExtension,
unknown_handle)
self.assertRaises(KeyError, extendee_proto.ClearExtension,
unknown_handle)
self.assertRaises(KeyError, extendee_proto.Extensions.__getitem__,
unknown_handle)
self.assertRaises(KeyError, extendee_proto.Extensions.__setitem__,
unknown_handle, 5)
# Try call HasExtension() with a valid handle, but for a
# *repeated* field. (Just as with non-extension repeated
# fields, Has*() isn't supported for extension repeated fields).
self.assertRaises(KeyError, extendee_proto.HasExtension,
unittest_pb2.repeated_string_extension)
def testMergeFromSingularField(self):
# Test merge with just a singular field.
proto1 = unittest_pb2.TestAllTypes()
proto1.optional_int32 = 1
proto2 = unittest_pb2.TestAllTypes()
# This shouldn't get overwritten.
proto2.optional_string = 'value'
proto2.MergeFrom(proto1)
self.assertEqual(1, proto2.optional_int32)
self.assertEqual('value', proto2.optional_string)
def testMergeFromRepeatedField(self):
# Test merge with just a repeated field.
proto1 = unittest_pb2.TestAllTypes()
proto1.repeated_int32.append(1)
proto1.repeated_int32.append(2)
proto2 = unittest_pb2.TestAllTypes()
proto2.repeated_int32.append(0)
proto2.MergeFrom(proto1)
self.assertEqual(0, proto2.repeated_int32[0])
self.assertEqual(1, proto2.repeated_int32[1])
self.assertEqual(2, proto2.repeated_int32[2])
def testMergeFromOptionalGroup(self):
# Test merge with an optional group.
proto1 = unittest_pb2.TestAllTypes()
proto1.optionalgroup.a = 12
proto2 = unittest_pb2.TestAllTypes()
proto2.MergeFrom(proto1)
self.assertEqual(12, proto2.optionalgroup.a)
def testMergeFromRepeatedNestedMessage(self):
# Test merge with a repeated nested message.
proto1 = unittest_pb2.TestAllTypes()
m = proto1.repeated_nested_message.add()
m.bb = 123
m = proto1.repeated_nested_message.add()
m.bb = 321
proto2 = unittest_pb2.TestAllTypes()
m = proto2.repeated_nested_message.add()
m.bb = 999
proto2.MergeFrom(proto1)
self.assertEqual(999, proto2.repeated_nested_message[0].bb)
self.assertEqual(123, proto2.repeated_nested_message[1].bb)
self.assertEqual(321, proto2.repeated_nested_message[2].bb)
def testMergeFromAllFields(self):
# With all fields set.
proto1 = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto1)
proto2 = unittest_pb2.TestAllTypes()
proto2.MergeFrom(proto1)
# Messages should be equal.
self.assertEqual(proto2, proto1)
# Serialized string should be equal too.
string1 = proto1.SerializeToString()
string2 = proto2.SerializeToString()
self.assertEqual(string1, string2)
def testMergeFromExtensionsSingular(self):
proto1 = unittest_pb2.TestAllExtensions()
proto1.Extensions[unittest_pb2.optional_int32_extension] = 1
proto2 = unittest_pb2.TestAllExtensions()
proto2.MergeFrom(proto1)
self.assertEqual(
1, proto2.Extensions[unittest_pb2.optional_int32_extension])
def testMergeFromExtensionsRepeated(self):
proto1 = unittest_pb2.TestAllExtensions()
proto1.Extensions[unittest_pb2.repeated_int32_extension].append(1)
proto1.Extensions[unittest_pb2.repeated_int32_extension].append(2)
proto2 = unittest_pb2.TestAllExtensions()
proto2.Extensions[unittest_pb2.repeated_int32_extension].append(0)
proto2.MergeFrom(proto1)
self.assertEqual(
3, len(proto2.Extensions[unittest_pb2.repeated_int32_extension]))
self.assertEqual(
0, proto2.Extensions[unittest_pb2.repeated_int32_extension][0])
self.assertEqual(
1, proto2.Extensions[unittest_pb2.repeated_int32_extension][1])
self.assertEqual(
2, proto2.Extensions[unittest_pb2.repeated_int32_extension][2])
def testMergeFromExtensionsNestedMessage(self):
proto1 = unittest_pb2.TestAllExtensions()
ext1 = proto1.Extensions[
unittest_pb2.repeated_nested_message_extension]
m = ext1.add()
m.bb = 222
m = ext1.add()
m.bb = 333
proto2 = unittest_pb2.TestAllExtensions()
ext2 = proto2.Extensions[
unittest_pb2.repeated_nested_message_extension]
m = ext2.add()
m.bb = 111
proto2.MergeFrom(proto1)
ext2 = proto2.Extensions[
unittest_pb2.repeated_nested_message_extension]
self.assertEqual(3, len(ext2))
self.assertEqual(111, ext2[0].bb)
self.assertEqual(222, ext2[1].bb)
self.assertEqual(333, ext2[2].bb)
def testCopyFromSingularField(self):
# Test copy with just a singular field.
proto1 = unittest_pb2.TestAllTypes()
proto1.optional_int32 = 1
proto1.optional_string = 'important-text'
proto2 = unittest_pb2.TestAllTypes()
proto2.optional_string = 'value'
proto2.CopyFrom(proto1)
self.assertEqual(1, proto2.optional_int32)
self.assertEqual('important-text', proto2.optional_string)
def testCopyFromRepeatedField(self):
# Test copy with a repeated field.
proto1 = unittest_pb2.TestAllTypes()
proto1.repeated_int32.append(1)
proto1.repeated_int32.append(2)
proto2 = unittest_pb2.TestAllTypes()
proto2.repeated_int32.append(0)
proto2.CopyFrom(proto1)
self.assertEqual(1, proto2.repeated_int32[0])
self.assertEqual(2, proto2.repeated_int32[1])
def testCopyFromAllFields(self):
# With all fields set.
proto1 = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto1)
proto2 = unittest_pb2.TestAllTypes()
proto2.CopyFrom(proto1)
# Messages should be equal.
self.assertEqual(proto2, proto1)
# Serialized string should be equal too.
string1 = proto1.SerializeToString()
string2 = proto2.SerializeToString()
self.assertEqual(string1, string2)
def testCopyFromSelf(self):
proto1 = unittest_pb2.TestAllTypes()
proto1.repeated_int32.append(1)
proto1.optional_int32 = 2
proto1.optional_string = 'important-text'
proto1.CopyFrom(proto1)
self.assertEqual(1, proto1.repeated_int32[0])
self.assertEqual(2, proto1.optional_int32)
self.assertEqual('important-text', proto1.optional_string)
def testClear(self):
proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto)
# Clear the message.
proto.Clear()
self.assertEquals(proto.ByteSize(), 0)
empty_proto = unittest_pb2.TestAllTypes()
self.assertEquals(proto, empty_proto)
# Test if extensions which were set are cleared.
proto = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(proto)
# Clear the message.
proto.Clear()
self.assertEquals(proto.ByteSize(), 0)
empty_proto = unittest_pb2.TestAllExtensions()
self.assertEquals(proto, empty_proto)
def testIsInitialized(self):
# Trivial cases - all optional fields and extensions.
proto = unittest_pb2.TestAllTypes()
self.assertTrue(proto.IsInitialized())
proto = unittest_pb2.TestAllExtensions()
self.assertTrue(proto.IsInitialized())
# The case of uninitialized required fields.
proto = unittest_pb2.TestRequired()
self.assertFalse(proto.IsInitialized())
proto.a = proto.b = proto.c = 2
self.assertTrue(proto.IsInitialized())
# The case of uninitialized submessage.
proto = unittest_pb2.TestRequiredForeign()
self.assertTrue(proto.IsInitialized())
proto.optional_message.a = 1
self.assertFalse(proto.IsInitialized())
proto.optional_message.b = 0
proto.optional_message.c = 0
self.assertTrue(proto.IsInitialized())
# Uninitialized repeated submessage.
message1 = proto.repeated_message.add()
self.assertFalse(proto.IsInitialized())
message1.a = message1.b = message1.c = 0
self.assertTrue(proto.IsInitialized())
# Uninitialized repeated group in an extension.
proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.TestRequired.multi
message1 = proto.Extensions[extension].add()
message2 = proto.Extensions[extension].add()
self.assertFalse(proto.IsInitialized())
message1.a = 1
message1.b = 1
message1.c = 1
self.assertFalse(proto.IsInitialized())
message2.a = 2
message2.b = 2
message2.c = 2
self.assertTrue(proto.IsInitialized())
# Uninitialized nonrepeated message in an extension.
proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.TestRequired.single
proto.Extensions[extension].a = 1
self.assertFalse(proto.IsInitialized())
proto.Extensions[extension].b = 2
proto.Extensions[extension].c = 3
self.assertTrue(proto.IsInitialized())
def testStringUTF8Encoding(self):
proto = unittest_pb2.TestAllTypes()
# Assignment of a unicode object to a field of type 'bytes' is not allowed.
self.assertRaises(TypeError,
setattr, proto, 'optional_bytes', u'unicode object')
# Check that the default value is of python's 'unicode' type.
self.assertEqual(type(proto.optional_string), unicode)
proto.optional_string = unicode('Testing')
self.assertEqual(proto.optional_string, str('Testing'))
# Assign a value of type 'str' which can be encoded in UTF-8.
proto.optional_string = str('Testing')
self.assertEqual(proto.optional_string, unicode('Testing'))
# Values of type 'str' are also accepted as long as they can be encoded in
# UTF-8.
self.assertEqual(type(proto.optional_string), str)
# Try to assign a 'str' value which contains bytes that aren't 7-bit ASCII.
self.assertRaises(ValueError,
setattr, proto, 'optional_string', str('a\x80a'))
# Assign a 'str' object which contains a UTF-8 encoded string.
self.assertRaises(ValueError,
setattr, proto, 'optional_string', 'Тест')
# No exception thrown.
proto.optional_string = 'abc'
def testStringUTF8Serialization(self):
proto = unittest_mset_pb2.TestMessageSet()
extension_message = unittest_mset_pb2.TestMessageSetExtension2
extension = extension_message.message_set_extension
test_utf8 = u'Тест'
test_utf8_bytes = test_utf8.encode('utf-8')
# 'Test' in another language, using UTF-8 charset.
proto.Extensions[extension].str = test_utf8
# Serialize using the MessageSet wire format (this is specified in the
# .proto file).
serialized = proto.SerializeToString()
# Check byte size.
self.assertEqual(proto.ByteSize(), len(serialized))
raw = unittest_mset_pb2.RawMessageSet()
raw.MergeFromString(serialized)
message2 = unittest_mset_pb2.TestMessageSetExtension2()
self.assertEqual(1, len(raw.item))
# Check that the type_id is the same as the tag ID in the .proto file.
self.assertEqual(raw.item[0].type_id, 1547769)
# Check the actually bytes on the wire.
self.assertTrue(
raw.item[0].message.endswith(test_utf8_bytes))
message2.MergeFromString(raw.item[0].message)
self.assertEqual(type(message2.str), unicode)
self.assertEqual(message2.str, test_utf8)
# How about if the bytes on the wire aren't a valid UTF-8 encoded string.
bytes = raw.item[0].message.replace(
test_utf8_bytes, len(test_utf8_bytes) * '\xff')
self.assertRaises(UnicodeDecodeError, message2.MergeFromString, bytes)
# Since we had so many tests for protocol buffer equality, we broke these out
# into separate TestCase classes.
class TestAllTypesEqualityTest(unittest.TestCase):
def setUp(self):
self.first_proto = unittest_pb2.TestAllTypes()
self.second_proto = unittest_pb2.TestAllTypes()
def testSelfEquality(self):
self.assertEqual(self.first_proto, self.first_proto)
def testEmptyProtosEqual(self):
self.assertEqual(self.first_proto, self.second_proto)
class FullProtosEqualityTest(unittest.TestCase):
"""Equality tests using completely-full protos as a starting point."""
def setUp(self):
self.first_proto = unittest_pb2.TestAllTypes()
self.second_proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.first_proto)
test_util.SetAllFields(self.second_proto)
def testAllFieldsFilledEquality(self):
self.assertEqual(self.first_proto, self.second_proto)
def testNonRepeatedScalar(self):
# Nonrepeated scalar field change should cause inequality.
self.first_proto.optional_int32 += 1
self.assertNotEqual(self.first_proto, self.second_proto)
# ...as should clearing a field.
self.first_proto.ClearField('optional_int32')
self.assertNotEqual(self.first_proto, self.second_proto)
def testNonRepeatedComposite(self):
# Change a nonrepeated composite field.
self.first_proto.optional_nested_message.bb += 1
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.optional_nested_message.bb -= 1
self.assertEqual(self.first_proto, self.second_proto)
# Clear a field in the nested message.
self.first_proto.optional_nested_message.ClearField('bb')
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.optional_nested_message.bb = (
self.second_proto.optional_nested_message.bb)
self.assertEqual(self.first_proto, self.second_proto)
# Remove the nested message entirely.
self.first_proto.ClearField('optional_nested_message')
self.assertNotEqual(self.first_proto, self.second_proto)
def testRepeatedScalar(self):
# Change a repeated scalar field.
self.first_proto.repeated_int32.append(5)
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.ClearField('repeated_int32')
self.assertNotEqual(self.first_proto, self.second_proto)
def testRepeatedComposite(self):
# Change value within a repeated composite field.
self.first_proto.repeated_nested_message[0].bb += 1
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.repeated_nested_message[0].bb -= 1
self.assertEqual(self.first_proto, self.second_proto)
# Add a value to a repeated composite field.
self.first_proto.repeated_nested_message.add()
self.assertNotEqual(self.first_proto, self.second_proto)
self.second_proto.repeated_nested_message.add()
self.assertEqual(self.first_proto, self.second_proto)
def testNonRepeatedScalarHasBits(self):
# Ensure that we test "has" bits as well as value for
# nonrepeated scalar field.
self.first_proto.ClearField('optional_int32')
self.second_proto.optional_int32 = 0
self.assertNotEqual(self.first_proto, self.second_proto)
def testNonRepeatedCompositeHasBits(self):
# Ensure that we test "has" bits as well as value for
# nonrepeated composite field.
self.first_proto.ClearField('optional_nested_message')
self.second_proto.optional_nested_message.ClearField('bb')
self.assertNotEqual(self.first_proto, self.second_proto)
# TODO(robinson): Replace next two lines with method
# to set the "has" bit without changing the value,
# if/when such a method exists.
self.first_proto.optional_nested_message.bb = 0
self.first_proto.optional_nested_message.ClearField('bb')
self.assertEqual(self.first_proto, self.second_proto)
class ExtensionEqualityTest(unittest.TestCase):
def testExtensionEquality(self):
first_proto = unittest_pb2.TestAllExtensions()
second_proto = unittest_pb2.TestAllExtensions()
self.assertEqual(first_proto, second_proto)
test_util.SetAllExtensions(first_proto)
self.assertNotEqual(first_proto, second_proto)
test_util.SetAllExtensions(second_proto)
self.assertEqual(first_proto, second_proto)
# Ensure that we check value equality.
first_proto.Extensions[unittest_pb2.optional_int32_extension] += 1
self.assertNotEqual(first_proto, second_proto)
first_proto.Extensions[unittest_pb2.optional_int32_extension] -= 1
self.assertEqual(first_proto, second_proto)
# Ensure that we also look at "has" bits.
first_proto.ClearExtension(unittest_pb2.optional_int32_extension)
second_proto.Extensions[unittest_pb2.optional_int32_extension] = 0
self.assertNotEqual(first_proto, second_proto)
first_proto.Extensions[unittest_pb2.optional_int32_extension] = 0
self.assertEqual(first_proto, second_proto)
# Ensure that differences in cached values
# don't matter if "has" bits are both false.
first_proto = unittest_pb2.TestAllExtensions()
second_proto = unittest_pb2.TestAllExtensions()
self.assertEqual(
0, first_proto.Extensions[unittest_pb2.optional_int32_extension])
self.assertEqual(first_proto, second_proto)
class MutualRecursionEqualityTest(unittest.TestCase):
def testEqualityWithMutualRecursion(self):
first_proto = unittest_pb2.TestMutualRecursionA()
second_proto = unittest_pb2.TestMutualRecursionA()
self.assertEqual(first_proto, second_proto)
first_proto.bb.a.bb.optional_int32 = 23
self.assertNotEqual(first_proto, second_proto)
second_proto.bb.a.bb.optional_int32 = 23
self.assertEqual(first_proto, second_proto)
class ByteSizeTest(unittest.TestCase):
def setUp(self):
self.proto = unittest_pb2.TestAllTypes()
self.extended_proto = more_extensions_pb2.ExtendedMessage()
def Size(self):
return self.proto.ByteSize()
def testEmptyMessage(self):
self.assertEqual(0, self.proto.ByteSize())
def testVarints(self):
def Test(i, expected_varint_size):
self.proto.Clear()
self.proto.optional_int64 = i
# Add one to the varint size for the tag info
# for tag 1.
self.assertEqual(expected_varint_size + 1, self.Size())
Test(0, 1)
Test(1, 1)
for i, num_bytes in zip(range(7, 63, 7), range(1, 10000)):
Test((1 << i) - 1, num_bytes)
Test(-1, 10)
Test(-2, 10)
Test(-(1 << 63), 10)
def testStrings(self):
self.proto.optional_string = ''
# Need one byte for tag info (tag #14), and one byte for length.
self.assertEqual(2, self.Size())
self.proto.optional_string = 'abc'
# Need one byte for tag info (tag #14), and one byte for length.
self.assertEqual(2 + len(self.proto.optional_string), self.Size())
self.proto.optional_string = 'x' * 128
# Need one byte for tag info (tag #14), and TWO bytes for length.
self.assertEqual(3 + len(self.proto.optional_string), self.Size())
def testOtherNumerics(self):
self.proto.optional_fixed32 = 1234
# One byte for tag and 4 bytes for fixed32.
self.assertEqual(5, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_fixed64 = 1234
# One byte for tag and 8 bytes for fixed64.
self.assertEqual(9, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_float = 1.234
# One byte for tag and 4 bytes for float.
self.assertEqual(5, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_double = 1.234
# One byte for tag and 8 bytes for float.
self.assertEqual(9, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_sint32 = 64
# One byte for tag and 2 bytes for zig-zag-encoded 64.
self.assertEqual(3, self.Size())
self.proto = unittest_pb2.TestAllTypes()
def testComposites(self):
# 3 bytes.
self.proto.optional_nested_message.bb = (1 << 14)
# Plus one byte for bb tag.
# Plus 1 byte for optional_nested_message serialized size.
# Plus two bytes for optional_nested_message tag.
self.assertEqual(3 + 1 + 1 + 2, self.Size())
def testGroups(self):
# 4 bytes.
self.proto.optionalgroup.a = (1 << 21)
# Plus two bytes for |a| tag.
# Plus 2 * two bytes for START_GROUP and END_GROUP tags.
self.assertEqual(4 + 2 + 2*2, self.Size())
def testRepeatedScalars(self):
self.proto.repeated_int32.append(10) # 1 byte.
self.proto.repeated_int32.append(128) # 2 bytes.
# Also need 2 bytes for each entry for tag.
self.assertEqual(1 + 2 + 2*2, self.Size())
def testRepeatedScalarsRemove(self):
self.proto.repeated_int32.append(10) # 1 byte.
self.proto.repeated_int32.append(128) # 2 bytes.
# Also need 2 bytes for each entry for tag.
self.assertEqual(1 + 2 + 2*2, self.Size())
self.proto.repeated_int32.remove(128)
self.assertEqual(1 + 2, self.Size())
def testRepeatedComposites(self):
# Empty message. 2 bytes tag plus 1 byte length.
foreign_message_0 = self.proto.repeated_nested_message.add()
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
foreign_message_1 = self.proto.repeated_nested_message.add()
foreign_message_1.bb = 7
self.assertEqual(2 + 1 + 2 + 1 + 1 + 1, self.Size())
def testRepeatedCompositesDelete(self):
# Empty message. 2 bytes tag plus 1 byte length.
foreign_message_0 = self.proto.repeated_nested_message.add()
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
foreign_message_1 = self.proto.repeated_nested_message.add()
foreign_message_1.bb = 9
self.assertEqual(2 + 1 + 2 + 1 + 1 + 1, self.Size())
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
del self.proto.repeated_nested_message[0]
self.assertEqual(2 + 1 + 1 + 1, self.Size())
# Now add a new message.
foreign_message_2 = self.proto.repeated_nested_message.add()
foreign_message_2.bb = 12
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
self.assertEqual(2 + 1 + 1 + 1 + 2 + 1 + 1 + 1, self.Size())
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
del self.proto.repeated_nested_message[1]
self.assertEqual(2 + 1 + 1 + 1, self.Size())
del self.proto.repeated_nested_message[0]
self.assertEqual(0, self.Size())
def testRepeatedGroups(self):
# 2-byte START_GROUP plus 2-byte END_GROUP.
group_0 = self.proto.repeatedgroup.add()
# 2-byte START_GROUP plus 2-byte |a| tag + 1-byte |a|
# plus 2-byte END_GROUP.
group_1 = self.proto.repeatedgroup.add()
group_1.a = 7
self.assertEqual(2 + 2 + 2 + 2 + 1 + 2, self.Size())
def testExtensions(self):
proto = unittest_pb2.TestAllExtensions()
self.assertEqual(0, proto.ByteSize())
extension = unittest_pb2.optional_int32_extension # Field #1, 1 byte.
proto.Extensions[extension] = 23
# 1 byte for tag, 1 byte for value.
self.assertEqual(2, proto.ByteSize())
def testCacheInvalidationForNonrepeatedScalar(self):
# Test non-extension.
self.proto.optional_int32 = 1
self.assertEqual(2, self.proto.ByteSize())
self.proto.optional_int32 = 128
self.assertEqual(3, self.proto.ByteSize())
self.proto.ClearField('optional_int32')
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.optional_int_extension
self.extended_proto.Extensions[extension] = 1
self.assertEqual(2, self.extended_proto.ByteSize())
self.extended_proto.Extensions[extension] = 128
self.assertEqual(3, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testCacheInvalidationForRepeatedScalar(self):
# Test non-extension.
self.proto.repeated_int32.append(1)
self.assertEqual(3, self.proto.ByteSize())
self.proto.repeated_int32.append(1)
self.assertEqual(6, self.proto.ByteSize())
self.proto.repeated_int32[1] = 128
self.assertEqual(7, self.proto.ByteSize())
self.proto.ClearField('repeated_int32')
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.repeated_int_extension
repeated = self.extended_proto.Extensions[extension]
repeated.append(1)
self.assertEqual(2, self.extended_proto.ByteSize())
repeated.append(1)
self.assertEqual(4, self.extended_proto.ByteSize())
repeated[1] = 128
self.assertEqual(5, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testCacheInvalidationForNonrepeatedMessage(self):
# Test non-extension.
self.proto.optional_foreign_message.c = 1
self.assertEqual(5, self.proto.ByteSize())
self.proto.optional_foreign_message.c = 128
self.assertEqual(6, self.proto.ByteSize())
self.proto.optional_foreign_message.ClearField('c')
self.assertEqual(3, self.proto.ByteSize())
self.proto.ClearField('optional_foreign_message')
self.assertEqual(0, self.proto.ByteSize())
child = self.proto.optional_foreign_message
self.proto.ClearField('optional_foreign_message')
child.c = 128
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.optional_message_extension
child = self.extended_proto.Extensions[extension]
self.assertEqual(0, self.extended_proto.ByteSize())
child.foreign_message_int = 1
self.assertEqual(4, self.extended_proto.ByteSize())
child.foreign_message_int = 128
self.assertEqual(5, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testCacheInvalidationForRepeatedMessage(self):
# Test non-extension.
child0 = self.proto.repeated_foreign_message.add()
self.assertEqual(3, self.proto.ByteSize())
self.proto.repeated_foreign_message.add()
self.assertEqual(6, self.proto.ByteSize())
child0.c = 1
self.assertEqual(8, self.proto.ByteSize())
self.proto.ClearField('repeated_foreign_message')
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.repeated_message_extension
child_list = self.extended_proto.Extensions[extension]
child0 = child_list.add()
self.assertEqual(2, self.extended_proto.ByteSize())
child_list.add()
self.assertEqual(4, self.extended_proto.ByteSize())
child0.foreign_message_int = 1
self.assertEqual(6, self.extended_proto.ByteSize())
child0.ClearField('foreign_message_int')
self.assertEqual(4, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
# TODO(robinson): We need cross-language serialization consistency tests.
# Issues to be sure to cover include:
# * Handling of unrecognized tags ("uninterpreted_bytes").
# * Handling of MessageSets.
# * Consistent ordering of tags in the wire format,
# including ordering between extensions and non-extension
# fields.
# * Consistent serialization of negative numbers, especially
# negative int32s.
# * Handling of empty submessages (with and without "has"
# bits set).
class SerializationTest(unittest.TestCase):
def testSerializeEmtpyMessage(self):
first_proto = unittest_pb2.TestAllTypes()
second_proto = unittest_pb2.TestAllTypes()
serialized = first_proto.SerializeToString()
self.assertEqual(first_proto.ByteSize(), len(serialized))
second_proto.MergeFromString(serialized)
self.assertEqual(first_proto, second_proto)
def testSerializeAllFields(self):
first_proto = unittest_pb2.TestAllTypes()
second_proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(first_proto)
serialized = first_proto.SerializeToString()
self.assertEqual(first_proto.ByteSize(), len(serialized))
second_proto.MergeFromString(serialized)
self.assertEqual(first_proto, second_proto)
def testSerializeAllExtensions(self):
first_proto = unittest_pb2.TestAllExtensions()
second_proto = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(first_proto)
serialized = first_proto.SerializeToString()
second_proto.MergeFromString(serialized)
self.assertEqual(first_proto, second_proto)
def testCanonicalSerializationOrder(self):
proto = more_messages_pb2.OutOfOrderFields()
# These are also their tag numbers. Even though we're setting these in
# reverse-tag order AND they're listed in reverse tag-order in the .proto
# file, they should nonetheless be serialized in tag order.
proto.optional_sint32 = 5
proto.Extensions[more_messages_pb2.optional_uint64] = 4
proto.optional_uint32 = 3
proto.Extensions[more_messages_pb2.optional_int64] = 2
proto.optional_int32 = 1
serialized = proto.SerializeToString()
self.assertEqual(proto.ByteSize(), len(serialized))
d = decoder.Decoder(serialized)
ReadTag = d.ReadFieldNumberAndWireType
self.assertEqual((1, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(1, d.ReadInt32())
self.assertEqual((2, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(2, d.ReadInt64())
self.assertEqual((3, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(3, d.ReadUInt32())
self.assertEqual((4, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(4, d.ReadUInt64())
self.assertEqual((5, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(5, d.ReadSInt32())
def testCanonicalSerializationOrderSameAsCpp(self):
# Copy of the same test we use for C++.
proto = unittest_pb2.TestFieldOrderings()
test_util.SetAllFieldsAndExtensions(proto)
serialized = proto.SerializeToString()
test_util.ExpectAllFieldsAndExtensionsInOrder(serialized)
def testMergeFromStringWhenFieldsAlreadySet(self):
first_proto = unittest_pb2.TestAllTypes()
first_proto.repeated_string.append('foobar')
first_proto.optional_int32 = 23
first_proto.optional_nested_message.bb = 42
serialized = first_proto.SerializeToString()
second_proto = unittest_pb2.TestAllTypes()
second_proto.repeated_string.append('baz')
second_proto.optional_int32 = 100
second_proto.optional_nested_message.bb = 999
second_proto.MergeFromString(serialized)
# Ensure that we append to repeated fields.
self.assertEqual(['baz', 'foobar'], list(second_proto.repeated_string))
# Ensure that we overwrite nonrepeatd scalars.
self.assertEqual(23, second_proto.optional_int32)
# Ensure that we recursively call MergeFromString() on
# submessages.
self.assertEqual(42, second_proto.optional_nested_message.bb)
def testMessageSetWireFormat(self):
proto = unittest_mset_pb2.TestMessageSet()
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
extension_message2 = unittest_mset_pb2.TestMessageSetExtension2
extension1 = extension_message1.message_set_extension
extension2 = extension_message2.message_set_extension
proto.Extensions[extension1].i = 123
proto.Extensions[extension2].str = 'foo'
# Serialize using the MessageSet wire format (this is specified in the
# .proto file).
serialized = proto.SerializeToString()
raw = unittest_mset_pb2.RawMessageSet()
self.assertEqual(False,
raw.DESCRIPTOR.GetOptions().message_set_wire_format)
raw.MergeFromString(serialized)
self.assertEqual(2, len(raw.item))
message1 = unittest_mset_pb2.TestMessageSetExtension1()
message1.MergeFromString(raw.item[0].message)
self.assertEqual(123, message1.i)
message2 = unittest_mset_pb2.TestMessageSetExtension2()
message2.MergeFromString(raw.item[1].message)
self.assertEqual('foo', message2.str)
# Deserialize using the MessageSet wire format.
proto2 = unittest_mset_pb2.TestMessageSet()
proto2.MergeFromString(serialized)
self.assertEqual(123, proto2.Extensions[extension1].i)
self.assertEqual('foo', proto2.Extensions[extension2].str)
# Check byte size.
self.assertEqual(proto2.ByteSize(), len(serialized))
self.assertEqual(proto.ByteSize(), len(serialized))
def testMessageSetWireFormatUnknownExtension(self):
# Create a message using the message set wire format with an unknown
# message.
raw = unittest_mset_pb2.RawMessageSet()
# Add an item.
item = raw.item.add()
item.type_id = 1545008
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
message1 = unittest_mset_pb2.TestMessageSetExtension1()
message1.i = 12345
item.message = message1.SerializeToString()
# Add a second, unknown extension.
item = raw.item.add()
item.type_id = 1545009
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
message1 = unittest_mset_pb2.TestMessageSetExtension1()
message1.i = 12346
item.message = message1.SerializeToString()
# Add another unknown extension.
item = raw.item.add()
item.type_id = 1545010
message1 = unittest_mset_pb2.TestMessageSetExtension2()
message1.str = 'foo'
item.message = message1.SerializeToString()
serialized = raw.SerializeToString()
# Parse message using the message set wire format.
proto = unittest_mset_pb2.TestMessageSet()
proto.MergeFromString(serialized)
# Check that the message parsed well.
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
extension1 = extension_message1.message_set_extension
self.assertEquals(12345, proto.Extensions[extension1].i)
def testUnknownFields(self):
proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto)
serialized = proto.SerializeToString()
# The empty message should be parsable with all of the fields
# unknown.
proto2 = unittest_pb2.TestEmptyMessage()
# Parsing this message should succeed.
proto2.MergeFromString(serialized)
# Now test with a int64 field set.
proto = unittest_pb2.TestAllTypes()
proto.optional_int64 = 0x0fffffffffffffff
serialized = proto.SerializeToString()
# The empty message should be parsable with all of the fields
# unknown.
proto2 = unittest_pb2.TestEmptyMessage()
# Parsing this message should succeed.
proto2.MergeFromString(serialized)
def _CheckRaises(self, exc_class, callable_obj, exception):
"""This method checks if the excpetion type and message are as expected."""
try:
callable_obj()
except exc_class, ex:
# Check if the exception message is the right one.
self.assertEqual(exception, str(ex))
return
else:
raise self.failureException('%s not raised' % str(exc_class))
def testSerializeUninitialized(self):
proto = unittest_pb2.TestRequired()
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Required field protobuf_unittest.TestRequired.a is not set.')
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto.a = 1
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Required field protobuf_unittest.TestRequired.b is not set.')
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto.b = 2
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Required field protobuf_unittest.TestRequired.c is not set.')
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto.c = 3
serialized = proto.SerializeToString()
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto2 = unittest_pb2.TestRequired()
proto2.MergeFromString(serialized)
self.assertEqual(1, proto2.a)
self.assertEqual(2, proto2.b)
self.assertEqual(3, proto2.c)
proto2.ParseFromString(partial)
self.assertEqual(1, proto2.a)
self.assertEqual(2, proto2.b)
self.assertEqual(3, proto2.c)
class OptionsTest(unittest.TestCase):
def testMessageOptions(self):
proto = unittest_mset_pb2.TestMessageSet()
self.assertEqual(True,
proto.DESCRIPTOR.GetOptions().message_set_wire_format)
proto = unittest_pb2.TestAllTypes()
self.assertEqual(False,
proto.DESCRIPTOR.GetOptions().message_set_wire_format)
class UtilityTest(unittest.TestCase):
def testImergeSorted(self):
ImergeSorted = reflection._ImergeSorted
# Various types of emptiness.
self.assertEqual([], list(ImergeSorted()))
self.assertEqual([], list(ImergeSorted([])))
self.assertEqual([], list(ImergeSorted([], [])))
# One nonempty list.
self.assertEqual([1, 2, 3], list(ImergeSorted([1, 2, 3])))
self.assertEqual([1, 2, 3], list(ImergeSorted([1, 2, 3], [])))
self.assertEqual([1, 2, 3], list(ImergeSorted([], [1, 2, 3])))
# Merging some nonempty lists together.
self.assertEqual([1, 2, 3], list(ImergeSorted([1, 3], [2])))
self.assertEqual([1, 2, 3], list(ImergeSorted([1], [3], [2])))
self.assertEqual([1, 2, 3], list(ImergeSorted([1], [3], [2], [])))
# Elements repeated across component iterators.
self.assertEqual([1, 2, 2, 3, 3],
list(ImergeSorted([1, 2], [3], [2, 3])))
# Elements repeated within an iterator.
self.assertEqual([1, 2, 2, 3, 3],
list(ImergeSorted([1, 2, 2], [3], [3])))
if __name__ == '__main__':
unittest.main()
``` |
[
{
"content": "Here is the code block:\n```python\n# coding: utf-8\n\n\"\"\"\n Gene Feature Enumeration Service\n\n The Gene Feature Enumeration (GFE) Submission service provides an API for converting raw sequence data to GFE. It provides both a RESTful API and a simple user interface for converting raw se... | [
{
"content": "Here is the code block:\n<|memory_start|>```python\n# coding: utf-8\n\n\"\"\"\n Gene Feature Enumeration Service\n\n The Gene Feature Enumeration (GFE) Submission service provides an API for converting raw sequence data to GFE. It provides both a RESTful API and a simple user interface for c... | ```python
# coding: utf-8
"""
Gene Feature Enumeration Service
The Gene Feature Enumeration (GFE) Submission service provides an API for converting raw sequence data to GFE. It provides both a RESTful API and a simple user interface for converting raw sequence data to GFE results. Sequences can be submitted one at a time or as a fasta file. This service uses <a href=\"https://github.com/nmdp-bioinformatics/service-feature\">nmdp-bioinformatics/service-feature</a> for encoding the raw sequence data and <a href=\"https://github.com/nmdp-bioinformatics/HSA\">nmdp-bioinformatics/HSA</a> for aligning the raw sequence data. The code is open source, and available on <a href=\"https://github.com/nmdp-bioinformatics/service-gfe-submission\">GitHub</a>.<br><br>Go to <a href=\"http://service-gfe-submission.readthedocs.io\">service-gfe-submission.readthedocs.io</a> for more information
OpenAPI spec version: 1.0.7
Contact: mhalagan@nmdp.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import sys
from setuptools import setup, find_packages
NAME = "swagger_client"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"]
setup(
name=NAME,
version=VERSION,
description="Gene Feature Enumeration Service",
author_email="mhalagan@nmdp.org",
url="",
keywords=["Swagger", "Gene Feature Enumeration Service"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
The Gene Feature Enumeration (GFE) Submission service provides an API for converting raw sequence data to GFE. It provides both a RESTful API and a simple user interface for converting raw sequence data to GFE results. Sequences can be submitted one at a time or as a fasta file. This service uses <a href=\"https://github.com/nmdp-bioinformatics/service-feature\">nmdp-bioinformatics/service-feature</a> for encoding the raw sequence data and <a href=\"https://github.com/nmdp-bioinformatics/HSA\">nmdp-bioinformatics/HSA</a> for aligning the raw sequence data. The code is open source, and available on <a href=\"https://github.com/nmdp-bioinformatics/service-gfe-submission\">GitHub</a>.<br><br>Go to <a href=\"http://service-gfe-submission.readthedocs.io\">service-gfe-submission.readthedocs.io</a> for more information
"""
)
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\n# -*- coding: utf-8 -*-\n# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4\n\n# Disabling the below pylint warnings in order to use long names convention in the tests\n# and because some entities are used seamlessly instead of being directl... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4\n\n# Disabling the below pylint warnings in order to use long names convention in the tests\n# and because some entities are used seamlessly instead ... | ```python
# -*- coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
# Disabling the below pylint warnings in order to use long names convention in the tests
# and because some entities are used seamlessly instead of being directly called.
# pylint: disable=invalid-name
# pylint: disable=unused-import
"""
tests.TestIntegration
------------------
The integration test set for functions in pypoabus.__main__
"""
import pytest
import pytest_mock
import requests
from pypoabus import __main__, __title__, __version__
from pypoabus.pypoabus import BusLine
def test_get_version(mock, capsys):
""" Check if -v returns the correct application version """
mock.patch('sys.argv', ['', '-v'])
expected = '{} {}\n'.format(__title__, __version__)
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.out == expected
def test_get_line_list_from_valid_zone(mock, capsys):
""" Checks if cli returns the correct bus list in unformatted json
for correct zone
"""
expected = '{ "list": ["l1", "l2"] }\n'
mock.patch('sys.argv', ['', '-l', 'south'])
mock.patch('pypoabus.pypoabus.list_bus_lines', return_value='["l1", "l2"]')
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.out == expected
def test_get_line_list_from_invalid_zone(mock, capsys):
""" Checks if cli returns the correct error message
for incorrect zone argument
"""
zone = 'NOT_VALID_ZONE'
mock.patch('sys.argv', ['', '-l', zone])
expected = "usage: {} [-h] [-v] [-l zone | -t line_code] [-f format]" \
" [-d]\npypoabus: error: argument -l/--list: " \
"invalid choice: '{}' (choose from 'north', " \
"'south', 'east', 'public')\n".format(__title__, zone)
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.err == expected
def test_get_timetable_from_valid_line(mock, capsys):
""" Checks if cli returns the correct bus timetable in unformatted json
for the correct busline
"""
expected = '{"code": "bar", "name": "foo", "schedules": []}\n'
mock.patch('sys.argv', ['', '-t', 'non_existing_line'])
mock.patch('pypoabus.pypoabus.get_bus_timetable', return_value=BusLine('foo', 'bar'))
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.out == expected
def test_get_timetable_from_invalid_line(mock, capsys):
""" Checks if cli returns the correct error message
for the incorrect busline argument
"""
expected = 'pypoabus: Error to connect to the server: ' \
'Unable to get EPTC page content. HTTP code: 500, reason: ' \
'Internal Server Error\n\n'
mocked_response = requests.Response()
mocked_response.status_code = 500
mocked_response.reason = 'Internal Server Error'
mock.patch('sys.argv', ['', '-t', 'non_existing_line'])
mock.patch('requests.get', return_value=mocked_response)
try:
__main__.main()
except SystemExit:
pass
capture_result = capsys.readouterr()
assert capture_result.err == expected
``` |
[
{
"content": "```python\n# -*- encoding: utf-8 -*-\r\n# This file is distributed under the same license as the Django package.\r\n#\r\nfrom __future__ import unicode_literals\r\n\r\n# The *_FORMAT strings use the Django date format syntax,\r\n# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#da... | [
{
"content": "<|memory_start|>```python\n# -*- encoding: utf-8 -*-\r\n# This file is distributed under the same license as the Django package.\r\n#\r\nfrom __future__ import unicode_literals\r\n\r\n# The *_FORMAT strings use the Django date format syntax,\r\n# see http://docs.djangoproject.com/en/dev/ref/templa... | ```python
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j N Y'
DATETIME_FORMAT = "j N Y, G.i"
TIME_FORMAT = 'G.i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y G.i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d-%m-%y', '%d/%m/%y', # '25-10-09', 25/10/09'
'%d-%m-%Y', '%d/%m/%Y', # '25-10-2009', 25/10/2009'
'%d %b %Y', # '25 Oct 2006',
'%d %B %Y', # '25 October 2006'
)
TIME_INPUT_FORMATS = (
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
)
DATETIME_INPUT_FORMATS = (
'%d-%m-%Y %H.%M.%S', # '25-10-2009 14.30.59'
'%d-%m-%Y %H.%M.%S.%f', # '25-10-2009 14.30.59.000200'
'%d-%m-%Y %H.%M', # '25-10-2009 14.30'
'%d-%m-%Y', # '25-10-2009'
'%d-%m-%y %H.%M.%S', # '25-10-09' 14.30.59'
'%d-%m-%y %H.%M.%S.%f', # '25-10-09' 14.30.59.000200'
'%d-%m-%y %H.%M', # '25-10-09' 14.30'
'%d-%m-%y', # '25-10-09''
'%m/%d/%y %H.%M.%S', # '10/25/06 14.30.59'
'%m/%d/%y %H.%M.%S.%f', # '10/25/06 14.30.59.000200'
'%m/%d/%y %H.%M', # '10/25/06 14.30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y %H.%M.%S', # '25/10/2009 14.30.59'
'%m/%d/%Y %H.%M.%S.%f', # '25/10/2009 14.30.59.000200'
'%m/%d/%Y %H.%M', # '25/10/2009 14.30'
'%m/%d/%Y', # '10/25/2009'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
``` |
[
{
"content": "```python\n# coding=UTF-8\n\"\"\"\nSMS Plus GSM MultiInfo gateway\n\"\"\"\nfrom __future__ import division\nfrom hashlib import sha1\nimport unicodedata, httplib, urllib, urlparse, json, BaseHTTPServer, time\nfrom satella.threads import BaseThread\nfrom cyrkus.redaction.plugins.httpifc.format impo... | [
{
"content": "<|memory_start|>```python\n# coding=UTF-8\n\"\"\"\nSMS Plus GSM MultiInfo gateway\n\"\"\"\nfrom __future__ import division\nfrom hashlib import sha1\nimport unicodedata, httplib, urllib, urlparse, json, BaseHTTPServer, time\nfrom satella.threads import BaseThread\nfrom cyrkus.redaction.plugins.htt... | ```python
# coding=UTF-8
"""
SMS Plus GSM MultiInfo gateway
"""
from __future__ import division
from hashlib import sha1
import unicodedata, httplib, urllib, urlparse, json, BaseHTTPServer, time
from satella.threads import BaseThread
from cyrkus.redaction.plugins.httpifc.format import fformat
class HttpServerThread(BaseThread):
class HTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, request, client_address, server):
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request, client_address, server)
self.request = request
def do_GET(self):
if self.path == '/':
self.send_response(200)
self.send_header('Content-Type', 'text/html; charset=UTF-8')
self.end_headers()
self.wfile.write(fformat(self.server.plugin.last_data, self.server.plugin.last_records))
else:
self.send_response(404)
def do_POST(self):
nodename = self.path[1:]
try:
ld = self.server.plugin.last_data[nodename]
lr = self.server.plugin.last_records[nodename]
except KeyError:
self.send_error(404)
return
self.send_response(200)
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(
{
'node': ld,
'secs_ago': int(time.time() - lr)
}
))
def __init__(self, calling_plugin, listening_ifc):
BaseThread.__init__(self)
self.plugin = calling_plugin
self.listen_ifc = tuple(listening_ifc)
def run(self):
httpd = BaseHTTPServer.HTTPServer(self.listen_ifc, HttpServerThread.HTTPRequestHandler)
httpd.plugin = self.plugin
httpd.serve_forever()
class Plugin(object):
def __init__(self, config, plugins):
"""@param config: configuration, as seen in config.json"""
self.config = config
self.plugins = plugins
self.last_data = {} # nodename => data dict
self.last_records = {} # nodename => last transmit time (timestamp)
self.http_server = HttpServerThread(self, config['listen_interface'])
self.http_server.start()
def on_received_report(self, data):
"""Received a report from other node"""
self.last_data[data['nodename']] = data
self.last_records[data['nodename']] = time.time()
``` |
[
{
"content": "```python\n# -*- coding: utf-8 -*-\n# Copyright (C) 2013 Stéphane Caron <caron@ynl.t.u-tokyo.ac.jp>\n#\n# This file is part of the Time-Optimal Path Parameterization (TOPP) library.\n# TOPP is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Publ... | [
{
"content": "<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# Copyright (C) 2013 Stéphane Caron <caron@ynl.t.u-tokyo.ac.jp>\n#\n# This file is part of the Time-Optimal Path Parameterization (TOPP) library.\n# TOPP is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Les... | ```python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Stéphane Caron <caron@ynl.t.u-tokyo.ac.jp>
#
# This file is part of the Time-Optimal Path Parameterization (TOPP) library.
# TOPP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# at your option, any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
TOPP_UNSPEC = 0
TOPP_OK = 1
TOPP_CANNOT_PREPROCESS = 2
TOPP_SHORT_TRAJ = 3
TOPP_MVC_HIT_ZERO = 4
TOPP_CLC_ERROR = 5
TOPP_SDBEGMIN_TOO_HIGH = 6
TOPP_SDENDMIN_TOO_HIGH = 7
TOPP_FWD_HIT_ZERO = 8
TOPP_BWD_HIT_ZERO = 9
TOPP_FWD_FAIL = 10
TOPP_BWD_FAIL = 11
MESSAGES = {
TOPP_UNSPEC: "unspecified error",
TOPP_OK: "everything OK",
TOPP_CANNOT_PREPROCESS: "cannot preprocess trajectory",
TOPP_SHORT_TRAJ: "trajectory too short",
TOPP_MVC_HIT_ZERO: "MVC hit the sd=0 axis",
TOPP_CLC_ERROR: "some CLC error",
TOPP_SDBEGMIN_TOO_HIGH: "sdbegmin is too high",
TOPP_SDENDMIN_TOO_HIGH: "sdendmin is too high",
TOPP_FWD_HIT_ZERO: "forward integration hit the sd=0 axis",
TOPP_BWD_HIT_ZERO: "backward integration hit the sd=0 axis",
TOPP_FWD_FAIL: "forward integration failed",
TOPP_BWD_FAIL: "backward integration failed"
}
class NoTrajectoryFound(Exception):
def __init__(self, error_code):
self.error_code = error_code
def __str__(self):
return MESSAGES[self.error_code]
``` |
[
{
"content": "Repeat the following code:\n```python\nTORCH_START_VALUE = 15\nTHING_START_VALUE = 20\nINVISIBLE_START_VALUE = 21\nFANTASTIC_START_VALUE = 3\n\n\nclass Story:\n heros = [\"Heidi\", \"Fantastic\", \"Tourch\", \"Thing\", \"Invisible\"]\n\n heidi = 0\n fantastic = FANTASTIC_START_VALUE\n ... | [
{
"content": "Repeat the following code:\n<|memory_start|>```python\nTORCH_START_VALUE = 15\nTHING_START_VALUE = 20\nINVISIBLE_START_VALUE = 21\nFANTASTIC_START_VALUE = 3\n\n\nclass Story:\n heros = [\"Heidi\", \"Fantastic\", \"Tourch\", \"Thing\", \"Invisible\"]\n\n heidi = 0\n fantastic = FANTASTIC_S... | ```python
TORCH_START_VALUE = 15
THING_START_VALUE = 20
INVISIBLE_START_VALUE = 21
FANTASTIC_START_VALUE = 3
class Story:
heros = ["Heidi", "Fantastic", "Tourch", "Thing", "Invisible"]
heidi = 0
fantastic = FANTASTIC_START_VALUE
torch = TORCH_START_VALUE
thing = THING_START_VALUE
invisible = INVISIBLE_START_VALUE
def act1_scene1(self):
self.fantastic = 1
self.invisible = INVISIBLE_START_VALUE
if self.fantastic == self.invisible:
self.act1_scene2()
else:
self.torch = 4
print(self.fantastic)
self.act1_scene2()
def act1_scene2(self):
self.thing = THING_START_VALUE
self.fantastic = 2
self.act1_scene3()
def act1_scene3(self):
if self.thing <= 1:
self.act1_scene4()
else:
self.fantastic = 4
self.thing -= 1
self.act1_scene3()
def act1_scene4(self):
self.invisible += self.fantastic / 2
self.torch -= 1
if self.thing <= self.torch:
self.act1_scene2()
else:
print(self.invisible)
self.act1_scene3()
def act2_scene1(self):
self.torch = 0
print(self.torch)
self.torch = TORCH_START_VALUE
self.act2_scene2()
def act2_scene2(self):
if self.torch % 2 == 1:
print(self.fantastic)
else:
self.thing = self.torch / 2
self.fantastic += 1
self.torch = self.thing
if self.fantastic <= 32:
self.act2_scene2()
Story().act1_scene1()
``` |
[
{
"content": "Repeat the full code snippet:\n```python\n# coding=utf-8\n\n__author__ = \"Gina Häußge <osd@foosel.net>\"\n__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'\n\nimport logging\nimport os\nimport threading\nimport urllib\nimport time\nimport subprocess\nimport f... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\n# coding=utf-8\n\n__author__ = \"Gina Häußge <osd@foosel.net>\"\n__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'\n\nimport logging\nimport os\nimport threading\nimport urllib\nimport time\nimport subp... | ```python
# coding=utf-8
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import logging
import os
import threading
import urllib
import time
import subprocess
import fnmatch
import datetime
import sys
import shutil
import octoprint.util as util
from octoprint.settings import settings
from octoprint.events import eventManager, Events
import sarge
# currently configured timelapse
current = None
def getFinishedTimelapses():
files = []
basedir = settings().getBaseFolder("timelapse")
for osFile in os.listdir(basedir):
if not fnmatch.fnmatch(osFile, "*.mpg"):
continue
statResult = os.stat(os.path.join(basedir, osFile))
files.append({
"name": osFile,
"size": util.getFormattedSize(statResult.st_size),
"bytes": statResult.st_size,
"date": util.getFormattedDateTime(datetime.datetime.fromtimestamp(statResult.st_ctime))
})
return files
validTimelapseTypes = ["off", "timed", "zchange"]
updateCallbacks = []
def registerCallback(callback):
if not callback in updateCallbacks:
updateCallbacks.append(callback)
def unregisterCallback(callback):
if callback in updateCallbacks:
updateCallbacks.remove(callback)
def notifyCallbacks(timelapse):
if timelapse is None:
config = None
else:
config = timelapse.configData()
for callback in updateCallbacks:
try: callback.sendTimelapseConfig(config)
except: logging.getLogger(__name__).exception("Exception while pushing timelapse configuration")
def configureTimelapse(config=None, persist=False):
global current
if config is None:
config = settings().get(["webcam", "timelapse"])
if current is not None:
current.unload()
type = config["type"]
postRoll = 0
if "postRoll" in config:
postRoll = config["postRoll"]
if type is None or "off" == type:
current = None
elif "zchange" == type:
current = ZTimelapse(postRoll=postRoll)
elif "timed" == type:
interval = 10
if "options" in config and "interval" in config["options"]:
interval = config["options"]["interval"]
current = TimedTimelapse(postRoll=postRoll, interval=interval)
notifyCallbacks(current)
if persist:
settings().set(["webcam", "timelapse"], config)
settings().save()
class Timelapse(object):
def __init__(self, postRoll=0):
self._logger = logging.getLogger(__name__)
self._imageNumber = None
self._inTimelapse = False
self._gcodeFile = None
self._postRoll = postRoll
self._postRollStart = None
self._onPostRollDone = None
self._captureDir = settings().getBaseFolder("timelapse_tmp")
self._movieDir = settings().getBaseFolder("timelapse")
self._snapshotUrl = settings().get(["webcam", "snapshot"])
self._fps = 25
self._renderThread = None
self._captureMutex = threading.Lock()
# subscribe events
eventManager().subscribe(Events.PRINT_STARTED, self.onPrintStarted)
eventManager().subscribe(Events.PRINT_FAILED, self.onPrintDone)
eventManager().subscribe(Events.PRINT_DONE, self.onPrintDone)
eventManager().subscribe(Events.PRINT_RESUMED, self.onPrintResumed)
for (event, callback) in self.eventSubscriptions():
eventManager().subscribe(event, callback)
def postRoll(self):
return self._postRoll
def unload(self):
if self._inTimelapse:
self.stopTimelapse(doCreateMovie=False)
# unsubscribe events
eventManager().unsubscribe(Events.PRINT_STARTED, self.onPrintStarted)
eventManager().unsubscribe(Events.PRINT_FAILED, self.onPrintDone)
eventManager().unsubscribe(Events.PRINT_DONE, self.onPrintDone)
eventManager().unsubscribe(Events.PRINT_RESUMED, self.onPrintResumed)
for (event, callback) in self.eventSubscriptions():
eventManager().unsubscribe(event, callback)
def onPrintStarted(self, event, payload):
"""
Override this to perform additional actions upon start of a print job.
"""
self.startTimelapse(payload["file"])
def onPrintDone(self, event, payload):
"""
Override this to perform additional actions upon the stop of a print job.
"""
self.stopTimelapse(success=(event==Events.PRINT_DONE))
def onPrintResumed(self, event, payload):
"""
Override this to perform additional actions upon the pausing of a print job.
"""
if not self._inTimelapse:
self.startTimelapse(payload["file"])
def eventSubscriptions(self):
"""
Override this method to subscribe to additional events by returning an array of (event, callback) tuples.
Events that are already subscribed:
* PrintStarted - self.onPrintStarted
* PrintResumed - self.onPrintResumed
* PrintFailed - self.onPrintDone
* PrintDone - self.onPrintDone
"""
return []
def configData(self):
"""
Override this method to return the current timelapse configuration data. The data should have the following
form:
type: "<type of timelapse>",
options: { <additional options> }
"""
return None
def startTimelapse(self, gcodeFile):
self._logger.debug("Starting timelapse for %s" % gcodeFile)
self.cleanCaptureDir()
self._imageNumber = 0
self._inTimelapse = True
self._gcodeFile = os.path.basename(gcodeFile)
def stopTimelapse(self, doCreateMovie=True, success=True):
self._logger.debug("Stopping timelapse")
self._inTimelapse = False
def resetImageNumber():
self._imageNumber = None
def createMovie():
self._renderThread = threading.Thread(target=self._createMovie, kwargs={"success": success})
self._renderThread.daemon = True
self._renderThread.start()
def resetAndCreate():
resetImageNumber()
createMovie()
if self._postRoll > 0:
self._postRollStart = time.time()
if doCreateMovie:
self._onPostRollDone = resetAndCreate
else:
self._onPostRollDone = resetImageNumber
self.processPostRoll()
else:
self._postRollStart = None
if doCreateMovie:
resetAndCreate()
else:
resetImageNumber()
def processPostRoll(self):
pass
def captureImage(self):
if self._captureDir is None:
self._logger.warn("Cannot capture image, capture directory is unset")
return
if self._imageNumber is None:
self._logger.warn("Cannot capture image, image number is unset")
return
with self._captureMutex:
filename = os.path.join(self._captureDir, "tmp_%05d.jpg" % self._imageNumber)
self._imageNumber += 1
self._logger.debug("Capturing image to %s" % filename)
captureThread = threading.Thread(target=self._captureWorker, kwargs={"filename": filename})
captureThread.daemon = True
captureThread.start()
return filename
def _captureWorker(self, filename):
eventManager().fire(Events.CAPTURE_START, {"file": filename})
try:
urllib.urlretrieve(self._snapshotUrl, filename)
self._logger.debug("Image %s captured from %s" % (filename, self._snapshotUrl))
except:
self._logger.exception("Could not capture image %s from %s, decreasing image counter again" % (filename, self._snapshotUrl))
with self._captureMutex:
if self._imageNumber is not None and self._imageNumber > 0:
self._imageNumber -= 1
eventManager().fire(Events.CAPTURE_DONE, {"file": filename})
def _createMovie(self, success=True):
ffmpeg = settings().get(["webcam", "ffmpeg"])
bitrate = settings().get(["webcam", "bitrate"])
if ffmpeg is None or bitrate is None:
self._logger.warn("Cannot create movie, path to ffmpeg or desired bitrate is unset")
return
input = os.path.join(self._captureDir, "tmp_%05d.jpg")
if success:
output = os.path.join(self._movieDir, "%s_%s.mpg" % (os.path.splitext(self._gcodeFile)[0], time.strftime("%Y%m%d%H%M%S")))
else:
output = os.path.join(self._movieDir, "%s_%s-failed.mpg" % (os.path.splitext(self._gcodeFile)[0], time.strftime("%Y%m%d%H%M%S")))
# prepare ffmpeg command
command = [
ffmpeg, '-loglevel', 'error', '-i', input, '-vcodec', 'mpeg2video', '-pix_fmt', 'yuv420p', '-r', str(self._fps), '-y', '-b:v', bitrate,
'-f', 'vob']
filters = []
# flip video if configured
if settings().getBoolean(["webcam", "flipH"]):
filters.append('hflip')
if settings().getBoolean(["webcam", "flipV"]):
filters.append('vflip')
# add watermark if configured
watermarkFilter = None
if settings().getBoolean(["webcam", "watermark"]):
watermark = os.path.join(os.path.dirname(__file__), "static", "img", "watermark.png")
if sys.platform == "win32":
# Because ffmpeg hiccups on windows' drive letters and backslashes we have to give the watermark
# path a special treatment. Yeah, I couldn't believe it either...
watermark = watermark.replace("\\", "/").replace(":", "\\\\:")
watermarkFilter = "movie=%s [wm]; [%%(inputName)s][wm] overlay=10:main_h-overlay_h-10" % watermark
filterstring = None
if len(filters) > 0:
if watermarkFilter is not None:
filterstring = "[in] %s [postprocessed]; %s [out]" % (",".join(filters), watermarkFilter % {"inputName": "postprocessed"})
else:
filterstring = "[in] %s [out]" % ",".join(filters)
elif watermarkFilter is not None:
filterstring = watermarkFilter % {"inputName": "in"} + " [out]"
if filterstring is not None:
self._logger.debug("Applying videofilter chain: %s" % filterstring)
command.extend(["-vf", sarge.shell_quote(filterstring)])
# finalize command with output file
self._logger.debug("Rendering movie to %s" % output)
command.append("\"" + output + "\"")
eventManager().fire(Events.MOVIE_RENDERING, {"gcode": self._gcodeFile, "movie": output, "movie_basename": os.path.basename(output)})
command_str = " ".join(command)
self._logger.debug("Executing command: %s" % command_str)
try:
p = sarge.run(command_str, stderr=sarge.Capture())
if p.returncode == 0:
eventManager().fire(Events.MOVIE_DONE, {"gcode": self._gcodeFile, "movie": output, "movie_basename": os.path.basename(output)})
else:
returncode = p.returncode
stderr_text = p.stderr.text
self._logger.warn("Could not render movie, got return code %r: %s" % (returncode, stderr_text))
eventManager().fire(Events.MOVIE_FAILED, {"gcode": self._gcodeFile, "movie": output, "movie_basename": os.path.basename(output), "returncode": returncode, "error": stderr_text})
except:
self._logger.exception("Could not render movie due to unknown error")
eventManager().fire(Events.MOVIE_FAILED, {"gcode": self._gcodeFile, "movie": output, "movie_basename": os.path.basename(output), "returncode": 255, "error": "Unknown error"})
def cleanCaptureDir(self):
if not os.path.isdir(self._captureDir):
self._logger.warn("Cannot clean capture directory, it is unset")
return
for filename in os.listdir(self._captureDir):
if not fnmatch.fnmatch(filename, "*.jpg"):
continue
os.remove(os.path.join(self._captureDir, filename))
class ZTimelapse(Timelapse):
def __init__(self, postRoll=0):
Timelapse.__init__(self, postRoll=postRoll)
self._logger.debug("ZTimelapse initialized")
def eventSubscriptions(self):
return [
("ZChange", self._onZChange)
]
def configData(self):
return {
"type": "zchange"
}
def processPostRoll(self):
Timelapse.processPostRoll(self)
filename = os.path.join(self._captureDir, "tmp_%05d.jpg" % self._imageNumber)
self._imageNumber += 1
with self._captureMutex:
self._captureWorker(filename)
for i in range(self._postRoll * self._fps):
newFile = os.path.join(self._captureDir, "tmp_%05d.jpg" % (self._imageNumber))
self._imageNumber += 1
shutil.copyfile(filename, newFile)
if self._onPostRollDone is not None:
self._onPostRollDone()
def _onZChange(self, event, payload):
self.captureImage()
class TimedTimelapse(Timelapse):
def __init__(self, postRoll=0, interval=1):
Timelapse.__init__(self, postRoll=postRoll)
self._interval = interval
if self._interval < 1:
self._interval = 1 # force minimum interval of 1s
self._timerThread = None
self._logger.debug("TimedTimelapse initialized")
def interval(self):
return self._interval
def configData(self):
return {
"type": "timed",
"options": {
"interval": self._interval
}
}
def onPrintStarted(self, event, payload):
Timelapse.onPrintStarted(self, event, payload)
if self._timerThread is not None:
return
self._timerThread = threading.Thread(target=self._timerWorker)
self._timerThread.daemon = True
self._timerThread.start()
def onPrintDone(self, event, payload):
Timelapse.onPrintDone(self, event, payload)
self._timerThread = None
def _timerWorker(self):
self._logger.debug("Starting timer for interval based timelapse")
while self._inTimelapse or (self._postRollStart and time.time() - self._postRollStart <= self._postRoll * self._fps):
self.captureImage()
time.sleep(self._interval)
if self._postRollStart is not None and self._onPostRollDone is not None:
self._onPostRollDone()
self._postRollStart = None
``` |
[
{
"content": "Here is the source code:\n```python\n#\n# Gramps - a GTK+/GNOME based genealogy program\n#\n# Copyright (C) 2002-2006 Donald N. Allingham\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free ... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\n#\n# Gramps - a GTK+/GNOME based genealogy program\n#\n# Copyright (C) 2002-2006 Donald N. Allingham\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published... | ```python
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....ggettext import gettext as _
try:
set()
except NameError:
from sets import Set as set
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# IsDescendantFamilyOf
#
#-------------------------------------------------------------------------
class IsDescendantFamilyOf(Rule):
"""Rule that checks for a person that is a descendant or the spouse
of a descendant of a specified person"""
labels = [ _('ID:'), _('Inclusive:') ]
name = _('Descendant family members of <person>')
category = _('Descendant filters')
description = _("Matches people that are descendants or the spouse "
"of a descendant of a specified person")
def prepare(self,db):
self.db = db
self.matches = set()
self.root_person = db.get_person_from_gramps_id(self.list[0])
self.add_matches(self.root_person)
try:
if int(self.list[1]):
inclusive = True
else:
inclusive = False
except IndexError:
inclusive = True
if not inclusive:
self.exclude()
def reset(self):
self.matches = set()
def apply(self,db,person):
return person.handle in self.matches
def add_matches(self,person):
if not person:
return
# Add self
self.matches.add(person.handle)
for family_handle in person.get_family_handle_list():
family = self.db.get_family_from_handle(family_handle)
if family:
# Add every child recursively
for child_ref in family.get_child_ref_list():
if child_ref:
self.add_matches(self.db.get_person_from_handle(child_ref.ref))
# Add spouse
if person.handle == family.get_father_handle():
spouse_handle = family.get_mother_handle()
else:
spouse_handle = family.get_father_handle()
self.matches.add(spouse_handle)
def exclude(self):
# This removes root person and his/her spouses from the matches set
if not self.root_person: return
self.matches.remove(self.root_person.handle)
for family_handle in self.root_person.get_family_handle_list():
family = self.db.get_family_from_handle(family_handle)
if family:
if self.root_person.handle == family.get_father_handle():
spouse_handle = family.get_mother_handle()
else:
spouse_handle = family.get_father_handle()
self.matches.remove(spouse_handle)
``` |
[
{
"content": "Write the code verbatim:\n```python\n#-------------------------------------------------------------------------------\n# Name: FRC\n# Purpose: Module to simplify getting standings, match results, and OPRs\n# from the FIRST web pages\n# Author: BaselA\n#---------------... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n#-------------------------------------------------------------------------------\n# Name: FRC\n# Purpose: Module to simplify getting standings, match results, and OPRs\n# from the FIRST web pages\n# Author: BaselA\n... | ```python
#-------------------------------------------------------------------------------
# Name: FRC
# Purpose: Module to simplify getting standings, match results, and OPRs
# from the FIRST web pages
# Author: BaselA
#-------------------------------------------------------------------------------
from BeautifulSoup import BeautifulSoup
import urllib2
from re import search
from re import sub
from re import compile
from numpy.linalg import solve
from numpy.linalg import cholesky
import numpy
eventInfo = {'code': {'week': '6', 'code': 'code', 'name': 'Colorado'},
'mdba': {'week': '6', 'code': 'mdba', 'name': 'Chesapeake'},
'ctgro': {'week': '2', 'code': 'ctgro', 'name': 'Groton'},
'waamv': {'week': '1', 'code': 'waamv', 'name': 'Auburn Mountainview'},
'wamou': {'week': '3', 'code': 'wamou', 'name': 'Mt. Vernon'},
'washo': {'week': '4', 'code': 'washo', 'name': 'Shorewood'},
'vari': {'week': '4', 'code': 'vari', 'name': 'Virginia'},
'mitry': {'week': '6', 'code': 'mitry', 'name': 'Troy'},
'lake': {'week': '6', 'code': 'lake', 'name': 'Bayou'},
'njbri': {'week': '5', 'code': 'njbri', 'name': 'Bridgewater-Raritan'},
'nytr': {'week': '3', 'code': 'nytr', 'name': 'New York Tech Valley'},
'nyli': {'week': '5', 'code': 'nyli', 'name': 'SBPLI Long Island'},
'okok': {'week': '5', 'code': 'okok', 'name': 'Oklahoma'},
'onwi': {'week': '6', 'code': 'onwi', 'name': 'Windsor Essex Great Lakes'},
'azch': {'week': '4', 'code': 'azch', 'name': 'Arizona'},
'ilch': {'week': '6', 'code': 'ilch', 'name': 'Midwest'},
'txho': {'week': '6', 'code': 'txho', 'name': 'Lone Star'},
'mndu': {'week': '2', 'code': 'mndu', 'name': 'Lake Superior'},
'gadu': {'week': '5', 'code': 'gadu', 'name': 'Peachtree'},
'ncre': {'week': '3', 'code': 'ncre', 'name': 'North Carolina'},
'onwa': {'week': '4', 'code': 'onwa', 'name': 'Waterloo'},
'misou': {'week': '1', 'code': 'misou', 'name': 'Southfield'},
'mrcmp': {'week': '7', 'code': 'mrcmp', 'name': 'Mid-Atlantic Robotics FRC Region Championship'},
'melew': {'week': '6', 'code': 'melew', 'name': 'Pine Tree'},
'milan': {'week': '6', 'code': 'milan', 'name': 'Lansing'},
'mxmc': {'week': '3', 'code': 'mxmc', 'name': 'Mexico City'},
'nyny': {'week': '6', 'code': 'nyny', 'name': 'New York City'},
'arfa': {'week': '2', 'code': 'arfa', 'name': 'Arkansas'},
'qcmo': {'week': '4', 'code': 'qcmo', 'name': 'Festival de Robotique FRC a Montreal'},
'miwmi': {'week': '4', 'code': 'miwmi', 'name': 'West Michigan'},
'scmb': {'week': '1', 'code': 'scmb', 'name': 'Palmetto'},
'onnb': {'week': '5', 'code': 'onnb', 'name': 'North Bay'},
'mosl': {'week': '3', 'code': 'mosl', 'name': 'St. Louis'},
'orosu': {'week': '6', 'code': 'orosu', 'name': 'Oregon State University'},
'pahat': {'week': '1', 'code': 'pahat', 'name': 'Hatboro-Horsham'},
'dcwa': {'week': '5', 'code': 'dcwa', 'name': 'Greater DC'},
'wache': {'week': '3', 'code': 'wache', 'name': 'Eastern Washington University'},
'ctsou': {'week': '4', 'code': 'ctsou', 'name': 'Southington'},
'nhnas': {'week': '1', 'code': 'nhnas', 'name': 'Granite State'},
'onto2': {'week': '1', 'code': 'onto2', 'name': 'Greater Toronto West'},
'orore': {'week': '2', 'code': 'orore', 'name': 'Oregon City'},
'nhdur': {'week': '2', 'code': 'nhdur', 'name': 'UNH'},
'inwl': {'week': '4', 'code': 'inwl', 'name': 'Boilermaker'},
'misjo': {'week': '5', 'code': 'misjo', 'name': 'St. Joseph'},
'rismi': {'week': '4', 'code': 'rismi', 'name': 'Rhode Island'},
'onto': {'week': '2', 'code': 'onto', 'name': 'Greater Toronto East'},
'necmp': {'week': '7', 'code': 'necmp', 'name': 'New England FRC Region Championship'},
'mitvc': {'week': '4', 'code': 'mitvc', 'name': 'Traverse City'},
'mawor': {'week': '3', 'code': 'mawor', 'name': 'WPI'},
'inth': {'week': '2', 'code': 'inth', 'name': 'Crossroads'},
'mndu2': {'week': '2', 'code': 'mndu2', 'name': 'Northern Lights'},
'flfo': {'week': '6', 'code': 'flfo', 'name': 'South Florida'},
'miket': {'week': '2', 'code': 'miket', 'name': 'Kettering University'},
'mihow': {'week': '3', 'code': 'mihow', 'name': 'Howell'},
'waell': {'week': '5', 'code': 'waell', 'name': 'Central Washington University'},
'wimi': {'week': '4', 'code': 'wimi', 'name': 'Wisconsin'},
'calb': {'week': '4', 'code': 'calb', 'name': 'Los Angeles'},
'casd': {'week': '2', 'code': 'casd', 'name': 'San Diego'},
'miliv': {'week': '5', 'code': 'miliv', 'name': 'Livonia'},
'casa': {'week': '3', 'code': 'casa', 'name': 'Sacramento'},
'casb': {'week': '1', 'code': 'casb', 'name': 'Inland Empire'},
'mabos': {'week': '5', 'code': 'mabos', 'name': 'Northeastern University'},
'casj': {'week': '6', 'code': 'casj', 'name': 'Silicon Valley'},
'txlu': {'week': '2', 'code': 'txlu', 'name': 'Hub City'},
'mibed': {'week': '6', 'code': 'mibed', 'name': 'Bedford'},
'txsa': {'week': '1', 'code': 'txsa', 'name': 'Alamo'},
'nvlv': {'week': '6', 'code': 'nvlv', 'name': 'Las Vegas'},
'txda': {'week': '3', 'code': 'txda', 'name': 'Dallas'},
'migul': {'week': '2', 'code': 'migul', 'name': 'Gull Lake'},
'abca': {'week': '6', 'code': 'abca', 'name': 'Western Canada'},
'pncmp': {'week': '7', 'code': 'pncmp', 'name': 'Autodesk FRC Championship'},
'orwil': {'week': '4', 'code': 'orwil', 'name': 'Wilsonville'},
'utwv': {'week': '3', 'code': 'utwv', 'name': 'Utah'},
'wasno': {'week': '2', 'code': 'wasno', 'name': 'Glacier Peak'},
'njfla': {'week': '1', 'code': 'njfla', 'name': 'Mt. Olive'},
'ista': {'week': '6', 'code': 'ista', 'name': 'Israel'},
'nyro': {'week': '5', 'code': 'nyro', 'name': 'Finger Lakes'},
'ilil': {'week': '1', 'code': 'ilil', 'name': 'Central Illinois'},
'mnmi': {'week': '5', 'code': 'mnmi', 'name': 'Minnesota 10000 Lakes'},
'njtab': {'week': '4', 'code': 'njtab', 'name': 'Lenape-Seneca'},
'miwat': {'week': '5', 'code': 'miwat', 'name': 'Waterford'},
'hiho': {'week': '5', 'code': 'hiho', 'name': 'Hawaii'},
'njcli': {'week': '3', 'code': 'njcli', 'name': 'Clifton'},
'papi': {'week': '5', 'code': 'papi', 'name': 'Greater Pittsburgh'},
'ohci': {'week': '5', 'code': 'ohci', 'name': 'Queen City'},
'ohcl': {'week': '4', 'code': 'ohcl', 'name': 'Buckeye'},
'miesc': {'week': '3', 'code': 'miesc', 'name': 'Escanaba'},
'tnkn': {'week': '5', 'code': 'tnkn', 'name': 'Smoky Mountains'},
'mokc': {'week': '3', 'code': 'mokc', 'name': 'Greater Kansas City'},
'cthar': {'week': '5', 'code': 'cthar', 'name': 'Hartford'},
'flor': {'week': '3', 'code': 'flor', 'name': 'Orlando'},
'paphi': {'week': '3', 'code': 'paphi', 'name': 'Springside Chestnut Hill'},
'micen': {'week': '1', 'code': 'micen', 'name': 'Center Line'},
'mimid': {'week': '4', 'code': 'mimid', 'name': 'Great Lakes Bay Region'},
'mnmi2': {'week': '5', 'code': 'mnmi2', 'name': 'Minnesota North Star'},
'micmp': {'week': '7', 'code': 'micmp', 'name': 'Michigan FRC State Championship'},
'cama': {'week': '2', 'code': 'cama', 'name': 'Central Valley'}}
codes =['abca','arc','arfa','azch','calb','cama','casa','casb','casd','casj',
'cmp','code','ctgro','cthar','ctsou','cur','dcwa','flfo','flor','gadu','gal',
'hiho','ilch','ilil','inth','inwl','ista','lake','mabos','mawor','mdba','melew',
'mibed','micen','micmp','miesc','migul','mihow','miket','milan','miliv','mimid',
'misjo','misou','mitry','mitvc','miwat','miwmi','mndu','mndu2','mnmi','mnmi2',
'mokc','mosl','mrcmp','mxmc','ncre','necmp','new','nhdur','nhnas','njbri','njcli',
'njfla','njtab','nvlv','nyli','nyny','nyro','nytr','ochl','ohci','okok','onnb',
'onto','onto2','onwa','onwi','orore','orosu','orwil','pahat','paphi','papi',
'pncmp','qcmo','rismi','scmb','tnkn','txda','txho','txlu','txsa','utwv','vari',
'waahs','waamv','wache','waell','wamou','washo','wasno','wimi']
abbreviations = {'abca':'abca','wcr':'abca','westcanada':'abca','westerncanada':'abca',
'westerncanadian':'abca','arc':'arc','archimedes':'arc','arfa':'arfa','razorback':'arfa',
'arkansas':'arfa','azch':'azch','phoenix':'azch','arizona':'azch','calb':'calb',
'losangeles':'calb','LA':'calb','cama':'cama','centralvalley':'cama','cvr':'cama',
'casa':'casa','sacramento':'casa','casb':'casb','inlandempire':'casb','casd':'casd',
'sandiego':'casd','casj':'casj','siliconvalley':'casj','svr':'casj','championship':'cmp',
'cmp':'cmp','einstein':'cmp','code':'code','colorado':'code','groton':'ctgro','ctgro':'ctgro',
'connecticut':'cthar','cthar':'cthar','hartford':'cthar','ctha':'cthar','southington':'ctsou',
'ctsou':'ctsou','cur':'cur','curie':'cur','dc':'dcwa','dcwa':'dcwa','washington':'dcwa',
'washingtondc':'dcwa','flbr':'flfo','southflorida':'flfo','flor':'flor','orlando':'flor',
'gadu':'gadu','peachtree':'gadu','gal':'gal','galileo':'gal','hawaii':'hiho','hiho':'hiho',
'ilch':'ilch','midwest':'ilch','ilil':'ilil','centralillinois':'ilil','centralil':'ilil',
'centillinois':'ilil','centil':'ilil','crossroads':'inth','inth':'inth','bmr':'inwl',
'boilermaker':'inwl','inwl':'inwl','israel':'ista','isreal':'ista','ista':'ista',
'bayou':'lake','lake':'lake','boston':'mabos','mabos':'mabos','mabo':'mabos',
'northeastern':'mabos','mawo':'mawor','wpi':'mawor','mawor':'mawor','chesapeake':'mdba',
'mdba':'mdba','mele':'melew','pine tree':'melew','ptr':'melew','melew':'melew',
'bedford':'mibed','mibed':'mibed','centerline':'micen','micen':'micen',
'michiganstatechampionship':'micmp','micmp':'micmp','msc':'micmp','escanaba':'miesc',
'miesc':'miesc','gulllake':'migul','migul':'migul','howell':'mihow','mihow':'mihow',
'kettering':'miket','ketteringuniversity':'miket','miket':'miket','lansing':'milan',
'milan':'milan','livonia':'miliv','miliv':'miliv','mimid':'mimid','greatlakesbay':'mimid',
'greatlakesbayregion':'mimid','greatlakes':'mimid','misou':'misou','Southfield':'misou',
'misjo':'misjo','stjoe':'misjo','stjoseph':'misjo','mitry':'mitry','troy':'mitry',
'mitvc':'mitvc','tc':'mitvc','traversecity':'mitvc','miwfd':'miwat','waterford':'miwat',
'miwat':'miwat','miwmi':'miwmi','westmichigan':'miwmi','wmr':'miwmi','lakesuperior':'mndu',
'mndu':'mndu','mndu2':'mndu2','northernlights':'mndu2','10000lakes':'mnmi',
'10klakes':'mnmi','mnmi':'mnmi','minnesotanorthstar':'mnmi2','mnmi2':'mnmi2',
'northstar':'mnmi2','greaterkansascity':'mokc','kansascity':'mokc','kc':'mokc',
'kcr':'mokc','mokc':'mokc','mosl':'mosl','stlouis':'mosl','lehigh':'mrcmp',
'mar':'mrcmp','marcmp':'mrcmp','mrcmp':'mrcmp','mexico':'mxmc','mexicocity':'mxmc',
'mxmc':'mxmc','ncr':'ncre','ncre':'ncre','northcarolina':'ncre','new':'new',
'newton':'new','newenglandcmp':'necmp','newenglandchampionship':'necmp',
'necmp':'necmp','nechampionship':'necmp','ne':'necmp','nhdur':'nhdur','unh':'nhdur',
'bae':'nhnas','baegranitestate':'nhnas','granitestate':'nhnas','gsr':'nhnas',
'nhma':'nhnas','nhnas':'nhnas','br':'njbri','bridgewater':'njbri',
'bridgewaterraritan':'njbri','njbrg':'njbri','njbri':'njbri','clifton':'njcli',
'njcli':'njcli','flanders':'njfla','mountolive':'njfla','mtolive':'njfla',
'njfla':'njfla','lenape':'njtab','lenapeseneca':'njtab','njlen':'njtab',
'njtab':'njtab','lasvegas':'nvlv','lvr':'nvlv','nvlv':'nvlv','vegas':'nvlv',
'longisland':'nyli','nyli':'nyli','sbplilongisland':'nyli','sbpli':'nyli',
'newyorkcity':'nyny','nyc':'nyny','nyny':'nyny','fingerlakes':'nyro','flr':'nyro',
'nyro':'nyro','newyorktechvalley':'nytr','techvalley':'nytr','nytr':'nytr',
'ohic':'ohci','qcr':'ohci','queencity':'ohci','ohci':'ohci','buckeye':'ohcl',
'ohcl':'ohcl','oklahoma':'okok','okok':'okok','okc':'okok','northbay':'onnb',
'onnb':'onnb','greatertorontoeast':'onto','gtre':'onto','onto':'onto',
'torontoeast':'onto','greatertorontowest':'onto2','gtrw':'onto2','onto2':'onto2',
'torontowest':'onto2','onwa':'onwa','waterloo':'onwa','onwi':'onwi','windsor':'onwi',
'windsoressex':'onwi','oregoncity':'orore','orore':'orore','oregonstate':'orosu',
'orosu':'orosu','wilsonville':'orwil','orwil':'orwil','hatborohorsham':'pahat',
'hh':'pahat','pahat':'pahat','chestnuthill':'paphi','paphi':'paphi','springside':'paphi',
'springsidechestnuthill':'paphi','papi':'papi','pittsburgh':'papi','pnw':'pncmp',
'pacificcmp':'pncmp','pacificnorthwestcmp':'pncmp','pnwcmp':'pncmp',
'pncmp':'pncmp','montreal':'qcmo','qcmo':'qcmo','rhodeisland':'rismi',
'rismi':'rismi','palmetto':'scmb','scmb':'scmb','smokymountains':'tnkn',
'smr':'tnkn','tnkn':'tnkn','dallas':'txda','txda':'txda','lonestar':'txho',
'lsr':'txho','txho':'txho','hubcity':'txlu','txlu':'txlu','alamo':'txsa',
'txsa':'txsa','utah':'utwv','utwv':'utwv','vari':'vari','virginia':'vari',
'auburn':'waahs','waahs':'waahs','auburnmtn':'waamv','auburnmountainview':'waamv',
'waamv':'waamv','centralwash':'waell','centralwashington':'waell','waell':'waell',
'mtvernon':'wamou','wamou':'wamou','spokane':'wache','wach':'wache','wasche':'wache',
'eastwash':'wache','eastwashington':'wache','easternwash':'wache','easternwashington':'wache',
'wache':'wache','shorewood':'washo','washo':'washo','glacierpeak':'wasno',
'wasno':'wasno','wimi':'wimi','wisconsin':'wimi'}
def getCode(phrase):
phrase = phrase.lower()
for code in codes:
if phrase == code:
return code
while search('[ -\.]', phrase):
phrase = sub("[ -\.]", "", phrase)
phrase = sub("district", "", sub("regional", "", phrase))
for abbrev in abbreviations:
if phrase == abbrev:
return abbreviations[abbrev]
def frclinksTo(code, whatDoYouWant, year):
code = getCode(code)
if whatDoYouWant == "None":
url = "http://frclinks.frclinks.com/e/"+str(code)
elif whatDoYouWant == "m" or whatDoYouWant == "r":
url = "http://frclinks.frclinks.com/e/"+str(whatDoYouWant)+"/"+str(code)
if year != 2014:
url = url + "/"+str(year)
soup = BeautifulSoup(urllib2.urlopen(url))
return soup.findAll('script')[2].getText()[19:-2]
def getTeamlist(code):
code = getCode(code)
soup = BeautifulSoup(urllib2.urlopen(frclinksTo(code, "None", 2014)))
teams = []
for team in soup.body.center.table.tr.td.p.center.table.tr.td.findAll('a')[5:]:
teams.append(team.getText())
return teams
def getTeamStandings(code):
code = getCode(code)
soup = BeautifulSoup(urllib2.urlopen(frclinksTo(code, "r", 2014)))
teams = []
for team in soup.findAll('tr', {"style":"background-color:#FFFFFF;"}):
for i,value in enumerate(team):
if i == 3:
try:
teams.append(value.getText())
except AttributeError:
pass
teams = list(str(z) for z in sorted(int(x) for x in teams))
return teams
def removeBlanks(array):
while True:
try:
if array[-1][9] == "" or array[-1][9] == " ":
array.pop()
else:
return array
except IndexError:
try:
array.pop()
except IndexError:
return []
def getMatches(code):
code = getCode(code)
soup = BeautifulSoup(urllib2.urlopen(frclinksTo(code,"m", 2014)))
values = []
quals= []
for match in soup.findAll('tr', {"style":"background-color:#FFFFFF;"}) + soup.findAll('tr', {"style": compile('mso-yfti-irow:[0-9]')})[6:-3]:
if search('(E|D|T)', match.td.getText()[0]):
pass
else:
for value in match:
if isinstance(value, basestring):
pass
else:
values.append(value.getText())
quals.append(values)
values = []
elims = []
try:
quals[-1][1]
except IndexError:
del(quals[-1])
if code == "cmp":
while len(quals)>0:
try:
if search('(i|t)', quals[-1][1]):
elims.append(quals.pop())
except:
pass
else:
while search('(i|t)', quals[-1][1]):
elims.append(quals.pop())
elims.reverse()
for match in elims:
del match[1]
quals = removeBlanks(quals)
elims = removeBlanks(elims)
return quals, elims
def getStandings(code):
code = getCode(code)
soup = BeautifulSoup(urllib2.urlopen(frclinksTo(code, "r", 2014)))
standings = []
for team in soup.findAll('tr', {"style":"background-color:#FFFFFF;"}):
values = []
for value in team:
try:
values.append(value.getText())
except AttributeError:
pass
values[6]=float(values[6])-float(values[5])-float(values[3])
standings.append(values)
return standings
def getOprMatrix(code):
code = getCode(code)
teamList = getTeamStandings(code)
teamDict={}
for team in teamList:
otherTeamList = {"totalScore":0}
for otherTeam in teamList:
otherTeamList[otherTeam] = 0
teamDict[team] = otherTeamList
alliances=[]
scores =[]
for matchNum, match in enumerate(getMatches(code)[0]):
redData=match[2:5]
scores.append(match[8])
blueData=match[5:8]
scores.append(match[9])
alliances.append(redData)
alliances.append(blueData)
for allianceNum, alliance in enumerate(alliances):
for team in alliance:
teamDict[team][alliance[0]] = teamDict[team][alliance[0]] + 1
teamDict[team][alliance[1]] = teamDict[team][alliance[1]] + 1
teamDict[team][alliance[2]] = teamDict[team][alliance[2]] + 1
teamDict[team]["totalScore"] = teamDict[team]["totalScore"] + int(scores[allianceNum])
oprMatrix =[]
teamScores = []
for team in teamList:
oprMatrixLine = []
for otherTeam in teamList:
oprMatrixLine.append(teamDict[team][otherTeam])
oprMatrix.append(oprMatrixLine)
teamScores.append(teamDict[team]["totalScore"])
return oprMatrix, teamScores
def calcOPR(oprMatrix, scores):
try:
L = cholesky(oprMatrix)
y = solve(L, scores)
OPR = solve(L.T.conj(), y)
except numpy.linalg.LinAlgError:
return []
return OPR
def getRegOpr(code):
code = getCode(code)
oprMatrix, scores = getOprMatrix(code)
OPR = calcOPR(oprMatrix, scores)
if OPR == []:
return OPR
for i in range(len(OPR)):
OPR[i] = round(float(OPR[i]), 2)
return OPR
def getAllOprs(code):
code = getCode(code)
oprMatrix, totalScores = getOprMatrix(code)
teamDict={}
autoScores = []
assistScores = []
trussScores = []
foulScores = []
teamlist = getTeamStandings(code)
for teamNum, team in enumerate(getStandings(code)):
teamDict[team[1]] = {"autoScore":team[4], "assistScore":team[3], "trussScore":team[5], "foulScore":team[6]}
for team in teamlist:
autoScores.append(teamDict[team]["autoScore"])
assistScores.append(teamDict[team]["assistScore"])
trussScores.append(teamDict[team]["trussScore"])
foulScores.append(teamDict[team]["foulScore"])
totOPR = calcOPR(oprMatrix, totalScores)
autoOPR = calcOPR(oprMatrix, autoScores)
assistOPR = calcOPR(oprMatrix, assistScores)
trussOPR = calcOPR(oprMatrix, trussScores)
foulOPR = calcOPR(oprMatrix, foulScores)
oprDict={}
try:
for teamNum, team in enumerate(getTeamStandings(code)):
oprDict[team] = {"OPR":totOPR[teamNum], "autoOPR":autoOPR[teamNum], "assistOPR":assistOPR[teamNum], "trussOPR":trussOPR[teamNum], "foulOPR":foulOPR[teamNum]}
for team in oprDict:
shift = oprDict[team]["OPR"]-(oprDict[team]["autoOPR"]+oprDict[team]["assistOPR"]+oprDict[team]["trussOPR"]+oprDict[team]["foulOPR"])
oprSum = abs(oprDict[team]["autoOPR"])+abs(oprDict[team]["assistOPR"])+abs(oprDict[team]["trussOPR"])+abs(oprDict[team]["foulOPR"])
for oprType in ["autoOPR", "assistOPR", "foulOPR"]:
oprDict[team][oprType] +=(shift/oprSum)*abs(oprDict[team][oprType])
for team in oprDict:
for value in oprDict[team]:
oprDict[team][value] = round(float(oprDict[team][value]),2)
except IndexError:
pass
return oprDict
def main():
while True:
print "To get the teamlist for an event, type 'teams'"
print "To get the team OPRs for an event, type 'opr'"
print "To get all the team OPR subtypes for an event, type 'allopr'"
print "To get the standings for an event, type 'standings'"
want = raw_input("What do you want? ").lower()
while search('[ -\.]', want):
want = sub("[ -\.]", "", want)
if search("allopr", want):
code = getCode(raw_input("What event? "))
opr = getAllOprs(code)
teams = getTeamStandings(code)
print "\t".join(["Team", "OPR ", "autoOPR", "assistOPR", "trussOPR", "foulOPR"])
for team in teams:
print "\t".join([team+int(4-len(team))*" ", str(opr[team]["OPR"]), str(opr[team]["autoOPR"]), str(opr[team]["teleOPR"]), str(opr[team]["climbOPR"])])
elif search("opr", want):
code = getCode(raw_input("What event? "))
opr = getRegOpr(code)
for i,team in enumerate(getTeamStandings(code)):
print team, opr[i]
elif search("team(s|list)", want):
code = getCode(raw_input("What event? "))
teams = getTeamlist(code)
for team in teams:
print team
elif search("(standing|ranking)", want):
code = getCode(raw_input("What event? "))
standings = getStandings(code)
print "\t".join(["Rank", "Team", "QP ","AP ", "CP ", "TP ", "Record", "DQ", "Played"])
for team in standings:
team[0] += int(4-len(team[0]))*" "
if len(team[1]) < 4:
team[1] += int(4-len(team[1]))*" "
print "\t".join(team)
else:
print "I'm not sure what you mean. Try again?"
if __name__ == "__main__":
main()
``` |
[
{
"content": "```python\n# $Id: PlotItems.py 299 2007-03-30 12:52:17Z mhagger $\n\n# Copyright (C) 1998-2003 Michael Haggerty <mhagger@alum.mit.edu>\n#\n# This file is licensed under the GNU Lesser General Public License\n# (LGPL). See LICENSE.txt for details.\n\n\"\"\"PlotItems.py -- Objects that can be plott... | [
{
"content": "<|memory_start|>```python\n# $Id: PlotItems.py 299 2007-03-30 12:52:17Z mhagger $\n\n# Copyright (C) 1998-2003 Michael Haggerty <mhagger@alum.mit.edu>\n#\n# This file is licensed under the GNU Lesser General Public License\n# (LGPL). See LICENSE.txt for details.\n\n\"\"\"PlotItems.py -- Objects t... | ```python
# $Id: PlotItems.py 299 2007-03-30 12:52:17Z mhagger $
# Copyright (C) 1998-2003 Michael Haggerty <mhagger@alum.mit.edu>
#
# This file is licensed under the GNU Lesser General Public License
# (LGPL). See LICENSE.txt for details.
"""PlotItems.py -- Objects that can be plotted by Gnuplot.
This module contains several types of PlotItems. PlotItems can be
plotted by passing them to a Gnuplot.Gnuplot object. You can derive
your own classes from the PlotItem hierarchy to customize their
behavior.
"""
import os, string, tempfile, types
from io import StringIO
import numpy
import gp, utils, Errors
class _unset:
"""Used to represent unset keyword arguments."""
pass
class PlotItem:
"""Plotitem represents an item that can be plotted by gnuplot.
For the finest control over the output, you can create 'PlotItems'
yourself with additional keyword options, or derive new classes
from 'PlotItem'.
The handling of options is complicated by the attempt to allow
options and their setting mechanism to be inherited conveniently.
Note first that there are some options that can only be set in the
constructor then never modified, and others that can be set in the
constructor and/or modified using the 'set_option()' member
function. The former are always processed within '__init__'. The
latter are always processed within 'set_option', which is called
by the constructor.
'set_option' is driven by a class-wide dictionary called
'_option_list', which is a mapping '{ <option> : <setter> }' from
option name to the function object used to set or change the
option. <setter> is a function object that takes two parameters:
'self' (the 'PlotItem' instance) and the new value requested for
the option. If <setter> is 'None', then the option is not allowed
to be changed after construction and an exception is raised.
Any 'PlotItem' that needs to add options can add to this
dictionary within its class definition. Follow one of the
examples in this file. Alternatively it could override the
'set_option' member function if it needs to do wilder things.
Members:
'_basecommand' -- a string holding the elementary argument that
must be passed to gnuplot's `plot' command for this item;
e.g., 'sin(x)' or '"filename.dat"'.
'_options' -- a dictionary of (<option>,<string>) tuples
corresponding to the plot options that have been set for
this instance of the PlotItem. <option> is the option as
specified by the user; <string> is the string that needs to
be set in the command line to set that option (or None if no
string is needed). Example::
{'title' : ('Data', 'title "Data"'),
'with' : ('linespoints', 'with linespoints')}
"""
# For _option_list explanation, see docstring for PlotItem.
_option_list = {
'axes' : lambda self, axes: self.set_string_option(
'axes', axes, None, 'axes %s'),
'with' : lambda self, with_: self.set_string_option(
'with', with_, None, 'with %s'),
'title' : lambda self, title: self.set_string_option(
'title', title, 'notitle', 'title "%s"'),
}
_option_list['with_'] = _option_list['with']
# order in which options need to be passed to gnuplot:
_option_sequence = [
'binary',
'index', 'every', 'thru', 'using', 'smooth',
'axes', 'title', 'with'
]
def __init__(self, **keyw):
"""Construct a 'PlotItem'.
Keyword options:
'with_=<string>' -- choose how item will be plotted, e.g.,
with_='points 3 3'.
'title=<string>' -- set the title to be associated with the item
in the plot legend.
'title=None' -- choose 'notitle' option (omit item from legend).
Note that omitting the title option is different than setting
'title=None'; the former chooses gnuplot's default whereas the
latter chooses 'notitle'.
"""
self._options = {}
self.set_option(**keyw)
def get_option(self, name):
"""Return the setting of an option. May be overridden."""
try:
return self._options[name][0]
except:
raise KeyError('option %s is not set!' % name)
def set_option(self, **keyw):
"""Set or change a plot option for this PlotItem.
See documentation for '__init__' for information about allowed
options. This function can be overridden by derived classes
to allow additional options, in which case those options will
also be allowed by '__init__' for the derived class. However,
it is easier to define a new '_option_list' variable for the
derived class.
"""
for (option, value) in keyw.items():
try:
setter = self._option_list[option]
except KeyError:
raise Errors.OptionError('%s=%s' % (option,value))
if setter is None:
raise Errors.OptionError(
'Cannot modify %s option after construction!', option)
else:
setter(self, value)
def set_string_option(self, option, value, default, fmt):
"""Set an option that takes a string value."""
if value is None:
self._options[option] = (value, default)
elif type(value) is str:
self._options[option] = (value, fmt % value)
else:
Errors.OptionError('%s=%s' % (option, value,))
def clear_option(self, name):
"""Clear (unset) a plot option. No error if option was not set."""
try:
del self._options[name]
except KeyError:
pass
def get_base_command_string(self):
raise NotImplementedError()
def get_command_option_string(self):
cmd = []
for opt in self._option_sequence:
(val,str) = self._options.get(opt, (None,None))
if str is not None:
cmd.append(str)
return " ".join(cmd)
def command(self):
"""Build the plot command to be sent to gnuplot.
Build and return the plot command, with options, necessary to
display this item. If anything else needs to be done once per
plot, it can be done here too.
"""
return " ".join([
self.get_base_command_string(),
self.get_command_option_string(),
])
def pipein(self, f):
"""Pipe necessary inline data to gnuplot.
If the plot command requires data to be put on stdin (i.e.,
'plot "-"'), this method should put that data there. Can be
overridden in derived classes.
"""
pass
class Func(PlotItem):
"""Represents a mathematical expression to plot.
Func represents a mathematical expression that is to be computed by
gnuplot itself, as if you would type for example::
gnuplot> plot sin(x)
into gnuplot itself. The argument to the contructor is a string
that should be a mathematical expression. Example::
g.plot(Func('sin(x)', with_='line 3'))
As shorthand, a string passed to the plot method of a Gnuplot
object is also treated as a Func::
g.plot('sin(x)')
"""
def __init__(self, function, **keyw):
PlotItem.__init__(self, **keyw)
self.function = function
def get_base_command_string(self):
return self.function
class _FileItem(PlotItem):
"""A PlotItem representing a file that contains gnuplot data.
This class is not meant for users but rather as a base class for
other types of FileItem.
"""
_option_list = PlotItem._option_list.copy()
_option_list.update({
'binary' : lambda self, binary: self.set_option_binary(binary),
'index' : lambda self, value: self.set_option_colonsep('index', value),
'every' : lambda self, value: self.set_option_colonsep('every', value),
'using' : lambda self, value: self.set_option_colonsep('using', value),
'smooth' : lambda self, smooth: self.set_string_option(
'smooth', smooth, None, 'smooth %s'
),
})
def __init__(self, filename, **keyw):
"""Represent a PlotItem that gnuplot treates as a file.
This class holds the information that is needed to construct
the plot command line, including options that are specific to
file-like gnuplot input.
<filename> is a string representing the filename to be passed
to gnuplot within quotes. It may be the name of an existing
file, '-' for inline data, or the name of a named pipe.
Keyword arguments:
'using=<int>' -- plot that column against line number
'using=<tuple>' -- plot using a:b:c:d etc. Elements in
the tuple that are None are output as the empty
string.
'using=<string>' -- plot `using <string>' (allows gnuplot's
arbitrary column arithmetic)
'every=<value>' -- plot 'every <value>'. <value> is
formatted as for 'using' option.
'index=<value>' -- plot 'index <value>'. <value> is
formatted as for 'using' option.
'binary=<boolean>' -- data in the file is in binary format
(this option is only allowed for grid data for splot).
'smooth=<string>' -- smooth the data. Option should be
'unique', 'csplines', 'acsplines', 'bezier', or
'sbezier'.
The keyword arguments recognized by 'PlotItem' can also be
used here.
Note that the 'using' option is interpreted by gnuplot, so
columns must be numbered starting with 1.
By default, gnuplot uses the name of the file plus any 'using'
option as the dataset title. If you want another title, set
it explicitly using the 'title' option.
"""
self.filename = filename
PlotItem.__init__(self, **keyw)
def get_base_command_string(self):
return gp.double_quote_string(self.filename)
def set_option_colonsep(self, name, value):
if value is None:
self.clear_option(name)
elif type(value) in [str, int]:
self._options[name] = (value, '%s %s' % (name, value,))
elif type(value) is tuple:
subopts = []
for subopt in value:
if subopt is None:
subopts.append('')
else:
subopts.append(str(subopt))
self._options[name] = (
value,
'%s %s' % (name, ":".join(subopts),),
)
else:
raise Errors.OptionError('%s=%s' % (name, value,))
def set_option_binary(self, binary):
if binary:
if not gp.GnuplotOpts.recognizes_binary_splot:
raise Errors.OptionError(
'Gnuplot.py is currently configured to reject binary data')
self._options['binary'] = (1, 'binary')
else:
self._options['binary'] = (0, None)
class _NewFileItem(_FileItem):
def __init__(self, content, filename=None, **keyw):
binary = keyw.get('binary', 0)
if binary:
mode = 'wb'
else:
mode = 'w'
if filename:
# This is a permanent file
self.temp = False
f = open(filename, mode)
else:
self.temp = True
if hasattr(tempfile, 'mkstemp'):
# Use the new secure method of creating temporary files:
(fd, filename,) = tempfile.mkstemp(
suffix='.gnuplot', text=(not binary)
)
f = os.fdopen(fd, mode)
else:
# for backwards compatibility to pre-2.3:
filename = tempfile.mktemp()
f = open(filename, mode)
f.write(content)
f.close()
# If the user hasn't specified a title, set it to None so
# that the name of the temporary file is not used:
if self.temp and 'title' not in keyw:
keyw['title'] = None
_FileItem.__init__(self, filename, **keyw)
def __del__(self):
if self.temp:
os.unlink(self.filename)
class _InlineFileItem(_FileItem):
"""A _FileItem that actually indicates inline data.
"""
def __init__(self, content, **keyw):
# If the user hasn't specified a title, set it to None so that
# '-' is not used:
if 'title' not in keyw:
keyw['title'] = None
if keyw.get('binary', 0):
raise Errors.OptionError('binary inline data is not supported')
_FileItem.__init__(self, '-', **keyw)
if content[-1] == '\n':
self.content = content
else:
self.content = content + '\n'
def pipein(self, f):
f.write(self.content + 'e\n')
if gp.GnuplotOpts.support_fifo:
import threading
class _FIFOWriter(threading.Thread):
"""Create a FIFO (named pipe), write to it, then delete it.
The writing takes place in a separate thread so that the main
thread is not blocked. The idea is that once the writing is
finished we know that gnuplot is done with the data that were in
the file so we can delete the file. This technique removes the
ambiguity about when the temporary files should be deleted.
Since the tempfile module does not provide an easy, secure way
to create a FIFO without race conditions, we instead create a
temporary directory using mkdtemp() then create the FIFO
within that directory. When the writer thread has written the
full information to the FIFO, it deletes both the FIFO and the
temporary directory that contained it.
"""
def __init__(self, content, mode='w'):
self.content = content
self.mode = mode
if hasattr(tempfile, 'mkdtemp'):
# Make the file within a temporary directory that is
# created securely:
self.dirname = tempfile.mkdtemp(suffix='.gnuplot')
self.filename = os.path.join(self.dirname, 'fifo')
else:
# For backwards compatibility pre-2.3, just use
# mktemp() to create filename:
self.dirname = None
self.filename = tempfile.mktemp()
threading.Thread.__init__(
self,
name=('FIFO Writer for %s' % (self.filename,)),
)
os.mkfifo(self.filename)
self.start()
def run(self):
f = open(self.filename, self.mode)
f.write(self.content)
f.close()
os.unlink(self.filename)
if self.dirname is not None:
os.rmdir(self.dirname)
class _FIFOFileItem(_FileItem):
"""A _FileItem based on a FIFO (named pipe).
This class depends on the availablity of os.mkfifo(), which only
exists under Unix.
"""
def __init__(self, content, **keyw):
# If the user hasn't specified a title, set it to None so that
# the name of the temporary FIFO is not used:
if 'title' not in keyw:
keyw['title'] = None
_FileItem.__init__(self, '', **keyw)
self.content = content
if keyw.get('binary', 0):
self.mode = 'wb'
else:
self.mode = 'w'
def get_base_command_string(self):
"""Create the gnuplot command for plotting this item.
The basecommand is different each time because each FIFOWriter
creates a new FIFO.
"""
# Create a new FIFO and a thread to write to it. Retrieve the
# filename of the FIFO to be used in the basecommand.
fifo = _FIFOWriter(self.content, self.mode)
return gp.double_quote_string(fifo.filename)
def File(filename, **keyw):
"""Construct a _FileItem object referring to an existing file.
This is a convenience function that just returns a _FileItem that
wraps the filename.
<filename> is a string holding the filename of an existing file.
The keyword arguments are the same as those of the _FileItem
constructor.
"""
if type(filename) is not str:
raise Errors.OptionError(
'Argument (%s) must be a filename' % (filename,)
)
return _FileItem(filename, **keyw)
def Data(*data, **keyw):
"""Create and return a _FileItem representing the data from *data.
Create a '_FileItem' object (which is a type of 'PlotItem') out of
one or more Float Python numpy arrays (or objects that can be
converted to a float numpy array). If the routine is passed a
single with multiple dimensions, then the last index ranges over
the values comprising a single data point (e.g., [<x>, <y>,
<sigma>]) and the rest of the indices select the data point. If
passed a single array with 1 dimension, then each point is
considered to have only one value (i.e., by default the values
will be plotted against their indices). If the routine is passed
more than one array, they must have identical shapes, and then
each data point is composed of one point from each array. E.g.,
'Data(x,x**2)' is a 'PlotItem' that represents x squared as a
function of x. For the output format, see the comments for
'write_array()'.
How the data are written to gnuplot depends on the 'inline'
argument and preference settings for the platform in use.
Keyword arguments:
'cols=<tuple>' -- write only the specified columns from each
data point to the file. Since cols is used by python, the
columns should be numbered in the python style (starting
from 0), not the gnuplot style (starting from 1).
'inline=<bool>' -- transmit the data to gnuplot 'inline'
rather than through a temporary file. The default is the
value of gp.GnuplotOpts.prefer_inline_data.
'filename=<string>' -- save data to a permanent file.
The keyword arguments recognized by '_FileItem' can also be used
here.
"""
if len(data) == 1:
# data was passed as a single structure
data = utils.float_array(data[0])
# As a special case, if passed a single 1-D array, then it is
# treated as one value per point (by default, plotted against
# its index):
if len(data.shape) == 1:
data = data[:,numpy.newaxis]
else:
# data was passed column by column (for example,
# Data(x,y)); pack it into one big array (this will test
# that sizes are all the same):
data = utils.float_array(data)
dims = len(data.shape)
# transpose so that the last index selects x vs. y:
data = numpy.transpose(data, (dims-1,) + tuple(range(dims-1)))
if 'cols' in keyw:
cols = keyw['cols']
del keyw['cols']
if isinstance(cols, int):
cols = (cols,)
data = numpy.take(data, cols, -1)
if 'filename' in keyw:
filename = keyw['filename'] or None
del keyw['filename']
else:
filename = None
if 'inline' in keyw:
inline = keyw['inline']
del keyw['inline']
if inline and filename:
raise Errors.OptionError(
'cannot pass data both inline and via a file'
)
else:
inline = (not filename) and gp.GnuplotOpts.prefer_inline_data
# Output the content into a string:
f = StringIO()
utils.write_array(f, data)
content = f.getvalue()
if inline:
return _InlineFileItem(content, **keyw)
elif filename:
return _NewFileItem(content, filename=filename, **keyw)
elif gp.GnuplotOpts.prefer_fifo_data:
return _FIFOFileItem(content, **keyw)
else:
return _NewFileItem(content, **keyw)
def GridData(
data, xvals=None, yvals=None, inline=_unset, filename=None, **keyw
):
"""Return a _FileItem representing a function of two variables.
'GridData' represents a function that has been tabulated on a
rectangular grid. The data are written to a file; no copy is kept
in memory.
Arguments:
'data' -- the data to plot: a 2-d array with dimensions
(numx,numy).
'xvals' -- a 1-d array with dimension 'numx'
'yvals' -- a 1-d array with dimension 'numy'
'binary=<bool>' -- send data to gnuplot in binary format?
'inline=<bool>' -- send data to gnuplot "inline"?
'filename=<string>' -- save data to a permanent file.
Note the unusual argument order! The data are specified *before*
the x and y values. (This inconsistency was probably a mistake;
after all, the default xvals and yvals are not very useful.)
'data' must be a data array holding the values of a function
f(x,y) tabulated on a grid of points, such that 'data[i,j] ==
f(xvals[i], yvals[j])'. If 'xvals' and/or 'yvals' are omitted,
integers (starting with 0) are used for that coordinate. The data
are written to a temporary file; no copy of the data is kept in
memory.
If 'binary=0' then the data are written to a datafile as 'x y
f(x,y)' triplets (y changes most rapidly) that can be used by
gnuplot's 'splot' command. Blank lines are included each time the
value of x changes so that gnuplot knows to plot a surface through
the data.
If 'binary=1' then the data are written to a file in a binary
format that 'splot' can understand. Binary format is faster and
usually saves disk space but is not human-readable. If your
version of gnuplot doesn't support binary format (it is a
recently-added feature), this behavior can be disabled by setting
the configuration variable
'gp.GnuplotOpts.recognizes_binary_splot=0' in the appropriate
gp*.py file.
Thus if you have three arrays in the above format and a Gnuplot
instance called g, you can plot your data by typing
'g.splot(Gnuplot.GridData(data,xvals,yvals))'.
"""
# Try to interpret data as an array:
data = utils.float_array(data)
try:
(numx, numy) = data.shape
except ValueError:
raise Errors.DataError('data array must be two-dimensional')
if xvals is None:
xvals = numpy.arange(numx)
else:
xvals = utils.float_array(xvals)
if xvals.shape != (numx,):
raise Errors.DataError(
'The size of xvals must be the same as the size of '
'the first dimension of the data array')
if yvals is None:
yvals = numpy.arange(numy)
else:
yvals = utils.float_array(yvals)
if yvals.shape != (numy,):
raise Errors.DataError(
'The size of yvals must be the same as the size of '
'the second dimension of the data array')
# Binary defaults to true if recognizes_binary_plot is set;
# otherwise it is forced to false.
binary = keyw.get('binary', 1) and gp.GnuplotOpts.recognizes_binary_splot
keyw['binary'] = binary
if inline is _unset:
inline = (
(not binary) and (not filename)
and gp.GnuplotOpts.prefer_inline_data
)
elif inline and filename:
raise Errors.OptionError(
'cannot pass data both inline and via a file'
)
# xvals, yvals, and data are now all filled with arrays of data.
if binary:
if inline:
raise Errors.OptionError('binary inline data not supported')
# write file in binary format
# It seems that the gnuplot documentation for binary mode
# disagrees with its actual behavior (as of v. 3.7). The
# documentation has the roles of x and y exchanged. We ignore
# the documentation and go with the code.
mout = numpy.zeros((numy + 1, numx + 1), numpy.float32)
mout[0,0] = numx
mout[0,1:] = xvals.astype(numpy.float32)
mout[1:,0] = yvals.astype(numpy.float32)
try:
# try copying without the additional copy implied by astype():
mout[1:,1:] = numpy.transpose(data)
except:
# if that didn't work then downcasting from double
# must be necessary:
mout[1:,1:] = numpy.transpose(data.astype(numpy.float32))
content = mout.tostring()
if (not filename) and gp.GnuplotOpts.prefer_fifo_data:
return _FIFOFileItem(content, **keyw)
else:
return _NewFileItem(content, filename=filename, **keyw)
else:
# output data to file as "x y f(x)" triplets. This
# requires numy copies of each x value and numx copies of
# each y value. First reformat the data:
set = numpy.transpose(
numpy.array(
(numpy.transpose(numpy.resize(xvals, (numy, numx))),
numpy.resize(yvals, (numx, numy)),
data)), (1,2,0))
# Now output the data with the usual routine. This will
# produce data properly formatted in blocks separated by blank
# lines so that gnuplot can connect the points into a grid.
f = StringIO()
utils.write_array(f, set)
content = f.getvalue()
if inline:
return _InlineFileItem(content, **keyw)
elif filename:
return _NewFileItem(content, filename=filename, **keyw)
elif gp.GnuplotOpts.prefer_fifo_data:
return _FIFOFileItem(content, **keyw)
else:
return _NewFileItem(content, **keyw)
``` |
[
{
"content": "```python\nfrom collections import OrderedDict\nimport logging\nfrom optparse import make_option\nfrom pprint import pprint\nfrom os import listdir\nfrom os.path import isfile, join\nfrom django.conf import settings\nfrom django.core.management import BaseCommand, call_command\nfrom django.db impo... | [
{
"content": "<|memory_start|>```python\nfrom collections import OrderedDict\nimport logging\nfrom optparse import make_option\nfrom pprint import pprint\nfrom os import listdir\nfrom os.path import isfile, join\nfrom django.conf import settings\nfrom django.core.management import BaseCommand, call_command\nfro... | ```python
from collections import OrderedDict
import logging
from optparse import make_option
from pprint import pprint
from os import listdir
from os.path import isfile, join
from django.conf import settings
from django.core.management import BaseCommand, call_command
from django.db import connection
from django.db.transaction import set_autocommit, commit
from django.utils.text import slugify
from bilanci import tree_models
from bilanci.models import Voce, ValoreBilancio, ImportXmlBilancio
from bilanci.utils import couch, gdocs, email_utils
from bilanci.utils.comuni import FLMapper
from territori.models import Territorio, ObjectDoesNotExist
from .somma_funzioni import SommaFunzioniMixin
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--dry-run',
dest='dryrun',
action='store_true',
default=False,
help='Set the dry-run command mode: nothing is written in the couchdb'),
make_option('--complete',
dest='complete',
action='store_true',
default=False,
help='After data import calculate indicators and updates opendata zip file'),
make_option('--years',
dest='years',
default='',
help='Years to fetch. From 2002 to 2012. Use one of this formats: 2012 or 2003-2006 or 2002,2004,2006'),
make_option('--cities',
dest='cities',
default='',
help='Cities codes or slugs. Use comma to separate values: Roma,Napoli,Torino or "All"'),
make_option('--start-from',
dest='start_from',
default='',
help='Start importing cities from such city. Use codfinloc: GARAGUSO--4170470090'),
make_option('--couchdb-server',
dest='couchdb_server',
default=settings.COUCHDB_DEFAULT_SERVER,
help='CouchDB server alias to connect to (staging | localhost). Defaults to staging.'),
make_option('--create-tree',
dest='create_tree',
action='store_true',
default=False,
help='Force recreating simplified tree leaves from csv file or gdocs (remove all values)'),
make_option('--force-google',
dest='force_google',
action='store_true',
default=False,
help='Force reloading mapping file and simplified subtrees leaves from gdocs (invalidate the csv cache)'),
make_option('--tree-node-slug',
dest='tree_node_slug',
default=None,
help='Voce slug of the tree model to start the import from. Example: consuntivo-entrate-imposte-e-tasse'),
make_option('--couch-path',
dest='couch_path_string',
default=None,
help='CouchDB keys sequence (CSV) to identify the import starting point. '
'Must be specified together with the treee-node-slug option. '
'Example: consuntivo,entrate,imposte'),
make_option('--append',
dest='append',
action='store_true',
default=False,
help='Use the log file appending instead of overwriting (used when launching shell scripts)'),
)
help = 'Import values from the simplified couchdb database into a Postgresql server'
dryrun = False
logger = logging.getLogger('management')
partial_import = False
couch_path = None
accepted_bilanci_types = ['preventivo', 'consuntivo']
somma_funzioni_branches = [
'preventivo-spese-spese-somma-funzioni',
'consuntivo-spese-cassa-spese-somma-funzioni',
'consuntivo-spese-impegni-spese-somma-funzioni',
]
considered_tipo_bilancio = accepted_bilanci_types
considered_somma_funzioni = somma_funzioni_branches
# somma_funzioni_slug_baseset: dict that stores the slugs needed to compute somma funzioni branches
somma_funzioni_slug_baseset = {}
#if the import is partial root_treenode is the root node of the sub-tree to be imported
root_treenode = None
root_descendants = None
import_set = OrderedDict()
imported_xml = None
years = None
cities_param = None
cities = None
voci_dict = None
couchdb = None
comuni_dicts = {}
def apply_somma_funzioni_patch(self, voce_sum, vb_filters, vb_dict):
"""
Compute spese correnti and spese per investimenti for funzioni, and write into spese-somma
Overwrite values if found.
"""
components = voce_sum.get_components_somma_funzioni()
# self.logger.debug("Applying somma_funzioni_patch to {0}".format(voce_sum.slug))
vb = []
for c in components:
try:
vb.append(vb_dict[c.slug])
except KeyError:
self.logger.error("Somma funz: cannot find slug: {} in vb_dict".format(c.slug))
return
valore = vb[0]['valore'] + vb[1]['valore']
valore_procapite = vb[0]['valore_procapite'] + vb[1]['valore_procapite']
ValoreBilancio.objects.create(
territorio=vb_filters['territorio'],
anno=vb_filters['anno'],
voce=voce_sum,
valore=valore,
valore_procapite=valore_procapite
)
def create_voci_tree(self, force_google):
"""
Create a Voci tree. If the tree exists, then it is deleted.
"""
if Voce.objects.count() > 0:
Voce.objects.all().delete()
# get simplified leaves (from csv or gdocs), to build the voices tree
simplified_leaves = gdocs.get_simplified_leaves(force_google=force_google)
self.create_voci_preventivo_tree(simplified_leaves)
self.create_voci_consuntivo_tree(simplified_leaves)
sf = SommaFunzioniMixin()
sf.create_somma_funzioni()
def create_voci_preventivo_tree(self, simplified_leaves):
# create preventivo root
subtree_node = Voce(denominazione='Preventivo', slug='preventivo')
subtree_node.insert_at(None, save=True, position='last-child')
# the preventivo subsections
subtrees = OrderedDict([
('preventivo-entrate', 'Preventivo entrate'),
('preventivo-spese', 'Preventivo spese'),
])
# add all leaves from the preventivo sections under preventivo
# entrate and spese are already considered
for subtree_slug, subtree_denominazione in subtrees.items():
for leaf_bc in simplified_leaves[subtree_slug]:
# add this leaf to the subtree, adding all needed intermediate nodes
self.add_leaf(leaf_bc, subtree_node)
def create_voci_consuntivo_tree(self, simplified_leaves):
# create consuntivo root
subtree_node = Voce(denominazione='Consuntivo', slug='consuntivo')
subtree_node.insert_at(None, save=True, position='last-child')
subtrees = OrderedDict([
('consuntivo-entrate', {
'denominazione': u'Consuntivo entrate',
'sections': [u'Accertamenti', u'Riscossioni in conto competenza', u'Riscossioni in conto residui',
u'Cassa']
}),
('consuntivo-spese', {
'denominazione': u'Consuntivo spese',
'sections': [u'Impegni', u'Pagamenti in conto competenza', u'Pagamenti in conto residui', u'Cassa']
}),
])
for subtree_slug, subtree_structure in subtrees.items():
for section_name in subtree_structure['sections']:
for leaf_bc in simplified_leaves[subtree_slug]:
bc = leaf_bc[:]
bc.insert(1, section_name)
self.add_leaf(bc, subtree_node, section_slug=slugify(section_name))
def add_leaf(self, breadcrumbs, subtree_node, section_slug=''):
"""
Add a leaf to the subtree, given the breadcrumbs list.
Creates the needed nodes in the process.
"""
self.logger.info(u"adding leaf {}".format(",".join(breadcrumbs)))
# skip 'totale' leaves (as totals values are attached to non-leaf nodes)
if 'totale' in [bc.lower() for bc in breadcrumbs]:
self.logger.info(u"skipped leaf {}".format(",".join(breadcrumbs)))
return
# copy breadcrumbs and remove last elements if empty
bc = breadcrumbs[:]
while not bc[-1]:
bc.pop()
prefix_slug = subtree_node.slug
current_node = subtree_node
for item in bc:
if current_node.get_children().filter(denominazione__iexact=item).count() == 0:
slug = u"{0}-{1}".format(prefix_slug, u"-".join(slugify(unicode(i)) for i in bc[0:bc.index(item) + 1]))
node = Voce(denominazione=item, slug=slug)
node.insert_at(current_node, save=True, position='last-child')
if bc[-1] == item:
return
else:
node = current_node.get_children().get(denominazione__iexact=item)
current_node = node
def couch_connect(self, couchdb_server):
# connect to couch database
couchdb_server_alias = couchdb_server
couchdb_dbname = settings.COUCHDB_SIMPLIFIED_NAME
if couchdb_server_alias not in settings.COUCHDB_SERVERS:
raise Exception("Unknown couchdb server alias.")
self.couchdb = couch.connect(
couchdb_dbname,
couchdb_server_settings=settings.COUCHDB_SERVERS[couchdb_server_alias]
)
def set_years(self, years):
# set considered years considering cases with - and ,
# Example
# 2003-2006
# or 2003,2004,2010
if not years:
raise Exception("Missing years parameter")
if "-" in years:
(start_year, end_year) = years.split("-")
years_list = range(int(start_year), int(end_year) + 1)
else:
years_list = [int(y.strip()) for y in years.split(",") if
settings.APP_START_YEAR <= int(y.strip()) <= settings.APP_END_YEAR]
if not years_list:
raise Exception("No suitable year found in {0}".format(years))
self.years = years_list
def set_cities(self, cities_codes, start_from):
# set considered cities
mapper = FLMapper()
if not cities_codes:
if start_from:
cities_codes = 'all'
all_cities = mapper.get_cities(cities_codes, logger=self.logger)
try:
cities_finloc = all_cities[all_cities.index(start_from):]
except ValueError:
raise Exception("Start-from city not found in cities complete list, use name--cod_finloc. "
"Example: ZUNGRI--4181030500")
else:
self.logger.info("Processing cities starting from: {0}".format(start_from))
else:
raise Exception("Missing cities parameter or start-from parameter")
else:
cities_finloc = mapper.get_cities(cities_codes, logger=self.logger)
finloc_numbers = [c[-10:] for c in cities_finloc]
slug_list = []
for numb in finloc_numbers:
slug_list.append(Territorio.objects.get(territorio="C", cod_finloc__endswith=numb).slug)
self.cities = Territorio.objects.filter(territorio="C", slug__in=slug_list)
def checks_partial_import(self, tree_node_slug, couch_path_string):
# based on the type of import set the type of bilancio that is considered
# sets branches of somma funzioni considered by the import
self.partial_import = True
#depending on tree node slug, couch path string sets considered tipo bilancio
self.couch_path = [unicode(x) for x in couch_path_string.split(",")]
# check that tree_node_slug exists in the Voce tree
try:
self.root_treenode = Voce.objects.get(slug=tree_node_slug)
except ObjectDoesNotExist:
self.logger.error(
"Voce with slug:{0} not present in Voce table. "
"Run update_bilancio_tree before running couch2pg".format(
tree_node_slug))
exit()
self.root_descendants = self.root_treenode.get_descendants(include_self=True)
self.considered_tipo_bilancio = self.root_treenode. \
get_ancestors(include_self=True, ascending=False). \
get(slug__in=self.accepted_bilanci_types).slug
# checks which branches of somma-funzioni are interested by the import
self.considered_somma_funzioni = self.root_descendants. \
filter(slug__in=self.somma_funzioni_branches). \
values_list('slug', flat=True)
def prepare_for_import(self):
##
# prepare_for_import
# 1) creates the import_set: the complete dict of cities, years and tipo bilancio that will be imported by the
# task
# 2) creates values_to_delete: a queryset that includes all ValoriBilancio
# that correspond to the bilancio selected by the import
# 3) gets the info about Xml import and removes the keys relative to cities, years and tipo_bilancio
# that have been imported via Xml
# 4) excludes from values_to_delete the values of bilancio imported via XML: they won't be deleted
# 5) fills somma_funzioni_slug_baseset with a dict that associates the slug of the root node of a
# somma-funzioni branch with the set of slugs needed to create it
# creates a dict with year as a key and value: a list of considered_bilancio_type(s)
years_dict = OrderedDict((year, self.considered_tipo_bilancio) for year in self.years)
# creates a dict in which for each city considered the value is the previous dict
self.import_set = OrderedDict((territorio, years_dict) for territorio in self.cities)
# construct values_to_delete
values_to_delete = ValoreBilancio.objects.filter(territorio__in=self.cities, anno__in=self.years)
if self.partial_import:
values_to_delete = values_to_delete.filter(voce__in=self.root_descendants)
# get data about ImportXml: if there is data that has been imported from XML for a city/ year
# then the couch import must NOT overwrite that data
self.imported_xml = ImportXmlBilancio.objects. \
filter(territorio__in=self.cities, anno__in=self.years, tipologia__in=self.considered_tipo_bilancio). \
order_by('territorio', 'anno')
if len(self.imported_xml) > 0:
for i in self.imported_xml:
self.logger.warning(
"BILANCIO:{} YEAR:{} CITY:{} will have to be reimported again: it was imported with xml". \
format(i.tipologia.title(), i.anno, i.territorio.denominazione))
# deletes ValoriBilanci that will be imported afterwards: this speeds up the import
if self.partial_import:
self.logger.info("Deleting values for selected cities, years and subtree")
else:
self.logger.info("Deleting values for selected cities, years")
if not self.dryrun and ValoreBilancio.objects.all().count() > 0:
if self.partial_import is False and self.cities_param.lower() == 'all':
# sql query to delete all values in ValoreBilancio table: this should cut the time
cursor = connection.cursor()
cursor.execute("TRUNCATE bilanci_valorebilancio", )
else:
values_to_delete.delete()
self.logger.info("Done deleting")
# creates somma_funzioni_slug_baseset
for slug in self.considered_somma_funzioni:
components = Voce.objects.get(slug=slug).get_components_somma_funzioni()
descendants = []
for c in components:
descendants.extend(c.get_descendants(include_self=True))
self.somma_funzioni_slug_baseset[slug] = descendants
def handle(self, *args, **options):
verbosity = options['verbosity']
if verbosity == '0':
self.logger.setLevel(logging.ERROR)
elif verbosity == '1':
self.logger.setLevel(logging.WARNING)
elif verbosity == '2':
self.logger.setLevel(logging.INFO)
elif verbosity == '3':
self.logger.setLevel(logging.DEBUG)
self.dryrun = options['dryrun']
complete = options['complete']
force_google = options['force_google']
create_tree = options['create_tree']
tree_node_slug = options['tree_node_slug']
couch_path_string = options['couch_path_string']
if tree_node_slug and couch_path_string is None or couch_path_string and tree_node_slug is None:
self.logger.error("Couch path and tree node must be both specified. Quitting")
exit()
if options['append'] is True:
self.logger = logging.getLogger('management_append')
###
# connect to couchdb
###
self.couch_connect(options['couchdb_server'])
###
# cities
###
self.cities_param = options['cities']
start_from = options['start_from']
self.set_cities(self.cities_param, start_from)
if len(self.cities) == 0:
self.logger.info("No cities to process. Quit")
return
# check if debug is active: the task may fail
if settings.DEBUG is True and settings.INSTANCE_TYPE != 'development' and len(self.cities) > 4000:
self.logger.error("DEBUG settings is True, task will fail. Disable DEBUG and retry")
exit()
###
# set considered years
###
self.set_years(options['years'])
# if it's a partial import
# * checks which kind of bilancio is considered
# * checks which branch of somma-funzioni has to be calculated
if tree_node_slug and couch_path_string:
tree_node_slug = unicode(tree_node_slug)
couch_path_string = unicode(couch_path_string)
self.checks_partial_import(tree_node_slug, couch_path_string)
# create the tree if it does not exist or if forced to do so
if create_tree or Voce.objects.count() == 0:
if not self.dryrun:
self.create_voci_tree(force_google=force_google)
# build the map of slug to pk for the Voce tree
self.voci_dict = Voce.objects.get_dict_by_slug()
# considering years,cities and limitations set creates a comprehensive map of all bilancio to be imported,
# deletes old values before import
self.prepare_for_import()
counter = 100
for territorio, city_years in self.import_set.iteritems():
city_finloc = territorio.cod_finloc
# get all budgets data for the city
city_budget = self.couchdb.get(city_finloc)
if city_budget is None:
# if city budget is not found, try again taking out apostrophe and re-slugging, this deals with
# slug name changes from finanza locale
if "'" in territorio.nome:
nome_senza_apostrofo = territorio.nome.replace("'", "")
finloc_number = city_finloc[-10:]
city_finloc_noapostrophe = u"{}--{}".format(slugify(nome_senza_apostrofo), finloc_number).upper()
city_budget = self.couchdb.get(city_finloc_noapostrophe)
if city_budget is None:
self.logger.warning(u"Document '{}' or '{}' not found in couchdb instance. Skipping.".format(city_finloc, city_finloc_noapostrophe))
continue
else:
self.logger.warning(u"Document '{}' not found in couchdb instance. Skipping.".format(city_finloc))
continue
self.logger.debug(u"City of {0}".format(city_finloc))
if counter == 100:
self.logger.info(u"Reached city of '{0}', continuing...".format(city_finloc))
counter = 0
else:
counter += 1
for year, certificati_to_import in city_years.iteritems():
if str(year) not in city_budget:
self.logger.warning(u" {} - {} not found. Skip".format(city_finloc, year))
continue
# POPULATION
# fetch valid population, starting from this year
# if no population found, set it to None, as not to break things
try:
(pop_year, population) = territorio.nearest_valid_population(year)
except TypeError:
population = None
# self.logger.debug("::Population: {0}".format(population))
# build a BilancioItem tree, out of the couch-extracted dict
# for the given city and year
# add the totals by extracting them from the dict, or by computing
city_year_budget_dict = city_budget[str(year)]
if self.partial_import is True:
self.logger.info(u"- Processing year: {}, subtree: {}".format(year, tree_node_slug))
# start from a custom node
path_not_found = False
city_year_budget_node_dict = city_year_budget_dict.copy()
# get the starting node in couchdb data
for k in self.couch_path:
try:
city_year_budget_node_dict = city_year_budget_node_dict[k]
except KeyError:
self.logger.warning(
"Couch path:{0} not present for {1}, anno:{2}".format(self.couch_path,
territorio.cod_finloc,
str(year)))
path_not_found = True
break
# if data path is found in the couch document, write data into postgres db
if path_not_found is False:
city_year_node_tree_patch = tree_models.make_tree_from_dict(
city_year_budget_node_dict, self.voci_dict, path=[tree_node_slug],
population=population
)
# writes new sub-tree
if not self.dryrun:
tree_models.write_tree_to_vb_db(territorio, year, city_year_node_tree_patch, self.voci_dict)
else:
# import tipo_bilancio considered
# normally is preventivo and consuntivo
# otherwise only one of them
for tipo_bilancio in certificati_to_import:
certificato_tree = tree_models.make_tree_from_dict(
city_year_budget_dict[tipo_bilancio], self.voci_dict, path=[unicode(tipo_bilancio)],
population=population
)
if len(certificato_tree.children) == 0:
continue
self.logger.debug(u"- Processing year: {} bilancio: {}".format(year, tipo_bilancio))
if not self.dryrun:
tree_models.write_tree_to_vb_db(territorio, year, certificato_tree, self.voci_dict)
# applies somma-funzioni patch only to the interested somma-funzioni branches (if any)
if len(self.considered_somma_funzioni) > 0:
self.logger.debug("Somma funzioni patch")
vb_filters = {
'territorio': territorio,
'anno': year,
}
for somma_funzioni_branch in self.considered_somma_funzioni:
# get data for somma-funzioni patch, getting only the needed ValoreBilancio using the
# somma_funzioni_slug_baseset
needed_slugs = self.somma_funzioni_slug_baseset[somma_funzioni_branch]
vb = ValoreBilancio.objects. \
filter(**vb_filters). \
filter(voce__slug__in=needed_slugs). \
values_list('voce__slug', 'valore', 'valore_procapite')
if len(vb) == 0:
self.logger.debug("Skipping {} branch: no values in db".format(somma_funzioni_branch))
continue
vb_dict = dict((v[0], {'valore': v[1], 'valore_procapite': v[2]}) for v in vb)
if not self.dryrun:
for voce_slug in Voce.objects.get(slug=somma_funzioni_branch).get_descendants(
include_self=True):
self.apply_somma_funzioni_patch(voce_slug, vb_filters, vb_dict)
del vb_dict
# actually save data into posgres
self.logger.debug("Write valori bilancio to postgres")
self.logger.info("Done importing couchDB values into postgres")
if self.cities_param.lower() != 'all':
for bilancio_xml in self.imported_xml:
self.logger.info(
"IMPORTANT: Re-import XML bilancio {},{},{}".format(bilancio_xml.territorio, bilancio_xml.anno,
bilancio_xml.tipologia))
else:
# directly import xml files in default folder for bilancio XML
xml_path = settings.OPENDATA_XML_ROOT
xml_files = [f for f in listdir(xml_path) if isfile(join(xml_path, f))]
for f in xml_files:
self.logger.info(u"Import XML bilancio file:'{}'".format(f))
call_command('xml2pg', verbosity=1, file=f, interactive=False)
if len(xml_files) != len(self.imported_xml):
self.logger.error(
"Found {} Xml files compared to {} objs in ImportXML table in DB!!".format(len(xml_files),
len(self.imported_xml)))
if complete and not self.dryrun and not self.partial_import:
##
# complete the import with medians, indicators and update opendata (zip files)
##
self.logger.info(u"Update indicators medians")
call_command('data_completion', verbosity=2, years=options['years'], cities=options['cities'],
interactive=False)
email_utils.send_notification_email(
msg_string="Couch2pg, update opendata, indicators and medians has finished.")
else:
email_utils.send_notification_email(msg_string="Couch2pg has finished.")
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\n# coding: utf-8\nimport sys\n\nfrom setuptools import setup, find_packages\n\ninstall_requires = [\n 'Flask',\n 'PyYAML',\n 'cairocffi',\n 'pyparsing>=1.5.7',\n 'pytz',\n 'six',\n 'tzlocal',\n]\n\nif sys.version_... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\n# coding: utf-8\nimport sys\n\nfrom setuptools import setup, find_packages\n\ninstall_requires = [\n 'Flask',\n 'PyYAML',\n 'cairocffi',\n 'pyparsing>=1.5.7',\n 'pytz',\n 'six',\n 'tzlocal',\n]\n\... | ```python
# coding: utf-8
import sys
from setuptools import setup, find_packages
install_requires = [
'Flask',
'PyYAML',
'cairocffi',
'pyparsing>=1.5.7',
'pytz',
'six',
'tzlocal',
]
if sys.version_info < (2, 7):
install_requires.append('importlib')
install_requires.append('logutils')
install_requires.append('ordereddict')
install_requires.append('structlog<=16.0.0')
else:
install_requires.append('structlog')
with open('README.rst') as f:
long_description = f.read()
setup(
name='graphite-api',
version='1.1.3',
url='https://github.com/brutasse/graphite-api',
author="Bruno Renié, based on Chris Davis's graphite-web",
author_email='bruno@renie.fr',
license='Apache Software License 2.0',
description=('Graphite-web, without the interface. '
'Just the rendering HTTP API.'),
long_description=long_description,
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=install_requires,
extras_require={
'sentry': ['raven[flask]'],
'cyanite': ['cyanite'],
'cache': ['Flask-Cache'],
'statsd': ['statsd'],
},
zip_safe=False,
platforms='any',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Flask',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: System :: Monitoring',
),
test_suite='tests',
)
``` |
[
{
"content": "```python\n'''\nMarkDown format generator\n'''\n\nclass MarkDown:\n\n 'convert raw text to markdown syntax'\n\n def __init__(self):\n self.escape_table = {\"\\\\\": \"\\\\\\\\\", \"`\": \"\\`\", \n \"*\": \"\\*\", \"_\": \"\\_\",\n ... | [
{
"content": "<|memory_start|>```python\n'''\nMarkDown format generator\n'''\n\nclass MarkDown:\n\n 'convert raw text to markdown syntax'\n\n def __init__(self):\n self.escape_table = {\"\\\\\": \"\\\\\\\\\", \"`\": \"\\`\", \n \"*\": \"\\*\", \"_\": \"\\_\",\n ... | ```python
'''
MarkDown format generator
'''
class MarkDown:
'convert raw text to markdown syntax'
def __init__(self):
self.escape_table = {"\\": "\\\\", "`": "\`",
"*": "\*", "_": "\_",
"{": "\{", "}": "\}",
"[": "\[", "]": "\]",
"(": "\(", ")": "\)",
"#": "\#", "+": "\+",
"-": "\-", ".": "\.",
"|": "\|"
}
def __escape(self, data):
return "".join(self.escape_table.get(c,c) for c in data)
def __convert_lines(self, text='', prefix='', suffix='', olist=False):
if type(text) is str:
if olist:
return '1. ' + self.__escape(text)
else:
return prefix + self.__escape(text) + suffix
elif type(text) is list:
for idx, t in enumerate(text):
if olist:
nt = str(idx+1) + '. ' + self.__escape(t)
else:
nt = prefix + self.__escape(t) + suffix
text[idx] = nt
return text
return ''
def text(self, text):
return self.__convert_lines(text)
def error(self, text):
return self.__convert_lines(text)
def title(self, text):
return self.__convert_lines(text, '##')
def subtitle(self, text):
return self.__convert_lines(text, '###')
def ssubtitle(self, text):
return self.__convert_lines(text, '####')
def bold(self, text):
return self.__convert_lines(text, '**', '**')
def line_breaker(self, count=1):
if count > 1:
ret = []
for i in range(0,count):
ret.append("-------------")
return ret
return "-------------"
def reference(self, text):
return self.__convert_lines(text, '>')
def ordered_list(self, data):
return self.__convert_lines(data, olist=True)
def unordered_list(self, data):
return self.__convert_lines(data, '- ')
``` |
[
{
"content": "```python\n#!/usr/bin/env vpython\n#\n# Copyright 2018 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport argparse\nimport collections\nimport json\nimport logging\nimport os\nimport pipes\nim... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/env vpython\n#\n# Copyright 2018 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport argparse\nimport collections\nimport json\nimport logging\nimport os\n... | ```python
#!/usr/bin/env vpython
#
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import collections
import json
import logging
import os
import pipes
import re
import signal
import socket
import sys
import tempfile
# The following non-std imports are fetched via vpython. See the list at
# //.vpython
import dateutil.parser # pylint: disable=import-error
import jsonlines # pylint: disable=import-error
import psutil # pylint: disable=import-error
CHROMIUM_SRC_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..'))
# Use the android test-runner's gtest results support library for generating
# output json ourselves.
sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'android'))
from pylib.base import base_test_result # pylint: disable=import-error
from pylib.results import json_results # pylint: disable=import-error
# Use luci-py's subprocess42.py
sys.path.insert(
0, os.path.join(CHROMIUM_SRC_PATH, 'tools', 'swarming_client', 'utils'))
import subprocess42 # pylint: disable=import-error
DEFAULT_CROS_CACHE = os.path.abspath(
os.path.join(CHROMIUM_SRC_PATH, 'build', 'cros_cache'))
CHROMITE_PATH = os.path.abspath(
os.path.join(CHROMIUM_SRC_PATH, 'third_party', 'chromite'))
CROS_RUN_TEST_PATH = os.path.abspath(
os.path.join(CHROMITE_PATH, 'bin', 'cros_run_test'))
# GN target that corresponds to the cros browser sanity test.
SANITY_TEST_TARGET = 'cros_browser_sanity_test'
# This is a special hostname that resolves to a different DUT in the lab
# depending on which lab machine you're on.
LAB_DUT_HOSTNAME = 'variable_chromeos_device_hostname'
SYSTEM_LOG_LOCATIONS = [
'/var/log/chrome/',
# Note that journal/ will contain journald's serialized logs, which aren't
# human-readable. To inspect them, download the logs locally and run
# `journalctl -D ...`.
'/var/log/journal/',
'/var/log/messages',
'/var/log/power_manager/',
'/var/log/ui/',
]
TAST_DEBUG_DOC = 'https://bit.ly/2LgvIXz'
class TestFormatError(Exception):
pass
class RemoteTest(object):
# This is a basic shell script that can be appended to in order to invoke the
# test on the device.
BASIC_SHELL_SCRIPT = [
'#!/bin/sh',
# /home and /tmp are mounted with "noexec" in the device, but some of our
# tools and tests use those dirs as a workspace (eg: vpython downloads
# python binaries to ~/.vpython-root and /tmp/vpython_bootstrap).
# /usr/local/tmp doesn't have this restriction, so change the location of
# the home and temp dirs for the duration of the test.
'export HOME=/usr/local/tmp',
'export TMPDIR=/usr/local/tmp',
]
def __init__(self, args, unknown_args):
self._additional_args = unknown_args
self._path_to_outdir = args.path_to_outdir
self._test_launcher_summary_output = args.test_launcher_summary_output
self._logs_dir = args.logs_dir
self._use_vm = args.use_vm
self._retries = 0
self._timeout = None
# The location on disk of a shell script that can be optionally used to
# invoke the test on the device. If it's not set, we assume self._test_cmd
# contains the test invocation.
self._on_device_script = None
self._test_cmd = [
CROS_RUN_TEST_PATH,
'--board',
args.board,
'--cache-dir',
args.cros_cache,
]
if args.use_vm:
self._test_cmd += [
'--start',
# Don't persist any filesystem changes after the VM shutsdown.
'--copy-on-write',
'--device',
'localhost'
]
else:
self._test_cmd += [
'--device', args.device if args.device else LAB_DUT_HOSTNAME
]
if args.logs_dir:
for log in SYSTEM_LOG_LOCATIONS:
self._test_cmd += ['--results-src', log]
self._test_cmd += ['--results-dest-dir', args.logs_dir]
# This environment variable is set for tests that have been instrumented
# for code coverage. Its incoming value is expected to be a location
# inside a subdirectory of result_dir above. This is converted to an
# absolute path that the vm is able to write to, and passed in the
# --results-src flag to cros_run_vm_test for copying out of the vm before
# its termination.
self._llvm_profile_var = None
if os.environ.get('LLVM_PROFILE_FILE'):
_, llvm_profile_file = os.path.split(os.environ['LLVM_PROFILE_FILE'])
self._llvm_profile_var = '/tmp/profraw/%s' % llvm_profile_file
# This should make the vm test runner exfil the profiling data.
self._test_cmd += ['--results-src', '/tmp/profraw']
self._test_env = setup_env()
@property
def suite_name(self):
raise NotImplementedError('Child classes need to define suite name.')
@property
def test_cmd(self):
return self._test_cmd
def write_test_script_to_disk(self, script_contents):
# Since we're using an on_device_script to invoke the test, we'll need to
# set cwd.
self._test_cmd += [
'--remote-cmd',
'--cwd',
os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
]
logging.info('Running the following command on the device:')
logging.info('\n' + '\n'.join(script_contents))
fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir)
os.fchmod(fd, 0755)
with os.fdopen(fd, 'wb') as f:
f.write('\n'.join(script_contents) + '\n')
return tmp_path
def run_test(self):
# Traps SIGTERM and kills all child processes of cros_run_test when it's
# caught. This will allow us to capture logs from the device if a test hangs
# and gets timeout-killed by swarming. See also:
# https://chromium.googlesource.com/infra/luci/luci-py/+/master/appengine/swarming/doc/Bot.md#graceful-termination_aka-the-sigterm-and-sigkill-dance
test_proc = None
def _kill_child_procs(trapped_signal, _):
logging.warning('Received signal %d. Killing child processes of test.',
trapped_signal)
if not test_proc or not test_proc.pid:
# This shouldn't happen?
logging.error('Test process not running.')
return
for child in psutil.Process(test_proc.pid).children():
logging.warning('Killing process %s', child)
child.kill()
signal.signal(signal.SIGTERM, _kill_child_procs)
for i in xrange(self._retries + 1):
logging.info('########################################')
logging.info('Test attempt #%d', i)
logging.info('########################################')
test_proc = subprocess42.Popen(
self._test_cmd,
stdout=sys.stdout,
stderr=sys.stderr,
env=self._test_env)
try:
test_proc.wait(timeout=self._timeout)
except subprocess42.TimeoutExpired:
logging.error('Test timed out. Sending SIGTERM.')
# SIGTERM the proc and wait 10s for it to close.
test_proc.terminate()
try:
test_proc.wait(timeout=10)
except subprocess42.TimeoutExpired:
# If it hasn't closed in 10s, SIGKILL it.
logging.error('Test did not exit in time. Sending SIGKILL.')
test_proc.kill()
test_proc.wait()
logging.info('Test exitted with %d.', test_proc.returncode)
if test_proc.returncode == 0:
break
ret = self.post_run(test_proc.returncode)
# Allow post_run to override test proc return code. (Useful when the host
# side Tast bin returns 0 even for failed tests.)
if ret is not None:
return ret
return test_proc.returncode
def post_run(self, return_code):
if self._on_device_script:
os.remove(self._on_device_script)
# Create a simple json results file for a test run. The results will contain
# only one test (suite_name), and will either be a PASS or FAIL depending on
# return_code.
if self._test_launcher_summary_output:
result = (
base_test_result.ResultType.FAIL
if return_code else base_test_result.ResultType.PASS)
suite_result = base_test_result.BaseTestResult(self.suite_name, result)
run_results = base_test_result.TestRunResults()
run_results.AddResult(suite_result)
with open(self._test_launcher_summary_output, 'w') as f:
json.dump(json_results.GenerateResultsDict([run_results]), f)
class TastTest(RemoteTest):
def __init__(self, args, unknown_args):
super(TastTest, self).__init__(args, unknown_args)
self._suite_name = args.suite_name
self._tests = args.tests
self._conditional = args.conditional
if not self._llvm_profile_var and not self._logs_dir:
# The host-side Tast bin returns 0 when tests fail, so we need to capture
# and parse its json results to reliably determine if tests fail.
raise TestFormatError(
'When using the host-side Tast bin, "--logs-dir" must be passed in '
'order to parse its results.')
@property
def suite_name(self):
return self._suite_name
def build_test_command(self):
if '--gtest_filter=%s' % self.suite_name in self._additional_args:
logging.info('GTest filtering not supported for tast tests. The '
'--gtest_filter arg will be ignored.')
self._additional_args.remove('--gtest_filter=%s' % self.suite_name)
if any(arg.startswith('--gtest_repeat') for arg in self._additional_args):
logging.info(
'--gtest_repeat not supported for tast tests. The arg will be '
'ignored.')
self._additional_args = [
arg for arg in self._additional_args
if not arg.startswith('--gtest_repeat')
]
if self._additional_args:
logging.error(
'Tast tests should not have additional args. These will be '
'ignored: %s', self._additional_args)
self._test_cmd += [
'--deploy',
'--mount',
'--build-dir',
os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
]
# Coverage tests require some special pre-test setup, so use an
# on_device_script in that case. For all other tests, use cros_run_test's
# built-in '--tast' option. This gives us much better results reporting.
if self._llvm_profile_var:
# Build the shell script that will be used on the device to invoke the
# test.
device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
device_test_script_contents += [
'echo "LLVM_PROFILE_FILE=%s" >> /etc/chrome_dev.conf' %
(self._llvm_profile_var)
]
local_test_runner_cmd = ['local_test_runner', '-waituntilready']
if self._use_vm:
# If we're running tests in VMs, tell the test runner to skip tests that
# aren't compatible.
local_test_runner_cmd.append('-extrauseflags=tast_vm')
if self._conditional:
local_test_runner_cmd.append(pipes.quote(self._conditional))
else:
local_test_runner_cmd.extend(self._tests)
device_test_script_contents.append(' '.join(local_test_runner_cmd))
self._on_device_script = self.write_test_script_to_disk(
device_test_script_contents)
self._test_cmd += [
'--files',
os.path.relpath(self._on_device_script), '--',
'./' + os.path.relpath(self._on_device_script, self._path_to_outdir)
]
else:
# Mounting the browser gives it enough disk space to not need stripping,
# but only for browsers not instrumented with code coverage.
self._test_cmd.append('--nostrip')
# Capture tast's results in the logs dir as well.
if self._logs_dir:
self._test_cmd += [
'--results-dir',
self._logs_dir,
]
if self._conditional:
# Don't use pipes.quote() here. Something funky happens with the arg
# as it gets passed down from cros_run_test to tast. (Tast picks up the
# escaping single quotes and complains that the conditional "must be
# within parentheses".)
self._test_cmd.append('--tast=%s' % self._conditional)
else:
self._test_cmd.append('--tast')
self._test_cmd.extend(self._tests)
def post_run(self, return_code):
# If we don't need to parse the host-side Tast tool's results, fall back to
# the parent method's default behavior.
if self._llvm_profile_var:
return super(TastTest, self).post_run(return_code)
tast_results_path = os.path.join(self._logs_dir, 'streamed_results.jsonl')
if not os.path.exists(tast_results_path):
logging.error(
'Tast results not found at %s. Falling back to generic result '
'reporting.', tast_results_path)
return super(TastTest, self).post_run(return_code)
# See the link below for the format of the results:
# https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/cmd/tast/run#TestResult
with jsonlines.open(tast_results_path) as reader:
tast_results = collections.deque(reader)
suite_results = base_test_result.TestRunResults()
for test in tast_results:
errors = test['errors']
start, end = test['start'], test['end']
# Use dateutil to parse the timestamps since datetime can't handle
# nanosecond precision.
duration = dateutil.parser.parse(end) - dateutil.parser.parse(start)
duration_ms = duration.total_seconds() * 1000
if bool(test['skipReason']):
result = base_test_result.ResultType.SKIP
elif errors:
result = base_test_result.ResultType.FAIL
else:
result = base_test_result.ResultType.PASS
error_log = ''
if errors:
# See the link below for the format of these errors:
# https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/tast/testing#Error
for err in errors:
error_log += err['stack'].encode('utf-8') + '\n'
error_log += (
"\nIf you're unsure why this test failed, consult the steps "
'outlined in\n%s\n' % TAST_DEBUG_DOC)
base_result = base_test_result.BaseTestResult(
test['name'], result, duration=duration_ms, log=error_log)
suite_results.AddResult(base_result)
if self._test_launcher_summary_output:
with open(self._test_launcher_summary_output, 'w') as f:
json.dump(json_results.GenerateResultsDict([suite_results]), f)
if not suite_results.DidRunPass():
return 1
elif return_code:
logging.warning(
'No failed tests found, but exit code of %d was returned from '
'cros_run_test.', return_code)
return return_code
return 0
class GTestTest(RemoteTest):
# The following list corresponds to paths that should not be copied over to
# the device during tests. In other words, these files are only ever used on
# the host.
_FILE_IGNORELIST = [
re.compile(r'.*build/android.*'),
re.compile(r'.*build/chromeos.*'),
re.compile(r'.*build/cros_cache.*'),
# The following matches anything under //testing/ that isn't under
# //testing/buildbot/filters/.
re.compile(r'.*testing/(?!buildbot/filters).*'),
re.compile(r'.*third_party/chromite.*'),
re.compile(r'.*tools/swarming_client.*'),
]
def __init__(self, args, unknown_args):
super(GTestTest, self).__init__(args, unknown_args)
self._test_exe = args.test_exe
self._runtime_deps_path = args.runtime_deps_path
self._vpython_dir = args.vpython_dir
self._test_launcher_shard_index = args.test_launcher_shard_index
self._test_launcher_total_shards = args.test_launcher_total_shards
self._on_device_script = None
self._stop_ui = args.stop_ui
self._trace_dir = args.trace_dir
@property
def suite_name(self):
return self._test_exe
def build_test_command(self):
# To keep things easy for us, ensure both types of output locations are
# the same.
if self._test_launcher_summary_output and self._logs_dir:
json_out_dir = os.path.dirname(self._test_launcher_summary_output) or '.'
if os.path.abspath(json_out_dir) != os.path.abspath(self._logs_dir):
raise TestFormatError(
'--test-launcher-summary-output and --logs-dir must point to '
'the same directory.')
if self._test_launcher_summary_output:
result_dir, result_file = os.path.split(
self._test_launcher_summary_output)
# If args.test_launcher_summary_output is a file in cwd, result_dir will
# be an empty string, so replace it with '.' when this is the case so
# cros_run_test can correctly handle it.
if not result_dir:
result_dir = '.'
device_result_file = '/tmp/%s' % result_file
self._test_cmd += [
'--results-src',
device_result_file,
'--results-dest-dir',
result_dir,
]
if self._trace_dir and self._logs_dir:
trace_path = os.path.dirname(self._trace_dir) or '.'
if os.path.abspath(trace_path) != os.path.abspath(self._logs_dir):
raise TestFormatError(
'--trace-dir and --logs-dir must point to the same directory.')
if self._trace_dir:
trace_path, trace_dirname = os.path.split(self._trace_dir)
device_trace_dir = '/tmp/%s' % trace_dirname
self._test_cmd += [
'--results-src',
device_trace_dir,
'--results-dest-dir',
trace_path,
]
# Build the shell script that will be used on the device to invoke the test.
device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
if self._llvm_profile_var:
device_test_script_contents += [
'export LLVM_PROFILE_FILE=%s' % self._llvm_profile_var,
]
if self._vpython_dir:
vpython_spec_path = os.path.relpath(
os.path.join(CHROMIUM_SRC_PATH, '.vpython'), self._path_to_outdir)
# Initialize the vpython cache. This can take 10-20s, and some tests
# can't afford to wait that long on the first invocation.
device_test_script_contents.extend([
'export PATH=$PATH:$PWD/%s' % (self._vpython_dir),
'vpython -vpython-spec %s -vpython-tool install' %
(vpython_spec_path),
])
test_invocation = ('LD_LIBRARY_PATH=./ ./%s --test-launcher-shard-index=%d '
'--test-launcher-total-shards=%d' %
(self._test_exe, self._test_launcher_shard_index,
self._test_launcher_total_shards))
if self._test_launcher_summary_output:
test_invocation += ' --test-launcher-summary-output=%s' % (
device_result_file)
if self._trace_dir:
device_test_script_contents.extend([
'rm -rf %s' % device_trace_dir,
'su chronos -c -- "mkdir -p %s"' % device_trace_dir,
])
test_invocation += ' --trace-dir=%s' % device_trace_dir
if self._additional_args:
test_invocation += ' %s' % ' '.join(self._additional_args)
if self._stop_ui:
device_test_script_contents += [
'stop ui',
]
# The UI service on the device owns the chronos user session, so shutting
# it down as chronos kills the entire execution of the test. So we'll have
# to run as root up until the test invocation.
test_invocation = 'su chronos -c -- "%s"' % test_invocation
# And we'll need to chown everything since cros_run_test's "--as-chronos"
# option normally does that for us.
device_test_script_contents.append('chown -R chronos: ../..')
else:
self._test_cmd += [
# Some tests fail as root, so run as the less privileged user
# 'chronos'.
'--as-chronos',
]
device_test_script_contents.append(test_invocation)
self._on_device_script = self.write_test_script_to_disk(
device_test_script_contents)
runtime_files = [os.path.relpath(self._on_device_script)]
runtime_files += self._read_runtime_files()
if self._vpython_dir:
# --vpython-dir is relative to the out dir, but --files expects paths
# relative to src dir, so fix the path up a bit.
runtime_files.append(
os.path.relpath(
os.path.abspath(
os.path.join(self._path_to_outdir, self._vpython_dir)),
CHROMIUM_SRC_PATH))
# TODO(bpastene): Add the vpython spec to the test's runtime deps instead
# of handling it here.
runtime_files.append('.vpython')
for f in runtime_files:
self._test_cmd.extend(['--files', f])
self._test_cmd += [
'--',
'./' + os.path.relpath(self._on_device_script, self._path_to_outdir)
]
def _read_runtime_files(self):
if not self._runtime_deps_path:
return []
abs_runtime_deps_path = os.path.abspath(
os.path.join(self._path_to_outdir, self._runtime_deps_path))
with open(abs_runtime_deps_path) as runtime_deps_file:
files = [l.strip() for l in runtime_deps_file if l]
rel_file_paths = []
for f in files:
rel_file_path = os.path.relpath(
os.path.abspath(os.path.join(self._path_to_outdir, f)))
if not any(regex.match(rel_file_path) for regex in self._FILE_IGNORELIST):
rel_file_paths.append(rel_file_path)
return rel_file_paths
def post_run(self, _):
if self._on_device_script:
os.remove(self._on_device_script)
class BrowserSanityTest(RemoteTest):
def __init__(self, args, unknown_args):
super(BrowserSanityTest, self).__init__(args, unknown_args)
# 10 min should be enough time for the sanity test to pass.
self._retries = 1
self._timeout = 600
@property
def suite_name(self):
return SANITY_TEST_TARGET
def build_test_command(self):
if '--gtest_filter=%s' % SANITY_TEST_TARGET in self._additional_args:
logging.info('GTest filtering not supported for the sanity test. The '
'--gtest_filter arg will be ignored.')
self._additional_args.remove('--gtest_filter=%s' % SANITY_TEST_TARGET)
if any(arg.startswith('--gtest_repeat') for arg in self._additional_args):
logging.info(
'--gtest_repeat not supported for sanity test. The arg will be '
'ignored.')
self._additional_args = [
arg for arg in self._additional_args
if not arg.startswith('--gtest_repeat')
]
if self._additional_args:
logging.error(
'Sanity test should not have additional args: These will be '
'ignored: %s', self._additional_args)
# VMs don't have the disk space for an unstripped version of Chrome
# instrumented for code coverage, so only strip in that case.
if not self._use_vm or not os.environ.get('LLVM_PROFILE_FILE'):
self._test_cmd.append('--nostrip')
device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
if self._llvm_profile_var:
device_test_script_contents += [
'echo "LLVM_PROFILE_FILE=%s" >> /etc/chrome_dev.conf' %
(self._llvm_profile_var)
]
# vm_sanity.py is the sanity test, which is baked into the device image.
device_test_script_contents.append('/usr/local/autotest/bin/vm_sanity.py')
self._on_device_script = self.write_test_script_to_disk(
device_test_script_contents)
self._test_cmd += [
'--files',
os.path.relpath(self._on_device_script),
# The sanity test smoke-checks the system browser, so deploy our
# locally-built chrome to the device before testing.
'--deploy',
'--mount',
'--build-dir',
os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
'--',
'./' + os.path.relpath(self._on_device_script, self._path_to_outdir)
]
def device_test(args, unknown_args):
# cros_run_test has trouble with relative paths that go up directories,
# so cd to src/, which should be the root of all data deps.
os.chdir(CHROMIUM_SRC_PATH)
# pylint: disable=redefined-variable-type
# TODO: Remove the above when depot_tool's pylint is updated to include the
# fix to https://github.com/PyCQA/pylint/issues/710.
if args.test_type == 'tast':
test = TastTest(args, unknown_args)
elif args.test_exe == SANITY_TEST_TARGET:
test = BrowserSanityTest(args, unknown_args)
else:
test = GTestTest(args, unknown_args)
test.build_test_command()
logging.info('Running the following command on the device:')
logging.info(' '.join(test.test_cmd))
return test.run_test()
def host_cmd(args, unknown_args):
if not args.cmd:
raise TestFormatError('Must specify command to run on the host.')
elif unknown_args:
raise TestFormatError(
'Args "%s" unsupported. Is your host command correctly formatted?' %
(' '.join(unknown_args)))
elif args.deploy_chrome and not args.path_to_outdir:
raise TestFormatError(
'--path-to-outdir must be specified if --deploy-chrome is passed.')
cros_run_test_cmd = [
CROS_RUN_TEST_PATH,
'--board',
args.board,
'--cache-dir',
args.cros_cache,
]
if args.use_vm:
cros_run_test_cmd += [
'--start',
# Don't persist any filesystem changes after the VM shutsdown.
'--copy-on-write',
'--device',
'localhost',
]
else:
cros_run_test_cmd += [
'--device', args.device if args.device else LAB_DUT_HOSTNAME
]
if args.verbose:
cros_run_test_cmd.append('--debug')
if args.logs_dir:
for log in SYSTEM_LOG_LOCATIONS:
cros_run_test_cmd += ['--results-src', log]
cros_run_test_cmd += ['--results-dest-dir', args.logs_dir]
test_env = setup_env()
if args.deploy_chrome:
cros_run_test_cmd += [
'--deploy',
# Mounting the browser gives it enough disk space to not need stripping.
'--mount',
'--nostrip',
'--build-dir',
os.path.abspath(args.path_to_outdir),
]
cros_run_test_cmd += [
'--host-cmd',
'--',
] + args.cmd
logging.info('Running the following command:')
logging.info(' '.join(cros_run_test_cmd))
return subprocess42.call(
cros_run_test_cmd, stdout=sys.stdout, stderr=sys.stderr, env=test_env)
def setup_env():
"""Returns a copy of the current env with some needed vars added."""
env = os.environ.copy()
# Some chromite scripts expect chromite/bin to be on PATH.
env['PATH'] = env['PATH'] + ':' + os.path.join(CHROMITE_PATH, 'bin')
# deploy_chrome needs a set of GN args used to build chrome to determine if
# certain libraries need to be pushed to the device. It looks for the args via
# an env var. To trigger the default deploying behavior, give it a dummy set
# of args.
# TODO(crbug.com/823996): Make the GN-dependent deps controllable via cmd
# line args.
if not env.get('GN_ARGS'):
env['GN_ARGS'] = 'enable_nacl = true'
if not env.get('USE'):
env['USE'] = 'highdpi'
return env
def add_common_args(*parsers):
for parser in parsers:
parser.add_argument('--verbose', '-v', action='store_true')
parser.add_argument(
'--board', type=str, required=True, help='Type of CrOS device.')
parser.add_argument(
'--cros-cache',
type=str,
default=DEFAULT_CROS_CACHE,
help='Path to cros cache.')
parser.add_argument(
'--path-to-outdir',
type=str,
required=True,
help='Path to output directory, all of whose contents will be '
'deployed to the device.')
parser.add_argument(
'--runtime-deps-path',
type=str,
help='Runtime data dependency file from GN.')
parser.add_argument(
'--vpython-dir',
type=str,
help='Location on host of a directory containing a vpython binary to '
'deploy to the device before the test starts. The location of '
'this dir will be added onto PATH in the device. WARNING: The '
'arch of the device might not match the arch of the host, so '
'avoid using "${platform}" when downloading vpython via CIPD.')
# TODO(bpastene): Switch all uses of "--vm-logs-dir" to "--logs-dir".
parser.add_argument(
'--vm-logs-dir',
'--logs-dir',
type=str,
dest='logs_dir',
help='Will copy everything under /var/log/ from the device after the '
'test into the specified dir.')
vm_or_device_group = parser.add_mutually_exclusive_group()
vm_or_device_group.add_argument(
'--use-vm',
action='store_true',
help='Will run the test in the VM instead of a device.')
vm_or_device_group.add_argument(
'--device',
type=str,
help='Hostname (or IP) of device to run the test on. This arg is not '
'required if --use-vm is set.')
def main():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest='test_type')
# Host-side test args.
host_cmd_parser = subparsers.add_parser(
'host-cmd',
help='Runs a host-side test. Pass the host-side command to run after '
'"--". If --use-vm is passed, hostname and port for the device '
'will be 127.0.0.1:9222.')
host_cmd_parser.set_defaults(func=host_cmd)
host_cmd_parser.add_argument(
'--deploy-chrome',
action='store_true',
help='Will deploy a locally built Chrome binary to the device before '
'running the host-cmd.')
host_cmd_parser.add_argument('cmd', nargs=argparse.REMAINDER)
# GTest args.
# TODO(bpastene): Rename 'vm-test' arg to 'gtest'.
gtest_parser = subparsers.add_parser(
'vm-test', help='Runs a device-side gtest.')
gtest_parser.set_defaults(func=device_test)
gtest_parser.add_argument(
'--test-exe',
type=str,
required=True,
help='Path to test executable to run inside the device. If the value is '
'%s, the sanity test that ships with the device image runs instead. '
'This test smokes-check the system browser (eg: loads a simple '
'webpage, executes some javascript), so a fully-built Chrome binary '
'that can get deployed to the device is expected to be available in '
'the out-dir.' % SANITY_TEST_TARGET)
# GTest args. Some are passed down to the test binary in the device. Others
# are parsed here since they might need tweaking or special handling.
gtest_parser.add_argument(
'--test-launcher-summary-output',
type=str,
help='When set, will pass the same option down to the test and retrieve '
'its result file at the specified location.')
# Shard args are parsed here since we might also specify them via env vars.
gtest_parser.add_argument(
'--test-launcher-shard-index',
type=int,
default=os.environ.get('GTEST_SHARD_INDEX', 0),
help='Index of the external shard to run.')
gtest_parser.add_argument(
'--test-launcher-total-shards',
type=int,
default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
help='Total number of external shards.')
gtest_parser.add_argument(
'--stop-ui',
action='store_true',
help='Will stop the UI service in the device before running the test.')
gtest_parser.add_argument(
'--trace-dir',
type=str,
help='When set, will pass down to the test to generate the trace and '
'retrieve the trace files to the specified location.')
# Tast test args.
# pylint: disable=line-too-long
tast_test_parser = subparsers.add_parser(
'tast',
help='Runs a device-side set of Tast tests. For more details, see: '
'https://chromium.googlesource.com/chromiumos/platform/tast/+/master/docs/running_tests.md'
)
tast_test_parser.set_defaults(func=device_test)
tast_test_parser.add_argument(
'--suite-name',
type=str,
required=True,
help='Name to apply to the set of Tast tests to run. This has no effect '
'on what is executed, but is used mainly for test results reporting '
'and tracking (eg: flakiness dashboard).')
tast_test_parser.add_argument(
'--test-launcher-summary-output',
type=str,
help='Generates a simple GTest-style JSON result file for the test run.')
# TODO(bpastene): Change all uses of "--conditional" to use "--attr-expr".
tast_test_parser.add_argument(
'--conditional',
'--attr-expr',
type=str,
dest='conditional',
help='A boolean expression whose matching tests will run '
'(eg: ("dep:chrome")).')
tast_test_parser.add_argument(
'--test',
'-t',
action='append',
dest='tests',
help='A Tast test to run in the device (eg: "ui.ChromeLogin").')
add_common_args(gtest_parser, tast_test_parser, host_cmd_parser)
args, unknown_args = parser.parse_known_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
if not args.use_vm and not args.device:
# If we're not running on a VM, but haven't specified a hostname, assume
# we're on a lab bot and are trying to run a test on a lab DUT. See if the
# magic lab DUT hostname resolves to anything. (It will in the lab and will
# not on dev machines.)
try:
socket.getaddrinfo(LAB_DUT_HOSTNAME, None)
except socket.gaierror:
logging.error('The default DUT hostname of %s is unreachable.',
LAB_DUT_HOSTNAME)
return 1
if args.use_vm:
if not os.path.exists('/dev/kvm'):
logging.error('/dev/kvm is missing. Is KVM installed on this machine?')
return 1
elif not os.access('/dev/kvm', os.W_OK):
logging.error(
'/dev/kvm is not writable as current user. Perhaps you should be '
'root?')
return 1
args.cros_cache = os.path.abspath(args.cros_cache)
return args.func(args, unknown_args)
if __name__ == '__main__':
sys.exit(main())
``` |
[
{
"content": "Provide a verbatim copy of the code:\n```python\n# Copyright (c) 2019 Dassault Systemes. All rights reserved.\r\n\r\nimport h5py\r\nimport sdf\r\nimport numpy as np\r\nimport os\r\nimport sys\r\n\r\n\r\ndef _to_python_str(s):\r\n \"\"\" Convert to Python string \"\"\"\r\n\r\n if isinstance(s... | [
{
"content": "Provide a verbatim copy of the code:\n<|memory_start|>```python\n# Copyright (c) 2019 Dassault Systemes. All rights reserved.\r\n\r\nimport h5py\r\nimport sdf\r\nimport numpy as np\r\nimport os\r\nimport sys\r\n\r\n\r\ndef _to_python_str(s):\r\n \"\"\" Convert to Python string \"\"\"\r\n\r\n ... | ```python
# Copyright (c) 2019 Dassault Systemes. All rights reserved.
import h5py
import sdf
import numpy as np
import os
import sys
def _to_python_str(s):
""" Convert to Python string """
if isinstance(s, bytes):
return s.decode('utf-8')
else:
return s
def load(filename, objectname):
with h5py.File(filename, 'r') as f:
datasets = {}
dsobj = f[objectname]
class_name = dsobj.__class__.__name__
if class_name == 'Group':
group = _create_group(dsobj, datasets)
_restore_scales(datasets)
return group
elif class_name == 'Dataset':
dataset = _create_dataset(dsobj, datasets)
for ri in range(dsobj.ndim):
if dsobj.dims[ri]:
sobj = dsobj.dims[ri][0]
s = _create_dataset(sobj, dict())
s.is_scale = True
dataset.scales[ri] = s
return dataset
else:
raise Exception('Unexpected object')
def save(filename, group):
with h5py.File(filename, 'w') as f:
datasets = dict()
_write_group(f, group, '/', datasets)
# attach the scales
for ds, h5ds in datasets.items():
for i, s in enumerate(ds.scales):
if s is None:
continue
elif s in datasets:
h5s = datasets[s]
dimname = s._display_name
if dimname is None:
dimname = ''
h5s.make_scale(_str(dimname))
h5ds.dims[i].attach_scale(h5s)
else:
print("Cannot attach scale for '" + h5ds.name +
"' because the referenced scale for dimension " + str(i) + " is not part of the file")
def _create_group(gobj, datasets):
""" Create an sdf.Group from an h5py group """
ds_obj_list = []
g_obj_list = []
group_attrs = {key: gobj.attrs[key] for key in gobj.attrs.keys() if key != 'COMMENT'}
comment = gobj.attrs.get('COMMENT')
for ds_name in gobj.keys():
# TODO: fix this?
if isinstance(gobj[ds_name], h5py._hl.dataset.Dataset):
ds_obj_list.append(gobj[ds_name])
elif isinstance(gobj[ds_name], h5py._hl.group.Group):
g_obj_list.append(gobj[ds_name])
child_groups = []
for cgobj in g_obj_list:
child_groups.append(_create_group(cgobj, datasets))
ds_list = [_create_dataset(dsobj, datasets) for dsobj in ds_obj_list]
name = gobj.name.split('/')[-1]
return sdf.Group(name=name, comment=comment, attributes=group_attrs, groups=child_groups, datasets=ds_list)
def _create_dataset(dsobj, datasets):
""" Create a dataset from an h5py dataset """
_, name = os.path.split(dsobj.name)
ds = sdf.Dataset(name, data=dsobj[()])
for attr in dsobj.attrs:
if attr == 'COMMENT':
ds.comment = _to_python_str(dsobj.attrs[attr])
elif attr == 'NAME':
ds.display_name = _to_python_str(dsobj.attrs[attr])
elif attr == 'RELATIVE_QUANTITY' and _to_python_str(dsobj.attrs[attr]) == 'TRUE':
ds.relative_quantity = True
elif attr == 'UNIT':
ds.unit = _to_python_str(dsobj.attrs[attr])
elif attr == 'DISPLAY_UNIT':
ds.display_unit = _to_python_str(dsobj.attrs[attr])
elif attr == 'CLASS' and _to_python_str(dsobj.attrs[attr]) == 'DIMENSION_SCALE':
ds.is_scale = True
elif attr == 'REFERENCE_LIST':
ds.is_scale = True
elif attr in ['REFERENCE_LIST', 'DIMENSION_LIST']:
pass
else:
ds.attributes[attr] = _to_python_str(dsobj.attrs[attr])
ds.scales = [None] * ds.data.ndim
datasets[dsobj] = ds
return ds
def _restore_scales(datasets):
for dsobj, ds in datasets.items():
for i in range(ds.data.ndim):
if dsobj.dims[i]:
sobj = dsobj.dims[i][0]
scale = datasets[sobj]
scale.is_scale = True
ds.scales[i] = scale
pass
def _str(s):
""" Convert to byte string """
if sys.version_info.major >= 3 and isinstance(s, bytes):
return s
else:
# convert the string to an fixed-length utf-8 byte string
return np.string_(s.encode('utf-8'))
def _write_group(f, g, path, datasets):
if path == '/':
gobj = f
else:
gobj = f.create_group(path)
# iterate over the child groups
for subgroup in g.groups:
_write_group(f, subgroup, path + subgroup.name + '/', datasets)
if g.comment is not None:
gobj.attrs['COMMENT'] = _str(g.comment)
for key, value in g.attributes.items():
gobj.attrs[key] = _str(value)
# write the datasets
for ds in g.datasets:
_write_dataset(f, ds, path, datasets)
def _write_dataset(f, ds, path, datasets):
f[path + ds.name] = ds.data
dsobj = f[path + ds.name]
datasets[ds] = dsobj
if ds.comment:
dsobj.attrs['COMMENT'] = _str(ds.comment)
if ds._display_name:
dsobj.attrs['NAME'] = _str(ds.display_name)
if ds.relative_quantity:
dsobj.attrs['RELATIVE_QUANTITY'] = _str('TRUE')
if ds.unit:
dsobj.attrs['UNIT'] = _str(ds.unit)
if ds.display_unit != ds.unit:
dsobj.attrs['DISPLAY_UNIT'] = _str(ds.display_unit)
if ds.is_scale:
dimname = ds.display_name
if dimname is None:
dimname = ''
h5py.h5ds.set_scale(dsobj.id, _str(dimname))
return dsobj
``` |
[
{
"content": "Here is the code block:\n```python\nimport sys\nimport datetime\n\n\nclass ProgressIndicator(object):\n ENABLED = True\n RECORDS = list()\n\n def __init__(self, prompt, frequency):\n self._display(prompt)\n self._record(prompt + 'start')\n self.prompt = prompt\n ... | [
{
"content": "Here is the code block:\n<|memory_start|>```python\nimport sys\nimport datetime\n\n\nclass ProgressIndicator(object):\n ENABLED = True\n RECORDS = list()\n\n def __init__(self, prompt, frequency):\n self._display(prompt)\n self._record(prompt + 'start')\n self.prompt ... | ```python
import sys
import datetime
class ProgressIndicator(object):
ENABLED = True
RECORDS = list()
def __init__(self, prompt, frequency):
self._display(prompt)
self._record(prompt + 'start')
self.prompt = prompt
self.frequency = frequency
self.count = 0
def click(self):
if (self.count % self.frequency) == 0:
self._display('.')
self.count += 1
def done(self):
self._display('\n')
self._record(self.prompt + 'done (%d entries)' % self.count)
@classmethod
def display(cls, fmt, *args):
if cls.ENABLED:
if len(args) == 0:
output = fmt
else:
output = fmt % tuple(args)
cls._display(output)
cls._record(output)
@classmethod
def _display(cls, output):
if cls.ENABLED:
sys.stdout.write(output)
sys.stdout.flush()
@classmethod
def _record(cls, event):
cls.RECORDS.append((datetime.datetime.now(), event))
@classmethod
def clear(cls):
cls.RECORDS = list()
@classmethod
def dump_records(cls):
for (timestamp, event) in cls.RECORDS:
print '%s: %s' % (str(timestamp), event)
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\nimport chainer.functions as F\nimport chainer.links as L\nfrom chainer import Variable\nfrom chainer.links import caffe\nfrom chainer import computational_graph as c\nfrom deel.tensor import *\nfrom deel.network import *\nimport chai... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\nimport chainer.functions as F\nimport chainer.links as L\nfrom chainer import Variable\nfrom chainer.links import caffe\nfrom chainer import computational_graph as c\nfrom deel.tensor import *\nfrom deel.network impor... | ```python
import chainer.functions as F
import chainer.links as L
from chainer import Variable
from chainer.links import caffe
from chainer import computational_graph as c
from deel.tensor import *
from deel.network import *
import chainer.serializers as cs
import copy
from deel.deel import *
import chainer
import json
import os
import multiprocessing
import threading
import time
import six
import numpy as np
import os.path
from PIL import Image
from six.moves import queue
import pickle
import hashlib
import datetime
import sys
import random
'''
Network in Network by Chainer
'''
import deel.model.rnin
class RegionalNetworkInNetwork(ImageNet):
def __init__(self,modelpath=None,mean='misc/ilsvrc_2012_mean.npy',labels='data/labels.txt',optimizer=None):
super(RegionalNetworkInNetwork,self).__init__('RegionalNetworkInNetwork',in_size=227)
self.func = deel.model.rnin.RNIN()
if modelpath is not None:
cs.load_hdf5("misc/"+modelpath,self.func)
self.graph_generated=None
xp = Deel.xp
#ImageNet.mean_image = pickle.load(open(mean, 'rb'))
ImageNet.mean_image = np.ndarray((3, 256, 256), dtype=xp.float32)
ImageNet.mean_image[0] = 104
ImageNet.mean_image[1] = 117
ImageNet.mean_image[2] = 123
ImageNet.in_size = self.func.insize
self.labels = np.loadtxt(labels, str, delimiter="\t")
self.t = ChainerTensor(Variable(Deel.xp.asarray([1.0])))
if Deel.gpu>=0:
self.func.to_gpu()
if optimizer is None:
self.optimizer = optimizers.Adam()
self.optimizer.setup(self.func)
def forward(self,x):
y = self.func.forward(x)
return y
def classify(self,x=None,train=False):
if x is None:
x=Tensor.context
image = x.value
self.x_batch = image
xp = Deel.xp
x_data = xp.asarray(self.x_batch,dtype=Deel.xp.float32)
x = chainer.Variable(x_data, volatile='off' if train else 'on')
score = self.forward(x)
score = F.softmax(score)
score = Variable(score.data) #Unchain
t = ChainerTensor(score)
t.owner=self
t.use()
return t
def save(self,filename):
#cs.save_hdf5(filename,self.func.copy().to_cpu())
cs.save_hdf5(filename,self.func.copy())
def backprop(self,t,distill=False):
x=Tensor.context
self.optimizer.zero_grads()
if distill:
t = chainer.Variable(Deel.xp.asarray([t.content.data],dtype=Deel.xp.float32), volatile='off')
loss = self.func.getLossDistill(x.content,t)
accuracy = 0.0
else:
loss,accuracy = self.func.getLoss(x.content,t.content)
accuracy = accuracy.data
loss.backward()
self.optimizer.update()
if not self.graph_generated:
#with open('graph.dot', 'w') as o:
# o.write(c.build_computational_graph((loss,), False).dump())
with open('graph.wo_split.dot', 'w') as o:
o.write(c.build_computational_graph((loss,), True).dump())
print('generated graph')
self.graph_generated = True
return loss.data,accuracy
``` |
[
{
"content": "Repeat the full code snippet:\n```python\n### Copyright (C) 2007-2015 Peter Williams <pwil3058@gmail.com>\n###\n### This program is free software; you can redistribute it and/or modify\n### it under the terms of the GNU General Public License as published by\n### the Free Software Foundation; vers... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\n### Copyright (C) 2007-2015 Peter Williams <pwil3058@gmail.com>\n###\n### This program is free software; you can redistribute it and/or modify\n### it under the terms of the GNU General Public License as published by\n### the Free Software ... | ```python
### Copyright (C) 2007-2015 Peter Williams <pwil3058@gmail.com>
###
### This program is free software; you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation; version 2 of the License only.
###
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# A dummy console facilitate code sharing in othe modules
from .bab.decorators import singleton
@singleton
class ConsoleLogWidget(object):
def start_cmd(self, cmd):
return
def append_stdin(self, msg):
return
def append_stdout(self, msg):
return
def append_stderr(self, msg):
return
def end_cmd(self, result=None):
return
def append_entry(self, msg):
return
LOG = ConsoleLogWidget()
``` |
[
{
"content": "Here is the code content:\n```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.9.7 on 2017-05-30 05:35\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migrat... | [
{
"content": "Here is the code content:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.9.7 on 2017-05-30 05:35\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(m... | ```python
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-05-30 05:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('republica', '0001_initial'),
('financas', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Caixa',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('saldo', models.DecimalField(decimal_places=2, max_digits=8)),
('republica', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='republica.Republica')),
],
),
migrations.CreateModel(
name='Transacao',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valor', models.DecimalField(decimal_places=2, max_digits=6)),
('data', models.DateField(default=django.utils.timezone.now)),
('descricao', models.CharField(max_length=100)),
('caixa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='transacoes', to='financas.Caixa')),
],
),
]
``` |
[
{
"content": "Repeat the full code snippet:\n```python\n# -*- coding: utf-8 -*-\n\n# Copyright 2013 Mirantis, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\n# Copyright 2013 Mirantis, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of t... | ```python
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nailgun.objects.serializers.base import BasicSerializer
class ReleaseSerializer(BasicSerializer):
fields = (
"id",
"name",
"version",
"can_update_from_versions",
"description",
"operating_system",
"modes_metadata",
"roles",
"roles_metadata",
"wizard_metadata",
"state",
"attributes_metadata"
)
@classmethod
def serialize(cls, instance, fields=None):
from nailgun.objects.release import Release
release_dict = \
super(ReleaseSerializer, cls).serialize(instance, fields)
release_dict["is_deployable"] = Release.is_deployable(instance)
# we always want to get orchestrator data even it's a default one
release_dict["orchestrator_data"] = \
Release.get_orchestrator_data_dict(instance)
return release_dict
class ReleaseOrchestratorDataSerializer(BasicSerializer):
fields = (
"repo_metadata",
"puppet_manifests_source",
"puppet_modules_source"
)
``` |
[
{
"content": "Here is the script:\n```python\n# This Gaia Sky script showcases a constant camera turn\n\n# Created by Toni Sagrista\n\nfrom py4j.clientserver import ClientServer, JavaParameters, PythonParameters\nimport time\n\nclass CameraUpdateRunnable(object):\n def __init__(self, gs, rotation_rate):\n ... | [
{
"content": "Here is the script:\n<|memory_start|>```python\n# This Gaia Sky script showcases a constant camera turn\n\n# Created by Toni Sagrista\n\nfrom py4j.clientserver import ClientServer, JavaParameters, PythonParameters\nimport time\n\nclass CameraUpdateRunnable(object):\n def __init__(self, gs, rota... | ```python
# This Gaia Sky script showcases a constant camera turn
# Created by Toni Sagrista
from py4j.clientserver import ClientServer, JavaParameters, PythonParameters
import time
class CameraUpdateRunnable(object):
def __init__(self, gs, rotation_rate):
self.gs = gs
self.rotation_rate = rotation_rate
self.prev_time = time.perf_counter()
self.direction = [0.0, 0.0, 1.0]
self.up = [0.0, 1.0, 0.0]
self.prev_time = time.time()
# Set the direction and up
self.gs.setCameraDirection(self.direction)
self.gs.setCameraUp(self.up)
def run(self):
self.time = time.time()
# This is the number of seconds since the last frame
dt = self.time - self.prev_time
# Actual degrees to rotate this frame
rot_deg = dt * self.rotation_rate
# Rotate the direction angle around up by rot_deg degrees
self.direction = self.gs.rotate3([self.direction[0], self.direction[1], self.direction[2]], [0.0, 1.0, 0.0], rot_deg)
# Set it
self.gs.setCameraDirection(self.direction)
# We do not need to set the up vector, since it never changes
# Store prev_time for use in next frame
self.prev_time = self.time
def toString():
return "camera-update-runnable"
class Java:
implements = ["java.lang.Runnable"]
gateway = ClientServer(java_parameters=JavaParameters(auto_convert=True),
python_parameters=PythonParameters())
gs = gateway.entry_point
gs.cameraStop()
gs.setCameraFree()
gs.stopSimulationTime()
gs.setVisibility("element.orbits", True)
gs.setCameraLock(True)
gs.setCameraOrientationLock(False)
gs.setFov(49)
# Rotation rate in deg/s
rotation_rate = 15.0
# park the camera updater
gs.parkRunnable("cam-updater", CameraUpdateRunnable(gs, rotation_rate))
gs.sleep(20)
# clean up and finish
print("Cleaning up and ending")
gs.unparkRunnable("cam-updater")
gs.cameraStop()
gs.maximizeInterfaceWindow()
gs.enableInput()
# close connection
gateway.shutdown()
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\nfrom check_information_version import *\nfrom check_information_db2system import *\nfrom check_configuration_audit_buffer import *\nfrom check_configuration_authentication_mechanism import *\nfrom check_configuration_catalog_noauth import *\nfr... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\nfrom check_information_version import *\nfrom check_information_db2system import *\nfrom check_configuration_audit_buffer import *\nfrom check_configuration_authentication_mechanism import *\nfrom check_configuration_catalog_noa... | ```python
from check_information_version import *
from check_information_db2system import *
from check_configuration_audit_buffer import *
from check_configuration_authentication_mechanism import *
from check_configuration_catalog_noauth import *
from check_configuration_datalinks import *
from check_configuration_dftdbpath import *
from check_configuration_diaglevel import *
from check_configuration_diagpath import *
from check_configuration_discover import *
from check_configuration_discover_inst import *
from check_configuration_discover_db import *
from check_configuration_fed_noauth import *
from check_configuration_health_mon import *
from check_configuration_keepfenced import *
from check_configuration_notifylevel import *
from check_configuration_srvcon_auth import *
from check_configuration_archretrydelay import *
from check_configuration_numarchretry import *
from check_configuration_logarchmeth import *
from check_configuration_failarchpath import *
from check_configuration_num_db_backups import *
from check_configuration_autorestart import *
from check_configuration_mirrorlogpath import *
from check_configuration_dasadm_group import *
from check_configuration_exec_exp_task import *
from check_configuration_sched_enable import *
from check_configuration_max_connection_limits import *
from check_configuration_auto_maint import *
from check_privilege_syscat_views import *
from check_privilege_tablespaces import *
from check_privilege_group_entitlements import *
from check_privilege_secadm import *
from check_privilege_dbadm import *
from check_privilege_createtab import *
from check_privilege_bindadd import *
from check_privilege_connect import *
from check_privilege_nofence import *
from check_privilege_implschema import *
from check_privilege_load import *
from check_privilege_external_routine import *
from check_privilege_external_quiesceconnect import *
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\nfrom django.core.cache import cache\nfrom rest_framework import viewsets, views\nfrom rest_framework.response import Response\nfrom stationspinner.corporation.serializers import CorporationSheetSerializer, \\\n CorporationSheetLis... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\nfrom django.core.cache import cache\nfrom rest_framework import viewsets, views\nfrom rest_framework.response import Response\nfrom stationspinner.corporation.serializers import CorporationSheetSerializer, \\\n Cor... | ```python
from django.core.cache import cache
from rest_framework import viewsets, views
from rest_framework.response import Response
from stationspinner.corporation.serializers import CorporationSheetSerializer, \
CorporationSheetListSerializer
from stationspinner.corporation.models import CorporationSheet, Asset
from stationspinner.libs.drf_extensions import CapsulerPermission
class CorporationSheetViewset(viewsets.ReadOnlyModelViewSet):
serializer_class = CorporationSheetSerializer
model = CorporationSheet
permission_classes = [CapsulerPermission]
def list(self, request):
serializer = CorporationSheetListSerializer(
self.get_queryset(),
many=True,
context={'request': request}
)
return Response(serializer.data)
def get_queryset(self):
return CorporationSheet.objects.filter(owner=self.request.user, enabled=True)
class AssetLocationsView(views.APIView):
permission_classes = [CapsulerPermission]
def get(self, request, format=None):
corporationID = request.query_params.get('corporationID', None)
regionID = request.query_params.get('regionID', None)
if not corporationID:
return Response([])
else:
try:
corporation = CorporationSheet.objects.get(owner=request.user,
pk=corporationID)
except CorporationSheet.DoesNotExist:
return Response([])
key = hash(('asset_locations', corporation.pk.__hash__, regionID))
asset_locations = cache.get(key, None)
if not asset_locations:
asset_locations = Asset.objects.get_top_level_locations(corporation.pk, regionID)
cache.set(key, asset_locations, 1800)
return Response(asset_locations)
``` |
[
{
"content": "```python\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable ... | [
{
"content": "<|memory_start|>```python\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless require... | ```python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from conveyor.conveyorheat.common import exception
from conveyor.conveyorheat.engine import properties
from conveyor.conveyorheat.engine import resource
from conveyor.conveyorheat.engine.resources.aws.ec2 import route_table
from conveyor.i18n import _
class InternetGateway(resource.Resource):
PROPERTIES = (
TAGS,
) = (
'Tags',
)
_TAG_KEYS = (
TAG_KEY, TAG_VALUE,
) = (
'Key', 'Value',
)
properties_schema = {
TAGS: properties.Schema(
properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
schema={
TAG_KEY: properties.Schema(
properties.Schema.STRING,
required=True
),
TAG_VALUE: properties.Schema(
properties.Schema.STRING,
required=True
),
},
implemented=False,
)
),
}
def handle_create(self):
self.resource_id_set(self.physical_resource_name())
def handle_delete(self):
pass
@staticmethod
def get_external_network_id(client):
ext_filter = {'router:external': True}
ext_nets = client.list_networks(**ext_filter)['networks']
if len(ext_nets) != 1:
# TODO(sbaker) if there is more than one external network
# add a heat configuration variable to set the ID of
# the default one
raise exception.Error(
_('Expected 1 external network, found %d') % len(ext_nets))
external_network_id = ext_nets[0]['id']
return external_network_id
class VPCGatewayAttachment(resource.Resource):
PROPERTIES = (
VPC_ID, INTERNET_GATEWAY_ID, VPN_GATEWAY_ID,
) = (
'VpcId', 'InternetGatewayId', 'VpnGatewayId',
)
properties_schema = {
VPC_ID: properties.Schema(
properties.Schema.STRING,
_('VPC ID for this gateway association.'),
required=True
),
INTERNET_GATEWAY_ID: properties.Schema(
properties.Schema.STRING,
_('ID of the InternetGateway.')
),
VPN_GATEWAY_ID: properties.Schema(
properties.Schema.STRING,
_('ID of the VPNGateway to attach to the VPC.'),
implemented=False
),
}
default_client_name = 'neutron'
def _vpc_route_tables(self):
for res in six.itervalues(self.stack):
if (res.has_interface('AWS::EC2::RouteTable') and
res.properties.get(route_table.RouteTable.VPC_ID) ==
self.properties.get(self.VPC_ID)):
yield res
def add_dependencies(self, deps):
super(VPCGatewayAttachment, self).add_dependencies(deps)
# Depend on any route table in this template with the same
# VpcId as this VpcId.
# All route tables must exist before gateway attachment
# as attachment happens to routers (not VPCs)
for route_tbl in self._vpc_route_tables():
deps += (self, route_tbl)
def handle_create(self):
client = self.client()
external_network_id = InternetGateway.get_external_network_id(client)
for router in self._vpc_route_tables():
client.add_gateway_router(router.resource_id, {
'network_id': external_network_id})
def handle_delete(self):
for router in self._vpc_route_tables():
with self.client_plugin().ignore_not_found:
self.client().remove_gateway_router(router.resource_id)
def resource_mapping():
return {
'AWS::EC2::InternetGateway': InternetGateway,
'AWS::EC2::VPCGatewayAttachment': VPCGatewayAttachment,
}
``` |
[
{
"content": "Repeat the following code:\n```python\n#!/usr/bin/env python\n#\n# Electrum - lightweight Bitcoin client\n# Copyright (C) 2014 Thomas Voegtlin\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the F... | [
{
"content": "Repeat the following code:\n<|memory_start|>```python\n#!/usr/bin/env python\n#\n# Electrum - lightweight Bitcoin client\n# Copyright (C) 2014 Thomas Voegtlin\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as publi... | ```python
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2014 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import os.path
import re
import sys
import threading
import time
import traceback
import urlparse
import json
import requests
try:
import paymentrequest_pb2 as pb2
except ImportError:
sys.exit("Error: could not find paymentrequest_pb2.py. Create it with 'protoc --proto_path=lib/ --python_out=lib/ lib/paymentrequest.proto'")
import bitcoin
import util
from util import print_error
import transaction
import x509
import rsakey
REQUEST_HEADERS = {'Accept': 'application/bitcoin-paymentrequest', 'User-Agent': 'Electrum'}
ACK_HEADERS = {'Content-Type':'application/bitcoin-payment','Accept':'application/bitcoin-paymentack','User-Agent':'Electrum'}
ca_path = requests.certs.where()
ca_list, ca_keyID = x509.load_certificates(ca_path)
# status of payment requests
PR_UNPAID = 0
PR_EXPIRED = 1
PR_UNKNOWN = 2 # sent but not propagated
PR_PAID = 3 # send and propagated
PR_ERROR = 4 # could not parse
def get_payment_request(url):
u = urlparse.urlparse(url)
if u.scheme in ['http', 'https']:
response = requests.request('GET', url, headers=REQUEST_HEADERS)
data = response.content
print_error('fetched payment request', url, len(data))
elif u.scheme == 'file':
with open(u.path, 'r') as f:
data = f.read()
else:
raise BaseException("unknown scheme", url)
pr = PaymentRequest(data)
return pr
class PaymentRequest:
def __init__(self, data):
self.raw = data
self.parse(data)
self.requestor = None # known after verify
self.tx = None
def __str__(self):
return self.raw
def parse(self, r):
self.id = bitcoin.sha256(r)[0:16].encode('hex')
try:
self.data = pb2.PaymentRequest()
self.data.ParseFromString(r)
except:
self.error = "cannot parse payment request"
return
self.details = pb2.PaymentDetails()
self.details.ParseFromString(self.data.serialized_payment_details)
self.outputs = []
for o in self.details.outputs:
addr = transaction.get_address_from_output_script(o.script)[1]
self.outputs.append(('address', addr, o.amount))
self.memo = self.details.memo
self.payment_url = self.details.payment_url
def verify(self, contacts):
if not self.raw:
self.error = "Empty request"
return
pr = pb2.PaymentRequest()
pr.ParseFromString(self.raw)
if not pr.signature:
self.error = "No signature"
return
if pr.pki_type in ["x509+sha256", "x509+sha1"]:
return self.verify_x509(pr)
elif pr.pki_type in ["dnssec+btc", "dnssec+ecdsa"]:
return self.verify_dnssec(pr, contacts)
else:
self.error = "ERROR: Unsupported PKI Type for Message Signature"
return False
def verify_x509(self, paymntreq):
if not ca_list:
self.error = "Trusted certificate authorities list not found"
return False
cert = pb2.X509Certificates()
cert.ParseFromString(paymntreq.pki_data)
# verify the chain of certificates
try:
x, ca = verify_cert_chain(cert.certificate)
except BaseException as e:
self.error = str(e)
return False
# get requestor name
self.requestor = x.get_common_name()
if self.requestor.startswith('*.'):
self.requestor = self.requestor[2:]
# verify the BIP70 signature
pubkey0 = rsakey.RSAKey(x.modulus, x.exponent)
sig = paymntreq.signature
paymntreq.signature = ''
s = paymntreq.SerializeToString()
sigBytes = bytearray(sig)
msgBytes = bytearray(s)
if paymntreq.pki_type == "x509+sha256":
hashBytes = bytearray(hashlib.sha256(msgBytes).digest())
verify = pubkey0.verify(sigBytes, x509.PREFIX_RSA_SHA256 + hashBytes)
elif paymntreq.pki_type == "x509+sha1":
verify = pubkey0.hashAndVerify(sigBytes, msgBytes)
if not verify:
self.error = "ERROR: Invalid Signature for Payment Request Data"
return False
### SIG Verified
self.error = 'Signed by Trusted CA: ' + ca.get_common_name()
return True
def verify_dnssec(self, pr, contacts):
sig = pr.signature
alias = pr.pki_data
info = contacts.resolve(alias)
if info.get('validated') is not True:
self.error = "Alias verification failed (DNSSEC)"
return False
if pr.pki_type == "dnssec+btc":
self.requestor = alias
address = info.get('address')
pr.signature = ''
message = pr.SerializeToString()
if bitcoin.verify_message(address, sig, message):
self.error = 'Verified with DNSSEC'
return True
else:
self.error = "verify failed"
return False
else:
self.error = "unknown algo"
return False
def has_expired(self):
return self.details.expires and self.details.expires < int(time.time())
def get_expiration_date(self):
return self.details.expires
def get_amount(self):
return sum(map(lambda x:x[2], self.outputs))
def get_requestor(self):
return self.requestor if self.requestor else 'unknown'
def get_verify_status(self):
return self.error
def get_memo(self):
return self.memo
def get_id(self):
return self.id
def get_outputs(self):
return self.outputs[:]
def send_ack(self, raw_tx, refund_addr):
pay_det = self.details
if not self.details.payment_url:
return False, "no url"
paymnt = pb2.Payment()
paymnt.merchant_data = pay_det.merchant_data
paymnt.transactions.append(raw_tx)
ref_out = paymnt.refund_to.add()
ref_out.script = transaction.Transaction.pay_script('address', refund_addr)
paymnt.memo = "Paid using Electrum"
pm = paymnt.SerializeToString()
payurl = urlparse.urlparse(pay_det.payment_url)
try:
r = requests.post(payurl.geturl(), data=pm, headers=ACK_HEADERS, verify=ca_path)
except requests.exceptions.SSLError:
print "Payment Message/PaymentACK verify Failed"
try:
r = requests.post(payurl.geturl(), data=pm, headers=ACK_HEADERS, verify=False)
except Exception as e:
print e
return False, "Payment Message/PaymentACK Failed"
if r.status_code >= 500:
return False, r.reason
try:
paymntack = pb2.PaymentACK()
paymntack.ParseFromString(r.content)
except Exception:
return False, "PaymentACK could not be processed. Payment was sent; please manually verify that payment was received."
print "PaymentACK message received: %s" % paymntack.memo
return True, paymntack.memo
def make_unsigned_request(req):
from transaction import Transaction
addr = req['address']
time = req.get('time', 0)
exp = req.get('exp', 0)
if time and type(time) != int:
time = 0
if exp and type(exp) != int:
exp = 0
amount = req['amount']
if amount is None:
amount = 0
memo = req['memo']
script = Transaction.pay_script('address', addr).decode('hex')
outputs = [(script, amount)]
pd = pb2.PaymentDetails()
for script, amount in outputs:
pd.outputs.add(amount=amount, script=script)
pd.time = time
pd.expires = time + exp if exp else 0
pd.memo = memo
pr = pb2.PaymentRequest()
pr.serialized_payment_details = pd.SerializeToString()
pr.signature = ''
return pr
def sign_request_with_alias(pr, alias, alias_privkey):
pr.pki_type = 'dnssec+btc'
pr.pki_data = str(alias)
message = pr.SerializeToString()
ec_key = bitcoin.regenerate_key(alias_privkey)
address = bitcoin.address_from_private_key(alias_privkey)
compressed = bitcoin.is_compressed(alias_privkey)
pr.signature = ec_key.sign_message(message, compressed, address)
def verify_cert_chain(chain):
""" Verify a chain of certificates. The last certificate is the CA"""
# parse the chain
cert_num = len(chain)
x509_chain = []
for i in range(cert_num):
x = x509.X509(bytearray(chain[i]))
x509_chain.append(x)
if i == 0:
x.check_date()
else:
if not x.check_ca():
raise BaseException("ERROR: Supplied CA Certificate Error")
if not cert_num > 1:
raise BaseException("ERROR: CA Certificate Chain Not Provided by Payment Processor")
# if the root CA is not supplied, add it to the chain
ca = x509_chain[cert_num-1]
if ca.getFingerprint() not in ca_list:
keyID = ca.get_issuer_keyID()
f = ca_keyID.get(keyID)
if f:
root = ca_list[f]
x509_chain.append(root)
else:
raise BaseException("Supplied CA Not Found in Trusted CA Store.")
# verify the chain of signatures
cert_num = len(x509_chain)
for i in range(1, cert_num):
x = x509_chain[i]
prev_x = x509_chain[i-1]
algo, sig, data = prev_x.get_signature()
sig = bytearray(sig)
pubkey = rsakey.RSAKey(x.modulus, x.exponent)
if algo == x509.ALGO_RSA_SHA1:
verify = pubkey.hashAndVerify(sig, data)
elif algo == x509.ALGO_RSA_SHA256:
hashBytes = bytearray(hashlib.sha256(data).digest())
verify = pubkey.verify(sig, x509.PREFIX_RSA_SHA256 + hashBytes)
elif algo == x509.ALGO_RSA_SHA384:
hashBytes = bytearray(hashlib.sha384(data).digest())
verify = pubkey.verify(sig, x509.PREFIX_RSA_SHA384 + hashBytes)
elif algo == x509.ALGO_RSA_SHA512:
hashBytes = bytearray(hashlib.sha512(data).digest())
verify = pubkey.verify(sig, x509.PREFIX_RSA_SHA512 + hashBytes)
else:
raise BaseException("Algorithm not supported")
util.print_error(self.error, algo.getComponentByName('algorithm'))
if not verify:
raise BaseException("Certificate not Signed by Provided CA Certificate Chain")
return x509_chain[0], ca
def check_ssl_config(config):
import pem
key_path = config.get('ssl_privkey')
cert_path = config.get('ssl_chain')
with open(key_path, 'r') as f:
params = pem.parse_private_key(f.read())
with open(cert_path, 'r') as f:
s = f.read()
bList = pem.dePemList(s, "CERTIFICATE")
# verify chain
x, ca = verify_cert_chain(bList)
# verify that privkey and pubkey match
privkey = rsakey.RSAKey(*params)
pubkey = rsakey.RSAKey(x.modulus, x.exponent)
assert x.modulus == params[0]
assert x.exponent == params[1]
# return requestor
requestor = x.get_common_name()
if requestor.startswith('*.'):
requestor = requestor[2:]
return requestor
def sign_request_with_x509(pr, key_path, cert_path):
import pem
with open(key_path, 'r') as f:
params = pem.parse_private_key(f.read())
privkey = rsakey.RSAKey(*params)
with open(cert_path, 'r') as f:
s = f.read()
bList = pem.dePemList(s, "CERTIFICATE")
certificates = pb2.X509Certificates()
certificates.certificate.extend(map(str, bList))
pr.pki_type = 'x509+sha256'
pr.pki_data = certificates.SerializeToString()
msgBytes = bytearray(pr.SerializeToString())
hashBytes = bytearray(hashlib.sha256(msgBytes).digest())
sig = privkey.sign(x509.PREFIX_RSA_SHA256 + hashBytes)
pr.signature = bytes(sig)
def serialize_request(req):
pr = make_unsigned_request(req)
signature = req.get('sig')
requestor = req.get('name')
if requestor and signature:
pr.signature = signature.decode('hex')
pr.pki_type = 'dnssec+btc'
pr.pki_data = str(requestor)
return pr
def make_request(config, req):
pr = make_unsigned_request(req)
key_path = config.get('ssl_privkey')
cert_path = config.get('ssl_chain')
if key_path and cert_path:
sign_request_with_x509(pr, key_path, cert_path)
return pr
class InvoiceStore(object):
def __init__(self, config):
self.config = config
self.invoices = {}
self.load_invoices()
def load_invoices(self):
path = os.path.join(self.config.path, 'invoices')
try:
with open(path, 'r') as f:
d = json.loads(f.read())
except:
return
for k, v in d.items():
try:
pr = PaymentRequest(v.get('hex').decode('hex'))
pr.tx = v.get('txid')
pr.requestor = v.get('requestor')
self.invoices[k] = pr
except:
continue
def save(self):
l = {}
for k, pr in self.invoices.items():
l[k] = {
'hex': str(pr).encode('hex'),
'requestor': pr.get_requestor(),
'txid': pr.tx
}
path = os.path.join(self.config.path, 'invoices')
with open(path, 'w') as f:
s = json.dumps(l, indent=4, sort_keys=True)
r = f.write(s)
def get_status(self, key):
pr = self.get(key)
if pr.tx is not None:
return PR_PAID
if pr.has_expired():
return PR_EXPIRED
return PR_UNPAID
def add(self, pr):
key = pr.get_id()
if key in self.invoices:
print_error('invoice already in list')
return key
self.invoices[key] = pr
self.save()
return key
def remove(self, key):
self.invoices.pop(key)
self.save()
def get(self, k):
return self.invoices.get(k)
def set_paid(self, key, tx_hash):
self.invoices[key].tx = tx_hash
self.save()
def sorted_list(self):
# sort
return self.invoices.values()
``` |
[
{
"content": "Here is a code snippet:\n```python\nimport operator\nimport re\n\nimport cv2\nfrom PIL import Image\n\nimport pytesser\nimport reading_white_text\nimport smooth_image\n\nINVERT_COLOR_THRESHOLD = 128\n\n\ndef make_string_alphanmeric(lines):\n s = re.sub('[^0-9a-zA-Z\\n]+', ' ', lines)\n retur... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\nimport operator\nimport re\n\nimport cv2\nfrom PIL import Image\n\nimport pytesser\nimport reading_white_text\nimport smooth_image\n\nINVERT_COLOR_THRESHOLD = 128\n\n\ndef make_string_alphanmeric(lines):\n s = re.sub('[^0-9a-zA-Z\\n]+', ' ', l... | ```python
import operator
import re
import cv2
from PIL import Image
import pytesser
import reading_white_text
import smooth_image
INVERT_COLOR_THRESHOLD = 128
def make_string_alphanmeric(lines):
s = re.sub('[^0-9a-zA-Z\n]+', ' ', lines)
return s
def greyscale_image_mean(file_name):
size = 128, 128
im = Image.open(file_name)
im.thumbnail(size)
im.save('thumbnail.jpg', "JPEG")
img = cv2.imread('thumbnail.jpg', 0)
avg = 0
x, y = im.size
for i in range(y):
for j in range(x):
avg += img[i][j]
return float(avg * 1.0 / (128 * 128))
D = {}
def remove_too_many_small_words_dish(text):
new_text = []
for lines in text:
word_count = 0.0
small_word_count = 0.0
line = lines.split(' ')
for word in line:
if len(word) <= 2:
small_word_count += 1
word_count += 1
try:
small_word_proportion = small_word_count / word_count
except:
small_word_proportion = 0.0
# print 'small_word_proportion: ' , small_word_proportion
if small_word_proportion <= 0.4:
new_text.append(line)
return new_text
def fact(l):
if l >= 500:
return 1
else:
f = 500.0 / l
if int(f) <= 0:
return 1
return int(f)
def image_process_extract_string(s, mask, x, y, w, h):
im = mask[y: y + h, x: x + w]
cv2.imwrite(s, im)
size = 2 * w, 2 * h
im = Image.open(s)
im_resized = im.resize(size, Image.ANTIALIAS)
im_resized.save(s, dpi=(100, 100))
return pytesser.image_to_string(s, 6)
def extract_image(file_name):
img = cv2.imread(file_name)
img_final = cv2.imread(file_name)
img2gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
inv_img = (255 - img2gray)
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (5, 2))
dilated = cv2.dilate(inv_img, kernel, iterations=7) # dilate
type_image = dilated
_, contours, hierarchy = cv2.findContours(type_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) # get contours
ind = 0
pix = {}
value_at = {}
index = 0
P = {}
image_2_text = smooth_image.smooth2(file_name)
for contour in contours:
# get rectangle bounding contour
[x, y, w, h] = cv2.boundingRect(contour)
# draw rectangle around contour on original image
if w < 20 or h < 20:
continue
if w > 500 and h > 500:
continue
cv2.rectangle(img, (x, y), (x + w + 10, y + h + 10), (255, 0, 255), 2)
s = '/tmp/' + str(ind) + '.tif'
box_read = image_process_extract_string(s, image_2_text, x, y, w, h)
# print box_read
D[(x, y)] = box_read
ind += 1
box_read_to_lines = box_read.split('\n')
for lines in box_read_to_lines:
P[(x, y)] = lines;
value_at[index] = (x, y)
index += 1
x1 = x / 50
x1 = x1 * 50
tup = [[x, lines]]
for key, val in tup:
pix.setdefault(key, []).append(val)
cv2.imwrite('boxed_image.jpg', img)
# print D
final_list2 = []
sorted_x = sorted(D.items(), key=operator.itemgetter(0))
# print sorted_x
for k, v in sorted(D.items()):
# print v
list_new = str(v).split('\n')
for l in list_new:
final_list2.append(l)
'''final_list = []
for val in pix:
for dish in pix[val]:
if len(dish) > 1:
final_list.append(dish)
'''
return final_list2
def pre_process_image(file_path):
norm2dp_image_path = 'norm2dp.jpg'
final_image_path = 'final_image_processed.jpg'
im = Image.open(file_path)
l, w = im.size
factor = fact(l)
size = int(factor * l), int(factor * w)
im_resized = im.resize(size, Image.ANTIALIAS)
im_resized.save(norm2dp_image_path, dpi=(200, 200))
im_new = smooth_image.smooth2(norm2dp_image_path)
cv2.imwrite(final_image_path, im_new)
return final_image_path
def remove_numeric_part(s):
no_digits = []
for i in s:
if not i.isdigit():
no_digits.append(i)
# Now join all elements of the list with '',
# which puts all of the characters together.
result = ''.join(no_digits)
return result
def main(file_path):
mean_grey_scale_value = greyscale_image_mean(file_path)
print(mean_grey_scale_value)
if not mean_grey_scale_value > INVERT_COLOR_THRESHOLD:
file_path = reading_white_text.read_image_white_text(file_path)
file_path = pre_process_image(file_path)
x = list(extract_image(file_path))
x = remove_too_many_small_words_dish(x)
for line in x:
line = make_string_alphanmeric(str(line))
line = remove_numeric_part(line)
line = line.strip()
if len(line) > 0:
print (line)
main('/Users/Amit/Projects/menu_parser/test.jpg')
``` |
[
{
"content": "Write out the code verbatim, preserving indentation and whitespace:\n```python\n# Copyright (c) 2015 Uber Technologies, Inc.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in t... | [
{
"content": "Write out the code verbatim, preserving indentation and whitespace:\n<|memory_start|>```python\n# Copyright (c) 2015 Uber Technologies, Inc.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"),... | ```python
# Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .call_request import CallRequestMessage, call_req_rw
from .call_request_continue import call_req_c_rw
from .call_response import CallResponseMessage, call_res_rw
from .call_response_continue import call_res_c_rw
from .cancel import CancelMessage, cancel_rw
from .claim import ClaimMessage, claim_rw
from .common import Tracing, ChecksumType
from .error import ErrorMessage, ErrorCode, error_rw
from .init_request import InitRequestMessage, init_req_rw
from .init_response import InitResponseMessage, init_res_rw
from .ping_request import PingRequestMessage, ping_req_rw
from .ping_response import PingResponseMessage, ping_res_rw
from .types import Types
RW = {
Types.CALL_REQ: call_req_rw,
Types.CALL_REQ_CONTINUE: call_req_c_rw,
Types.CALL_RES: call_res_rw,
Types.CALL_RES_CONTINUE: call_res_c_rw,
Types.CANCEL: cancel_rw,
Types.CLAIM: claim_rw,
Types.ERROR: error_rw,
Types.INIT_REQ: init_req_rw,
Types.INIT_RES: init_res_rw,
Types.PING_REQ: ping_req_rw,
Types.PING_RES: ping_res_rw,
}
__all__ = [
"RW",
"ChecksumType",
"CallRequestMessage",
"CallRequestContinueMessage",
"CallResponseMessage",
"CallResponseContinueMessage",
"CancelMessage",
"ClaimMessage",
"ErrorMessage",
"ErrorCode",
"InitRequestMessage",
"InitResponseMessage",
"PingRequestMessage",
"PingResponseMessage",
"Tracing",
]
``` |
[
{
"content": "```python\n# tests.checks\n# Performs checking that visualizers adhere to Yellowbrick conventions.\n#\n# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>\n# Created: Mon May 22 11:18:06 2017 -0700\n#\n# Copyright (C) 2017 District Data Labs\n# For license information, see LICENSE.txt\... | [
{
"content": "<|memory_start|>```python\n# tests.checks\n# Performs checking that visualizers adhere to Yellowbrick conventions.\n#\n# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>\n# Created: Mon May 22 11:18:06 2017 -0700\n#\n# Copyright (C) 2017 District Data Labs\n# For license information, ... | ```python
# tests.checks
# Performs checking that visualizers adhere to Yellowbrick conventions.
#
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Mon May 22 11:18:06 2017 -0700
#
# Copyright (C) 2017 District Data Labs
# For license information, see LICENSE.txt
#
# ID: checks.py [4131cb1] benjamin@bengfort.com $
"""
Performs checking that visualizers adhere to Yellowbrick conventions.
"""
##########################################################################
## Imports
##########################################################################
import sys
sys.path.append("..")
import numpy as np
import matplotlib.pyplot as plt
from yellowbrick.base import ModelVisualizer, ScoreVisualizer
from yellowbrick.classifier.base import ClassificationScoreVisualizer
from yellowbrick.cluster.base import ClusteringScoreVisualizer
from yellowbrick.features.base import FeatureVisualizer, DataVisualizer
from yellowbrick.regressor.base import RegressionScoreVisualizer
from yellowbrick.text.base import TextVisualizer
##########################################################################
## Checking runable
##########################################################################
def check_visualizer(Visualizer):
"""
Check if visualizer adheres to Yellowbrick conventions.
This function runs an extensive test-suite for input validation, return
values, exception handling, and more. Additional tests for scoring or
tuning visualizers will be run if the Visualizer clss inherits from the
corresponding object.
"""
name = Visualizer.__name__
for check in _yield_all_checks(name, Visualizer):
check(name, Visualizer)
##########################################################################
## Generate the specific per-visualizer checking
##########################################################################
def _yield_all_checks(name, Visualizer):
"""
Composes the checks required for the specific visualizer.
"""
# Global Checks
yield check_instantiation
yield check_estimator_api
# Visualizer Type Checks
if issubclass(Visualizer, RegressionScoreVisualizer):
for check in _yield_regressor_checks(name, Visualizer):
yield check
if issubclass(Visualizer, ClassificationScoreVisualizer):
for check in _yield_classifier_checks(name, Visualizer):
yield check
if issubclass(Visualizer, ClusteringScoreVisualizer):
for check in _yield_clustering_checks(name, Visualizer):
yield check
if issubclass(Visualizer, FeatureVisualizer):
for check in _yield_feature_checks(name, Visualizer):
yield check
if issubclass(Visualizer, TextVisualizer):
for check in _yield_text_checks(name, Visualizer):
yield check
# Other checks
def _yield_regressor_checks(name, Visualizer):
"""
Checks for regressor visualizers
"""
pass
def _yield_classifier_checks(name, Visualizer):
"""
Checks for classifier visualizers
"""
pass
def _yield_clustering_checks(name, Visualizer):
"""
Checks for clustering visualizers
"""
pass
def _yield_feature_checks(name, Visualizer):
"""
Checks for feature visualizers
"""
pass
def _yield_text_checks(name, Visualizer):
"""
Checks for text visualizers
"""
pass
##########################################################################
## Checking Functions
##########################################################################
def check_instantiation(name, Visualizer, args, kwargs):
# assert that visualizers can be passed an axes object.
ax = plt.gca()
viz = Visualizer(*args, **kwargs)
assert viz.ax == ax
def check_estimator_api(name, Visualizer):
X = np.random.rand((5, 10))
y = np.random.randint(0,2, 10)
# Ensure fit returns self.
viz = Visualizer()
self = viz.fit(X, y)
assert viz == self
if __name__ == '__main__':
import sys
sys.path.append("..")
from yellowbrick.classifier import *
from yellowbrick.cluster import *
from yellowbrick.features import *
from yellowbrick.regressor import *
from yellowbrick.text import *
visualizers = [
ClassBalance, ClassificationReport, ConfusionMatrix, ROCAUC,
KElbowVisualizer, SilhouetteVisualizer,
ScatterVisualizer, JointPlotVisualizer, Rank2D, RadViz, ParallelCoordinates,
AlphaSelection, ManualAlphaSelection,
PredictionError, ResidualsPlot,
TSNEVisualizer, FreqDistVisualizer, PosTagVisualizer
]
for visualizer in visualizers:
check_visualizer(visualizer)
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\nimport pytest\nfrom collections import OrderedDict\n\nfrom arctic.mixins import LayoutMixin\nfrom articles.forms import ArticleForm\n\nfrom tests.conftest import get_form\nfrom tests.factories import ArticleFactory\n\n\n@pytest.fixture\ndef lay... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\nimport pytest\nfrom collections import OrderedDict\n\nfrom arctic.mixins import LayoutMixin\nfrom articles.forms import ArticleForm\n\nfrom tests.conftest import get_form\nfrom tests.factories import ArticleFactory\n\n\n@pytest.... | ```python
import pytest
from collections import OrderedDict
from arctic.mixins import LayoutMixin
from articles.forms import ArticleForm
from tests.conftest import get_form
from tests.factories import ArticleFactory
@pytest.fixture
def layout():
class Layout(LayoutMixin):
layout = None
def __init__(self):
self.object = ArticleFactory()
self.form = ArticleForm(instance=self.object)
self.get_form = get_form(self.form)
return Layout()
pytestmark = pytest.mark.django_db
def test_layout_example_1(layout):
layout.layout = ['title|8']
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] == '8'
def test_layout_example_2(layout):
layout.layout = [['title|3', 'title', 'title']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0][0]['name'] == 'title'
assert layout[0]['rows'][0][0]['column'] == '3'
assert layout[0]['rows'][0][1]['name'] == 'title'
assert layout[0]['rows'][0][1]['column'] == '4'
assert layout[0]['rows'][0][2]['name'] == 'title'
assert layout[0]['rows'][0][2]['column'] == '5'
def test_layout_example_3a(layout):
layout.layout = [['title|3', 'title', 'title', 'category', 'category']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0][0]['name'] == 'title'
assert layout[0]['rows'][0][0]['column'] == '3'
assert layout[0]['rows'][0][1]['name'] == 'title'
assert layout[0]['rows'][0][1]['column'] == '2'
assert layout[0]['rows'][0][2]['name'] == 'title'
assert layout[0]['rows'][0][2]['column'] == '2'
assert layout[0]['rows'][0][3]['name'] == 'category'
assert layout[0]['rows'][0][3]['column'] == '2'
assert layout[0]['rows'][0][4]['name'] == 'category'
assert layout[0]['rows'][0][4]['column'] == '3'
def test_layout_example_3b(layout):
layout.layout = ['title|3', 'title', 'title', ['category', 'category']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] == '3'
assert layout[0]['rows'][1]['name'] == 'title'
assert layout[0]['rows'][1]['column'] is None
assert layout[0]['rows'][2]['name'] == 'title'
assert layout[0]['rows'][2]['column'] is None
assert layout[0]['rows'][3][0]['name'] == 'category'
assert layout[0]['rows'][3][0]['column'] == '6'
assert layout[0]['rows'][3][1]['name'] == 'category'
assert layout[0]['rows'][3][1]['column'] == '6'
def test_layout_example_4(layout):
layout.layout = OrderedDict([('-fieldset',
['title',
'title',
['category', 'updated_at|4']]),
('fieldset2|test description',
[['title|7', 'category']]),
('fieldset3',
['published'])])
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] == 'fieldset'
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is True
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] is None
assert layout[0]['rows'][1]['name'] == 'title'
assert layout[0]['rows'][1]['column'] is None
assert layout[0]['rows'][2][0]['name'] == 'category'
assert layout[0]['rows'][2][0]['column'] == '8'
assert layout[0]['rows'][2][1]['name'] == 'updated_at'
assert layout[0]['rows'][2][1]['column'] == '4'
assert layout[1]['fieldset']['title'] == 'fieldset2'
assert layout[1]['fieldset']['description'] == 'test description'
assert layout[1]['fieldset']['collapsible'] is False
assert layout[1]['rows'][0][0]['name'] == 'title'
assert layout[1]['rows'][0][0]['column'] == '7'
assert layout[1]['rows'][0][1]['name'] == 'category'
assert layout[1]['rows'][0][1]['column'] == '5'
assert layout[2]['fieldset']['title'] == 'fieldset3'
assert layout[2]['fieldset']['description'] is None
assert layout[2]['fieldset']['collapsible'] is False
assert layout[2]['rows'][0]['name'] == 'published'
assert layout[2]['rows'][0]['column'] is None
``` |
[
{
"content": "Return the code unaltered:\n```python\nfrom django.conf import settings\nfrom django.core.urlresolvers import reverse\nfrom django.db import transaction\nfrom django.http import HttpResponseRedirect, HttpResponseForbidden\nfrom django.shortcuts import render, get_object_or_404\nfrom django.utils i... | [
{
"content": "Return the code unaltered:\n<|memory_start|>```python\nfrom django.conf import settings\nfrom django.core.urlresolvers import reverse\nfrom django.db import transaction\nfrom django.http import HttpResponseRedirect, HttpResponseForbidden\nfrom django.shortcuts import render, get_object_or_404\nfro... | ```python
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import HttpResponseRedirect, HttpResponseForbidden
from django.shortcuts import render, get_object_or_404
from django.utils import simplejson
from django.views.decorators.csrf import csrf_protect, csrf_exempt
from functools import wraps
from zumanji.forms import UploadJsonForm
from zumanji.helpers import get_trace_data, get_changes, get_git_changes
from zumanji.models import Project, Build, BuildTag, Test
from zumanji.importer import import_build
NOTSET = object()
def api_auth(func):
@wraps(func)
def wrapped(request, *args, **kwargs):
if request.REQUEST.get('api_key'):
if request.REQUEST['api_key'] != settings.ZUMANJI_CONFIG.get('API_KEY', NOTSET):
return HttpResponseForbidden('Invalid api_key')
return func(request, *args, **kwargs)
return csrf_protect(func)(request, *args, **kwargs)
return csrf_exempt(wrapped)
def index(request):
build_qs = Build.objects.order_by('-revision__datetime', '-datetime').select_related('revision')
project_list = []
# lol O(N)
for project in Project.objects.all():
try:
latest_build = build_qs.filter(project=project)[0]
except IndexError:
latest_build = None
project_list.append((project, latest_build))
return render(request, 'zumanji/index.html', {
'project_list': project_list,
})
def view_project(request, project_label):
project = get_object_or_404(Project, label=project_label)
build_list = list(Build.objects
.filter(project=project)
.order_by('-revision__datetime', '-datetime')
.select_related('revision', 'project'))
return render(request, 'zumanji/project.html', {
'project': project,
'build_list': build_list,
})
def view_tag(request, project_label, tag_id):
project = get_object_or_404(Project, label=project_label)
tag = get_object_or_404(BuildTag, pk=tag_id)
build_list = list(Build.objects
.filter(project=project, tags=tag)
.order_by('-datetime')
.select_related('revision', 'project'))
return render(request, 'zumanji/tag.html', {
'project': project,
'tag': tag,
'build_list': build_list,
})
def view_build(request, project_label, build_id, tag_id=None):
filter_args = dict(project__label=project_label, id=build_id)
tag = None
if tag_id:
tag = get_object_or_404(BuildTag, id=tag_id)
filter_args["tags"] = tag
build = get_object_or_404(Build, **filter_args)
project = build.project
previous_build = build.get_previous_build(tag=tag)
next_build = build.get_next_build(tag=tag)
test_list = list(build.test_set
.filter(parent__isnull=True)
.order_by('-upper90_duration'))
compare_with = request.GET.get('compare_with')
if compare_with:
try:
compare_build = Build.objects.get(project__label=project_label, id=compare_with)
except Build.DoesNotExist:
compare_build = None
else:
compare_build = previous_build
changes = get_changes(compare_build, test_list)
if compare_build:
git_changes = get_git_changes(build, compare_build)
else:
git_changes = None
return render(request, 'zumanji/build.html', {
'project': project,
'tag': tag,
'build': build,
'previous_build': previous_build,
'compare_build': compare_build,
'next_build': next_build,
'test_list': test_list,
'changes': changes,
'git_changes': git_changes,
})
def view_test(request, project_label, build_id, test_label):
test = get_object_or_404(Test, project__label=project_label, build=build_id, label=test_label)
project = test.project
build = test.build
test_list = list(Test.objects.filter(parent=test)
.order_by('-upper90_duration')
.select_related('parent'))
# this is actually a <Test>
previous_test_by_build = test.get_test_in_previous_build()
next_test_by_build = test.get_test_in_next_build()
breadcrumbs = [
(reverse('zumanji:view_build', kwargs={'project_label': project.label, 'build_id': build.id}), 'Build #%s' % build.id)
]
last = ''
for node in test.get_context():
node_label = node.label[len(last):]
breadcrumbs.append(
(reverse('zumanji:view_test', kwargs={
'project_label': project.label,
'build_id': build.id,
'test_label': node.label,
}), node_label)
)
last = node.label + '.' # include the dot
previous_builds = test.get_previous_builds(50)
compare_with = request.GET.get('compare_with')
if compare_with:
try:
compare_build = Build.objects.get(project__label=project_label, id=compare_with)
except Build.DoesNotExist:
compare_build = None
else:
compare_build = previous_test_by_build.build if previous_test_by_build else None
if compare_build:
try:
compare_test = compare_build.test_set.get(label=test.label)
except Test.DoesNotExist:
compare_test = None
git_changes = get_git_changes(build, compare_build)
else:
compare_test = None
git_changes = None
trace_results = get_trace_data(test, compare_test)
if previous_test_by_build:
tests_to_check = test_list
changes = get_changes(compare_build, tests_to_check)
else:
changes = []
return render(request, 'zumanji/test.html', {
'breadcrumbs': breadcrumbs,
'project': project,
'build': build,
'previous_test_by_build': previous_test_by_build,
'next_test_by_build': next_test_by_build,
'previous_builds': previous_builds,
'test': test,
'test_list': test_list,
'changes': changes,
'compare_build': compare_build,
'trace_results': trace_results,
'git_changes': git_changes,
})
@api_auth
@transaction.commit_on_success
def upload_project_build(request, project_label):
project = get_object_or_404(Project, label=project_label)
form = UploadJsonForm(request.POST or None, request.FILES or None)
if form.is_valid():
data = simplejson.loads(request.FILES['json_file'].read())
try:
build = import_build(data, project=project.label, revision=form.cleaned_data.get('revision'))
except Exception, e:
form.errors['json_file'] = unicode(e)
else:
return HttpResponseRedirect(reverse('zumanji:view_build', kwargs={
'project_label': project.label, 'build_id': build.id}))
return render(request, 'zumanji/upload_build.html', {
'project': project,
'form': form,
})
``` |
[
{
"content": "Write the code verbatim:\n```python\n# Copyright 2015 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.o... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n# Copyright 2015 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# htt... | ```python
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module for the Cloud SDK CLI tree external representation."""
import argparse
import textwrap
from googlecloudsdk.core.console import console_io
def _NormalizeDescription(description):
"""Normalizes description text.
argparse.SUPPRESS normalizes to None.
Args:
description: str, The text to be normalized.
Returns:
str, The normalized text.
"""
if description == argparse.SUPPRESS:
description = None
elif description:
description = textwrap.dedent(description)
return description or ''
class Flag(object):
"""Flag info.
Attributes:
type: str, The flag value type name {'bool', 'int', 'float', 'string'}.
name: str, The normalized flag name ('_' => '-').
hidden: bool, True if the flag is hidden.
value: str, The flag value documentation name.
countmin: int, The minimum number of flag values.
countmax: int, The maximum number of flag values, 0 for unlimited.
required: int, 1 if the flag must be specified, 0 otherwise.
description: str, The help text.
choices: list, The list of static choices.
default: (self.type), The default flag value or None if no default.
group: int, Mutually exclusive flag group id counting from 1, 0 if none.
resource: str, Flag value resource identifier.
"""
def __init__(self, name, description='', default=None):
self.type = 'string'
self.name = name
self.hidden = description == argparse.SUPPRESS
self.value = ''
self.countmin = 0
self.countmax = 0
self.required = 0
self.choices = []
self.default = default
self.description = _NormalizeDescription(description)
self.group = 0
self.resource = ''
class Positional(object):
"""Positional info.
Attributes:
name: str, The normalized name ('_' => '-').
value: str, The positional value documentation name.
countmin: int, The minimum number of positional values.
countmax: int, The maximum number of positional values.
required: int, 1 if the positional must be specified, 0 otherwise.
description: str, The help text.
resource: str, Positional value resource identifier.
"""
def __init__(self, name, description):
self.name = name
self.value = ''
self.countmin = 0
self.countmax = 0
self.capsule = ''
self.description = description
self.resource = ''
class Command(object):
"""Command and group info.
Attributes:
release: str, The command release name {'internal', 'alpha', 'beta', 'ga'}.
name: str, The normalized name ('_' => '-').
hidden: bool, True if the command is hidden.
capsule: str, The first line of the command docstring.
description: str, The second and following lines of the command docstring.
flags: {str:str}, Command flag dict, indexed by normalized flag name.
positionals: [str], Command positionals list.
sections: {str:str}, Optional section help dict, indexed by section name.
"""
def __init__(self, command, parent):
self.release = command.ReleaseTrack().id
self.name = command.name.replace('_', '-')
self.hidden = command.IsHidden()
self.flags = {}
self.positionals = []
self.sections = {}
parent_command = parent.name.replace('_', '-') if parent else ''
self.release, capsule = self.__Release(
command, self.release, getattr(command, 'short_help', ''))
self.capsule = console_io.LazyFormat(
_NormalizeDescription(capsule),
command=self.name,
parent_command=parent_command)
self.release, description = self.__Release(
command, self.release, getattr(command, 'long_help', ''))
self.description = console_io.LazyFormat(
_NormalizeDescription(description),
command=self.name,
index=self.capsule,
parent_command=parent_command)
sections = getattr(command, 'detailed_help', None)
if sections:
for s in sections:
if s == 'brief':
self.release, self.capsule = self.__Release(
command, self.release, sections[s])
else:
self.sections[s] = console_io.LazyFormat(
_NormalizeDescription(sections[s]),
command=self.name,
index=self.capsule,
description=self.description,
parent_command=parent_command)
self.commands = {}
# _parent is explicitly private so it won't appear in serialized output.
self._parent = parent
if parent:
parent.commands[self.name] = self
args = command.ai
# Initialize the mutually exclusive flag groups.
group_count = {}
group_name = {}
for arg in args.flag_args:
for name in arg.option_strings:
if name.startswith('--'):
name = name.replace('_', '-')
if not self.__Ancestor(name):
g = args.mutex_groups.get(arg.dest, None)
if g:
group_name[name] = g
if g in group_count:
group_count[g] += 1
else:
group_count[g] = 1
group_id_count = 0
group_id = {}
# Sorted iteration preserves group_id[] indices across separate invocations
# where the mutex groups do not change.
for _, g in sorted(group_name.iteritems()):
if group_count[g] > 1:
group_count[g] = 0 # Don't check this group again!
group_id_count += 1
group_id[g] = group_id_count
# Collect the flags.
for arg in sorted(args.flag_args):
for name in arg.option_strings:
if name.startswith('--'):
name = name.replace('_', '-')
# Don't include ancestor flags.
if not self.__Ancestor(name):
flag = Flag(name, description=_NormalizeDescription(arg.help),
default=arg.default)
# ArgParse does not have an explicit Boolean flag type. By
# convention a flag with arg.nargs=0 and action='store_true' or
# action='store_false' is a Boolean flag. arg.type gives no hint
# (arg.type=bool would have been so easy) and we don't have access
# to args.action here. Even then the flag can take on non-Boolean
# values. If arg.default is not specified then it will be None, but
# it can be set to anything. So we do a conservative 'truthiness'
# test here.
if arg.nargs == 0:
flag.type = 'bool'
flag.default = True if arg.default else False
else:
if arg.type == int:
flag.type = 'int'
elif arg.type == float:
flag.type = 'float'
if arg.nargs == '*':
pass
elif arg.nargs == '?':
flag.countmax = 1
elif arg.nargs == '+':
flag.countmin = 1
elif type(arg.nargs) in (int, long):
flag.countmin = arg.nargs
flag.countmax = arg.nargs
if arg.metavar:
flag.value = arg.metavar
else:
flag.value = name[2:].upper()
if arg.choices:
choices = sorted(arg.choices)
if choices == ['false', 'true']:
flag.type = 'bool'
else:
flag.choices = choices
if arg.required:
flag.required = 1
flag.resource = getattr(arg, 'completion_resource', '')
if name in group_name and group_name[name] in group_id:
flag.group = group_id[group_name[name]]
self.flags[flag.name] = flag
# Collect the positionals.
for arg in args.positional_args:
name = arg.dest.replace('_', '-')
positional = Positional(name, description=_NormalizeDescription(arg.help))
if arg.metavar:
positional.value = arg.metavar
if arg.nargs != 0:
if arg.nargs == '*':
pass
elif arg.nargs == '?':
positional.countmax = 1
elif arg.nargs == '+':
positional.countmin = 1
elif type(arg.nargs) in (int, long):
positional.countmin = arg.nargs
positional.countmax = arg.nargs
positional.resource = getattr(arg, 'completion_resource', '')
self.positionals.append(positional)
def __Ancestor(self, flag):
"""Determines if flag is provided by an ancestor command.
Args:
flag: str, The flag name (no leading '-').
Returns:
bool, True if flag provided by an ancestor command, false if not.
"""
command = self._parent
while command:
if flag in command.flags:
return True
command = command._parent # pylint: disable=protected-access
return False
def __Release(self, command, release, description):
"""Determines the release type from the description text.
Args:
command: Command, The CLI command/group description.
release: int, The default release type.
description: str, The command description markdown.
Returns:
(release, description): (int, str), The actual release and description
with release prefix omitted.
"""
description = _NormalizeDescription(description)
path = command.GetPath()
if len(path) >= 2 and path[1] == 'internal':
release = 'INTERNAL'
return release, description
``` |
[
{
"content": "Return the code exactly, with no changes:\n```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.10.1 on 2016-12-28 04:48\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('wechat', '0009... | [
{
"content": "Return the code exactly, with no changes:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.10.1 on 2016-12-28 04:48\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ... | ```python
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-28 04:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wechat', '0009_auto_20161223_1920'),
]
operations = [
migrations.RenameModel(
old_name='UserSignUpExtendAccomodation',
new_name='UserSignUpExtendAccommodation',
),
migrations.RenameModel(
old_name='UserSignUpExtendMailcode',
new_name='UserSignUpExtendPostcode',
),
migrations.RenameField(
model_name='priceinfo',
old_name='price_discription',
new_name='price_description',
),
migrations.RenameField(
model_name='usersignupdetail',
old_name='telphone',
new_name='telephone',
),
migrations.RenameField(
model_name='usersignupextendaccommodation',
old_name='accomodation_type',
new_name='accommodation_type',
),
migrations.RenameField(
model_name='usersignupextendextrainfo',
old_name='extra_discription',
new_name='extra_description',
),
]
``` |
[
{
"content": "Write out the code verbatim, preserving indentation and whitespace:\n```python\nimport json\n\nfrom PyQt5.QtCore import pyqtSlot, pyqtSignal, QModelIndex, Qt\nfrom PyQt5.QtWidgets import QWidget, QSpacerItem, QSizePolicy, QShortcut\nfrom PyQt5.QtGui import QKeySequence\n\nfrom txplayagui.ui.librar... | [
{
"content": "Write out the code verbatim, preserving indentation and whitespace:\n<|memory_start|>```python\nimport json\n\nfrom PyQt5.QtCore import pyqtSlot, pyqtSignal, QModelIndex, Qt\nfrom PyQt5.QtWidgets import QWidget, QSpacerItem, QSizePolicy, QShortcut\nfrom PyQt5.QtGui import QKeySequence\n\nfrom txpl... | ```python
import json
from PyQt5.QtCore import pyqtSlot, pyqtSignal, QModelIndex, Qt
from PyQt5.QtWidgets import QWidget, QSpacerItem, QSizePolicy, QShortcut
from PyQt5.QtGui import QKeySequence
from txplayagui.ui.library import Ui_LibraryWidget
from txplayagui.library import LibraryModel
from txplayagui.utilities import unwrapMime
class LibraryWidget(Ui_LibraryWidget, QWidget):
rescanStarted = pyqtSignal()
itemsActivated = pyqtSignal(list)
def __init__(self, parent=None):
QWidget.__init__(self, parent)
Ui_LibraryWidget.setupUi(self, self)
self.rescanButton.show()
self.scanProgressBar.hide()
self.libraryModel = LibraryModel()
self.treeView.setModel(self.libraryModel)
self.libraryModel.toggleRow.connect(self.onToggleRow)
self.rescanButton.clicked.connect(self.rescanClicked)
self.treeView.doubleClicked.connect(self.onTreeViewDoubleClicked)
self.querySearchBox.textChanged.connect(self.onQueryChanged)
self.clearSearchButton.clicked.connect(self.onQueryClear)
# shortcuts
releaseSearchboxShortcut = QShortcut(QKeySequence('Esc'), self.querySearchBox)
releaseSearchboxShortcut.setContext(Qt.WidgetShortcut)
releaseSearchboxShortcut.activated.connect(self.onReleaseSearchbox)
scrollLibraryShortcut = QShortcut(QKeySequence(Qt.Key_Down), self.querySearchBox)
scrollLibraryShortcut.setContext(Qt.WidgetShortcut)
scrollLibraryShortcut.activated.connect(self.onScrollLibrary)
activateTracksShortcut = QShortcut(QKeySequence(Qt.Key_Return), self.treeView)
activateTracksShortcut.setContext(Qt.WidgetShortcut)
activateTracksShortcut.activated.connect(self.onActivateTracks)
@pyqtSlot()
def rescanClicked(self):
from txplayagui.client import rescanLibrary
self.rescanButton.hide()
self.scanControlsLayout.removeItem(self.scanControlsLayout.itemAt(2))
self.scanProgressBar.show()
self.scanResponse = rescanLibrary()
self.scanResponse.lineReceived.connect(self.scanProgress)
self.rescanStarted.emit()
@pyqtSlot(str)
def scanProgress(self, progress):
data = json.loads(progress.rstrip())
if 'scanprogress' in data:
progress = data['scanprogress']
self.setProgress(progress)
else:
self.scanResponse.close()
self.scanResponse.deleteLater()
self.rescanFinished(data['library'])
@pyqtSlot(int, QModelIndex, bool)
def onToggleRow(self, row, parentIndex, isShown):
self.treeView.setRowHidden(row, parentIndex, not isShown)
@pyqtSlot(QModelIndex)
def onTreeViewDoubleClicked(self, index):
hashes = self._getHashes(index)
if len(hashes) == 0:
return
self.itemsActivated.emit(hashes)
@pyqtSlot(unicode)
def onQueryChanged(self, query):
if len(query) > 2:
self.libraryModel.filter(query)
elif query == '':
return self.libraryModel.showAll()
@pyqtSlot()
def onQueryClear(self):
self.querySearchBox.setText('')
self.querySearchBox.setFocus()
def onReleaseSearchbox(self):
self.setFocus()
def onScrollLibrary(self):
self.treeView.setCurrentIndex(self.libraryModel.headIndex())
self.treeView.setFocus()
def onActivateTracks(self):
collectedHashes = []
for index in self.treeView.selectedIndexes():
for hash_ in self._getHashes(index):
if hash_ not in collectedHashes:
collectedHashes.append(hash_)
if len(collectedHashes) == 0:
return
self.itemsActivated.emit(collectedHashes)
def _getHashes(self, index):
mimeData = unwrapMime(self.libraryModel.mimeData([index]))
item = mimeData['items'][0]
try:
return [item['hash']]
except KeyError:
if 'album' in item:
return self.libraryModel.albumHashes(index)
return []
def setProgress(self, value):
self.scanProgressBar.setValue(value)
def rescanFinished(self, data):
self.libraryModel.loadData(data)
self.rescanButton.show()
spacerItem = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.scanControlsLayout.addItem(spacerItem)
self.scanProgressBar.hide()
self.scanProgressBar.setValue(0)
# apply filter if active
query = self.querySearchBox.text().lower()
if len(query) > 2:
self.libraryModel.filter(query)
``` |
[
{
"content": "Reproduce the code exactly as provided (keep formatting):\n```python\nfrom __future__ import unicode_literals\nfrom enum import Enum\n\nfrom django.db import models\nfrom django.contrib.auth.models import User\n\n\nclass DataType(Enum):\n \"\"\"\n Enumeration of valid file types\n \"\"\"\... | [
{
"content": "Reproduce the code exactly as provided (keep formatting):\n<|memory_start|>```python\nfrom __future__ import unicode_literals\nfrom enum import Enum\n\nfrom django.db import models\nfrom django.contrib.auth.models import User\n\n\nclass DataType(Enum):\n \"\"\"\n Enumeration of valid file ty... | ```python
from __future__ import unicode_literals
from enum import Enum
from django.db import models
from django.contrib.auth.models import User
class DataType(Enum):
"""
Enumeration of valid file types
"""
NETCDF = 1
TEXT = 2
JSON = 3
NAMELIST = 4
IMAGE = 5
XML = 6
class BaseModel(models.Model):
"""
A simple base model to subclass from when we want to keep track of create and modify dates
"""
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now_add=True)
class Meta:
"""
Declare this class abstract
"""
abstract = True
ordering = ["created_date"]
class DataFile(BaseModel):
"""
Model of data stored by the users
Inherit date fields from BaseModel
"""
path = models.CharField(max_length=255)
display_name = models.CharField(max_length=255)
owner = models.ForeignKey(User, related_name='data_owner_user')
allowed_access = models.ManyToManyField(User, related_name='file_allowed_access_users')
data_type = models.IntegerField(DataType)
def toDict(self):
"""
Dump contents to dict
"""
return {
'id': self.id,
'path': self.path,
'display_name': self.display_name,
'owner': self.owner.username,
'allowed_access': [user.username for user in self.allowed_access.all()],
'data_type': self.data_type
}
class DataSet(BaseModel):
"""
A container for data files, to group them into sets of data
"""
name = models.CharField(max_length=255)
allowed_access = models.ManyToManyField(User, related_name='data_set_allowed_access_users')
file_list = models.ManyToManyField(DataFile, related_name='data_set_contents')
metadata = models.CharField(max_length=1023)
owner = models.ForeignKey(User, related_name='dataset_owner')
def toDict(self):
"""
Dump contents to a dict
"""
return {
'id': self.id,
'name': self.name,
'metadata': self.metadata,
'allowed_access': [user.username for user in self.allowed_access.all()],
'file_list': [file.display_name for file in self.file_list.all()],
'owner': str(self.owner)
}
``` |
[
{
"content": "```python\n#!/usr/bin/env python\n# coding=utf-8\n\n# Copyright (C) 2015 by Serge Poltavski #\n# serge.poltavski@gmail.com #\n# #\n# This progra... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/env python\n# coding=utf-8\n\n# Copyright (C) 2015 by Serge Poltavski #\n# serge.poltavski@gmail.com #\n# #\... | ```python
#!/usr/bin/env python
# coding=utf-8
# Copyright (C) 2015 by Serge Poltavski #
# serge.poltavski@gmail.com #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/> #
__author__ = 'Serge Poltavski'
import imp
import logging
from .xletdatabase import XletDatabase
class XletCalcDatabase(XletDatabase):
_counter = 0
def __init__(self, path, extname):
super(XletCalcDatabase, self).__init__(extname)
self._extname = extname
XletCalcDatabase._counter += 1
self._module = None
try:
self._module = imp.load_source("plugin{0:d}".format(XletCalcDatabase._counter), path)
except IOError as e:
logging.error("Plugin not found: {0:s}".format(path))
raise e
def outlets(self, name, args=None):
if args is None:
args = []
return self._module.outlets(name, args)
def inlets(self, name, args=None):
if args is None:
args = []
return self._module.inlets(name, args)
def has_object(self, name):
return self._module.has_object(name)
``` |
[
{
"content": "```python\n# -*- coding: utf-8 -*-\n\n# Copyright (c) 2015, Michael Droettboom All rights reserved.\n\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions\n# are met:\n\n# 1. Redistributions of source code must ... | [
{
"content": "<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\n# Copyright (c) 2015, Michael Droettboom All rights reserved.\n\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions\n# are met:\n\n# 1. Redistributions of s... | ```python
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Michael Droettboom All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be interpreted
# as representing official policies, either expressed or implied, of
# the FreeBSD Project.
from __future__ import print_function, unicode_literals, absolute_import
TT_PLATFORM = """
Platform identifier codes.
- `APPLE_UNICODE`: Used by Apple to indicate a Unicode character map
and/or name entry. See `TT_APPLE_ID` for corresponding ‘encoding_id’
values. Note that name entries in this format are coded as
big-endian UCS-2 character codes only.
- `MACINTOSH`: Used by Apple to indicate a MacOS-specific charmap
and/or name entry. See `TT_MAC_ID` for corresponding ‘encoding_id’
values. Note that most TrueType fonts contain an Apple roman charmap
to be usable on MacOS systems (even if they contain a Microsoft
charmap as well).
- `ISO`: This value was used to specify ISO/IEC 10646 charmaps. It is
however now deprecated. See `TT_ISO_ID` for a list of
corresponding ‘encoding_id’ values.
- `MICROSOFT`: Used by Microsoft to indicate Windows-specific
charmaps. See `TT_MS_ID` for a list of corresponding ‘encoding_id’
values. Note that most fonts contain a Unicode charmap using
(`TT_PLATFORM.MICROSOFT`, `TT_MS_ID.UNICODE_CS`).
- `CUSTOM`: Used to indicate application-specific charmaps.
- `ADOBE`: This value isn't part of any font format specification, but
is used by FreeType to report Adobe-specific charmaps in an
`CharMap` object. See `TT_ADOBE_ID`.
"""
TT_APPLE_ID = """
Apple-specific encoding values.
- `DEFAULT`: Unicode version 1.0.
- `UNICODE_1_1`: Unicode 1.1; specifies Hangul characters starting at
U+34xx.
- `ISO_10646`: Deprecated (identical to preceding).
- `UNICODE_2_0`: Unicode 2.0 and beyond (UTF-16 BMP only).
- `UNICODE_32`: Unicode 3.1 and beyond, using UTF-32.
- `VARIANT_SELECTOR`: From Adobe, not Apple. Not a normal
cmap. Specifies variations on a real cmap.
"""
TT_ADOBE_ID = """
Adobe-specific encoding values.
- `STANDARD`: Adobe standard encoding.
- `EXPERT`: Adobe expert encoding.
- `CUSTOM`: Adobe custom encoding.
- `LATIN_1`: Adobe Latin 1 encoding.
"""
TT_ISO_ID = """
Standard ISO encodings.
- `ISO_7BIT_ASCII`: ASCII.
- `ISO_10646`: ISO/10646.
- `ISO_8859_1`: Also known as Latin-1.
"""
TT_MAC_ID = """
Macintosh-specific encoding values.
- `ROMAN`
- `JAPANESE`
- `TRADITIONAL_CHINESE`
- `KOREAN`
- `ARABIC`
- `HEBREW`
- `GREEK`
- `RUSSIAN`
- `RSYMBOL`
- `DEVANAGARI`
- `GURMUKHI`
- `GUJARATI`
- `ORIYA`
- `BENGALI`
- `TAMIL`
- `TELUGU`
- `KANNADA`
- `MALAYALAM`
- `SINHALESE`
- `BURMESE`
- `KHMER`
- `THAI`
- `LAOTIAN`
- `GEORGIAN`
- `ARMENIAN`
- `MALDIVIAN`
- `SIMPLIFIED_CHINESE`
- `TIBETAN`
- `MONGOLIAN`
- `GEEZ`
- `SLAVIC`
- `VIETNAMESE`
- `SINDHI`
- `UNINTERP`
"""
TT_MAC_LANGID = """
Language identifier.
Used in the name records of the TTF "name" table if the "platform"
identifier code is `TT_PLATFORM.MACINTOSH`.
"""
TT_MS_ID = """
Microsoft-specific encoding values.
- `SYMBOL_CS`: Corresponds to Microsoft symbol encoding. See
`ENCODING.MS_SYMBOL`.
- `UNICODE_CS`: Corresponds to a Microsoft WGL4 charmap, matching
Unicode. See `ENCODING.UNICODE`.
- `SJIS`: Corresponds to SJIS Japanese encoding. See `ENCODING.SJIS`.
- `GB2312`: Corresponds to Simplified Chinese as used in Mainland
China. See `ENCODING.GB2312`.
- `BIG_5`: Corresponds to Traditional Chinese as used in Taiwan and
Hong Kong. See `ENCODING.BIG5`.
- `WANSUNG`: Corresponds to Korean Wansung encoding. See
`ENCODING.WANSUNG`.
- `JOHAB`: Corresponds to Johab encoding. See `ENCODING.JOHAB`.
- `UCS_4`: Corresponds to UCS-4 or UTF-32 charmaps. This has been
added to the OpenType specification version 1.4 (mid-2001.)
"""
TT_MS_LANGID = """
Language identifier.
Used in in the name records of the TTF "name" table if the "platform"
identifier code is `TT_PLATFORM.MICROSOFT`.
"""
TT_NAME_ID = """
The type of value stored in a `SfntName` record.
- `COPYRIGHT`
- `FONT_FAMILY`
- `FONT_SUBFAMILY`
- `UNIQUE_ID`
- `FULL_NAME`
- `VERSION_STRING`
- `PS_NAME`
- `TRADEMARK`
The following values are from the OpenType spec:
- `MANUFACTURER`
- `DESIGNER`
- `DESCRIPTION`
- `VENDOR_URL`
- `DESIGNER_URL`
- `LICENSE`
- `LICENSE_URL`
- `PREFERRED_FAMILY`
- `PREFERRED_SUBFAMILY`
- `MAC_FULL_NAME`
- `SAMPLE_TEXT`
This is new in OpenType 1.3:
- `CID_FINDFONT_NAME`
This is new in OpenType 1.5:
- `WWS_FAMILY`
- `WWS_SUBFAMILY`
"""
``` |
[
{
"content": "```python\n# -*- coding: utf-8 -*-\n\"\"\"\n pyClanSphere.plugins.bulletin_board.database\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n Our needed tables are declared here (now)\n\n :copyright: (c) 2009 - 2010 by the pyClanSphere Team,\n see AUTHORS for more details... | [
{
"content": "<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\"\"\"\n pyClanSphere.plugins.bulletin_board.database\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n Our needed tables are declared here (now)\n\n :copyright: (c) 2009 - 2010 by the pyClanSphere Team,\n see AUTHORS ... | ```python
# -*- coding: utf-8 -*-
"""
pyClanSphere.plugins.bulletin_board.database
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our needed tables are declared here (now)
:copyright: (c) 2009 - 2010 by the pyClanSphere Team,
see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from pyClanSphere.database import db, metadata
# Mapping these out from db module to increases readability further down
for var in ['Table', 'Column', 'String', 'Integer', 'Boolean', 'DateTime', 'ForeignKey', 'Text']:
globals()[var] = getattr(db,var)
board_categories = Table('board_categories', metadata,
Column('category_id', Integer, primary_key=True),
Column('name', String(50)),
Column('ordering', Integer)
)
board_forums = Table('board_forums', metadata,
Column('forum_id', Integer, primary_key=True),
Column('category_id', ForeignKey('board_categories.category_id')),
Column('name', String(50)),
Column('description', String(255)),
Column('ordering', Integer),
Column('allow_anonymous', Boolean),
Column('is_public', Boolean),
Column('is_public', Boolean),
Column('topiccount', Integer),
Column('postcount', Integer),
Column('modification_date', DateTime),
Column('lasttopic_id', Integer, ForeignKey('board_topics.topic_id', name="forum_lasttopic", use_alter=True)),
Column('lastpost_id', Integer, ForeignKey('board_posts.post_id', name="forum_lastpost", use_alter=True))
)
board_topics = Table('board_topics', metadata,
Column('topic_id', Integer, primary_key=True),
Column('forum_id', ForeignKey('board_forums.forum_id')),
Column('name', String(255)),
Column('date', DateTime, default=datetime.utcnow()),
Column('author_id', ForeignKey('users.user_id')),
Column('author_str', String(40)),
Column('is_sticky', Boolean),
Column('is_locked', Boolean),
Column('is_global', Boolean),
Column('is_solved', Boolean),
Column('is_external', Boolean),
Column('lastpost_id', Integer, ForeignKey('board_posts.post_id', name="topic_lastpost", use_alter=True)),
Column('postcount', Integer),
Column('modification_date', DateTime)
)
board_posts = Table('board_posts', metadata,
Column('post_id', Integer, primary_key=True),
Column('topic_id', ForeignKey('board_topics.topic_id')),
Column('text', Text),
Column('author_id', ForeignKey('users.user_id')),
Column('author_str', String(40)),
Column('date', DateTime, default=datetime.utcnow()),
Column('ip', String(40)),
)
board_global_lastread = Table('board_global_lastread', metadata,
Column('user_id', ForeignKey('users.user_id'), primary_key=True),
Column('date', DateTime, default=datetime.utcnow())
)
board_local_lastread = Table('board_local_lastread', metadata,
Column('user_id', ForeignKey('users.user_id'), primary_key=True),
Column('topic_id', ForeignKey('board_topics.topic_id'), primary_key=True),
Column('date', DateTime, default=datetime.utcnow())
)
def init_database(app):
""" This is for inserting our new table"""
engine = app.database_engine
metadata.create_all(engine)
__all__ = ['board_categories', 'board_forums', 'board_topics', 'board_posts',
'board_local_lastread', 'board_global_lastread']
``` |
[
{
"content": "Recreate the original code text:\n```python\n\"\"\"Compatibility fixes for older version of python, numpy and scipy\n\nIf you add content to this file, please give the version of the package\nat which the fixe is no longer needed.\n\"\"\"\n# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@norm... | [
{
"content": "Recreate the original code text:\n<|memory_start|>```python\n\"\"\"Compatibility fixes for older version of python, numpy and scipy\n\nIf you add content to this file, please give the version of the package\nat which the fixe is no longer needed.\n\"\"\"\n# Authors: Emmanuelle Gouillart <emmanuell... | ```python
"""Compatibility fixes for older version of python, numpy and scipy
If you add content to this file, please give the version of the package
at which the fixe is no longer needed.
"""
# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Fabian Pedregosa <fpedregosa@acm.org>
# Lars Buitinck
#
# License: BSD 3 clause
import warnings
import sys
import functools
import os
import errno
import numpy as np
import scipy.sparse as sp
import scipy
try:
from inspect import signature
except ImportError:
from ..externals.funcsigs import signature
def _parse_version(version_string):
version = []
for x in version_string.split('.'):
try:
version.append(int(x))
except ValueError:
# x may be of the form dev-1ea1592
version.append(x)
return tuple(version)
np_version = _parse_version(np.__version__)
sp_version = _parse_version(scipy.__version__)
try:
from scipy.special import expit # SciPy >= 0.10
with np.errstate(invalid='ignore', over='ignore'):
if np.isnan(expit(1000)): # SciPy < 0.14
raise ImportError("no stable expit in scipy.special")
except ImportError:
def expit(x, out=None):
"""Logistic sigmoid function, ``1 / (1 + exp(-x))``.
See sklearn.utils.extmath.log_logistic for the log of this function.
"""
if out is None:
out = np.empty(np.atleast_1d(x).shape, dtype=np.float64)
out[:] = x
# 1 / (1 + exp(-x)) = (1 + tanh(x / 2)) / 2
# This way of computing the logistic is both fast and stable.
out *= .5
np.tanh(out, out)
out += 1
out *= .5
return out.reshape(np.shape(x))
# little danse to see if np.copy has an 'order' keyword argument
# Supported since numpy 1.7.0
if 'order' in signature(np.copy).parameters:
def safe_copy(X):
# Copy, but keep the order
return np.copy(X, order='K')
else:
# Before an 'order' argument was introduced, numpy wouldn't muck with
# the ordering
safe_copy = np.copy
try:
if (not np.allclose(np.divide(.4, 1, casting="unsafe"),
np.divide(.4, 1, casting="unsafe", dtype=np.float64))
or not np.allclose(np.divide(.4, 1), .4)):
raise TypeError('Divide not working with dtype: '
'https://github.com/numpy/numpy/issues/3484')
divide = np.divide
except TypeError:
# Compat for old versions of np.divide that do not provide support for
# the dtype args
def divide(x1, x2, out=None, dtype=None):
out_orig = out
if out is None:
out = np.asarray(x1, dtype=dtype)
if out is x1:
out = x1.copy()
else:
if out is not x1:
out[:] = x1
if dtype is not None and out.dtype != dtype:
out = out.astype(dtype)
out /= x2
if out_orig is None and np.isscalar(x1):
out = np.asscalar(out)
return out
try:
np.array(5).astype(float, copy=False)
except TypeError:
# Compat where astype accepted no copy argument (numpy < 1.7.0)
def astype(array, dtype, copy=True):
if not copy and array.dtype == dtype:
return array
return array.astype(dtype)
else:
astype = np.ndarray.astype
try:
with warnings.catch_warnings(record=True):
# Don't raise the numpy deprecation warnings that appear in
# 1.9, but avoid Python bug due to simplefilter('ignore')
warnings.simplefilter('always')
sp.csr_matrix([1.0, 2.0, 3.0]).max(axis=0)
except (TypeError, AttributeError):
# in scipy < 14.0, sparse matrix min/max doesn't accept an `axis` argument
# the following code is taken from the scipy 0.14 codebase
def _minor_reduce(X, ufunc):
major_index = np.flatnonzero(np.diff(X.indptr))
if X.data.size == 0 and major_index.size == 0:
# Numpy < 1.8.0 don't handle empty arrays in reduceat
value = np.zeros_like(X.data)
else:
value = ufunc.reduceat(X.data, X.indptr[major_index])
return major_index, value
def _min_or_max_axis(X, axis, min_or_max):
N = X.shape[axis]
if N == 0:
raise ValueError("zero-size array to reduction operation")
M = X.shape[1 - axis]
mat = X.tocsc() if axis == 0 else X.tocsr()
mat.sum_duplicates()
major_index, value = _minor_reduce(mat, min_or_max)
not_full = np.diff(mat.indptr)[major_index] < N
value[not_full] = min_or_max(value[not_full], 0)
mask = value != 0
major_index = np.compress(mask, major_index)
value = np.compress(mask, value)
from scipy.sparse import coo_matrix
if axis == 0:
res = coo_matrix((value, (np.zeros(len(value)), major_index)),
dtype=X.dtype, shape=(1, M))
else:
res = coo_matrix((value, (major_index, np.zeros(len(value)))),
dtype=X.dtype, shape=(M, 1))
return res.A.ravel()
def _sparse_min_or_max(X, axis, min_or_max):
if axis is None:
if 0 in X.shape:
raise ValueError("zero-size array to reduction operation")
zero = X.dtype.type(0)
if X.nnz == 0:
return zero
m = min_or_max.reduce(X.data.ravel())
if X.nnz != np.product(X.shape):
m = min_or_max(zero, m)
return m
if axis < 0:
axis += 2
if (axis == 0) or (axis == 1):
return _min_or_max_axis(X, axis, min_or_max)
else:
raise ValueError("invalid axis, use 0 for rows, or 1 for columns")
def sparse_min_max(X, axis):
return (_sparse_min_or_max(X, axis, np.minimum),
_sparse_min_or_max(X, axis, np.maximum))
else:
def sparse_min_max(X, axis):
return (X.min(axis=axis).toarray().ravel(),
X.max(axis=axis).toarray().ravel())
try:
from numpy import argpartition
except ImportError:
# numpy.argpartition was introduced in v 1.8.0
def argpartition(a, kth, axis=-1, kind='introselect', order=None):
return np.argsort(a, axis=axis, order=order)
try:
from numpy import partition
except ImportError:
warnings.warn('Using `sort` instead of partition.'
'Upgrade numpy to 1.8 for better performace on large number'
'of clusters')
def partition(a, kth, axis=-1, kind='introselect', order=None):
return np.sort(a, axis=axis, order=order)
try:
from itertools import combinations_with_replacement
except ImportError:
# Backport of itertools.combinations_with_replacement for Python 2.6,
# from Python 3.4 documentation (http://tinyurl.com/comb-w-r), copyright
# Python Software Foundation (https://docs.python.org/3/license.html)
def combinations_with_replacement(iterable, r):
# combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC
pool = tuple(iterable)
n = len(pool)
if not n and r:
return
indices = [0] * r
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != n - 1:
break
else:
return
indices[i:] = [indices[i] + 1] * (r - i)
yield tuple(pool[i] for i in indices)
if np_version < (1, 7):
# Prior to 1.7.0, np.frombuffer wouldn't work for empty first arg.
def frombuffer_empty(buf, dtype):
if len(buf) == 0:
return np.empty(0, dtype=dtype)
else:
return np.frombuffer(buf, dtype=dtype)
else:
frombuffer_empty = np.frombuffer
if np_version < (1, 8):
def in1d(ar1, ar2, assume_unique=False, invert=False):
# Backport of numpy function in1d 1.8.1 to support numpy 1.6.2
# Ravel both arrays, behavior for the first array could be different
ar1 = np.asarray(ar1).ravel()
ar2 = np.asarray(ar2).ravel()
# This code is significantly faster when the condition is satisfied.
if len(ar2) < 10 * len(ar1) ** 0.145:
if invert:
mask = np.ones(len(ar1), dtype=np.bool)
for a in ar2:
mask &= (ar1 != a)
else:
mask = np.zeros(len(ar1), dtype=np.bool)
for a in ar2:
mask |= (ar1 == a)
return mask
# Otherwise use sorting
if not assume_unique:
ar1, rev_idx = np.unique(ar1, return_inverse=True)
ar2 = np.unique(ar2)
ar = np.concatenate((ar1, ar2))
# We need this to be a stable sort, so always use 'mergesort'
# here. The values from the first array should always come before
# the values from the second array.
order = ar.argsort(kind='mergesort')
sar = ar[order]
if invert:
bool_ar = (sar[1:] != sar[:-1])
else:
bool_ar = (sar[1:] == sar[:-1])
flag = np.concatenate((bool_ar, [invert]))
indx = order.argsort(kind='mergesort')[:len(ar1)]
if assume_unique:
return flag[indx]
else:
return flag[indx][rev_idx]
else:
from numpy import in1d
if sp_version < (0, 15):
# Backport fix for scikit-learn/scikit-learn#2986 / scipy/scipy#4142
from ._scipy_sparse_lsqr_backport import lsqr as sparse_lsqr
else:
from scipy.sparse.linalg import lsqr as sparse_lsqr
if sys.version_info < (2, 7, 0):
# partial cannot be pickled in Python 2.6
# http://bugs.python.org/issue1398
class partial(object):
def __init__(self, func, *args, **keywords):
functools.update_wrapper(self, func)
self.func = func
self.args = args
self.keywords = keywords
def __call__(self, *args, **keywords):
args = self.args + args
kwargs = self.keywords.copy()
kwargs.update(keywords)
return self.func(*args, **kwargs)
else:
from functools import partial
def parallel_helper(obj, methodname, *args, **kwargs):
"""Helper to workaround Python 2 limitations of pickling instance methods"""
return getattr(obj, methodname)(*args, **kwargs)
if np_version < (1, 6, 2):
# Allow bincount to accept empty arrays
# https://github.com/numpy/numpy/commit/40f0844846a9d7665616b142407a3d74cb65a040
def bincount(x, weights=None, minlength=None):
if len(x) > 0:
return np.bincount(x, weights, minlength)
else:
if minlength is None:
minlength = 0
minlength = np.asscalar(np.asarray(minlength, dtype=np.intp))
return np.zeros(minlength, dtype=np.intp)
else:
from numpy import bincount
if 'exist_ok' in signature(os.makedirs).parameters:
makedirs = os.makedirs
else:
def makedirs(name, mode=0o777, exist_ok=False):
"""makedirs(name [, mode=0o777][, exist_ok=False])
Super-mkdir; create a leaf directory and all intermediate ones. Works
like mkdir, except that any intermediate path segment (not just the
rightmost) will be created if it does not exist. If the target
directory already exists, raise an OSError if exist_ok is False.
Otherwise no exception is raised. This is recursive.
"""
try:
os.makedirs(name, mode=mode)
except OSError as e:
if (not exist_ok or e.errno != errno.EEXIST
or not os.path.isdir(name)):
raise
if np_version < (1, 8, 1):
def array_equal(a1, a2):
# copy-paste from numpy 1.8.1
try:
a1, a2 = np.asarray(a1), np.asarray(a2)
except:
return False
if a1.shape != a2.shape:
return False
return bool(np.asarray(a1 == a2).all())
else:
from numpy import array_equal
if sp_version < (0, 13, 0):
def rankdata(a, method='average'):
if method not in ('average', 'min', 'max', 'dense', 'ordinal'):
raise ValueError('unknown method "{0}"'.format(method))
arr = np.ravel(np.asarray(a))
algo = 'mergesort' if method == 'ordinal' else 'quicksort'
sorter = np.argsort(arr, kind=algo)
inv = np.empty(sorter.size, dtype=np.intp)
inv[sorter] = np.arange(sorter.size, dtype=np.intp)
if method == 'ordinal':
return inv + 1
arr = arr[sorter]
obs = np.r_[True, arr[1:] != arr[:-1]]
dense = obs.cumsum()[inv]
if method == 'dense':
return dense
# cumulative counts of each unique value
count = np.r_[np.nonzero(obs)[0], len(obs)]
if method == 'max':
return count[dense]
if method == 'min':
return count[dense - 1] + 1
# average method
return .5 * (count[dense] + count[dense - 1] + 1)
else:
from scipy.stats import rankdata
``` |
[
{
"content": "Reproduce the code exactly as provided (keep formatting):\n```python\n# This file is part of DevParrot.\n#\n# Author: Matthieu Gautier <matthieu.gautier@devparrot.org>\n#\n# DevParrot is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Publi... | [
{
"content": "Reproduce the code exactly as provided (keep formatting):\n<|memory_start|>```python\n# This file is part of DevParrot.\n#\n# Author: Matthieu Gautier <matthieu.gautier@devparrot.org>\n#\n# DevParrot is free software: you can redistribute it and/or modify\n# it under the terms of the G... | ```python
# This file is part of DevParrot.
#
# Author: Matthieu Gautier <matthieu.gautier@devparrot.org>
#
# DevParrot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DevParrot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DevParrot. If not, see <http://www.gnu.org/licenses/>.
#
#
# Copyright 2011-2013 Matthieu Gautier
from devparrot.core.command import Command
from devparrot.core.constraints import Boolean
@Command(
_section='core',
vertical = Boolean(default= lambda : False)
)
def split(vertical):
"""split the view it two separate panes"""
from devparrot.core import session
from devparrot.core.ui import viewContainer
viewContainer.split(session.get_currentContainer().get_documentView(), vertical, True)
@Command(_section='core')
def unsplit():
"""unsplit (merge) two separate panes"""
from devparrot.core import session
from devparrot.core.ui import viewContainer
container = session.get_currentContainer()
viewContainer.unsplit(container)
``` |
[
{
"content": "```python\n#!/usr/bin/env python\n# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com\n#\n# Python tests originally created or extracted from other peoples work. The\n# parts were too small to be protected.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/env python\n# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com\n#\n# Python tests originally created or extracted from other peoples work. The\n# parts were too small to be protected.\n#\n# Licensed under the Apache License, Version 2.0 (t... | ```python
#!/usr/bin/env python
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os, sys
# Find common code relative in file system. Not using packages for test stuff.
sys.path.insert(
0,
os.path.normpath(
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
".."
)
)
)
from test_common import (
my_print,
setup,
createSearchMode,
compareWithCPython,
withPythonPathChange
)
python_version = setup(needs_io_encoding = True)
search_mode = createSearchMode()
extra_options = os.environ.get("NUITKA_EXTRA_OPTIONS","")
for filename in sorted(os.listdir('.')):
if not os.path.isdir(filename) or \
filename.endswith(".build") or \
filename.endswith(".dist"):
continue
filename = os.path.relpath(filename)
# For these, we expect that they will fail.
expected_errors = [
"module_exits",
"main_raises",
"main_raises2",
"package_contains_main"
]
# Allowed after Python3, packages need no more "__init__.py"
if python_version < "3.3":
expected_errors.append("package_missing_init")
if filename not in expected_errors:
extra_flags = ["expect_success"]
else:
extra_flags = ["expect_failure"]
if filename in ("reimport_main_static", "package_missing_init",
"dash_import", "package_contains_main", "case_imports3",
"import_variants", "package_init_import"):
extra_flags.append("ignore_warnings")
extra_flags.append("remove_output")
extra_flags.append("recurse_all")
# Use the original __file__ value, at least one case warns about things
# with filename included.
extra_flags.append("original_file")
# Cannot include the files with syntax errors, these would then become
# ImportError, but that's not the test. In all other cases, use two
# step execution, which will not add the program original source to
# PYTHONPATH.
if filename != "syntax_errors":
extra_flags.append("two_step_execution")
else:
extra_flags.append("binary_python_path")
if filename == "plugin_import":
os.environ["NUITKA_EXTRA_OPTIONS"] = extra_options + \
" --recurse-directory=%s/some_package" % (
os.path.abspath(filename)
)
elif filename == "reimport_main_dynamic":
if python_version < "3":
os.environ["NUITKA_EXTRA_OPTIONS"] = extra_options + \
" --recurse-directory=%s" % (
os.path.abspath(filename)
)
else:
os.environ["NUITKA_EXTRA_OPTIONS"] = extra_options + \
" --recurse-pattern=%s/*.py" % (
os.path.abspath(filename)
)
extra_flags.append("ignore_warnings")
else:
os.environ["NUITKA_EXTRA_OPTIONS"] = extra_options
active = search_mode.consider(
dirname = None,
filename = filename
)
if active:
my_print("Consider output of recursively compiled program:", filename)
for filename_main in os.listdir(filename):
if filename_main.endswith("Main.py"):
break
if filename_main.endswith("Main"):
break
else:
sys.exit(
"""\
Error, no file ends with 'Main.py' or 'Main' in %s, incomplete test case.""" % (
filename
)
)
extra_python_path = [
os.path.abspath(os.path.join(filename,entry))
for entry in
os.listdir(filename)
if entry.startswith("path")
]
with withPythonPathChange(extra_python_path):
compareWithCPython(
dirname = filename,
filename = filename_main,
extra_flags = extra_flags,
search_mode = search_mode,
needs_2to3 = False
)
else:
my_print("Skipping", filename)
search_mode.finish()
``` |
[
{
"content": "Repeat the code precisely:\n```python\n# rFactor Remote LCD\n# Copyright (C) 2014 Ingo Ruhnke <grumbel@gmail.com>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, eith... | [
{
"content": "Repeat the code precisely:\n<|memory_start|>```python\n# rFactor Remote LCD\n# Copyright (C) 2014 Ingo Ruhnke <grumbel@gmail.com>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software ... | ```python
# rFactor Remote LCD
# Copyright (C) 2014 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import rfactorlcd
class LapTime:
def __init__(self):
self.sector1 = 0
self.sector2 = 0
self.sector3 = 0
@property
def total(self):
return self.sector1 + self.sector2 + self.sector3
class LapTimes(object):
"""Lap time history for a vehicle in a single session"""
def __init__(self):
self.laps = {}
self.current_sector = None
@property
def best_time(self):
if self.laps == []:
return 0
else:
return min([lap.total for lap in self.laps])
def last_lap(self):
last_lap = -1
last_times = None
for lap, times in self.laps.items():
if lap > last_lap:
last_lap = lap
last_times = times
return last_times
def update(self, state):
"""Update current LapTime history with info from VehicleState"""
if state.sector == 0 and state.total_laps == 0:
pass
elif self.current_sector != state.sector:
self.current_sector = state.sector
if state.sector == 0:
lap = state.total_laps - 1
else:
lap = state.total_laps
if lap in self.laps:
lap_time = self.laps[lap]
else:
lap_time = LapTime()
self.laps[lap] = lap_time
# set the sector time in the LapTime object
if state.sector == 1:
lap_time.sector1 = state.cur_sector1
elif state.sector == 2:
lap_time.sector2 = state.cur_sector2 - state.cur_sector1
elif state.sector == 0:
lap_time.sector3 = state.last_lap_time - state.cur_sector2
else:
logging.error("unknown sector: %d" % state.sector)
class WheelState(object):
def __init__(self):
self.rotation = 0.0
self.suspension_deflection = 0.0
self.ride_height = 0.0
self.tire_load = 0.0
self.lateral_force = 0.0
self.grip_fract = 0.0
self.brake_temp = 0.0
self.pressure = 0.0
self.temperature = [0.0, 0.0, 0.0]
self.wear = 0.0
self.surface_type = 0
self.flat = 0
self.detached = 0
class VehicleState(object):
def __init__(self):
self.is_player = 0
self.control = 0
self.driver_name = ""
self.vehicle_name = ""
self.vehicle_class = ""
self.total_laps = 0
self.sector = 0
self.finish_status = 0
self.lap_dist = 0
self.path_lateral = 0.0
self.track_edge = 0.0
self.in_pits = 0
self.place = 0
self.time_behind_next = 0.0
self.laps_behind_next = 0
self.time_behind_leader = 0.0
self.laps_behind_leader = 0
self.best_sector1 = 0.0
self.best_sector2 = 0.0
self.best_lap_time = 0.0
self.last_sector1 = 0.0
self.last_sector2 = 0.0
self.last_lap_time = 0.0
self.cur_sector1 = 0.0
self.cur_sector2 = 0.0
self.num_pitstops = 0
self.num_penalties = 0
self.lap_start_et = 0.0
self.lap_times = LapTimes()
class rFactorState(object):
def __init__(self):
self.session_id = 0
# telemetry defaults
self.lap_number = 0
self.lap_start_et = 0.0
self.pos = (0.0, 0.0, 0.0)
self.local_vel = (0.0, 0.0, 0.0)
self.local_accel = (0.0, 0.0, 0.0)
self.ori_x = (0.0, 0.0, 0.0)
self.ori_y = (0.0, 0.0, 0.0)
self.ori_z = (0.0, 0.0, 0.0)
self.local_rot = (0.0, 0.0, 0.0)
self.local_rot_accel = (0.0, 0.0, 0.0)
self.gear = 0
self.rpm = 0.0
self.max_rpm = 0.0
self.clutch_rpm = 0.0
self.fuel = 0.0
self.water_temp = 0.0
self.oil_temp = 0.0
self.throttle = 0.0
self.brake = 0.0
self.steering = 0.0
self.clutch = 0.0
self.steering_arm_force = 0.0
self.scheduled_stops = 0
self.overheating = 0
self.detached = 0
self.dent_severity = [0, 0, 0, 0, 0, 0, 0, 0]
self.wheels = [WheelState(), WheelState(), WheelState(), WheelState()]
self.num_vehicles = 0
self.player = None
self.vehicles = []
# info
self.track_name = ""
self.player_name = ""
self.plr_file_name = ""
self.end_e_t = 0.0
self.max_laps = 0
self.lap_dist = 1.0
# score
self.game_phase = 0
self.yellow_flag_state = 0
self.sector_flag = [0, 0, 0]
self.start_light = 0
self.num_red_lights = 0
self.session = 0
self.current_e_t = 0.0
self.ambient_temp = 0.0
self.track_temp = 0.0
# Backward compatibility hacks:
self.speed = 0
self.laptime = "1:23:45"
self.best_lap_driver = ""
@property
def best_lap_time(self):
if self.vehicles != []:
best = self.vehicles[0].best_lap_time
for veh in self.vehicles[1:]:
if veh.best_lap_time < best:
best = veh.best_lap_time
self.best_lap_driver = veh.driver_name # FIXME: hack
return best
else:
return 0
def on_telemetry(self, msg):
self.delta_time = msg.read_float()
self.lap_number = msg.read_int()
self.lap_start_et = msg.read_float()
# missing: mVehicleName[64]
# missing: mTrackName[64]
self.pos = msg.read_vect()
self.local_vel = msg.read_vect()
self.local_accel = msg.read_vect()
self.ori_x = msg.read_vect()
self.ori_y = msg.read_vect()
self.ori_z = msg.read_vect()
self.local_rot = msg.read_vect()
self.local_rot_accel = msg.read_vect()
self.gear = msg.read_int()
self.rpm = msg.read_float()
self.max_rpm = msg.read_float()
self.clutch_rpm = msg.read_float()
self.fuel = msg.read_float()
self.water_temp = msg.read_float()
self.oil_temp = msg.read_float()
self.throttle = msg.read_float()
self.brake = msg.read_float()
self.steering = msg.read_float()
self.clutch = msg.read_float()
self.steering_arm_force = msg.read_float()
self.scheduled_stops = msg.read_char()
self.overheating = msg.read_char()
self.detached = msg.read_char()
self.dent_severity = msg.read_multi_char(8)
self.last_impact_e_t = msg.read_float()
self.last_impact_magnitude = msg.read_float()
self.last_impact_pos = msg.read_vect()
# give speed in km/h
self.speed = -self.local_vel[2] * 3.6
for i in range(0, 4):
self.wheels[i].rotation = msg.read_float()
self.wheels[i].suspension_deflection = msg.read_float()
self.wheels[i].ride_height = msg.read_float()
self.wheels[i].tire_load = msg.read_float()
self.wheels[i].lateral_force = msg.read_float()
self.wheels[i].grip_fract = msg.read_float()
self.wheels[i].brake_temp = msg.read_float()
self.wheels[i].pressure = msg.read_float()
self.wheels[i].temperature = [msg.read_float(),
msg.read_float(),
msg.read_float()]
self.wheels[i].wear = msg.read_float()
# missing: mTerrainName[16]
self.wheels[i].surface_type = msg.read_char()
self.wheels[i].flat = msg.read_char()
self.wheels[i].detached = msg.read_char()
def on_vehicle(self, msg):
self.num_vehicles = msg.read_int()
if self.num_vehicles != len(self.vehicles):
self.vehicles = []
for i in range(self.num_vehicles):
self.vehicles.append(VehicleState())
for i in range(0, self.num_vehicles):
self.vehicles[i].is_player = msg.read_char()
self.vehicles[i].control = msg.read_char()
self.vehicles[i].driver_name = msg.read_string()
self.vehicles[i].vehicle_name = msg.read_string()
self.vehicles[i].vehicle_class = msg.read_string()
self.vehicles[i].total_laps = msg.read_short()
# rFactor numbers sectors 1, 2, 0, convert them to 0, 1, 2
self.vehicles[i].sector = (msg.read_char() + 2) % 3
self.vehicles[i].finish_status = msg.read_char()
self.vehicles[i].lap_dist = msg.read_float()
self.vehicles[i].path_lateral = msg.read_float()
self.vehicles[i].track_edge = msg.read_float()
self.vehicles[i].in_pits = msg.read_char()
self.vehicles[i].place = msg.read_char()
self.vehicles[i].time_behind_next = msg.read_float()
self.vehicles[i].laps_behind_next = msg.read_int()
self.vehicles[i].time_behind_leader = msg.read_float()
self.vehicles[i].laps_behind_leader = msg.read_int()
self.vehicles[i].best_sector1 = msg.read_float()
self.vehicles[i].best_sector2 = msg.read_float()
self.vehicles[i].best_lap_time = msg.read_float()
# these times are only updated going into a new lap
self.vehicles[i].last_sector1 = msg.read_float()
self.vehicles[i].last_sector2 = msg.read_float()
self.vehicles[i].last_lap_time = msg.read_float()
self.vehicles[i].cur_sector1 = msg.read_float()
self.vehicles[i].cur_sector2 = msg.read_float()
self.vehicles[i].num_pitstops = msg.read_short()
self.vehicles[i].num_penalties = msg.read_short()
self.vehicles[i].lap_start_et = msg.read_float()
self.vehicles[i].pos = msg.read_vect()
self.vehicles[i].local_vel = msg.read_vect()
self.vehicles[i].local_accel = msg.read_vect()
self.vehicles[i].ori_x = msg.read_vect()
self.vehicles[i].ori_y = msg.read_vect()
self.vehicles[i].ori_z = msg.read_vect()
self.vehicles[i].local_rot = msg.read_vect()
self.vehicles[i].local_rot_accel = msg.read_vect()
if self.vehicles[i].is_player:
self.player = self.vehicles[i]
self.vehicles[i].lap_times.update(self.vehicles[i])
def on_score(self, msg):
self.game_phase = msg.read_char()
self.yellow_flag_state = msg.read_char()
self.sector_flag = msg.read_multi_char(3)
self.start_light = msg.read_char()
self.num_red_lights = msg.read_char()
self.in_realtime = msg.read_char()
self.session = msg.read_int()
self.current_e_t = msg.read_float()
self.ambient_temp = msg.read_float()
self.track_temp = msg.read_float()
self.dark_cloud = msg.read_float()
self.raining = msg.read_float()
self.wind = msg.read_vect()
self.on_path_wetness = msg.read_float()
self.off_path_wetness = msg.read_float()
def on_info(self, msg):
self.track_name = msg.read_string()
self.player_name = msg.read_string()
self.plr_file_name = msg.read_string()
self.end_e_t = msg.read_float()
self.max_laps = msg.read_int()
self.lap_dist = msg.read_float()
# missing mResultsStream
def on_start_realtime(self, msg):
pass
def on_end_realtime(self, msg):
pass
def on_start_session(self, msg):
self.session_id += 1
self.vehicles = []
logging.info("on_start_session")
def on_end_session(self, msg):
logging.info("on_end_session")
def dispatch_message(self, tag, payload):
msg = rfactorlcd.BinaryDecoder(payload)
if tag == "STSS":
self.on_start_session(msg)
elif tag == "EDSS":
self.on_end_session(msg)
elif tag == "STRT":
self.on_start_realtime(msg)
elif tag == "EDRT":
self.on_end_realtime(msg)
elif tag == "VHCL":
self.on_vehicle(msg)
elif tag == "TLMT":
self.on_telemetry(msg)
elif tag == "SCOR":
self.on_score(msg)
elif tag == "INFO":
self.on_info(msg)
else:
print "error: unhandled tag: %s" % tag
# EOF #
``` |
[
{
"content": "Reconstruct the code exactly:\n```python\nimport re\nfrom math import cos, sqrt, tan, sin, atan, trunc, radians, degrees\n\n\ndef getFloat(value):\n try:\n return float(value)\n except ValueError:\n value = str(value).replace('B', '8').replace('O', '0').replace(',', '.')\n ... | [
{
"content": "Reconstruct the code exactly:\n<|memory_start|>```python\nimport re\nfrom math import cos, sqrt, tan, sin, atan, trunc, radians, degrees\n\n\ndef getFloat(value):\n try:\n return float(value)\n except ValueError:\n value = str(value).replace('B', '8').replace('O', '0').replace(... | ```python
import re
from math import cos, sqrt, tan, sin, atan, trunc, radians, degrees
def getFloat(value):
try:
return float(value)
except ValueError:
value = str(value).replace('B', '8').replace('O', '0').replace(',', '.')
_errcount = 0
for i in value:
if not re.match(r'[0-9\.]]', i):
_errcount += 1
if _errcount == 0:
return float(value)
return None
class Location:
"""
Location class for position calculation and conversion, can handle GK and Wgs84 notation - default Wgs84
"""
earthRadius = 6378137.0 # Earth radius in m
aBessel = 6377397.155
eeBessel = 0.0066743722296294277832
ScaleFactor = 0.00000982
RotXRad = -7.16069806998785E-06
RotYRad = 3.56822869296619E-07
RotZRad = 7.06858347057704E-06
ShiftXMeters = 591.28
ShiftYMeters = 81.35
ShiftZMeters = 396.39
def __init__(self, x, y, geotype='wgs84'): # wgs84 (default), gk
self.x = getFloat(x)
self.y = getFloat(y)
self.geotype = geotype.lower()
def __repr__(self):
return u"<location: {}, {} ({})>".format(self.x, self.y, self.geotype)
def getLatLng(self, use_wgs84=None):
if self.geotype == 'gk': # gauss kruger
(x, y) = self._gk_transformation()
return Location.seven_parameter_helmert_transf(x, y, use_wgs84)
else:
return self.x, self.y
def getDistance(self, lat, lng):
"""
get distance in meters
"""
(lat1, lng1) = self.getLatLng()
x = ((radians(lng - lng1)) * cos(0.5 * (radians(lat + lat1))))**2
return Location.earthRadius * sqrt(x + (radians(lat - lat1))**2)
def _gk_transformation(self): # transformation for gauss kruger
# Check for invalid Parameters
if not ((self.x > 1000000) and (self.y > 1000000)) and self.geotype != 'gk':
raise ValueError("No valid Gauss-Kruger-Code.")
# Variables to prepare the geovalues
bii = (self.y / 10000855.7646)**2
bf = (325632.08677 * (self.y / 10000855.7646) * (((((0.00000562025 * bii + 0.00022976983) * bii - 0.00113566119) * bii + 0.00424914906) * bii - 0.00831729565) * bii + 1)) / degrees(3600)
g2 = 0.0067192188 * cos(bf)**2
fa = (self.x - trunc(self.x / 1000000) * 1000000 - 500000) / (6398786.849 / sqrt(1 + g2))
geo_dez_right = degrees(bf - fa**2 * tan(bf) * (1 + g2) / 2 + fa**4 * tan(bf) * (5 + 3 * tan(bf)**2 + 6 * g2 - 6 * g2 * tan(bf)**2) / 24)
geo_dez_height = degrees(fa - fa**3 * (1 + 2 * tan(bf)**2 + g2) / 6 + fa**5 * (1 + 28 * tan(bf)**2 + 24 * tan(bf)**4) / 120) / cos(bf) + trunc(self.x / 1000000) * 3
return geo_dez_right, geo_dez_height
@staticmethod
def seven_parameter_helmert_transf(x, y, use_wgs84=False):
# calculate coordinates with helmert transformation
latitudeit = 99999999
if use_wgs84:
ee = 0.0066943799
else:
ee = 0.00669438002290
n = Location.aBessel / sqrt(1 - (Location.eeBessel * sin(radians(x))**2))
cartesian_x_meters = n * cos(radians(x)) * cos(radians(y))
cartesian_y_meters = n * cos(radians(x)) * sin(radians(y))
cartesian_z_meters = n * (1 - Location.eeBessel) * sin(radians(x))
cart_output_x_meters = (1 + Location.ScaleFactor) * cartesian_x_meters + Location.RotZRad * cartesian_y_meters - Location.RotYRad * cartesian_z_meters + Location.ShiftXMeters
cart_output_y_meters = -1 * Location.RotZRad * cartesian_x_meters + (1 + Location.ScaleFactor) * cartesian_y_meters + Location.RotXRad * cartesian_z_meters + Location.ShiftYMeters
cart_output_z_meters = Location.RotYRad * cartesian_x_meters - Location.RotXRad * cartesian_y_meters + (1 + Location.ScaleFactor) * cartesian_z_meters + Location.ShiftZMeters
geo_dez_height = atan(cart_output_y_meters / cart_output_x_meters)
latitude = atan(cart_output_z_meters / sqrt((cart_output_x_meters * cart_output_x_meters) + (cart_output_y_meters * cart_output_y_meters)))
while abs(latitude - latitudeit) >= 0.000000000000001:
latitudeit = latitude
n = Location.earthRadius / sqrt(1 - ee * sin(latitude)**2)
latitude = atan((cart_output_z_meters + ee * n * sin(latitudeit)) / sqrt(cart_output_x_meters**2 + cart_output_y_meters * cart_output_y_meters))
return degrees(latitude), degrees(geo_dez_height)
if __name__ == "__main__":
# test values
# location1 (48.124570, 11.582328)
lkx1 = 4469012.74
lky1 = 5331920.84
# location2 (48.1103206, 11.7233732)
lkx2 = 4479507.160
lky2 = "53302B9,O32" # test value with error
l1 = Location(lkx1, lky1, geotype='gk')
l2 = Location(lkx2, lky2, geotype='gk')
l3 = Location(48.1103206, 11.7233732) # test coordinates (imprecision)
print "l1: {}\nl2: {}\nl3: {}".format(l1, l2, l3)
print "\nl2->l3 {:8.2f} m (precision)".format(l2.getDistance(*l3.getLatLng()))
print "l2->l1 {:8.2f} m".format(l2.getDistance(*l1.getLatLng()))
``` |
[
{
"content": "Write the code verbatim:\n```python\nfrom django.conf.urls import patterns, url\nfrom django.views.generic import TemplateView\nfrom .views import StudentListLectureView, InstructorListLectureView, RegisteredLectureCreateView, CreateLectureView, \\\n\tLectureDetailView, LectureListView, UserRegist... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\nfrom django.conf.urls import patterns, url\nfrom django.views.generic import TemplateView\nfrom .views import StudentListLectureView, InstructorListLectureView, RegisteredLectureCreateView, CreateLectureView, \\\n\tLectureDetailView, LectureList... | ```python
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import StudentListLectureView, InstructorListLectureView, RegisteredLectureCreateView, CreateLectureView, \
LectureDetailView, LectureListView, UserRegisteredLectureView, UserRegisteredLectureApproveView, \
CreateAttendanceView, RosterDetailView, RegistrationDeleteView, StudentLectureDetailView
urlpatterns = patterns('',
url(r'^instructor/$', InstructorListLectureView.as_view(), name='instructor-class-list'),
url(r'^student/$', StudentListLectureView.as_view(), name='student-class-list'),
url(r'^lecture/register/$',RegisteredLectureCreateView.as_view(), name='register-lecture'),
url(r'^lecture/registration/(?P<pk>\d+)/delete/$', RegistrationDeleteView.as_view(), name='delete-registration'),
url(r'^lecture/pending/(?P<pk>\d+)/(?P<approved>[-_\w]+)/$', UserRegisteredLectureApproveView.as_view(), name='approve-deny-lecture'),
url(r'^lecture/create/$', CreateLectureView.as_view(), name='create-lecture'),
url(r'^lecture/(?P<pk>[-_\w]+)/$', LectureDetailView.as_view(), name='detail-lecture'),
url(r'^lecture/(?P<pk>[-_\w]+)/student/$', StudentLectureDetailView.as_view(), name='student-detail-lecture'),
url(r'^lecture/(?P<pk>[-_\w]+)/create/attendance/$', CreateAttendanceView.as_view() , name='create-attendance'),
url(r'^lecture/(?P<pk>[-_\w]+)/roster/$', RosterDetailView.as_view() , name='roster-detail'),
url(r'^lectures/$', LectureListView.as_view(), name='list-all-lecture'),
url(r'^lectures/user/$', UserRegisteredLectureView.as_view(), name='users-registered-classes'),
)
``` |
[
{
"content": "Here is the source code:\n```python\n# Copyright (c) 2016 EMC Corporation.\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# ... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\n# Copyright (c) 2016 EMC Corporation.\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the Li... | ```python
# Copyright (c) 2016 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from oslo_utils import units
from manila.share.drivers.dell_emc.plugins.unity import utils
from manila import test
class MockSP(object):
def __init__(self, sp_id):
self.sp_id = sp_id
def get_id(self):
return self.sp_id
SPA = MockSP('spa')
SPB = MockSP('spb')
class MockPort(object):
def __init__(self, sp, port_id, mtu):
self._sp = sp
self.port_id = port_id
self.mtu = mtu
def get_id(self):
return self.port_id
@property
def parent_storage_processor(self):
return self._sp
SPA_ETH0 = MockPort(SPA, 'spa_eth0', 1500)
SPA_ETH1 = MockPort(SPA, 'spa_eth1', 9000)
SPB_ETH0 = MockPort(SPB, 'spb_eth0', 1500)
SPB_ETH1 = MockPort(SPB, 'spb_eth1', 9000)
SPA_LA1 = MockPort(SPA, 'spa_la_1', 1500)
SPB_LA1 = MockPort(SPB, 'spb_la_1', 1500)
@ddt.ddt
class TestUtils(test.TestCase):
@ddt.data({'matcher': None,
'matched': {'pool_1', 'pool_2', 'nas_server_pool'},
'not_matched': set()},
{'matcher': ['*'],
'matched': {'pool_1', 'pool_2', 'nas_server_pool'},
'not_matched': set()},
{'matcher': ['pool_*'],
'matched': {'pool_1', 'pool_2'},
'not_matched': {'nas_server_pool'}},
{'matcher': ['*pool'],
'matched': {'nas_server_pool'},
'not_matched': {'pool_1', 'pool_2'}},
{'matcher': ['nas_server_pool'],
'matched': {'nas_server_pool'},
'not_matched': {'pool_1', 'pool_2'}},
{'matcher': ['nas_*', 'pool_*'],
'matched': {'pool_1', 'pool_2', 'nas_server_pool'},
'not_matched': set()})
def test_do_match(self, data):
full = ['pool_1 ', ' pool_2', ' nas_server_pool ']
matcher = data['matcher']
expected_matched = data['matched']
expected_not_matched = data['not_matched']
matched, not_matched = utils.do_match(full, matcher)
self.assertEqual(expected_matched, matched)
self.assertEqual(expected_not_matched, not_matched)
@ddt.data({'ports': [SPA_ETH0, SPB_ETH0],
'ids_conf': None,
'port_map': {'spa': {'spa_eth0'}, 'spb': {'spb_eth0'}},
'unmanaged': set()},
{'ports': [SPA_ETH0, SPB_ETH0],
'ids_conf': [' '],
'port_map': {'spa': {'spa_eth0'}, 'spb': {'spb_eth0'}},
'unmanaged': set()},
{'ports': [SPA_ETH0, SPB_ETH0, SPA_ETH1],
'ids_conf': ['spa*'],
'port_map': {'spa': {'spa_eth0', 'spa_eth1'}},
'unmanaged': {'spb_eth0'}},
)
@ddt.unpack
def test_match_ports(self, ports, ids_conf, port_map, unmanaged):
sp_ports_map, unmanaged_port_ids = utils.match_ports(ports,
ids_conf)
self.assertEqual(port_map, sp_ports_map)
self.assertEqual(unmanaged, unmanaged_port_ids)
def test_find_ports_by_mtu(self):
all_ports = [SPA_ETH0, SPB_ETH0, SPA_ETH1, SPB_ETH1, SPA_LA1,
SPB_LA1]
port_ids_conf = '*'
port_map = utils.find_ports_by_mtu(all_ports, port_ids_conf, 1500)
self.assertEqual({'spa': {'spa_eth0', 'spa_la_1'},
'spb': {'spb_eth0', 'spb_la_1'}},
port_map)
def test_gb_to_byte(self):
self.assertEqual(3 * units.Gi, utils.gib_to_byte(3))
def test_get_snapshot_id(self):
snapshot = {'provider_location': '23047-ef2344-4563cvw-r4323cwed',
'id': 'test_id'}
result = utils.get_snapshot_id(snapshot)
expected = '23047-ef2344-4563cvw-r4323cwed'
self.assertEqual(expected, result)
def test_get_snapshot_id_without_pl(self):
snapshot = {'provider_location': '', 'id': 'test_id'}
result = utils.get_snapshot_id(snapshot)
expected = 'test_id'
self.assertEqual(expected, result)
def test_get_nfs_share_id(self):
nfs_share = {'export_locations':
[{'path': '10.10.1.12:/addf-97e-46c-8ac6-55922f',
'share_instance_id': 'e24-457e-47-12c6-gf345'}],
'share_proto': 'NFS', 'id': 'test_nfs_id'}
result = utils.get_share_backend_id(nfs_share)
expected = 'addf-97e-46c-8ac6-55922f'
self.assertEqual(expected, result)
def test_get_nfs_share_id_without_path(self):
nfs_share = {'export_locations':
[{'path': '',
'share_instance_id': 'ev24-7e-4-12c6-g45245'}],
'share_proto': 'NFS', 'id': 'test_nfs_id'}
result = utils.get_share_backend_id(nfs_share)
expected = 'test_nfs_id'
self.assertEqual(expected, result)
def test_get_cifs_share_id(self):
cifs_share = {'export_locations':
[{'path': '\\\\17.66.5.3\\bdf-h4e-42c-122c5-b212',
'share_instance_id': 'ev4-47e-48-126-gfbh452'}],
'share_proto': 'CIFS', 'id': 'test_cifs_id'}
result = utils.get_share_backend_id(cifs_share)
expected = 'bdf-h4e-42c-122c5-b212'
self.assertEqual(expected, result)
def test_get_cifs_share_id_without_path(self):
cifs_share = {'export_locations':
[{'path': '',
'share_instance_id': 'ef4-47e-48-12c6-gf452'}],
'share_proto': 'CIFS', 'id': 'test_cifs_id'}
result = utils.get_share_backend_id(cifs_share)
expected = 'test_cifs_id'
self.assertEqual(expected, result)
``` |
[
{
"content": "```python\nclass InvalidIndexNameError(ValueError):\n pass\n\n\nclass MockValidationException(ValueError):\n def __init__(self, message):\n self.exception_msg = message\n\n\nclass InvalidUpdateExpressionInvalidDocumentPath(MockValidationException):\n invalid_update_expression_msg =... | [
{
"content": "<|memory_start|>```python\nclass InvalidIndexNameError(ValueError):\n pass\n\n\nclass MockValidationException(ValueError):\n def __init__(self, message):\n self.exception_msg = message\n\n\nclass InvalidUpdateExpressionInvalidDocumentPath(MockValidationException):\n invalid_update_... | ```python
class InvalidIndexNameError(ValueError):
pass
class MockValidationException(ValueError):
def __init__(self, message):
self.exception_msg = message
class InvalidUpdateExpressionInvalidDocumentPath(MockValidationException):
invalid_update_expression_msg = (
"The document path provided in the update expression is invalid for update"
)
def __init__(self):
super(InvalidUpdateExpressionInvalidDocumentPath, self).__init__(
self.invalid_update_expression_msg
)
class InvalidUpdateExpression(MockValidationException):
invalid_update_expr_msg = "Invalid UpdateExpression: {update_expression_error}"
def __init__(self, update_expression_error):
self.update_expression_error = update_expression_error
super(InvalidUpdateExpression, self).__init__(
self.invalid_update_expr_msg.format(
update_expression_error=update_expression_error
)
)
class AttributeDoesNotExist(MockValidationException):
attr_does_not_exist_msg = (
"The provided expression refers to an attribute that does not exist in the item"
)
def __init__(self):
super(AttributeDoesNotExist, self).__init__(self.attr_does_not_exist_msg)
class ProvidedKeyDoesNotExist(MockValidationException):
provided_key_does_not_exist_msg = (
"The provided key element does not match the schema"
)
def __init__(self):
super(ProvidedKeyDoesNotExist, self).__init__(
self.provided_key_does_not_exist_msg
)
class ExpressionAttributeNameNotDefined(InvalidUpdateExpression):
name_not_defined_msg = "An expression attribute name used in the document path is not defined; attribute name: {n}"
def __init__(self, attribute_name):
self.not_defined_attribute_name = attribute_name
super(ExpressionAttributeNameNotDefined, self).__init__(
self.name_not_defined_msg.format(n=attribute_name)
)
class AttributeIsReservedKeyword(InvalidUpdateExpression):
attribute_is_keyword_msg = (
"Attribute name is a reserved keyword; reserved keyword: {keyword}"
)
def __init__(self, keyword):
self.keyword = keyword
super(AttributeIsReservedKeyword, self).__init__(
self.attribute_is_keyword_msg.format(keyword=keyword)
)
class ExpressionAttributeValueNotDefined(InvalidUpdateExpression):
attr_value_not_defined_msg = "An expression attribute value used in expression is not defined; attribute value: {attribute_value}"
def __init__(self, attribute_value):
self.attribute_value = attribute_value
super(ExpressionAttributeValueNotDefined, self).__init__(
self.attr_value_not_defined_msg.format(attribute_value=attribute_value)
)
class UpdateExprSyntaxError(InvalidUpdateExpression):
update_expr_syntax_error_msg = "Syntax error; {error_detail}"
def __init__(self, error_detail):
self.error_detail = error_detail
super(UpdateExprSyntaxError, self).__init__(
self.update_expr_syntax_error_msg.format(error_detail=error_detail)
)
class InvalidTokenException(UpdateExprSyntaxError):
token_detail_msg = 'token: "{token}", near: "{near}"'
def __init__(self, token, near):
self.token = token
self.near = near
super(InvalidTokenException, self).__init__(
self.token_detail_msg.format(token=token, near=near)
)
class InvalidExpressionAttributeNameKey(MockValidationException):
invalid_expr_attr_name_msg = (
'ExpressionAttributeNames contains invalid key: Syntax error; key: "{key}"'
)
def __init__(self, key):
self.key = key
super(InvalidExpressionAttributeNameKey, self).__init__(
self.invalid_expr_attr_name_msg.format(key=key)
)
class ItemSizeTooLarge(MockValidationException):
item_size_too_large_msg = "Item size has exceeded the maximum allowed size"
def __init__(self):
super(ItemSizeTooLarge, self).__init__(self.item_size_too_large_msg)
class ItemSizeToUpdateTooLarge(MockValidationException):
item_size_to_update_too_large_msg = (
"Item size to update has exceeded the maximum allowed size"
)
def __init__(self):
super(ItemSizeToUpdateTooLarge, self).__init__(
self.item_size_to_update_too_large_msg
)
class IncorrectOperandType(InvalidUpdateExpression):
inv_operand_msg = "Incorrect operand type for operator or function; operator or function: {f}, operand type: {t}"
def __init__(self, operator_or_function, operand_type):
self.operator_or_function = operator_or_function
self.operand_type = operand_type
super(IncorrectOperandType, self).__init__(
self.inv_operand_msg.format(f=operator_or_function, t=operand_type)
)
class IncorrectDataType(MockValidationException):
inc_data_type_msg = "An operand in the update expression has an incorrect data type"
def __init__(self):
super(IncorrectDataType, self).__init__(self.inc_data_type_msg)
class ConditionalCheckFailed(ValueError):
msg = "The conditional request failed"
def __init__(self):
super(ConditionalCheckFailed, self).__init__(self.msg)
class TransactionCanceledException(ValueError):
cancel_reason_msg = "Transaction cancelled, please refer cancellation reasons for specific reasons [{}]"
def __init__(self, errors):
msg = self.cancel_reason_msg.format(", ".join([str(err) for err in errors]))
super(TransactionCanceledException, self).__init__(msg)
class EmptyKeyAttributeException(MockValidationException):
empty_str_msg = "One or more parameter values were invalid: An AttributeValue may not contain an empty string"
def __init__(self):
super(EmptyKeyAttributeException, self).__init__(self.empty_str_msg)
``` |
[
{
"content": "Reconstruct the code exactly:\n```python\n#\n# Gramps - a GTK+/GNOME based genealogy program\n#\n# Copyright (C) 2017-2018 Nick Hall\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Softwa... | [
{
"content": "Reconstruct the code exactly:\n<|memory_start|>```python\n#\n# Gramps - a GTK+/GNOME based genealogy program\n#\n# Copyright (C) 2017-2018 Nick Hall\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n#... | ```python
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2017-2018 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
LaTeX Genealogy Tree descendant report
"""
#------------------------------------------------------------------------
#
# python modules
#
#------------------------------------------------------------------------
from functools import partial
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".Tree")
#------------------------------------------------------------------------
#
# Gramps module
#
#------------------------------------------------------------------------
from gramps.gen.errors import ReportError
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import MenuReportOptions
from gramps.gen.plug.menu import PersonOption, NumberOption, BooleanOption
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#------------------------------------------------------------------------
#
# DescendantTree
#
#------------------------------------------------------------------------
class DescendantTree(Report):
""" Descendant Tree report """
def __init__(self, database, options, user):
"""
Create LaTeX Genealogy Tree descendant report.
"""
Report.__init__(self, database, options, user)
menu = options.menu
get_option_by_name = menu.get_option_by_name
get_value = lambda name: get_option_by_name(name).get_value()
self._db = self.database
self._pid = get_value('pid')
self.max_generations = menu.get_option_by_name('maxgen').get_value()
self.include_images = menu.get_option_by_name('images').get_value()
def write_report(self):
"""
Inherited method; called by report() in _ReportDialog.py
"""
if self._pid:
person = self._db.get_person_from_gramps_id(self._pid)
if person is None:
raise ReportError(_("Person %s is not in the Database") %
self._pid)
family_handles = person.get_family_handle_list()
if len(family_handles) > 0:
options = ['pref code={\\underline{#1}}',
'list separators hang',
'place text={\\newline}{}']
if self.include_images:
images = ('if image defined={'
'add to width=25mm,right=25mm,\n'
'underlay={\\begin{tcbclipinterior}'
'\\path[fill overzoom image=\\gtrDBimage]\n'
'([xshift=-24mm]interior.south east) '
'rectangle (interior.north east);\n'
'\\end{tcbclipinterior}},\n'
'}{},')
box = 'box={halign=left,\\gtrDBsex,%s\n}' % images
else:
box = 'box={halign=left,\\gtrDBsex}'
options.append(box)
self.doc.start_tree(options)
self.write_subgraph(0, 'child', family_handles, person.handle)
self.doc.end_tree()
def write_subgraph(self, level, subgraph_type, family_handles, ghandle):
if level >= self.max_generations:
return
family = self._db.get_family_from_handle(family_handles[0])
self.doc.start_subgraph(level, subgraph_type, family)
for handle in family_handles[1:]:
self.write_subgraph(level+1, 'union', [handle], ghandle)
for handle in (family.get_father_handle(), family.get_mother_handle()):
if handle:
parent = self._db.get_person_from_handle(handle)
if handle == ghandle:
if subgraph_type == 'child':
self.doc.write_node(self._db, level+1, 'g', parent,
False)
else:
self.doc.write_node(self._db, level+1, 'p', parent, True)
for childref in family.get_child_ref_list():
child = self._db.get_person_from_handle(childref.ref)
family_handles = child.get_family_handle_list()
if len(family_handles) > 0:
family_handles = child.get_family_handle_list()
if level+1 >= self.max_generations:
self.doc.write_node(self._db, level+1, 'c', child, True)
else:
self.write_subgraph(level+1, 'child', family_handles,
childref.ref)
else:
self.doc.write_node(self._db, level+1, 'c', child, True)
self.doc.end_subgraph(level)
#------------------------------------------------------------------------
#
# DescendantTreeOptions
#
#------------------------------------------------------------------------
class DescendantTreeOptions(MenuReportOptions):
"""
Defines all of the controls necessary
to configure the Descendant Tree report.
"""
def __init__(self, name, dbase):
self.__pid = None
MenuReportOptions.__init__(self, name, dbase)
def add_menu_options(self, menu):
category_name = _("Report Options")
self.__pid = PersonOption(_("Center Person"))
self.__pid.set_help(_("The center person for the report"))
menu.add_option(category_name, "pid", self.__pid)
maxgen = NumberOption(_("Generations"), 10, 1, 100)
maxgen.set_help(_("The number of generations to include in the tree"))
menu.add_option(category_name, "maxgen", maxgen)
images = BooleanOption(_("Include images"), False)
images.set_help(_("Include images of people in the nodes."))
menu.add_option(category_name, "images", images)
``` |
[
{
"content": "Repeat the code exactly:\n```python\n################################################################################\n#\n# RMG - Reaction Mechanism Generator\n#\n# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu), \n# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\n################################################################################\n#\n# RMG - Reaction Mechanism Generator\n#\n# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu), \n# Prof. Richard H. West (r.west@neu.edu) an... | ```python
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu),
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module provides functions for searching paths within a molecule.
The paths generally consist of alternating atoms and bonds.
"""
import cython
import itertools
from Queue import Queue
def find_butadiene(start, end):
"""
Search for a path between start and end atom that consists of
alternating non-single and single bonds.
Returns a list with atom and bond elements from start to end, or
None if nothing was found.
"""
q = Queue()#FIFO queue of paths that need to be analyzed
q.put([start])
while not q.empty():
path = q.get()
# search for end atom among the neighbors of the terminal atom of the path:
terminal = path[-1]
assert isinstance(terminal, Atom)
for atom4, bond34 in terminal.bonds.iteritems():
if atom4 == end and not bond34.isSingle():# we have found the path we are looking for
#add the final bond and atom and return
path.append(bond34)
path.append(atom4)
return path
else:#none of the neighbors is the end atom.
# Add a new allyl path and try again:
new_paths = add_allyls(path)
[q.put(p) if p else '' for p in new_paths]
# Could not find a resonance path from start atom to end atom
return None
def find_butadiene_end_with_charge(start):
"""
Search for a (4-atom, 3-bond) path between start and end atom that consists of
alternating non-single and single bonds and ends with a charged atom.
Returns a list with atom and bond elements from start to end, or
None if nothing was found.
"""
q = Queue()#FIFO queue of paths that need to be analyzed
q.put([start])
while not q.empty():
path = q.get()
# search for end atom among the neighbors of the terminal atom of the path:
terminal = path[-1]
assert isinstance(terminal, Atom)
for atom4, bond34 in terminal.bonds.iteritems():
if atom4.charge != 0 and not bond34.isSingle() and not atom4 in path:# we have found the path we are looking for
#add the final bond and atom and return
path.append(bond34)
path.append(atom4)
return path
else:#none of the neighbors is the end atom.
# Add a new allyl path and try again:
new_paths = add_allyls(path)
[q.put(p) if p else '' for p in new_paths]
# Could not find a resonance path from start atom to end atom
return None
def find_allyl_end_with_charge(start):
"""
Search for a (3-atom, 2-bond) path between start and end atom that consists of
alternating non-single and single bonds and ends with a charged atom.
Returns a list with atom and bond elements from start to end, or
an empty list if nothing was found.
"""
paths = []
q = Queue()#FIFO queue of paths that need to be analyzed
unsaturated_bonds = add_unsaturated_bonds([start])
if not unsaturated_bonds:
return []
[q.put(path) for path in unsaturated_bonds]
while not q.empty():
path = q.get()
# search for end atom among the neighbors of the terminal atom of the path:
terminal = path[-1]
assert isinstance(terminal, Atom)
path_copy = path[:]
for atom3, bond23 in terminal.bonds.iteritems():
if atom3.charge != 0 and not atom3 in path_copy:# we have found the path we are looking for
#add the final bond and atom and return
path_copy_copy = path_copy[:]
path_copy_copy.extend([bond23, atom3])
paths.append(path_copy_copy)
else:#none of the neighbors is the end atom.
# Add a new inverse allyl path and try again:
new_paths = add_inverse_allyls(path)
[q.put(p) if p else '' for p in new_paths]
# Could not find a resonance path from start atom to end atom
return paths
def find_shortest_path(start, end, path=None):
path = path if path else []
path = path + [start]
if start == end:
return path
shortest = None
for node,_ in start.edges.iteritems():
if node not in path:
newpath = find_shortest_path(node, end, path)
if newpath:
if not shortest or len(newpath) < len(shortest):
shortest = newpath
return shortest
def add_unsaturated_bonds(path):
"""
Find all the (2-atom, 1-bond) patterns "X=X" starting from the
last atom of the existing path.
The bond attached to the starting atom should be non single.
"""
paths = []
start = path[-1]
assert isinstance(start, Atom)
for atom2, bond12 in start.bonds.iteritems():
if not bond12.isSingle() and not atom2 in path and atom2.number!= 1:
new_path = path[:]
new_path.extend((bond12, atom2))
paths.append(new_path)
return paths
def add_allyls(path):
"""
Find all the (3-atom, 2-bond) patterns "X=X-X" starting from the
last atom of the existing path.
The bond attached to the starting atom should be non single.
The second bond should be single.
"""
paths = []
start = path[-1]
assert isinstance(start, Atom)
for atom2, bond12 in start.bonds.iteritems():
if not bond12.isSingle() and not atom2 in path:
for atom3, bond23 in atom2.bonds.iteritems():
if start is not atom3 and atom3.number!= 1:
new_path = path[:]
new_path.extend((bond12, atom2, bond23, atom3))
paths.append(new_path)
return paths
def add_inverse_allyls(path):
"""
Find all the (3-atom, 2-bond) patterns "start~atom2=atom3" starting from the
last atom of the existing path.
The second bond should be non-single.
"""
paths = []
start = path[-1]
assert isinstance(start, Atom)
for atom2, bond12 in start.bonds.iteritems():
if not atom2 in path:
for atom3, bond23 in atom2.bonds.iteritems():
if not atom3 in path and atom3.number!= 1 and not bond23.isSingle():
new_path = path[:]
new_path.extend((bond12, atom2, bond23, atom3))
paths.append(new_path)
return paths
def compute_atom_distance(atom_indices, mol):
"""
Compute the distances between each pair of atoms in the atom_indices.
The distance between two atoms is defined as the length of the shortest path
between the two atoms minus 1, because the start atom is part of the path.
The distance between multiple atoms is defined by generating all possible
combinations between two atoms and storing the distance between each combination
of atoms in a dictionary.
The parameter 'atom_indices' is a list of 1-based atom indices.
"""
if len(atom_indices) == 1: return {(atom_indices[0],): 0}
distances = {}
combos = [sorted(tup) for tup in itertools.combinations(atom_indices, 2)]
for i1, i2 in combos:
start, end = mol.atoms[i1 - 1], mol.atoms[i2 - 1]
path = find_shortest_path(start, end)
distances[(i1, i2)] = len(path) - 1
return distances
def findAllDelocalizationPaths(atom1):
"""
Find all the delocalization paths allyl to the radical center indicated
by `atom1`. Used to generate resonance isomers.
"""
cython.declare(paths=list)
cython.declare(atom2=Atom, atom3=Atom, bond12=Bond, bond23=Bond)
# No paths if atom1 is not a radical
if atom1.radicalElectrons <= 0:
return []
# Find all delocalization paths
paths = []
for atom2, bond12 in atom1.edges.items():
# Vinyl bond must be capable of gaining an order
if (bond12.isSingle() or bond12.isDouble()) and (atom1.radicalElectrons == 1 or atom1.radicalElectrons == 2):
for atom3, bond23 in atom2.edges.items():
# Allyl bond must be capable of losing an order without breaking
if atom1 is not atom3 and (bond23.isDouble() or bond23.isTriple()):
paths.append([atom1, atom2, atom3, bond12, bond23])
return paths
def findAllDelocalizationPathsLonePairRadical(atom1):
"""
Find all the delocalization paths of lone electron pairs next to the radical center indicated
by `atom1`. Used to generate resonance isomers in adjacent N and O as in NO2.
"""
cython.declare(paths=list)
cython.declare(atom2=Atom, bond12=Bond)
paths = []
if atom1.isNitrogen() and atom1.radicalElectrons >= 1 and atom1.lonePairs == 0:
for atom2, bond12 in atom1.edges.items():
if atom2.isOxygen() and atom2.radicalElectrons == 0 and atom2.lonePairs == 3 and bond12.isSingle():
paths.append([atom1, atom2])
elif atom1.isOxygen() and atom1.radicalElectrons >= 1 and atom1.lonePairs == 2:
for atom2, bond12 in atom1.edges.items():
if atom2.isNitrogen() and atom2.radicalElectrons == 0 and atom2.lonePairs == 1 and bond12.isSingle():
paths.append([atom1, atom2])
return paths
def findAllDelocalizationPathsN5dd_N5ts(atom1):
"""
Find all the resonance structures of nitrogen atoms with two double bonds (N5dd)
and nitrogen atoms with one triple and one single bond (N5ts)
"""
cython.declare(paths=list)
cython.declare(atom2=Atom, bond12=Bond)
# No paths if atom1 is not nitrogen
if not (atom1.isNitrogen()):
return []
# Find all delocalization paths
paths = []
index_atom_2 = 0
index_atom_3 = 0
for atom2, bond12 in atom1.edges.items():
index_atom_2 = index_atom_2 + 1
# Only double bonds are considered
if bond12.isDouble():
for atom3, bond13 in atom1.edges.items():
index_atom_3 = index_atom_3 + 1
# Only double bonds are considered, at the moment we only consider non-radical nitrogen and oxygen atoms
if (bond13.isDouble() and atom3.radicalElectrons == 0 and atom3.lonePairs > 0 and not atom3.isOxygen() and not atom3.isCarbon() and (index_atom_2 != index_atom_3)):
paths.append([atom1, atom2, atom3, bond12, bond13, 1])
for atom2, bond12 in atom1.edges.items():
# Only triple bonds are considered
if bond12.isTriple():
for atom3, bond13 in atom1.edges.items():
# Only single bonds are considered, at the moment we only consider negatively charged nitrogen and oxygen
if (bond13.isSingle() and ((atom3.isNitrogen() and atom3.lonePairs >= 2) or (atom3.isOxygen() and atom3.lonePairs >= 3))):
paths.append([atom1, atom2, atom3, bond12, bond13, 2])
return paths
``` |
[
{
"content": "Here is the source code:\n```python\n\"\"\"\nRipped from https://www.smsoft.ru/en/pifdoc.htm\n\"\"\"\n\nimport ptypes\nfrom ptypes import *\n\nclass Heading(pstruct.type):\n def __Next_section_offset(self):\n return dyn.pointer(Section, pint.uint16_t)\n\n def __Section_data_offset(sel... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\n\"\"\"\nRipped from https://www.smsoft.ru/en/pifdoc.htm\n\"\"\"\n\nimport ptypes\nfrom ptypes import *\n\nclass Heading(pstruct.type):\n def __Next_section_offset(self):\n return dyn.pointer(Section, pint.uint16_t)\n\n def __Section... | ```python
"""
Ripped from https://www.smsoft.ru/en/pifdoc.htm
"""
import ptypes
from ptypes import *
class Heading(pstruct.type):
def __Next_section_offset(self):
return dyn.pointer(Section, pint.uint16_t)
def __Section_data_offset(self):
def _object_(_, self=self):
length = self['Length'].li
return SectionData.withdefault(length.int(), length=length.int())
return dyn.pointer(_object_, pint.uint16_t)
_fields_ = [
(dyn.clone(pstr.string, length=0x10), 'Name'),
(__Next_section_offset, 'NextOffset'),
(__Section_data_offset, 'Offset'),
(pint.uint16_t, 'Length'),
]
class SectionData(ptype.definition):
cache = {}
class default(pstr.string):
pass
class Section(pstruct.type):
def __data(self):
res = self['heading'].li
length = res['Length']
return SectionData.withdefault(length.int(), length=length.int())
def __padding_section(self):
res = self['heading'].li
if res['NextOffset'].int() < 0xffff:
length, fields = res['NextOffset'].int() - self.getoffset(), ['heading', 'data']
return dyn.block(max(0, length - sum(self[fld].li.size() for fld in fields)))
return dyn.block(0)
_fields_ = [
(Heading, 'heading'),
(__data, 'data'),
(__padding_section, 'padding(data)'),
]
class MaximumRequired(pstruct.type):
_fields_ = [
(pint.uint16_t, 'maximum'),
(pint.uint16_t, 'required'),
]
def summary(self):
return "required={:#x} maximum={:#x}".format(self['required'].int(), self['maximum'].int())
@SectionData.define
class BasicSection(pstruct.type):
type = 0x171
@pbinary.littleendian
class _Flags(pbinary.flags):
_fields_ = [
(1, 'COM2'),
(1, 'COM1'),
(1, 'Reserved'),
(1, 'Close on exit'),
(1, 'No screen exchange'),
(1, 'Prevent switch'),
(1, 'Graphics mode'),
(1, 'Direct memory'),
]
@pbinary.littleendian
class _Program_flags(pbinary.flags):
_fields_ = [
(1, 'Unused'),
(1, 'Has parameters'),
(1, 'Exchange interrupt vectors'),
(5, 'Reserved'),
(1, 'Direct screen'),
(1, 'Stop in background mode'),
(1, 'Use coprocessor'),
(1, 'Direct keyboard'),
(4, 'Unknown'),
]
_fields_ = [
(pint.uint8_t, 'Reserved'),
(pint.uint8_t, 'Checksum'),
(dyn.clone(pstr.string, length=30), 'Window title'),
(MaximumRequired, 'Reserved memory'),
(dyn.clone(pstr.string, length=63), 'Path'),
(_Flags, 'Flags'),
(pint.uint8_t, 'Drive index'),
(dyn.clone(pstr.string, length=64), 'Directory'),
(dyn.clone(pstr.string, length=64), 'Parameters'),
(pint.uint8_t, 'Video mode'),
(pint.uint8_t, 'Text video pages quantity'),
(pint.uint8_t, 'First used interrupt'),
(pint.uint8_t, 'Last used interrupt'),
(pint.uint8_t, 'Rows'),
(pint.uint8_t, 'Columns'),
(pint.uint8_t, 'X position'),
(pint.uint8_t, 'Y position'),
(pint.uint16_t, 'Number of last video page'),
(dyn.clone(pstr.string, length=64), 'Shared program path'),
(dyn.clone(pstr.string, length=64), 'Shared program data'),
(_Program_flags, 'Program flags'),
]
@SectionData.define
class Windows386Section(pstruct.type):
type = 0x68
@pbinary.littleendian
class _Bit_mask1(pbinary.flags):
_fields_ = [
(3, 'Unused'),
(1, 'No MS-DOS transition warning'),
(1, 'Unused'),
(1, 'No MS-DOS automatic transition'),
(1, 'Unused'),
(1, 'Prevent Windows detection'),
(1, 'MS-DOS mode'),
(1, 'Unused'),
(1, 'Maximized window'),
(1, 'Minimized window'),
(1, 'Memory protection'),
(1, 'Lock application memory'),
(1, 'Fast paste'),
(1, 'XMS memory locked'),
(1, 'EMS memory locked'),
(1, 'Use shortcut key'),
(1, 'Do not use HMA'),
(1, 'Detect idle time'),
(1, 'No Ctrl+Esc'),
(1, 'No PrtSc'),
(1, 'No Alt+PrtSc'),
(1, 'No Alt+Enter'),
(1, 'No Alt+Space'),
(1, 'No Alt+Esc'),
(1, 'No Alt+Tab'),
(1, 'Unused'),
(1, 'Full-screen mode'),
(1, 'Exclusive run mode'),
(1, 'Background continuation'),
(1, 'Permit exit'),
]
@pbinary.littleendian
class _Bit_mask2(pbinary.flags):
_fields_ = [
(8, 'Unused'),
(1, 'Retain video memory'),
(1, 'Memory: High graphics'),
(1, 'Memory: Low graphics'),
(1, 'Memory: Text graphics'),
(1, 'Ports: High graphics'),
(1, 'Ports: Low graphics'),
(1, 'Ports: Text graphics'),
(1, 'Video ROM emulation'),
]
@pbinary.littleendian
class _Shortcut_modifier(pbinary.flags):
_fields_ = [
(12, 'Unused'),
(1, 'Alt'),
(1, 'Ctrl'),
(2, 'Shift'),
]
_fields_ = [
(MaximumRequired, 'Conventional memory'),
(pint.uint16_t, 'Active priority'),
(pint.uint16_t, 'Background priority'),
(MaximumRequired, 'EMS memory'),
(MaximumRequired, 'XMS memory'),
(_Bit_mask1, 'Bit mask 1'),
(_Bit_mask2, 'Bit mask 2'),
(pint.uint16_t, 'Unknown_16'),
(pint.uint16_t, 'Shortcut key scan code'),
(_Shortcut_modifier, 'Shortcut key modifier'),
(pint.uint16_t, 'Use shortcut key'),
(pint.uint16_t, 'Extended shortcut key'),
(pint.uint16_t, 'Unknown_20'),
(pint.uint16_t, 'Unknown_22'),
(pint.uint32_t, 'Unknown_24'),
(dyn.clone(pstr.string, length=64), 'Parameters'),
]
@SectionData.define
class Windows286Section(pstruct.type):
type = 0x6
@pbinary.littleendian
class _Flags(pbinary.flags):
_fields_ = [
(1, 'COM4'),
(1, 'COM3'),
(8, 'Unused'),
(1, 'No screen retain'),
(1, 'No Ctrl+Esc'),
(1, 'No PrtSc'),
(1, 'No Alt+PrtSc'),
(1, 'No Alt+Esc'),
(1, 'No Alt+Tab'),
]
_fields_ = [
(MaximumRequired, 'XMS memory'),
(_Flags, 'Flags'),
]
@SectionData.define
class WindowsVMM40Section(pstruct.type):
type = 0x1ac
class _Dimensions(pstruct.type):
_fields_ = [
(pint.uint16_t, 'horizontal size'),
(pint.uint16_t, 'vertical size'),
]
@pbinary.littleendian
class _Bit_mask1(pbinary.flags):
_fields_ = [
(10, 'Unknown'),
(1, 'No screensaver'),
(1, 'No exit warning'),
(2, 'Unused'),
(1, 'Continue in background'),
(1, 'Reserved'),
]
@pbinary.littleendian
class _Bit_mask2(pbinary.flags):
_fields_ = [
(7, 'Unknown'),
(1, 'Full-screen mode'),
(1, 'No dynamic video memory'),
(6, 'Unused'),
(1, 'Video-ROM emulation'),
]
@pbinary.littleendian
class _Bit_mask3(pbinary.flags):
_fields_ = [
(4, 'Unknown'),
(1, 'No Ctrl+Esc'),
(1, 'No PrtSc'),
(1, 'No Alt+PrtSc'),
(1, 'No Alt+Enter'),
(1, 'No Alt+Space'),
(1, 'No Alt+Esc'),
(1, 'No Alt+Tab'),
(4, 'Unused'),
(1, 'Fast paste'),
]
@pbinary.littleendian
class _Mouse_flags(pbinary.flags):
_fields_ = [
(14, 'Unused'),
(1, 'Exclusive'),
(1, 'No selection'),
]
@pbinary.littleendian
class _Font_flags(pbinary.flags):
_fields_ = [
(4, 'Unused'),
(1, 'Current TrueType'),
(1, 'Current Raster'),
(5, 'Unknown'),
(1, 'Automatic size'),
(1, 'Use TrueType'),
(1, 'Use Raster'),
(2, 'Reserved'),
]
@pbinary.littleendian
class _Bit_mask4(pbinary.flags):
_fields_ = [
(14, 'Unused'),
(1, 'Show toolbar'),
(1, 'Unknown'),
]
@pbinary.littleendian
class _Last_maximized_flags(pbinary.flags):
_fields_ = [
(14, 'Unknown'),
(1, 'Last maximized'),
(1, 'Reserved'),
]
class _Last_window_state(pint.enum, pint.uint16_t):
_values_ = [
('Normal', 1),
('Minimized', 2),
('Maximized', 3),
]
class _Border_position(pstruct.type):
_fields_ = [
(pint.uint16_t, 'left'),
(pint.uint16_t, 'top'),
(pint.uint16_t, 'right'),
(pint.uint16_t, 'bottom'),
]
_fields_ = [
(dyn.block(88), 'Unknown_0'),
(dyn.clone(pstr.string, length=80), 'Icon filename'),
(pint.uint16_t, 'Icon number'),
(_Bit_mask1, 'Bit mask 1'),
(dyn.block(10), 'Unknown_ac'),
(pint.uint16_t, 'Priority'),
(_Bit_mask2, 'Bit mask 2'),
(dyn.block(8), 'Unknown_ba'),
(pint.uint16_t, 'Number of lines'),
(_Bit_mask3, 'Bit mask 3'),
(pint.uint16_t, 'Unknown_c6'),
(pint.uint16_t, 'Unknown_c8'),
(pint.uint16_t, 'Unknown_ca'),
(pint.uint16_t, 'Unknown_cc'),
(pint.uint16_t, 'Unknown_ce'),
(pint.uint16_t, 'Unknown_d0'),
(pint.uint16_t, 'Unknown_c2'),
(pint.uint16_t, 'Unknown_c4'),
(_Mouse_flags, 'Mouse flags'),
(dyn.block(6), 'Unknown_d8'),
(_Font_flags, 'Font flags'),
(pint.uint16_t, 'Unknown_e0'),
(_Dimensions, 'Raster font size'),
(_Dimensions, 'Current font size'),
(dyn.clone(pstr.string, length=32), 'Raster font name'),
(dyn.clone(pstr.string, length=32), 'TrueType font name'),
(pint.uint16_t, 'Unknown_12a'),
(_Bit_mask4, 'Bit mask 4'),
(pint.uint16_t, 'No restore settings'),
(_Dimensions, 'Screen symbol size'),
(_Dimensions, 'Client area size'),
(_Dimensions, 'Window size'),
(pint.uint16_t, 'Unknown_13c'),
(_Last_maximized_flags, 'Last maximized'),
(_Last_window_state, 'Last start'),
(_Border_position, 'Maximized border position'),
(_Border_position, 'Normal border position'),
(pint.uint32_t, 'Unknown_152'),
(dyn.clone(pstr.string, length=80), 'BAT file name'),
(pint.uint16_t, 'Environment size'),
(pint.uint16_t, 'DPMI memory volume'),
(pint.uint16_t, 'Unknown_1aa'),
]
@SectionData.define
class WindowsNT31Section(pstruct.type):
type = 0x8c
_fields_ = [
(pint.uint16_t, 'Hardware timer emulation'),
(dyn.block(10), 'Unknown_2'),
(dyn.clone(pstr.string, length=64), 'CONFIG.NT filename'),
(dyn.clone(pstr.string, length=64), 'AUTOEXEC.NT filename'),
]
@SectionData.define
class WindowsNT40Section(pstruct.type):
type = 0x68c
_fields_ = [
(pint.uint32_t, 'Unknown_0'),
(dyn.clone(pstr.wstring, length=128), 'Unicode parameters'),
(dyn.clone(pstr.string, length=128), 'Ascii parameters'),
(dyn.block(240), 'Unknown_184'),
(dyn.clone(pstr.wstring, length=80), 'Unicode PIF filename'),
(dyn.clone(pstr.string, length=80), 'Ascii PIF filename'),
(dyn.clone(pstr.wstring, length=30), 'Unicode window title'),
(dyn.clone(pstr.string, length=30), 'Ascii window title'),
(dyn.clone(pstr.wstring, length=80), 'Unicode icon filename'),
(dyn.clone(pstr.string, length=80), 'Ascii icon filename'),
(dyn.clone(pstr.wstring, length=64), 'Unicode working directory'),
(dyn.clone(pstr.string, length=64), 'Ascii working directory'),
(dyn.block(286), 'Unknown_56e'),
]
class Sections(parray.terminated):
_object_ = Section
def isTerminator(self, item):
res = item['heading']
return res['NextOffset'].int() == 0xffff
class File(pstruct.type):
_fields_ = [
(BasicSection, 'basicSection'),
(Heading, 'basicHeading'),
(Sections, 'sections'),
]
def enumerate(self):
item = self['basicHeading']
yield item['Name'].str(), item['Offset'].d.li
while item['NextOffset'].int() < 0xffff:
res = item['NextOffset'].d.li
item = res['heading']
yield item['Name'].str(), item['Offset'].d.li
return
def iterate(self):
for _, item in self.enumerate():
yield item
return
if __name__ == '__main__':
import ptypes, local.pif as PIF
ptypes.setsource(ptypes.prov.file('/home/user/work/syringe/template/samples/_default.pif','rb'))
z = PIF.File()
z=z.l
for name, item in z.enumerate():
print(name)
print(item)
for item in z.iterate():
print(item)
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.