text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
This file contains a minimal set of tests for compliance with the extension
array interface test suite, and should contain no other tests.
The test suite for the full functionality of the array is located in
`pandas/tests/arrays/`.
The tests in this file are inherited from the BaseExtensionTests, and only
minimal tweaks should be applied to get the tests passing (by overwriting a
parent method).
Additional tests should either be added to one of the BaseExtensionTests
classes (if they are relevant for the extension interface for all dtypes), or
be added to the array-specific tests in `pandas/tests/arrays/`.
"""
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
from pandas.core.arrays.boolean import BooleanDtype
from pandas.tests.extension import base
def make_data():
return [True, False] * 4 + [np.nan] + [True, False] * 44 + [np.nan] + [True, False]
@pytest.fixture
def dtype():
return BooleanDtype()
@pytest.fixture
def data(dtype):
return pd.array(make_data(), dtype=dtype)
@pytest.fixture
def data_for_twos(dtype):
return pd.array(np.ones(100), dtype=dtype)
@pytest.fixture
def data_missing(dtype):
return pd.array([np.nan, True], dtype=dtype)
@pytest.fixture
def data_for_sorting(dtype):
return pd.array([True, True, False], dtype=dtype)
@pytest.fixture
def data_missing_for_sorting(dtype):
return pd.array([True, np.nan, False], dtype=dtype)
@pytest.fixture
def na_cmp():
# we are pd.NA
return lambda x, y: x is pd.NA and y is pd.NA
@pytest.fixture
def na_value():
return pd.NA
@pytest.fixture
def data_for_grouping(dtype):
b = True
a = False
na = np.nan
return pd.array([b, b, na, na, a, a, b], dtype=dtype)
class TestDtype(base.BaseDtypeTests):
pass
class TestInterface(base.BaseInterfaceTests):
pass
class TestConstructors(base.BaseConstructorsTests):
pass
class TestGetitem(base.BaseGetitemTests):
pass
class TestSetitem(base.BaseSetitemTests):
pass
class TestMissing(base.BaseMissingTests):
pass
class TestArithmeticOps(base.BaseArithmeticOpsTests):
implements = {"__sub__", "__rsub__"}
def check_opname(self, s, op_name, other, exc=None):
# overwriting to indicate ops don't raise an error
super().check_opname(s, op_name, other, exc=None)
def _check_op(self, s, op, other, op_name, exc=NotImplementedError):
if exc is None:
if op_name in self.implements:
msg = r"numpy boolean subtract"
with pytest.raises(TypeError, match=msg):
op(s, other)
return
result = op(s, other)
expected = s.combine(other, op)
if op_name in (
"__floordiv__",
"__rfloordiv__",
"__pow__",
"__rpow__",
"__mod__",
"__rmod__",
):
# combine keeps boolean type
expected = expected.astype("Int8")
elif op_name in ("__truediv__", "__rtruediv__"):
# combine with bools does not generate the correct result
# (numpy behaviour for div is to regard the bools as numeric)
expected = s.astype(float).combine(other, op).astype("Float64")
if op_name == "__rpow__":
# for rpow, combine does not propagate NaN
expected[result.isna()] = np.nan
self.assert_series_equal(result, expected)
else:
with pytest.raises(exc):
op(s, other)
def _check_divmod_op(self, s, op, other, exc=None):
# override to not raise an error
super()._check_divmod_op(s, op, other, None)
@pytest.mark.skip(reason="BooleanArray does not error on ops")
def test_error(self, data, all_arithmetic_operators):
# other specific errors tested in the boolean array specific tests
pass
def test_arith_frame_with_scalar(self, data, all_arithmetic_operators, request):
# frame & scalar
op_name = all_arithmetic_operators
if op_name not in self.implements:
mark = pytest.mark.xfail(reason="_reduce needs implementation")
request.node.add_marker(mark)
super().test_arith_frame_with_scalar(data, all_arithmetic_operators)
class TestComparisonOps(base.BaseComparisonOpsTests):
def check_opname(self, s, op_name, other, exc=None):
# overwriting to indicate ops don't raise an error
super().check_opname(s, op_name, other, exc=None)
def _compare_other(self, s, data, op_name, other):
self.check_opname(s, op_name, other)
@pytest.mark.skip(reason="Tested in tests/arrays/test_boolean.py")
def test_compare_scalar(self, data, all_compare_operators):
pass
@pytest.mark.skip(reason="Tested in tests/arrays/test_boolean.py")
def test_compare_array(self, data, all_compare_operators):
pass
class TestReshaping(base.BaseReshapingTests):
pass
class TestMethods(base.BaseMethodsTests):
@pytest.mark.parametrize("na_sentinel", [-1, -2])
def test_factorize(self, data_for_grouping, na_sentinel):
# override because we only have 2 unique values
labels, uniques = pd.factorize(data_for_grouping, na_sentinel=na_sentinel)
expected_labels = np.array(
[0, 0, na_sentinel, na_sentinel, 1, 1, 0], dtype=np.intp
)
expected_uniques = data_for_grouping.take([0, 4])
tm.assert_numpy_array_equal(labels, expected_labels)
self.assert_extension_array_equal(uniques, expected_uniques)
def test_combine_le(self, data_repeated):
# override because expected needs to be boolean instead of bool dtype
orig_data1, orig_data2 = data_repeated(2)
s1 = pd.Series(orig_data1)
s2 = pd.Series(orig_data2)
result = s1.combine(s2, lambda x1, x2: x1 <= x2)
expected = pd.Series(
[a <= b for (a, b) in zip(list(orig_data1), list(orig_data2))],
dtype="boolean",
)
self.assert_series_equal(result, expected)
val = s1.iloc[0]
result = s1.combine(val, lambda x1, x2: x1 <= x2)
expected = pd.Series([a <= val for a in list(orig_data1)], dtype="boolean")
self.assert_series_equal(result, expected)
def test_searchsorted(self, data_for_sorting, as_series):
# override because we only have 2 unique values
data_for_sorting = pd.array([True, False], dtype="boolean")
b, a = data_for_sorting
arr = type(data_for_sorting)._from_sequence([a, b])
if as_series:
arr = pd.Series(arr)
assert arr.searchsorted(a) == 0
assert arr.searchsorted(a, side="right") == 1
assert arr.searchsorted(b) == 1
assert arr.searchsorted(b, side="right") == 2
result = arr.searchsorted(arr.take([0, 1]))
expected = np.array([0, 1], dtype=np.intp)
tm.assert_numpy_array_equal(result, expected)
# sorter
sorter = np.array([1, 0])
assert data_for_sorting.searchsorted(a, sorter=sorter) == 0
@pytest.mark.skip(reason="uses nullable integer")
def test_value_counts(self, all_data, dropna):
return super().test_value_counts(all_data, dropna)
@pytest.mark.skip(reason="uses nullable integer")
def test_value_counts_with_normalize(self, data):
pass
def test_argmin_argmax(self, data_for_sorting, data_missing_for_sorting):
# override because there are only 2 unique values
# data_for_sorting -> [B, C, A] with A < B < C -> here True, True, False
assert data_for_sorting.argmax() == 0
assert data_for_sorting.argmin() == 2
# with repeated values -> first occurence
data = data_for_sorting.take([2, 0, 0, 1, 1, 2])
assert data.argmax() == 1
assert data.argmin() == 0
# with missing values
# data_missing_for_sorting -> [B, NA, A] with A < B and NA missing.
assert data_missing_for_sorting.argmax() == 0
assert data_missing_for_sorting.argmin() == 2
class TestCasting(base.BaseCastingTests):
pass
class TestGroupby(base.BaseGroupbyTests):
"""
Groupby-specific tests are overridden because boolean only has 2
unique values, base tests uses 3 groups.
"""
def test_grouping_grouper(self, data_for_grouping):
df = pd.DataFrame(
{"A": ["B", "B", None, None, "A", "A", "B"], "B": data_for_grouping}
)
gr1 = df.groupby("A").grouper.groupings[0]
gr2 = df.groupby("B").grouper.groupings[0]
tm.assert_numpy_array_equal(gr1.grouper, df.A.values)
tm.assert_extension_array_equal(gr2.grouper, data_for_grouping)
@pytest.mark.parametrize("as_index", [True, False])
def test_groupby_extension_agg(self, as_index, data_for_grouping):
df = pd.DataFrame({"A": [1, 1, 2, 2, 3, 3, 1], "B": data_for_grouping})
result = df.groupby("B", as_index=as_index).A.mean()
_, index = pd.factorize(data_for_grouping, sort=True)
index = pd.Index(index, name="B")
expected = pd.Series([3, 1], index=index, name="A")
if as_index:
self.assert_series_equal(result, expected)
else:
expected = expected.reset_index()
self.assert_frame_equal(result, expected)
def test_groupby_extension_no_sort(self, data_for_grouping):
df = pd.DataFrame({"A": [1, 1, 2, 2, 3, 3, 1], "B": data_for_grouping})
result = df.groupby("B", sort=False).A.mean()
_, index = pd.factorize(data_for_grouping, sort=False)
index = pd.Index(index, name="B")
expected = pd.Series([1, 3], index=index, name="A")
self.assert_series_equal(result, expected)
def test_groupby_extension_transform(self, data_for_grouping):
valid = data_for_grouping[~data_for_grouping.isna()]
df = pd.DataFrame({"A": [1, 1, 3, 3, 1], "B": valid})
result = df.groupby("B").A.transform(len)
expected = pd.Series([3, 3, 2, 2, 3], name="A")
self.assert_series_equal(result, expected)
def test_groupby_extension_apply(self, data_for_grouping, groupby_apply_op):
df = pd.DataFrame({"A": [1, 1, 2, 2, 3, 3, 1], "B": data_for_grouping})
df.groupby("B").apply(groupby_apply_op)
df.groupby("B").A.apply(groupby_apply_op)
df.groupby("A").apply(groupby_apply_op)
df.groupby("A").B.apply(groupby_apply_op)
def test_groupby_apply_identity(self, data_for_grouping):
df = pd.DataFrame({"A": [1, 1, 2, 2, 3, 3, 1], "B": data_for_grouping})
result = df.groupby("A").B.apply(lambda x: x.array)
expected = pd.Series(
[
df.B.iloc[[0, 1, 6]].array,
df.B.iloc[[2, 3]].array,
df.B.iloc[[4, 5]].array,
],
index=pd.Index([1, 2, 3], name="A"),
name="B",
)
self.assert_series_equal(result, expected)
def test_in_numeric_groupby(self, data_for_grouping):
df = pd.DataFrame(
{
"A": [1, 1, 2, 2, 3, 3, 1],
"B": data_for_grouping,
"C": [1, 1, 1, 1, 1, 1, 1],
}
)
result = df.groupby("A").sum().columns
if data_for_grouping.dtype._is_numeric:
expected = pd.Index(["B", "C"])
else:
expected = pd.Index(["C"])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("min_count", [0, 10])
def test_groupby_sum_mincount(self, data_for_grouping, min_count):
df = pd.DataFrame({"A": [1, 1, 2, 2, 3, 3, 1], "B": data_for_grouping})
result = df.groupby("A").sum(min_count=min_count)
if min_count == 0:
expected = pd.DataFrame(
{"B": pd.array([3, 0, 0], dtype="Int64")},
index=pd.Index([1, 2, 3], name="A"),
)
tm.assert_frame_equal(result, expected)
else:
expected = pd.DataFrame(
{"B": pd.array([pd.NA] * 3, dtype="Int64")},
index=pd.Index([1, 2, 3], name="A"),
)
tm.assert_frame_equal(result, expected)
class TestNumericReduce(base.BaseNumericReduceTests):
def check_reduce(self, s, op_name, skipna):
result = getattr(s, op_name)(skipna=skipna)
expected = getattr(s.astype("float64"), op_name)(skipna=skipna)
# override parent function to cast to bool for min/max
if np.isnan(expected):
expected = pd.NA
elif op_name in ("min", "max"):
expected = bool(expected)
tm.assert_almost_equal(result, expected)
class TestBooleanReduce(base.BaseBooleanReduceTests):
pass
class TestPrinting(base.BasePrintingTests):
pass
class TestUnaryOps(base.BaseUnaryOpsTests):
pass
class TestParsing(base.BaseParsingTests):
pass
|
{
"content_hash": "e40987a9f1baf11ba1bdeeb56b84b32c",
"timestamp": "",
"source": "github",
"line_count": 393,
"max_line_length": 87,
"avg_line_length": 33.10178117048346,
"alnum_prop": 0.603044046429395,
"repo_name": "jreback/pandas",
"id": "ced7ea92613105784cbc96b2aef490ff313bdd52",
"size": "13009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/tests/extension/test_boolean.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4879"
},
{
"name": "C",
"bytes": "406353"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "529"
},
{
"name": "Python",
"bytes": "14930989"
},
{
"name": "Shell",
"bytes": "29317"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
}
|
import bpy
import bl_ui
from . import render
from . import scene
from . import world
from . import camera
from . import objects
from . import materials
from . import lamps
import bl_ui.properties_texture as properties_texture
INCLUDE_TEXTURE = ['TEXTURE_MT_specials', 'TEXTURE_PT_context_texture', 'TEXTURE_PT_image', 'TEXTURE_UL_texslots', 'Panel',
'Object', 'Material', 'Texture', 'TextureSlotPanel', 'TextureButtonsPanel', 'UIList', 'id_tex_datablock', 'context_tex_datablock']
for member in dir(properties_texture):
if member in INCLUDE_TEXTURE:
subclass = getattr(properties_texture, member)
try:
subclass.COMPAT_ENGINES.add('APPLESEED_RENDER')
except:
pass
del properties_texture
# Enable all existing panels for these contexts
import bl_ui.properties_data_mesh as properties_data_mesh
for member in dir(properties_data_mesh):
subclass = getattr(properties_data_mesh, member)
try:
subclass.COMPAT_ENGINES.add('APPLESEED_RENDER')
except:
pass
del properties_data_mesh
import bl_ui.properties_particle as properties_particle
for member in dir(properties_particle):
subclass = getattr(properties_particle, member)
try:
subclass.COMPAT_ENGINES.add('APPLESEED_RENDER')
except:
pass
del properties_particle
def register():
render.register()
scene.register()
world.register()
materials.register()
camera.register()
objects.register()
# particles.register()
lamps.register()
def unregister():
render.unregister()
scene.unregister()
world.unregister()
materials.unregister()
camera.unregister()
objects.unregister()
# particles.unregister()
lamps.unregister()
|
{
"content_hash": "28b4a44b502c4886454b856122e85ccb",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 149,
"avg_line_length": 27.841269841269842,
"alnum_prop": 0.6972633979475484,
"repo_name": "jasperges/blenderseed",
"id": "2846fe658dea4b556d720f8216657e519bd977f0",
"size": "3042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ui/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "202"
},
{
"name": "Python",
"bytes": "406324"
}
],
"symlink_target": ""
}
|
from twisted.internet import defer
from buildbot.clients import usersclient
from buildbot.process.users import users
from buildbot.util import in_reactor
@in_reactor
@defer.inlineCallbacks
def user(config):
master = config.get('master')
op = config.get('op')
username = config.get('username')
passwd = config.get('passwd')
master, port = master.split(":")
port = int(port)
bb_username = config.get('bb_username')
bb_password = config.get('bb_password')
if bb_username or bb_password:
bb_password = users.encrypt(bb_password)
info = config.get('info')
ids = config.get('ids')
# find identifier if op == add
if info and op == 'add':
for user in info:
user['identifier'] = sorted(user.values())[0]
uc = usersclient.UsersClient(master, username, passwd, port)
output = yield uc.send(op, bb_username, bb_password, ids, info)
if output:
print output
defer.returnValue(0)
|
{
"content_hash": "a73f0ca8445b7e451918d3bb629d68d8",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 67,
"avg_line_length": 29.515151515151516,
"alnum_prop": 0.6581108829568788,
"repo_name": "denny820909/builder",
"id": "4f231e769d0d6c5ce86af091620fe52cec019135",
"size": "1680",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/scripts/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "106"
},
{
"name": "C",
"bytes": "68706"
},
{
"name": "CSS",
"bytes": "18630"
},
{
"name": "D",
"bytes": "532"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "HTML",
"bytes": "69377"
},
{
"name": "Makefile",
"bytes": "1220"
},
{
"name": "Objective-C",
"bytes": "1291"
},
{
"name": "Python",
"bytes": "21088388"
},
{
"name": "Shell",
"bytes": "2766"
},
{
"name": "Smarty",
"bytes": "393"
}
],
"symlink_target": ""
}
|
from installed_clients.DataFileUtilClient import DataFileUtil
from .utils import report_utils
from .utils.TemplateUtil import TemplateUtil
from .utils.validation_utils import validate_simple_report_params, validate_extended_report_params
import os
from configparser import ConfigParser
#END_HEADER
class KBaseReport:
'''
Module Name:
KBaseReport
Module Description:
Module for workspace data object reports, which show the results of running a job in an SDK app.
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "3.2.0"
GIT_URL = "https://github.com/ialarmedalien/KBaseReport"
GIT_COMMIT_HASH = "f5bc602a97236420844d03782549055d9ecbf2f0"
#BEGIN_CLASS_HEADER
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
self.config = config
self.callback_url = os.environ['SDK_CALLBACK_URL']
self.dfu = DataFileUtil(self.callback_url)
config_parser = ConfigParser()
config_file = os.environ.get('KB_DEPLOYMENT_CONFIG', None)
if not config_file:
raise ValueError('No config file found. Cannot initialise Template Toolkit')
template_toolkit_config = {}
config_parser.read(config_file)
for config_item in config_parser.items('TemplateToolkitPython'):
template_toolkit_config[config_item[0]] = config_item[1]
self.config['template_toolkit'] = template_toolkit_config
self.templater = TemplateUtil(self.config)
self.scratch = config['scratch']
#END_CONSTRUCTOR
pass
def create(self, ctx, params):
"""
Function signature for the create() method -- generate a simple,
text-based report for an app run.
@deprecated KBaseReport.create_extended_report
:param params: instance of type "CreateParams" (* Parameters for the
create() method * * Pass in *either* workspace_name or
workspace_id -- only one is needed. * Note that workspace_id is
preferred over workspace_name because workspace_id immutable. If *
both are provided, the workspace_id will be used. * * Required
arguments: * SimpleReport report - See the structure above *
string workspace_name - Workspace name of the running app.
Required * if workspace_id is absent * int
workspace_id - Workspace ID of the running app. Required if *
workspace_name is absent) -> structure: parameter "report" of type
"SimpleReport" (* A simple report for use in create() * Optional
arguments: * string text_message - Readable plain-text report
message * string direct_html - Simple HTML text that will be
rendered within the report widget * TemplateParams template -
a template file and template data to be rendered and displayed *
as HTML. Use in place of 'direct_html' * list<string> warnings
- A list of plain-text warning messages *
list<WorkspaceObject> objects_created - List of result workspace
objects that this app * has created. They will get linked
in the report view) -> structure: parameter "text_message" of
String, parameter "direct_html" of String, parameter "template" of
type "TemplateParams" (* Structure representing a template to be
rendered. 'template_file' must be provided, * 'template_data_json'
is optional) -> structure: parameter "template_file" of String,
parameter "template_data_json" of String, parameter "warnings" of
list of String, parameter "objects_created" of list of type
"WorkspaceObject" (* Represents a Workspace object with some brief
description text * that can be associated with the object. *
Required arguments: * ws_id ref - workspace ID in the format
'workspace_id/object_id/version' * Optional arguments: *
string description - A plaintext, human-readable description of
the * object created) -> structure: parameter "ref" of
type "ws_id" (* Workspace ID reference in the format
'workspace_id/object_id/version' * @id ws), parameter
"description" of String, parameter "workspace_name" of String,
parameter "workspace_id" of Long
:returns: instance of type "ReportInfo" (* The reference to the saved
KBaseReport. This is the return object for * both create() and
create_extended() * Returned data: * ws_id ref - reference to a
workspace object in the form of *
'workspace_id/object_id/version'. This is a reference to a saved *
Report object (see KBaseReportWorkspace.spec) * string name -
Plaintext unique name for the report. In * create_extended,
this can optionally be set in a parameter) -> structure: parameter
"ref" of type "ws_id" (* Workspace ID reference in the format
'workspace_id/object_id/version' * @id ws), parameter "name" of
String
"""
# ctx is the context object
# return variables are: info
#BEGIN create
params = validate_simple_report_params(params)
if 'template' in params['report']:
# render template and set content as 'direct_html'
params['report'] = self.templater.render_template_to_direct_html(params['report'])
info = report_utils.create_report(params, self.dfu)
#END create
# At some point might do deeper type checking...
if not isinstance(info, dict):
raise ValueError('Method create return value ' +
'info is not type dict as required.')
# return the results
return [info]
def create_extended_report(self, ctx, params):
"""
Create a report for the results of an app run. This method handles file
and HTML zipping, uploading, and linking as well as HTML rendering.
:param params: instance of type "CreateExtendedReportParams" (*
Parameters used to create a more complex report with file and HTML
links * * Pass in *either* workspace_name or workspace_id -- only
one is needed. * Note that workspace_id is preferred over
workspace_name because workspace_id immutable. * * Note that it is
possible to pass both 'html_links'/'direct_html_link_index' and
'direct_html' * as parameters for an extended report; in such
cases, the file specified by the * 'direct_html_link_links'
parameter is used for the report and the 'direct_html' is ignored.
* * Required arguments: * string workspace_name - Name of the
workspace where the report * should be saved. Required if
workspace_id is absent * int workspace_id - ID of workspace
where the report should be saved. * Required if
workspace_name is absent * Optional arguments: * string
message - Simple text message to store in the report object *
list<WorkspaceObject> objects_created - List of result workspace
objects that this app * has created. They will be linked
in the report view * list<string> warnings - A list of
plain-text warning messages * string direct_html - Simple HTML
text content to be rendered within the report widget. *
Set only one of 'direct_html', 'template', and
'html_links'/'direct_html_link_index'. * Setting both
'template' and 'direct_html' will generate an error. *
TemplateParams template - render a template to produce HTML text
content that will be * rendered within the report widget.
Setting 'template' and 'direct_html' or *
'html_links'/'direct_html_link_index' will generate an error. *
list<File> html_links - A list of paths, shock IDs, or template
specs pointing to HTML files or directories. * If you pass
in paths to directories, they will be zipped and uploaded *
int direct_html_link_index - Index in html_links to set the
direct/default view in the report. * Set only one of
'direct_html', 'template', and
'html_links'/'direct_html_link_index'. * Setting both
'template' and 'html_links'/'direct_html_link_index' will generate
an error. * list<File> file_links - Allows the user to specify
files that the report widget * should link for download.
If you pass in paths to directories, they will be zipped. *
Each entry should be a path, shock ID, or template specification.
* string report_object_name - Name to use for the report
object (will * be auto-generated if unspecified) *
html_window_height - Fixed height in pixels of the HTML window for
the report * summary_window_height - Fixed height in pixels of
the summary window for the report) -> structure: parameter
"message" of String, parameter "objects_created" of list of type
"WorkspaceObject" (* Represents a Workspace object with some brief
description text * that can be associated with the object. *
Required arguments: * ws_id ref - workspace ID in the format
'workspace_id/object_id/version' * Optional arguments: *
string description - A plaintext, human-readable description of
the * object created) -> structure: parameter "ref" of
type "ws_id" (* Workspace ID reference in the format
'workspace_id/object_id/version' * @id ws), parameter
"description" of String, parameter "warnings" of list of String,
parameter "html_links" of list of type "File" (* A file to be
linked in the report. Pass in *either* a shock_id or a * path. If
a path to a file is given, then the file will be uploaded. If a *
path to a directory is given, then it will be zipped and uploaded.
* Required arguments: * string name - Plain-text filename (eg.
"results.zip") -- shown to the user * One of the following
identifiers is required: * string path - Can be a file or
directory path. * string shock_id - Shock node ID. *
TemplateParams template - template to be rendered and saved as a
file. * Optional arguments: * string label - A short
description for the file (eg. "Filter results") * string
description - A more detailed, human-readable description of the
file) -> structure: parameter "path" of String, parameter
"shock_id" of String, parameter "template" of type
"TemplateParams" (* Structure representing a template to be
rendered. 'template_file' must be provided, * 'template_data_json'
is optional) -> structure: parameter "template_file" of String,
parameter "template_data_json" of String, parameter "name" of
String, parameter "label" of String, parameter "description" of
String, parameter "template" of type "TemplateParams" (* Structure
representing a template to be rendered. 'template_file' must be
provided, * 'template_data_json' is optional) -> structure:
parameter "template_file" of String, parameter
"template_data_json" of String, parameter "direct_html" of String,
parameter "direct_html_link_index" of Long, parameter "file_links"
of list of type "File" (* A file to be linked in the report. Pass
in *either* a shock_id or a * path. If a path to a file is given,
then the file will be uploaded. If a * path to a directory is
given, then it will be zipped and uploaded. * Required arguments:
* string name - Plain-text filename (eg. "results.zip") --
shown to the user * One of the following identifiers is required:
* string path - Can be a file or directory path. * string
shock_id - Shock node ID. * TemplateParams template - template
to be rendered and saved as a file. * Optional arguments: *
string label - A short description for the file (eg. "Filter
results") * string description - A more detailed,
human-readable description of the file) -> structure: parameter
"path" of String, parameter "shock_id" of String, parameter
"template" of type "TemplateParams" (* Structure representing a
template to be rendered. 'template_file' must be provided, *
'template_data_json' is optional) -> structure: parameter
"template_file" of String, parameter "template_data_json" of
String, parameter "name" of String, parameter "label" of String,
parameter "description" of String, parameter "report_object_name"
of String, parameter "html_window_height" of Double, parameter
"summary_window_height" of Double, parameter "workspace_name" of
String, parameter "workspace_id" of Long
:returns: instance of type "ReportInfo" (* The reference to the saved
KBaseReport. This is the return object for * both create() and
create_extended() * Returned data: * ws_id ref - reference to a
workspace object in the form of *
'workspace_id/object_id/version'. This is a reference to a saved *
Report object (see KBaseReportWorkspace.spec) * string name -
Plaintext unique name for the report. In * create_extended,
this can optionally be set in a parameter) -> structure: parameter
"ref" of type "ws_id" (* Workspace ID reference in the format
'workspace_id/object_id/version' * @id ws), parameter "name" of
String
"""
# ctx is the context object
# return variables are: info
#BEGIN create_extended_report
params = validate_extended_report_params(params)
if 'template' in params:
# render template and set content as 'direct_html'
params = self.templater.render_template_to_direct_html(params)
info = report_utils.create_extended(params, self.dfu, self.templater)
#END create_extended_report
# At some point might do deeper type checking...
if not isinstance(info, dict):
raise ValueError('Method create_extended_report return value ' +
'info is not type dict as required.')
# return the results
return [info]
def render_template(self, ctx, params):
"""
Render a file from a template. This method takes a template file and
a data structure, renders the template, and saves the results to a file.
It returns the output file path in the form
{ 'path': '/path/to/file' }
To ensure that the template and the output file are accessible to both
the KBaseReport service and the app requesting the template rendering, the
template file should be copied into the shared `scratch` directory and the
output_file location should also be in `scratch`.
See https://github.com/kbaseIncubator/kbase_report_templates for sample
page templates, standard includes, and instructions on creating your own
templates.
:param params: instance of type "RenderTemplateParams" (* Render a
template using the supplied data, saving the results to an output
* file in the scratch directory. * * Required arguments: *
string template_file - Path to the template file to be rendered.
* string output_file - Path to the file where the rendered
template * should be saved. Must be
in the scratch directory. * Optional: * string
template_data_json - Data for rendering in the template.) ->
structure: parameter "template_file" of String, parameter
"output_file" of String, parameter "template_data_json" of String
:returns: instance of type "File" (* A file to be linked in the
report. Pass in *either* a shock_id or a * path. If a path to a
file is given, then the file will be uploaded. If a * path to a
directory is given, then it will be zipped and uploaded. *
Required arguments: * string name - Plain-text filename (eg.
"results.zip") -- shown to the user * One of the following
identifiers is required: * string path - Can be a file or
directory path. * string shock_id - Shock node ID. *
TemplateParams template - template to be rendered and saved as a
file. * Optional arguments: * string label - A short
description for the file (eg. "Filter results") * string
description - A more detailed, human-readable description of the
file) -> structure: parameter "path" of String, parameter
"shock_id" of String, parameter "template" of type
"TemplateParams" (* Structure representing a template to be
rendered. 'template_file' must be provided, * 'template_data_json'
is optional) -> structure: parameter "template_file" of String,
parameter "template_data_json" of String, parameter "name" of
String, parameter "label" of String, parameter "description" of
String
"""
# ctx is the context object
# return variables are: output_file_path
#BEGIN render_template
output_file_path = self.templater.render_template_to_file(params)
#END render_template
# At some point might do deeper type checking...
if not isinstance(output_file_path, dict):
raise ValueError('Method render_template return value ' +
'output_file_path is not type dict as required.')
# return the results
return [output_file_path]
def render_templates(self, ctx, params):
"""
Render files from a set of template specifications.
It returns the output file paths in the order they were supplied in the form
[{ 'path': '/path/to/file' }, { 'path': '/path/to/file2' }, ....]
If any template fails to render, the endpoint will return an error.
:param params: instance of type "RenderTemplateListParams" ->
structure: parameter "params" of list of type
"RenderTemplateParams" (* Render a template using the supplied
data, saving the results to an output * file in the scratch
directory. * * Required arguments: * string template_file -
Path to the template file to be rendered. * string output_file
- Path to the file where the rendered template *
should be saved. Must be in the scratch directory. * Optional: *
string template_data_json - Data for rendering in the template.)
-> structure: parameter "template_file" of String, parameter
"output_file" of String, parameter "template_data_json" of String
:returns: instance of type "FileList" -> structure: parameter
"file_list" of list of type "File" (* A file to be linked in the
report. Pass in *either* a shock_id or a * path. If a path to a
file is given, then the file will be uploaded. If a * path to a
directory is given, then it will be zipped and uploaded. *
Required arguments: * string name - Plain-text filename (eg.
"results.zip") -- shown to the user * One of the following
identifiers is required: * string path - Can be a file or
directory path. * string shock_id - Shock node ID. *
TemplateParams template - template to be rendered and saved as a
file. * Optional arguments: * string label - A short
description for the file (eg. "Filter results") * string
description - A more detailed, human-readable description of the
file) -> structure: parameter "path" of String, parameter
"shock_id" of String, parameter "template" of type
"TemplateParams" (* Structure representing a template to be
rendered. 'template_file' must be provided, * 'template_data_json'
is optional) -> structure: parameter "template_file" of String,
parameter "template_data_json" of String, parameter "name" of
String, parameter "label" of String, parameter "description" of
String
"""
# ctx is the context object
# return variables are: output_paths
#BEGIN render_templates
output_paths = self.templater.render_template_list_to_files(params)
#END render_templates
# At some point might do deeper type checking...
if not isinstance(output_paths, list):
raise ValueError('Method render_templates return value ' +
'output_paths is not type dict as required.')
# return the results
return [output_paths]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
|
{
"content_hash": "cb1ce1d2a588c665c518a9eae432d205",
"timestamp": "",
"source": "github",
"line_count": 382,
"max_line_length": 100,
"avg_line_length": 58.397905759162306,
"alnum_prop": 0.6270844540075309,
"repo_name": "msneddon/KBaseReport",
"id": "784f8eb93c7567a3df4963f626bd672a59ca5bbe",
"size": "22346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/KBaseReport/KBaseReportImpl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3020"
},
{
"name": "Java",
"bytes": "16444"
},
{
"name": "JavaScript",
"bytes": "3825"
},
{
"name": "Makefile",
"bytes": "2718"
},
{
"name": "Perl",
"bytes": "12359"
},
{
"name": "Python",
"bytes": "40360"
},
{
"name": "Ruby",
"bytes": "1974"
},
{
"name": "Shell",
"bytes": "1209"
}
],
"symlink_target": ""
}
|
"""
Test suite for codestream oddities
"""
# Standard library imports ...
import importlib.resources as ir
from io import BytesIO
import struct
import unittest
import warnings
# Local imports ...
import glymur
from glymur import Jp2k
from . import fixtures, data
class TestSuite(unittest.TestCase):
"""Test suite for ICC Profile code."""
def setUp(self):
self.p0_03 = ir.path(data, 'p0_03.j2k')
self.p0_06 = ir.path(data, 'p0_06.j2k')
self.p1_06 = ir.path(data, 'p1_06.j2k')
self.issue142 = ir.path(data, 'issue142.j2k')
self.edf_c2_1178956 = ir.path(data, 'edf_c2_1178956.jp2')
def test_tlm_segment(self):
"""
Verify parsing of the TLM segment.
In this case there's only a single tile.
"""
with ir.path(data, 'p0_06.j2k') as path:
j2k = Jp2k(path)
buffer = b'\xffU\x00\x08\x00@\x00\x00YW'
b = BytesIO(buffer[2:])
segment = j2k.codestream._parse_tlm_segment(b)
self.assertEqual(segment.ztlm, 0)
# ttlm is an array, but None is the singleton element
self.assertIsNone(segment.ttlm.item())
self.assertEqual(segment.ptlm, (22871,))
def test_ppt_segment(self):
"""
Verify parsing of the PPT segment
"""
with ir.path(data, 'p1_06.j2k') as path:
j2k = Jp2k(path)
c = j2k.get_codestream(header_only=False)
self.assertEqual(c.segment[6].zppt, 0)
def test_plt_segment(self):
"""
Verify parsing of the PLT segment
"""
with ir.path(data, 'issue142.j2k') as path:
c = Jp2k(path).get_codestream(header_only=False)
self.assertEqual(c.segment[7].zplt, 0)
self.assertEqual(len(c.segment[7].iplt), 59)
def test_ppm_segment(self):
"""
Verify parsing of the PPM segment
"""
with ir.path(data, 'edf_c2_1178956.jp2') as path:
with warnings.catch_warnings():
# Lots of things wrong with this file.
warnings.simplefilter('ignore')
jp2 = Jp2k(path)
c = jp2.get_codestream()
self.assertEqual(c.segment[2].zppm, 0)
self.assertEqual(len(c.segment[2].data), 9)
def test_crg_segment(self):
"""
Verify parsing of the CRG segment
"""
with ir.path(data, 'p0_03.j2k') as path:
j2k = Jp2k(path)
c = j2k.get_codestream()
self.assertEqual(c.segment[6].xcrg, (65424,))
self.assertEqual(c.segment[6].ycrg, (32558,))
def test_rgn_segment(self):
"""
Verify parsing of the RGN segment
"""
with ir.path(data, 'p0_06.j2k') as path:
j2k = Jp2k(path)
c = j2k.get_codestream()
self.assertEqual(c.segment[-1].crgn, 0)
self.assertEqual(c.segment[-1].srgn, 0)
self.assertEqual(c.segment[-1].sprgn, 11)
class TestCodestreamRepr(unittest.TestCase):
def setUp(self):
self.jp2file = glymur.data.nemo()
def tearDown(self):
pass
def test_soc(self):
"""Test SOC segment repr"""
segment = glymur.codestream.SOCsegment()
newseg = eval(repr(segment))
self.assertEqual(newseg.marker_id, 'SOC')
def test_siz(self):
"""Test SIZ segment repr"""
kwargs = {
'rsiz': 0,
'xysiz': (2592, 1456),
'xyosiz': (0, 0),
'xytsiz': (2592, 1456),
'xytosiz': (0, 0),
'Csiz': 3,
'bitdepth': (8, 8, 8),
'signed': (False, False, False),
'xyrsiz': ((1, 1, 1), (1, 1, 1))
}
segment = glymur.codestream.SIZsegment(**kwargs)
newseg = eval(repr(segment))
self.assertEqual(newseg.marker_id, 'SIZ')
self.assertEqual(newseg.xsiz, 2592)
self.assertEqual(newseg.ysiz, 1456)
self.assertEqual(newseg.xosiz, 0)
self.assertEqual(newseg.yosiz, 0)
self.assertEqual(newseg.xtsiz, 2592)
self.assertEqual(newseg.ytsiz, 1456)
self.assertEqual(newseg.xtosiz, 0)
self.assertEqual(newseg.ytosiz, 0)
self.assertEqual(newseg.xrsiz, (1, 1, 1))
self.assertEqual(newseg.yrsiz, (1, 1, 1))
self.assertEqual(newseg.bitdepth, (8, 8, 8))
self.assertEqual(newseg.signed, (False, False, False))
class TestCodestream(fixtures.TestCommon):
"""Test suite for unusual codestream cases."""
def test_reserved_marker_segment(self):
"""
SCENARIO: Rewrite a J2K file to include a marker segment with a
reserved marker 0xff6f (65391).
EXPECTED RESULT: The marker segment should be properly parsed.
"""
with open(self.temp_j2k_filename, 'wb') as tfile:
with open(self.j2kfile, 'rb') as ifile:
# Everything up until the first QCD marker.
read_buffer = ifile.read(65)
tfile.write(read_buffer)
# Write the new marker segment, 0xff6f = 65391
read_buffer = struct.pack('>HHB', int(65391), int(3), int(0))
tfile.write(read_buffer)
# Get the rest of the input file.
read_buffer = ifile.read()
tfile.write(read_buffer)
tfile.flush()
codestream = Jp2k(tfile.name).get_codestream()
self.assertEqual(codestream.segment[3].marker_id, '0xff6f')
self.assertEqual(codestream.segment[3].length, 3)
self.assertEqual(codestream.segment[3].data, b'\x00')
def test_siz_segment_ssiz_unsigned(self):
"""ssiz attribute to be removed in future release"""
j = Jp2k(self.jp2file)
codestream = j.get_codestream()
# The ssiz attribute was simply a tuple of raw bytes.
# The first 7 bits are interpreted as the bitdepth, the MSB determines
# whether or not it is signed.
self.assertEqual(codestream.segment[1].ssiz, (7, 7, 7))
|
{
"content_hash": "b18d35c7005b1c90725e3212bc98c7a7",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 78,
"avg_line_length": 32.26881720430107,
"alnum_prop": 0.577307564145285,
"repo_name": "quintusdias/glymur",
"id": "cf84698309ebd3f4cfdfb60f1f337b535f191de9",
"size": "6027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_codestream.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3600"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "872299"
},
{
"name": "Shell",
"bytes": "2095"
}
],
"symlink_target": ""
}
|
"""predictions_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = [
url(r'^', include('predictions.urls')),
url(r'^admin/', admin.site.urls)
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
{
"content_hash": "64c5555325af08263bf7687161b66aee",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 81,
"avg_line_length": 42,
"alnum_prop": 0.6865079365079365,
"repo_name": "Thing342/FRCPredictionsApp",
"id": "616a33b2beb9f5a5b37b4e02ef042144c6fe15cd",
"size": "1008",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "predictions_app/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1731"
},
{
"name": "HTML",
"bytes": "2917"
},
{
"name": "Python",
"bytes": "32737"
}
],
"symlink_target": ""
}
|
"""
A node and corresponding command to constrain the first twist joint in a
shoulder. The node also outputs the shoulder's spine-space elevation angle for
other nodes to use.
\b Creation \b Info:
\b Donations: http://adammechtley.com/donations/
\b License: The MIT License
Copyright (c) 2011 Adam Mechtley (http://adammechtley.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the 'Software'), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
\b Usage:
Add this file to your plug-in path and load it in Maya from the Plug-in
Manager (Window -> Settings/Preferences -> Plug-in Manager).
\namespace AM_ShoulderConstraint
"""
import math, re, sys
import maya.OpenMaya as OM
import maya.OpenMayaMPx as OMMPx
## current version of the plug-in
kVersionNumber = '1.06'
# ----------------------------------------------------------------
# Command Definition
# ----------------------------------------------------------------
class AM_ShoulderConstraintCmd(OMMPx.MPxCommand):
"""
A command to quickly create, edit, or query an am_shoulderConstraint node.
"""
## the name of the command
kPluginCmdName = 'am_shoulderConstraint'
## specifies a name for the am_shoulderConstraint node (CE)
kNameFlag = '-n'
kNameFlagLong = '-name'
## the shoulder object (CEQ)
kShoulderObjectFlag = '-sh'
kShoulderObjectFlagLong = '-shoulderObject'
## the spine object (CEQ)
kSpineObjectFlag = '-sp'
kSpineObjectFlagLong = '-spineObject'
## twist offset when the arm is raised (CEQ)
kRaisedAngleFlag = '-rao'
kRaisedAngleFlagLong = '-raisedAngleOffset'
## local axis of the shoulder that aims at the elbow (CEQ)
kShoulderAimFlag = '-sha'
kShoulderAimFlagLong = '-shoulderAim'
## local axis of the shoulder that points toward the character's front (CEQ)
kShoulderFrontFlag = '-shf'
kShoulderFrontFlagLong = '-shoulderFront'
## local axis of the spine that points up toward the head (CEQ)
kSpineAimFlag = '-spa'
kSpineAimFlagLong = '-spineAim'
## local axis of the spine that points toward the character's front (CEQ)
kSpineFrontFlag = '-spf'
kSpineFrontFlagLong = '-spineFront'
def __init__(self):
OMMPx.MPxCommand.__init__(self)
self.__isQueryUsed = True # initialize to True so command is not added to queue if argument parsing fails
self.__isEditUsed = False # if the edit flag has been set, then undo will be enabled
self.__shoulderConstraintNodeArg = OM.MObject() # the am_exposeTransform node selected for edit and query modes
self.__shoulderConstraintNodeFn = OM.MFnDependencyNode()
self.__shoulderConstraintNodeName = ''
self.__constrainedObjectArg = OM.MDagPath() # the helper joint whose rotation is constrained
self.__shoulderObjectArg = OM.MDagPath() # the shoulder object to which the helper joint is constrained
self.__spineObjectArg = OM.MDagPath() # the spine object (ribcage)
self.__raisedAngleArg = 45.0 # amount up-vector rotates back from lateral direction when arm is raised
self.__shoulderAimAxisArg = OM.MVector(1, 0, 0)
self.__shoulderFrontAxisArg = OM.MVector(0, 0, 1)
self.__spineAimAxisArg = OM.MVector(1, 0, 0)
self.__spineFrontAxisArg = OM.MVector(0, 0, 1)
self.__dgModify = OM.MDGModifier() # DG modifier used to create and modify nodes
def doIt(self, args):
# parse the arguments
try:
argData = OM.MArgDatabase(self.syntax(), args) # if this fails, it will raise its own exception...
except:
pass # ...so we can just pass here
else:
# read all of the arguments and store them to the appropriate data attributes
# manually confirm the object list
sel = OM.MSelectionList()
argData.getObjects(sel)
# ordinarily, the command is designed to only operate on a single object at a time
if not sel.length() == 1:
# edit and query mode require one object
if argData.isEdit() or argData.isQuery():
raise Exception(
'This command requires exactly 1 argument to be specified or selected; found %i.' % sel.length())
# create mode supports a variety of patterns
else:
isShoulderSpecified = argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderObjectFlag)
isSpineSpecified = argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineObjectFlag)
# selection list is either [spine, constraintObject] with shoulder flag, or [shoulder, constrainedObject] with spine flag
if sel.length() == 2:
if not (isShoulderSpecified or isSpineSpecified):
raise Exception(
'Selection list of length 2 requires either a spine or a shoulder be specified with a flag.')
else:
if isSpineSpecified:
sel.add(argData.flagArgumentString(AM_ShoulderConstraintCmd.kSpineObjectFlag, 0))
sel.getDagPath(sel.length() - 1, self.__spineObjectArg)
else:
sel.getDagPath(0, self.__spineObjectArg)
if isShoulderSpecified:
sel.add(argData.flagArgumentString(AM_ShoulderConstraintCmd.kShoulderObjectFlag, 0))
sel.getDagPath(sel.length() - 1, self.__shoulderObjectArg)
else:
sel.getDagPath(0, self.__shoulderObjectArg)
sel.getDagPath(1, self.__constrainedObjectArg)
# selection list is either [spine, shoulder, constraintObject], or has spine or shoulder specified with flags
elif sel.length() == 3:
if isSpineSpecified:
sel.add(argData.flagArgumentString(AM_ShoulderConstraintCmd.kSpineObjectFlag, 0))
sel.getDagPath(sel.length() - 1, self.__spineObjectArg)
else:
sel.getDagPath(0, self.__spineObjectArg)
if isShoulderSpecified:
sel.add(argData.flagArgumentString(AM_ShoulderConstraintCmd.kSpineObjectFlag, 0))
sel.getDagPath(sel.length() - 1, self.__shoulderObjectArg)
else:
sel.getDagPath(1, self.__shoulderObjectArg)
sel.getDagPath(2, self.__constrainedObjectArg)
else:
raise Exception(
'This command requires 3 or fewer arguments to be specified or selected; found %i.' % sel.length())
else:
iter = OM.MItSelectionList(sel, OM.MFn.kDependencyNode)
while not iter.isDone():
# in edit or query mode, the object must be an am_shoulderConstraint node
if argData.isEdit() or argData.isQuery():
iter.getDependNode(self.__shoulderConstraintNodeArg)
self.__shoulderConstraintNodeFn.setObject(self.__shoulderConstraintNodeArg)
if not (self.__shoulderConstraintNodeFn.typeId() == AM_ShoulderConstraintNode.kPluginNodeId):
raise Exception('The provided dependency node %s is not of type %s.' % (
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kPluginNodeTypeName))
# in create mode, only a transform node is accepted
else:
selectedObject = OM.MObject()
iter.getDependNode(selectedObject)
if selectedObject.hasFn(OM.MFn.kTransform):
OM.MDagPath.getAPathTo(selectedObject, self.__constrainedObjectArg)
else:
selectedObjectFn = OM.MFnDependencyNode(selectedObject)
raise Exception('%s is not a valid transform node.' % selectedObjectFn.name())
# verify that a shoulder and spine have been specified
if not argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderObjectFlag) or not argData.isFlagSet(
AM_ShoulderConstraintCmd.kSpineObjectFlag):
raise Exception(
'When only one argument is specified in create mode, shoulder and spine flags must be present.')
iter.next()
# perform the query
if argData.isQuery():
self.__shoulderObjectArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderObjectFlag)
self.__spineObjectArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineObjectFlag)
self.__raisedAngleArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kRaisedAngleFlag)
self.__shoulderAimAxisArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderAimFlag)
self.__shoulderFrontAxisArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderFrontFlag)
self.__spineAimAxisArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineAimFlag)
self.__spineFrontAxisArg = argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineFrontFlag)
self.doItQuery()
# set up other arguments and call redoIt() for create or edit mode
else:
# validate the name flag
if argData.isFlagSet(AM_ShoulderConstraintCmd.kNameFlag):
self.__shoulderConstraintNodeName = argData.flagArgumentString(AM_ShoulderConstraintCmd.kNameFlag,
0)
if (len(self.__shoulderConstraintNodeName) < 1 or self.__shoulderConstraintNodeName[
0].isalpha() is False or len(re.split('\W+', self.__shoulderConstraintNodeName)) > 1):
raise Exception(
'%s is not a valid node name. Did you type it correctly?' % self.__shoulderConstraintNodeName)
# validate dagpaths
sel = OM.MSelectionList()
if argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderObjectFlag):
sel.clear()
try:
sel.add(argData.flagArgumentString(AM_ShoulderConstraintCmd.kShoulderObjectFlag, 0))
sel.getDagPath(0, self.__shoulderObjectArg)
except:
raise Exception(
'%s is not a valid node name. Did you type it correctly?' % argData.flagArgumentString(
AM_ShoulderConstraintCmd.kShoulderObjectFlag, 0))
if argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineObjectFlag):
sel.clear()
try:
sel.add(argData.flagArgumentString(AM_ShoulderConstraintCmd.kSpineObjectFlag, 0))
sel.getDagPath(0, self.__spineObjectArg)
except:
raise Exception(
'%s is not a valid node name. Did you type it correctly?' % argData.flagArgumentString(
AM_ShoulderConstraintCmd.kSpineObjectFlag, 0))
# get raised angle arg
if argData.isFlagSet(AM_ShoulderConstraintCmd.kRaisedAngleFlag):
self.__raisedAngleArg = argData.flagArgumentString(AM_ShoulderConstraintCmd.kRaisedAngleFlag, 0)
else:
self.__raisedAngleArg = None
# get vector args
if argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderAimFlag):
self.__shoulderAimAxisArg = OM.MVector(
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kShoulderAimFlag, 0),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kShoulderAimFlag, 1),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kShoulderAimFlag, 2))
if argData.isFlagSet(AM_ShoulderConstraintCmd.kShoulderFrontFlag):
self.__shoulderFrontAxisArg = OM.MVector(
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kShoulderFrontFlag, 0),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kShoulderFrontFlag, 1),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kShoulderFrontFlag, 2))
if argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineAimFlag):
self.__spineAimAxisArg = OM.MVector(
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kSpineAimFlag, 0),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kSpineAimFlag, 1),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kSpineAimFlag, 2))
if argData.isFlagSet(AM_ShoulderConstraintCmd.kSpineFrontFlag):
self.__spineFrontAxisArg = OM.MVector(
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kSpineFrontFlag, 0),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kSpineFrontFlag, 1),
argData.flagArgumentDouble(AM_ShoulderConstraintCmd.kSpineFrontFlag, 2))
# set the isEditUsed flag only after all arguments have been stored to ensure command is not added to queue before it has done anything
self.__isEditUsed = argData.isEdit()
self.__isQueryUsed = False
self.redoIt()
def doItQuery(self):
# query mode typically only supports one argument at a time
# this principle ensures that the result will be given in a way that can be stored in a variable
path = OM.MDagPath()
doubleArray = OM.MScriptUtil()
if self.__shoulderObjectArg:
plug = OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug(AM_ShoulderConstraintNode.kShoulderObjectAttrName, True))
iter = OM.MItDependencyGraph(plug, OM.MFn.kTransform, OM.MItDependencyGraph.kUpstream)
while not iter.isDone():
OM.MDagPath.getAPathTo(iter.currentItem(), path)
iter.next()
self.setResult(path.partialPathName())
elif self.__spineObjectArg:
plug = OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug(AM_ShoulderConstraintNode.kSpineObjectAttrName, True))
iter = OM.MItDependencyGraph(plug, OM.MFn.kTransform, OM.MItDependencyGraph.kUpstream)
while not iter.isDone():
OM.MDagPath.getAPathTo(iter.currentItem(), path)
iter.next()
self.setResult(path.partialPathName())
elif self.__raisedAngleArg:
self.setResult(OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug(AM_ShoulderConstraintNode.kRaisedAngleAttrName,
True)).asDouble())
elif self.__shoulderAimAxisArg:
doubleArray.createFromDouble(
OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug('%s0' % AM_ShoulderConstraintNode.kShoulderAimAttrName,
True)).asDouble(),
OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug('%s1' % AM_ShoulderConstraintNode.kShoulderAimAttrName,
True)).asDouble(),
OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug('%s2' % AM_ShoulderConstraintNode.kShoulderAimAttrName,
True)).asDouble())
self.setResult(OM.MDoubleArray(doubleArray.asDoublePtr(), 3))
elif self.__shoulderFrontAxisArg:
doubleArray.createFromDouble(
OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug('%s0' % AM_ShoulderConstraintNode.kShoulderFrontAttrName,
True)).asDouble(),
OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug('%s1' % AM_ShoulderConstraintNode.kShoulderFrontAttrName,
True)).asDouble(),
OM.MPlug(
self.__shoulderConstraintNodeFn.findPlug('%s2' % AM_ShoulderConstraintNode.kShoulderFrontAttrName,
True)).asDouble())
self.setResult(OM.MDoubleArray(doubleArray.asDoublePtr(), 3))
elif self.__spineAimAxisArg:
doubleArray.createFromDouble(
OM.MPlug(self.__shoulderConstraintNodeFn.findPlug('%s0' % AM_ShoulderConstraintNode.kSpineAimAttrName,
True)).asDouble(),
OM.MPlug(self.__shoulderConstraintNodeFn.findPlug('%s1' % AM_ShoulderConstraintNode.kSpineAimAttrName,
True)).asDouble(),
OM.MPlug(self.__shoulderConstraintNodeFn.findPlug('%s2' % AM_ShoulderConstraintNode.kSpineAimAttrName,
True)).asDouble())
self.setResult(OM.MDoubleArray(doubleArray.asDoublePtr(), 3))
elif self.__spineFrontAxisArg:
doubleArray.createFromDouble(
OM.MPlug(self.__shoulderConstraintNodeFn.findPlug('%s0' % AM_ShoulderConstraintNode.kSpineFrontAttrName,
True)).asDouble(),
OM.MPlug(self.__shoulderConstraintNodeFn.findPlug('%s1' % AM_ShoulderConstraintNode.kSpineFrontAttrName,
True)).asDouble(),
OM.MPlug(self.__shoulderConstraintNodeFn.findPlug('%s2' % AM_ShoulderConstraintNode.kSpineFrontAttrName,
True)).asDouble())
self.setResult(OM.MDoubleArray(doubleArray.asDoublePtr(), 3))
def redoIt(self):
# clear out the modifier so it doesn't store old object names
self.__dgModify = OM.MDGModifier()
# create a new node if the command is in create mode
if not self.__isEditUsed:
self.__shoulderConstraintNodeArg = OM.MObject(
self.__dgModify.createNode(AM_ShoulderConstraintNode.kPluginNodeId))
self.__shoulderConstraintNodeFn.setObject(self.__shoulderConstraintNodeArg)
fn = OM.MFnDagNode(self.__constrainedObjectArg)
self.__dgModify.renameNode(self.__shoulderConstraintNodeArg, '%s_shoulderConstraint' % fn.name())
# assign the -name argument if provided
if len(self.__shoulderConstraintNodeName) > 0:
self.__dgModify.renameNode(self.__shoulderConstraintNodeArg, self.__shoulderConstraintNodeName)
# WARNING: must tell the DGModifier to doIt() now in order to let Maya's auto-rename kick in and ensure the name is unique
# otherwise attempts to use commandToExecute below may end up using some other object
self.__dgModify.doIt()
# set the attributes on the node
plug = OM.MPlug()
if self.__constrainedObjectArg.isValid():
self.__dgModify.commandToExecute('connectAttr -f %s.%s %s.rotate' % (
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kRotateAttrName,
self.__constrainedObjectArg.partialPathName()))
self.__dgModify.commandToExecute('connectAttr -f %s.rotateOrder %s.%s' % (
self.__constrainedObjectArg.partialPathName(),
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kRotateOrderAttrName))
self.__dgModify.commandToExecute('connectAttr -f %s.parentInverseMatrix %s.%s' % (
self.__constrainedObjectArg.partialPathName(),
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kParentInvMatrixAttrName))
if self.__shoulderObjectArg.isValid():
self.__dgModify.commandToExecute('connectAttr -f %s.worldMatrix %s.%s' % (
self.__shoulderObjectArg.partialPathName(),
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kShoulderObjectAttrName))
if not self.__shoulderObjectArg.hasFn(OM.MFn.kJoint):
self.__dgModify.commandToExecute('connectAttr -f %s.rotatePivot %s.%s' % (
self.__shoulderObjectArg.partialPathName(),
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kShoulderRotatePivotAttrName))
if self.__spineObjectArg.isValid():
self.__dgModify.commandToExecute('connectAttr -f %s.worldMatrix %s.%s' % (
self.__spineObjectArg.partialPathName(),
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kSpineObjectAttrName))
if not self.__spineObjectArg.hasFn(OM.MFn.kJoint):
self.__dgModify.commandToExecute('connectAttr -f %s.rotatePivot %s.%s' % (
self.__spineObjectArg.partialPathName(),
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kSpineRotatePivotAttrName))
if self.__raisedAngleArg is not None:
# set the raisedAngleOffset attribute if it is not connected
plug = self.__shoulderConstraintNodeFn.findPlug(AM_ShoulderConstraintNode.kRaisedAngleAttrName, True)
if not plug.isConnected():
self.__dgModify.commandToExecute('setAttr %s.%s %s' % (
self.__shoulderConstraintNodeFn.name(), AM_ShoulderConstraintNode.kRaisedAngleAttrName,
self.__raisedAngleArg))
# set vector attribute values
vectorArgs = {
self.__shoulderAimAxisArg: AM_ShoulderConstraintNode.kShoulderAimAttrName,
self.__shoulderFrontAxisArg: AM_ShoulderConstraintNode.kShoulderFrontAttrName,
self.__spineAimAxisArg: AM_ShoulderConstraintNode.kSpineAimAttrName,
self.__spineFrontAxisArg: AM_ShoulderConstraintNode.kSpineFrontAttrName
}
for vArg in vectorArgs:
if not vArg:
continue
# set the vector attribute if it is not connected
plug = self.__shoulderConstraintNodeFn.findPlug(vectorArgs[vArg], True)
if not plug.isConnected():
self.__dgModify.commandToExecute('setAttr %s.%s %s %s %s' % (
self.__shoulderConstraintNodeFn.name(),
vectorArgs[vArg], vArg.x, vArg.y, vArg.z))
# following Maya convention, select the newly created node if the command is in create mode
if not self.__isEditUsed:
self.__dgModify.commandToExecute('select %s' % self.__shoulderConstraintNodeFn.name())
self.__dgModify.doIt()
self.setResult(self.__shoulderConstraintNodeFn.name())
def undoIt(self):
self.__dgModify.undoIt()
def isUndoable(self):
# the command should only be undoable if edit or create mode was used
return not self.__isQueryUsed
@classmethod
def cmdCreator(cls):
return OMMPx.asMPxPtr(cls())
@classmethod
def syntaxCreator(cls):
syntax = OM.MSyntax()
syntax.enableQuery() # BUG: including these modes has benefits, but it also breaks built-in object parsing
syntax.enableEdit()
syntax.addFlag(cls.kNameFlag, cls.kNameFlagLong, OM.MSyntax.kString)
syntax.addFlag(cls.kSpineObjectFlag, cls.kSpineObjectFlagLong, OM.MSyntax.kSelectionItem)
syntax.addFlag(cls.kShoulderObjectFlag, cls.kShoulderObjectFlagLong, OM.MSyntax.kSelectionItem)
syntax.addFlag(cls.kRaisedAngleFlag, cls.kRaisedAngleFlagLong, OM.MSyntax.kDouble)
syntax.addFlag(cls.kShoulderAimFlag, cls.kShoulderAimFlagLong, OM.MSyntax.kDouble, OM.MSyntax.kDouble,
OM.MSyntax.kDouble)
syntax.addFlag(cls.kShoulderFrontFlag, cls.kShoulderFrontFlagLong, OM.MSyntax.kDouble, OM.MSyntax.kDouble,
OM.MSyntax.kDouble)
syntax.addFlag(cls.kSpineAimFlag, cls.kSpineAimFlagLong, OM.MSyntax.kDouble, OM.MSyntax.kDouble,
OM.MSyntax.kDouble)
syntax.addFlag(cls.kSpineFrontFlag, cls.kSpineFrontFlagLong, OM.MSyntax.kDouble, OM.MSyntax.kDouble,
OM.MSyntax.kDouble)
syntax.useSelectionAsDefault(True)
syntax.setObjectType(OM.MSyntax.kSelectionList, 1, 3)
return syntax
# ----------------------------------------------------------------
# Node definition
# ----------------------------------------------------------------
class AM_ShoulderConstraintNode(OMMPx.MPxNode):
"""
A node to constrain the first twist joint in a shoulder. The node also
outputs the shoulder's spine-space elevation angle for other nodes to use.
\par Input Attributes:
- \em rotateOrder: The rotateOrder attribute of the constrained object.
- \em raisedAngleOffset: The amount that the constrained object's
up-vector rotates back when the shoulder is raised. A value between
0 and 90 is ideal and should eliminate flipping in a normal human
range of motion. A value of 45 is recommended in most cases.
- \em shoulderAimAxis: Axis in the upper arm's local space that aims
toward the elbow joint.
- \em shoulderFrontAxis: Axis in the upper arm's local space that
points toward the character's front.
- \em spineAimAxis: Axis in the specified spine joint's local space
that aims toward the next vertebra (up).
- \em spineFrontAxis: Axis in the specified spine joint's local space
that aims toward the character's front.
- \em rotatePivot: The rotatePivot attribute of the upper arm.
- \em jointOrient: The jointOrient attribute of the constrained object
if it is a joint. If there is no connection (i.e., the constrained
object is not a joint), then it is initialized to identity and has
no effect.
- \em spineWorldMatrix: The worldMatrix attribute of the object to use
for computing the shoulder's elevation angle. The shoulder
constraint is designed with the expectation that this is the the
first common parent of both of the upper arms (usually the
ribcage). Although this joint will produce perfectly valid values
if any intermediate joints exist (collar bone, scapula), such an
intermediate joint could be used instead, provided that the axes
given for the spine node (above) are transformed into the
intermediate joint's local space.
- \em shoulderWorldMatrix: The worldMatrix attribute of the upper arm
object along whose aim axis (above) the constrained object is
twisting.
- \em parentInverseMatrix: The parentInverseMatrix attribute of the
constrained object.
\par Output Attributes:
- \em rotate: The local Euler rotation piped to the constrained object.
- \em elevationAngle: The elevation angle of the shoulder in spine
space.
"""
## the name of the nodeType
kPluginNodeTypeName = 'am_shoulderConstraint'
## the unique MTypeId for the node
kPluginNodeId = OM.MTypeId(0x001138C0)
# input attributes
## the constrained node's rotateOrder attribute
rotateOrder = OM.MObject()
kRotateOrderAttrName = 'rotateOrder'
kRotateOrderAttrLongName = 'constraintRotateOrder'
## twist offset when the arm is raised
raisedAngleOffset = OM.MObject()
kRaisedAngleAttrName = 'raisedOffset'
kRaisedAngleAttrLongName = 'raisedAngleOffset'
## local axis of the shoulder that aims at the elbow
shoulderAim = OM.MObject()
kShoulderAimAttrName = 'shoulderAim'
kShoulderAimAttrLongName = 'shoulderAimAxis'
## local axis of the shoulder that points toward the character's front
shoulderFront = OM.MObject()
kShoulderFrontAttrName = 'shoulderFront'
kShoulderFrontAttrLongName = 'shoulderFrontAxis'
## local axis of the spine that points up toward the head
spineAim = OM.MObject()
kSpineAimAttrName = 'spineAim'
kSpineAimAttrLongName = 'spineAimAxis'
## local axis of the spine that points toward the character's front
spineFront = OM.MObject()
kSpineFrontAttrName = 'spineFront'
kSpineFrontAttrLongName = 'spineFrontAxis'
## the rotatePivot attribute of the shoulder object
shoulderPivot = OM.MObject()
kShoulderRotatePivotAttrName = 'shoulderPivot'
kShoulderRotatePivotAttrLongName = 'shoulderRotatePivot'
## the rotatePivot attribute of the spine object
spinePivot = OM.MObject()
kSpineRotatePivotAttrName = 'spinePivot'
kSpineRotatePivotAttrLongName = 'spineRotatePivot'
## the jointOrient (XYZ Euler offset) attribute of the constrained object
jointOrient = OM.MObject()
kJointOrientAttrName = 'orient'
kJointOrientAttrLongName = 'jointOrient'
## the shoulder object
shoulder = OM.MObject()
kShoulderObjectAttrName = 'shoulder'
kShoulderObjectAttrLongName = 'shoulderWorldMatrix'
## the spine object
spine = OM.MObject()
kSpineObjectAttrName = 'spine'
kSpineObjectAttrLongName = 'spineWorldMatrix'
## parentInverseMatrix of the constrained transform
parentInverseMatrix = OM.MObject()
kParentInvMatrixAttrName = 'parent'
kParentInvMatrixAttrLongName = 'parentInverseMatrix'
# output attributes
# local rotation for constrained object
rotate = OM.MObject()
kRotateAttrName = 'rotate'
kRotateAttrLongName = 'constraintRotate'
# elevation angle of the shoulder
angle = OM.MObject()
kElevationAngleAttrName = 'angle'
kElevationAngleAttrLongName = 'elevationAngle'
def __init__(self):
OMMPx.MPxNode.__init__(self)
def compute(self, plug, dataBlock):
"""Compute the upper arm's elevation angle as well as the orientation for the first twist joint in the shoulder."""
if (plug == AM_ShoulderConstraintNode.rotate or plug == AM_ShoulderConstraintNode.angle):
# ----------------------------------------------------------------
# Get the incoming data
# ----------------------------------------------------------------
# the constrained node's rotateOrder attribute
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.rotateOrder)
eRotateOrder = dataHandle.asShort()
# angular offset for the raised orientation
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.raisedAngleOffset)
dRaisedAngleOffset = dataHandle.asDouble()
dRaisedAngleOffset = math.radians(dRaisedAngleOffset)
# axis pointing down the length of the upper arm
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.shoulderAim)
vShoulderForward = (dataHandle.asVector()).normal()
# axis pointing to the character's front
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.shoulderFront)
vShoulderRight = (dataHandle.asVector()).normal()
# axis pointing to the character's the head
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.spineAim)
vSpineForward = (dataHandle.asVector()).normal()
# axis pointing to the character's front
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.spineFront)
vSpineUp = (dataHandle.asVector()).normal()
# the rotatePivot attribute of the upper arm
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.shoulderPivot)
vShoulderPivot = dataHandle.asVector()
# the rotatePivot attribute of the spine
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.spinePivot)
vSpinePivot = dataHandle.asVector()
# the jointOrient attribute (Euler XYZ offset) of the constrained node
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.jointOrient)
vJointOrient = dataHandle.asVector()
eJointOrient = OM.MEulerRotation(math.radians(vJointOrient.x), math.radians(vJointOrient.y),
math.radians(vJointOrient.z), 0)
qJointOrient = eJointOrient.asQuaternion()
qJointOrient.conjugateIt()
# the spine's worldMatrix attribute
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.spine)
mSpine = dataHandle.asMatrix()
mSpine = OM.MTransformationMatrix(mSpine)
qSpine = mSpine.rotation()
# the shoulder's worldMatrix attribute
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.shoulder)
mShoulder = dataHandle.asMatrix()
mShoulder = OM.MTransformationMatrix(mShoulder)
qShoulder = mShoulder.rotation()
# the constrained node's parentInverseMatrix attribute
dataHandle = dataBlock.inputValue(AM_ShoulderConstraintNode.parentInverseMatrix)
mParent = dataHandle.asMatrix()
mParent = OM.MTransformationMatrix(mParent)
qParent = mParent.rotation()
# ----------------------------------------------------------------
# Orthonormalize provided axes for the shoulder and spine
# ----------------------------------------------------------------
vShoulderUp = OM.MVector(vShoulderForward ^ vShoulderRight).normal()
vShoulderRight = vShoulderUp ^ vShoulderForward
vSpineRight = OM.MVector(vSpineUp ^ vSpineForward).normal()
vSpineUp = vSpineForward ^ vSpineRight
# ----------------------------------------------------------------
# Determine the side of the body the shoulder is on: right is negative, left is positive
# (assumes that the shoulder's pivot point will never cross over the spine object's central axis)
# ----------------------------------------------------------------
mSpine.addTranslation(-vSpinePivot, OM.MSpace.kObject)
mShoulder.addTranslation(-vShoulderPivot, OM.MSpace.kObject)
vSpinePosition = mSpine.getTranslation(OM.MSpace.kWorld)
vShoulderPosition = mShoulder.getTranslation(OM.MSpace.kWorld)
dBodySideScalar = cmp((vSpinePosition - vShoulderPosition) * vSpineRight.rotateBy(qSpine), 0.0)
# ----------------------------------------------------------------
# Define each of the target up-vectors corresponding to different elevations in spine space
# ----------------------------------------------------------------
# target up vector at rest
vRest = vSpineForward * -dBodySideScalar
# target up vector when lowered
vLowered = -(vSpineForward ^ vSpineUp)
# target up vector when raised
vRaised = -vSpineRight.rotateBy(OM.MQuaternion(dRaisedAngleOffset, vSpineForward * -dBodySideScalar))
# ----------------------------------------------------------------
# Interpolate the target up-vector
# ----------------------------------------------------------------
# rotate the spine and shoulder vectors into their respective spaces to get the current elevation angle
vSpineForwardRotated = vSpineForward.rotateBy(qSpine)
vShoulderForwardRotated = vShoulderForward.rotateBy(qShoulder)
dAngle = math.degrees(math.pi - vSpineForwardRotated.angle(vShoulderForwardRotated))
# interpolate the up-vector based on the shoulder's angle of elevation
dDot = vSpineForwardRotated * vShoulderForwardRotated
vTargetUp = OM.MVector()
if (dDot < 0):
vTargetUp = vRest + (vLowered - vRest) * -dDot
else:
vTargetUp = vRest + (vRaised - vRest) * dDot
# rotate the target up-vector into world space
vTargetUp = vTargetUp.rotateBy(qSpine).normal()
# ----------------------------------------------------------------
# Compute the local orientation for the constrained node
# ----------------------------------------------------------------
# orthonormalize target axes and get target normal orientation for constrained object
vTargetRight = vTargetUp ^ vShoulderForwardRotated
vTargetUp = vShoulderForwardRotated ^ vTargetRight
mList = [vTargetRight.x, vTargetRight.y, vTargetRight.z, 0.0,
vTargetUp.x, vTargetUp.y, vTargetUp.z, 0.0,
vShoulderForwardRotated.x, vShoulderForwardRotated.y, vShoulderForwardRotated.z, 0.0,
0.0, 0.0, 0.0, 1.0]
mTarget = OM.MMatrix()
OM.MScriptUtil.createMatrixFromList(mList, mTarget)
qTarget = OM.MQuaternion(OM.MTransformationMatrix(mTarget).rotation())
# get offset orientation of the constrained object (from z-forward, y-up)
mList = [vShoulderRight.x, vShoulderRight.y, vShoulderRight.z, 0.0,
vShoulderUp.x, vShoulderUp.y, vShoulderUp.z, 0.0,
vShoulderForward.x, vShoulderForward.y, vShoulderForward.z, 0.0,
0.0, 0.0, 0.0, 1.0]
mFromNormal = OM.MMatrix()
OM.MScriptUtil.createMatrixFromList(mList, mFromNormal)
qFromNormal = OM.MQuaternion(OM.MTransformationMatrix(mFromNormal).rotation())
if (qFromNormal.w > 0.0):
qFromNormal.invertIt()
# composite final quaternion to get local-space value
qFinal = OM.MQuaternion(qFromNormal * qTarget * qParent * qJointOrient)
# output as Euler angles for the constrained node
eFinal = qFinal.asEulerRotation()
eFinal.reorderIt(eRotateOrder)
# ----------------------------------------------------------------
# Set the outgoing plugs
# ----------------------------------------------------------------
outputHandle = dataBlock.outputValue(AM_ShoulderConstraintNode.rotate)
outputHandle.set3Double(math.degrees(eFinal.x), math.degrees(eFinal.y), math.degrees(eFinal.z))
outputHandle = dataBlock.outputValue(AM_ShoulderConstraintNode.angle)
outputHandle.setDouble(dAngle)
dataBlock.setClean(plug)
else:
return OM.kUnknownParameter
@classmethod
def nodeCreator(cls):
return OMMPx.asMPxPtr(cls())
@classmethod
def nodeInitializer(cls):
# ----------------------------------------------------------------
# Input
# ----------------------------------------------------------------
# the constrained node's rotateOrder attribute
eAttr = OM.MFnEnumAttribute()
cls.rotateOrder = eAttr.create(cls.kRotateOrderAttrLongName, cls.kRotateOrderAttrName, 0)
eAttr.addField('xyz', 0)
eAttr.addField('yzx', 1)
eAttr.addField('zxy', 2)
eAttr.addField('xzy', 3)
eAttr.addField('yxz', 4)
eAttr.addField('zyx', 5)
eAttr.setWritable(1)
eAttr.setStorable(1)
eAttr.setReadable(1)
eAttr.setKeyable(0)
eAttr.setHidden(1)
# twist offset when the arm is raised
nAttr = OM.MFnNumericAttribute()
cls.raisedAngleOffset = nAttr.create(cls.kRaisedAngleAttrLongName, cls.kRaisedAngleAttrName,
OM.MFnNumericData.kDouble, 45.0)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(1)
# local axis of the shoulder that aims at the elbow
cls.shoulderAim = nAttr.create(cls.kShoulderAimAttrLongName, cls.kShoulderAimAttrName,
OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
# local axis of the shoulder that points toward the character's front
cls.shoulderFront = nAttr.create(cls.kShoulderFrontAttrLongName, cls.kShoulderFrontAttrName,
OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
# axis on the spine joint pointing toward the head
cls.spineAim = nAttr.create(cls.kSpineAimAttrLongName, cls.kSpineAimAttrName, OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
# axis on the spine joint pointing toward the front
cls.spineFront = nAttr.create(cls.kSpineFrontAttrLongName, cls.kSpineFrontAttrName, OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
# rotatePivot attribute of the upper arm
cls.shoulderPivot = nAttr.create(cls.kShoulderRotatePivotAttrLongName, cls.kShoulderRotatePivotAttrName,
OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
nAttr.setHidden(1)
# rotatePivot attribute of the spine
cls.spinePivot = nAttr.create(cls.kSpineRotatePivotAttrLongName, cls.kSpineRotatePivotAttrName,
OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
nAttr.setHidden(1)
# joint orient (Euler XYZ offset) of the constrained transform
cls.jointOrient = nAttr.create(cls.kJointOrientAttrLongName, cls.kJointOrientAttrName,
OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(1)
nAttr.setReadable(1)
nAttr.setKeyable(0)
nAttr.setHidden(1)
# worldMatrix of the spine joint
mAttr = OM.MFnMatrixAttribute()
cls.spine = mAttr.create(cls.kSpineObjectAttrLongName, cls.kSpineObjectAttrName, OM.MFnMatrixAttribute.kDouble)
mAttr.setWritable(1)
mAttr.setStorable(1)
mAttr.setReadable(1)
mAttr.setKeyable(0)
mAttr.setHidden(1)
# worldMatrix of the shoulder joint
cls.shoulder = mAttr.create(cls.kShoulderObjectAttrLongName, cls.kShoulderObjectAttrName,
OM.MFnMatrixAttribute.kDouble)
mAttr.setWritable(1)
mAttr.setStorable(1)
mAttr.setReadable(1)
mAttr.setKeyable(0)
mAttr.setHidden(1)
# parentInverseMatrix of the constrained transform
cls.parentInverseMatrix = mAttr.create(cls.kParentInvMatrixAttrLongName, cls.kParentInvMatrixAttrName,
OM.MFnMatrixAttribute.kDouble)
mAttr.setWritable(1)
mAttr.setStorable(1)
mAttr.setReadable(1)
mAttr.setKeyable(0)
mAttr.setHidden(1)
# ----------------------------------------------------------------
# Output
# ----------------------------------------------------------------
nAttr = OM.MFnNumericAttribute()
# local rotation for constrained object
cls.rotate = nAttr.create(cls.kRotateAttrLongName, cls.kRotateAttrName, OM.MFnNumericData.k3Double)
nAttr.setWritable(1)
nAttr.setStorable(0)
nAttr.setReadable(1)
# elevation angle of the shoulder
cls.angle = nAttr.create(cls.kElevationAngleAttrLongName, cls.kElevationAngleAttrName,
OM.MFnNumericData.kDouble)
nAttr.setWritable(1)
nAttr.setStorable(0)
nAttr.setReadable(1)
# ----------------------------------------------------------------
# Add attributes
# ----------------------------------------------------------------
cls.addAttribute(cls.rotateOrder)
cls.addAttribute(cls.raisedAngleOffset)
cls.addAttribute(cls.shoulderAim)
cls.addAttribute(cls.shoulderFront)
cls.addAttribute(cls.spineAim)
cls.addAttribute(cls.spineFront)
cls.addAttribute(cls.shoulderPivot)
cls.addAttribute(cls.spinePivot)
cls.addAttribute(cls.jointOrient)
cls.addAttribute(cls.spine)
cls.addAttribute(cls.shoulder)
cls.addAttribute(cls.parentInverseMatrix)
cls.addAttribute(cls.rotate)
cls.addAttribute(cls.angle)
# establish effect upon rotate output
cls.attributeAffects(cls.rotateOrder, cls.rotate)
cls.attributeAffects(cls.raisedAngleOffset, cls.rotate)
cls.attributeAffects(cls.shoulderAim, cls.rotate)
cls.attributeAffects(cls.shoulderFront, cls.rotate)
cls.attributeAffects(cls.spineAim, cls.rotate)
cls.attributeAffects(cls.spineFront, cls.rotate)
cls.attributeAffects(cls.shoulderPivot, cls.rotate)
cls.attributeAffects(cls.spinePivot, cls.rotate)
cls.attributeAffects(cls.jointOrient, cls.rotate)
cls.attributeAffects(cls.spine, cls.rotate)
cls.attributeAffects(cls.shoulder, cls.rotate)
cls.attributeAffects(cls.parentInverseMatrix, cls.rotate)
# establish effect upon angle output
cls.attributeAffects(cls.rotateOrder, cls.angle)
cls.attributeAffects(cls.raisedAngleOffset, cls.angle)
cls.attributeAffects(cls.shoulderAim, cls.angle)
cls.attributeAffects(cls.shoulderFront, cls.angle)
cls.attributeAffects(cls.spineAim, cls.angle)
cls.attributeAffects(cls.spineFront, cls.angle)
cls.attributeAffects(cls.shoulderPivot, cls.angle)
cls.attributeAffects(cls.spinePivot, cls.angle)
cls.attributeAffects(cls.jointOrient, cls.angle)
cls.attributeAffects(cls.spine, cls.angle)
cls.attributeAffects(cls.shoulder, cls.angle)
cls.attributeAffects(cls.parentInverseMatrix, cls.angle)
# ----------------------------------------------------------------
# Initialize the plug-in
# ----------------------------------------------------------------
def initializePlugin(mobject):
plugin = OMMPx.MFnPlugin(mobject, 'Adam Mechtley', kVersionNumber, 'Any')
# dependency node
try:
plugin.registerNode(AM_ShoulderConstraintNode.kPluginNodeTypeName, AM_ShoulderConstraintNode.kPluginNodeId,
AM_ShoulderConstraintNode.nodeCreator, AM_ShoulderConstraintNode.nodeInitializer)
except:
sys.stderr.write('Failed to register node: %s\n' % AM_ShoulderConstraintNode.kPluginNodeTypeName)
raise
# command
try:
plugin.registerCommand(AM_ShoulderConstraintCmd.kPluginCmdName, AM_ShoulderConstraintCmd.cmdCreator,
AM_ShoulderConstraintCmd.syntaxCreator)
except:
sys.stderr.write('Failed to register command: %s\n' % AM_ShoulderConstraintCmd.kPluginCmdName)
raise
# ----------------------------------------------------------------
# Uninitialize the plug-in
# ----------------------------------------------------------------
def uninitializePlugin(mobject):
plugin = OMMPx.MFnPlugin(mobject)
# dependency node
try:
plugin.deregisterNode(AM_ShoulderConstraintNode.kPluginNodeId)
except:
sys.stderr.write('Failed to deregister node: %s\n' % AM_ShoulderConstraintNode.kPluginNodeTypeName)
raise
# command
try:
plugin.deregisterCommand(AM_ShoulderConstraintCmd.kPluginCmdName)
except:
sys.stderr.write('Failed to unregister command: %s\n' % AM_ShoulderConstraintCmd.kPluginCmdName)
raise
|
{
"content_hash": "8d583200b21487169161d519cc1ea1c6",
"timestamp": "",
"source": "github",
"line_count": 895,
"max_line_length": 151,
"avg_line_length": 55.270391061452514,
"alnum_prop": 0.6146724078678715,
"repo_name": "CountZer0/PipelineConstructionSet",
"id": "ff94f8daba148380dff86641987f219fe3f3601a",
"size": "49467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "maya/plugins/2014/win64/AM_ShoulderConstraint.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "49130"
},
{
"name": "JavaScript",
"bytes": "21455"
},
{
"name": "Python",
"bytes": "24534027"
},
{
"name": "Shell",
"bytes": "784"
}
],
"symlink_target": ""
}
|
import distutils.spawn
import logging
import os
import re
import stat
import subprocess
from telemetry.core.platform import desktop_platform_backend
from telemetry.core.platform import ps_util
class PosixPlatformBackend(desktop_platform_backend.DesktopPlatformBackend):
# This is an abstract class. It is OK to have abstract methods.
# pylint: disable=W0223
def _RunCommand(self, args):
return subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0]
def _GetFileContents(self, path):
with open(path, 'r') as f:
return f.read()
def _GetPsOutput(self, columns, pid=None):
"""Returns output of the 'ps' command as a list of lines.
Subclass should override this function.
Args:
columns: A list of require columns, e.g., ['pid', 'pss'].
pid: If nont None, returns only the information of the process
with the pid.
"""
args = ['ps']
args.extend(['-p', str(pid)] if pid != None else ['-e'])
for c in columns:
args.extend(['-o', c + '='])
return self._RunCommand(args).splitlines()
def GetChildPids(self, pid):
"""Returns a list of child pids of |pid|."""
ps_output = self._GetPsOutput(['pid', 'ppid', 'state'])
ps_line_re = re.compile(
'\s*(?P<pid>\d+)\s*(?P<ppid>\d+)\s*(?P<state>\S*)\s*')
processes = []
for pid_ppid_state in ps_output:
m = ps_line_re.match(pid_ppid_state)
assert m, 'Did not understand ps output: %s' % pid_ppid_state
processes.append((m.group('pid'), m.group('ppid'), m.group('state')))
return ps_util.GetChildPids(processes, pid)
def GetCommandLine(self, pid):
command = self._GetPsOutput(['command'], pid)
return command[0] if command else None
def GetFlushUtilityName(self):
return 'clear_system_cache'
def CanLaunchApplication(self, application):
return bool(distutils.spawn.find_executable(application))
def LaunchApplication(
self, application, parameters=None, elevate_privilege=False):
assert application, 'Must specify application to launch'
if os.path.sep not in application:
application = distutils.spawn.find_executable(application)
assert application, 'Failed to find application in path'
args = [application]
if parameters:
assert isinstance(parameters, list), 'parameters must be a list'
args += parameters
def IsSetUID(path):
return (os.stat(path).st_mode & stat.S_ISUID) == stat.S_ISUID
def IsElevated():
p = subprocess.Popen(
['sudo', '-nv'], stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout = p.communicate()[0]
# Some versions of sudo set the returncode based on whether sudo requires
# a password currently. Other versions return output when password is
# required and no output when the user is already authenticated.
return not p.returncode and not stdout
if elevate_privilege and not IsSetUID(application):
args = ['sudo'] + args
if not IsElevated():
print ('Telemetry needs to run %s under sudo. Please authenticate.' %
application)
subprocess.check_call(['sudo', '-v']) # Synchronously authenticate.
prompt = ('Would you like to always allow %s to be run as the current '
'user without sudo? If so, Telemetry will '
'`sudo chmod +s %s`. (y/N)' % (application, application))
if raw_input(prompt).lower() == 'y':
subprocess.check_call(['sudo', 'chmod', '+s', application])
stderror_destination = subprocess.PIPE
if logging.getLogger().isEnabledFor(logging.DEBUG):
stderror_destination = None
return subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=stderror_destination)
|
{
"content_hash": "5c4e522527324bd181b8fff92e180f2d",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 79,
"avg_line_length": 35.72641509433962,
"alnum_prop": 0.6601531555320834,
"repo_name": "anirudhSK/chromium",
"id": "9f76b5205a49b6920024cff52d0da61ee1d6a6b1",
"size": "3950",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/core/platform/posix_platform_backend.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "52960"
},
{
"name": "Awk",
"bytes": "8660"
},
{
"name": "C",
"bytes": "42502191"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "201859263"
},
{
"name": "CSS",
"bytes": "946557"
},
{
"name": "DOT",
"bytes": "2984"
},
{
"name": "Java",
"bytes": "5687122"
},
{
"name": "JavaScript",
"bytes": "22163714"
},
{
"name": "M",
"bytes": "2190"
},
{
"name": "Matlab",
"bytes": "2496"
},
{
"name": "Objective-C",
"bytes": "7670589"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "672770"
},
{
"name": "Python",
"bytes": "10873885"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "1315894"
},
{
"name": "Tcl",
"bytes": "277091"
},
{
"name": "TypeScript",
"bytes": "1560024"
},
{
"name": "XSLT",
"bytes": "13493"
},
{
"name": "nesC",
"bytes": "15206"
}
],
"symlink_target": ""
}
|
from fonction_py.tools import *
from fonction_py.preprocess import *
from scipy.optimize import minimize
from sklearn import linear_model
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from pandas.tools.plotting import scatter_matrix
import matplotlib.pyplot as plt
from sklearn import cross_validation
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from sklearn import decomposition
import time
# predire que Gestion renault = 0 depuis fevrier/2011
#
#
def fun_to_min(x,xTrain,yTrain):
a=x[:-1]
b=x[-1]
return LinExp(np.dot(xTrain,np.transpose(a))+b,yTrain)
def linearLinexpMinimization(x, y):
xTrain, xTest, yTrain, yTest = faireSplitting(x, y, 0.8) # rajoute les features
del x
del y
print("ok")
print("AVEC")
pca = decomposition.PCA(n_components=65)
pca.fit(xTrain)
PCAxTrain = pca.transform(xTrain)
nbLines,nbFeatures = PCAxTrain.shape
res = minimize(fun_to_min,np.zeros(nbFeatures+1),args=(PCAxTrain,yTrain))
PCAxTest = pca.transform(xTest)
x = res.x #x est de longeur nbFeatures+1
a=x[:-1] #a.T est une colonne de longueur nbFeatures
b=x[-1] #b est un scalaire
print("a : \n",a)
print("b : \n",b)
pred = np.dot(PCAxTest,np.transpose(a))+b
pred = np.round(pred)
check(pred, yTest)
bins = np.linspace(-10, 10, 40)
plt.hist(pred-yTest, bins, normed=1)
def telephoniePred(x,y,xTest):
pca = decomposition.PCA(n_components=65)#65)
pca.fit(x)
PCAxTrain = pca.transform(x)
nbLines,nbFeatures = PCAxTrain.shape
res = minimize(fun_to_min,np.zeros(nbFeatures+1),args=(PCAxTrain,y))
PCAxTest = pca.transform(xTest)
x = res.x
a=x[:-1]
b=x[-1]
pred = np.dot(PCAxTest,np.transpose(a))+b
pred = np.round(pred)
return pred
def submit():
start_time = time.time()
fields = ['DATE', 'DAY_OFF', 'WEEK_END', 'DAY_WE_DS', 'ASS_ASSIGNMENT', 'CSPL_RECEIVED_CALLS' ] # selectionne les colonnes à lire
data=pd.read_csv("data/trainPure.csv", sep=";", usecols=fields) # LECTURE
resultat = pd.read_csv("data/submission.txt", sep="\t") # LECTURE
categoryList = ['CAT','CMS','Crises','Domicile','Gestion','Gestion - Accueil Telephonique','Gestion Assurances','Gestion Clients','Gestion DZ','Gestion Relation Clienteles','Gestion Renault','Japon','Manager','Mécanicien','Médical','Nuit','Prestataires','RENAULT','RTC','Regulation Medicale','SAP','Services','Tech. Axa','Tech. Inter','Tech. Total','Téléphonie']
for category in categoryList :
start_time = time.time()
print(category)
xTrain,yTrain = preprocess(data.copy(), category) # rajoute les features
xTest,xTrain,souvenir=preprocessFINAL(xTrain,category)
prediction = telephoniePred(xTrain,yTrain,xTest)
prediction =np.round(prediction).astype(int)
souvenir['prediction']=prediction
end_time = time.time()
print('prediction\'s length : ',len(prediction))
print('Time : ',end_time - start_time)
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT'])
print('DONE')
resultat=resultat.fillna(0)
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y']
del resultat['prediction_x']
del resultat['prediction_y']
pd.DataFrame(res).to_csv("reslist.csv", sep=";", decimal=",")
resultat.to_csv("vraipred.txt", sep="\t", index =False)
return resultat
|
{
"content_hash": "6e65452d01e7fc99206b4ebdd64ec45a",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 366,
"avg_line_length": 38.5,
"alnum_prop": 0.6901298701298701,
"repo_name": "LaRiffle/axa_challenge",
"id": "63dd50bcec5944165ea1a9f032b00ec7298b5764",
"size": "3855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fonction_py/tim.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45941"
}
],
"symlink_target": ""
}
|
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
credential_list_mappings = client.sip \
.domains("SD32a3c49700934481addd5ce1659f04d2") \
.credential_list_mappings \
.list()
# Loop over the list of credential_list_mappings and print
# a property for each one
for credential_list_mapping in credential_list_mappings:
print(credential_list_mapping.friendly_name)
|
{
"content_hash": "be79b695cb67f2fb682723a1c7811485",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 62,
"avg_line_length": 34.25,
"alnum_prop": 0.7718978102189781,
"repo_name": "teoreteetik/api-snippets",
"id": "caeb5e3af2f5a40ba1112b016a5ccc979dea2442",
"size": "621",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest/sip-in/get-credential-list-mappings/get-credential-list-mappings.6.x.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "643369"
},
{
"name": "HTML",
"bytes": "335"
},
{
"name": "Java",
"bytes": "943336"
},
{
"name": "JavaScript",
"bytes": "539577"
},
{
"name": "M",
"bytes": "117"
},
{
"name": "Mathematica",
"bytes": "93"
},
{
"name": "Objective-C",
"bytes": "46198"
},
{
"name": "PHP",
"bytes": "538312"
},
{
"name": "Python",
"bytes": "467248"
},
{
"name": "Ruby",
"bytes": "470316"
},
{
"name": "Shell",
"bytes": "1564"
},
{
"name": "Swift",
"bytes": "36563"
}
],
"symlink_target": ""
}
|
import m5
from m5.objects import *
class MyCache(BaseCache):
assoc = 2
block_size = 64
latency = '1ns'
mshrs = 10
tgts_per_mshr = 5
cpu = TimingSimpleCPU(cpu_id=0)
cpu.addTwoLevelCacheHierarchy(MyCache(size = '128kB'), MyCache(size = '256kB'),
MyCache(size = '2MB', latency='10ns'))
system = System(cpu = cpu,
physmem = PhysicalMemory(),
membus = Bus())
system.physmem.port = system.membus.port
cpu.connectMemPorts(system.membus)
cpu.clock = '2GHz'
root = Root(system = system)
|
{
"content_hash": "c36b856394b6dc351f418cfcc815bdf8",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 79,
"avg_line_length": 26.80952380952381,
"alnum_prop": 0.6145648312611013,
"repo_name": "liangwang/m5",
"id": "0ed985a17dbcbdf8b13189282f7509bb159ad832",
"size": "2145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/configs/simple-timing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "490228"
},
{
"name": "C++",
"bytes": "8617145"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "Python",
"bytes": "2567844"
},
{
"name": "Shell",
"bytes": "6722"
},
{
"name": "Visual Basic",
"bytes": "2884"
}
],
"symlink_target": ""
}
|
import os.path
import sys
import string
import xbmc
import xbmcgui
import resources.lib.common as common
import database_tv as tv_db
from bs4 import BeautifulSoup
import listtv
pluginhandle = common.pluginHandle
if common.get_setting('libraryfolder') == '0':
MOVIE_PATH = os.path.join(xbmc.translatePath(common.__addonprofile__), 'Movies')
TV_SHOWS_PATH = os.path.join(xbmc.translatePath(common.__addonprofile__), 'TV')
else: # == 1
if common.get_setting('customlibraryfolder') != '':
MOVIE_PATH = os.path.join(xbmc.translatePath(common.get_setting('customlibraryfolder')), 'Movies')
TV_SHOWS_PATH = os.path.join(xbmc.translatePath(common.get_setting('customlibraryfolder')), 'TV')
else:
xbmcgui.Dialog().ok("Error", "Set location of custom folder or use built in folder")
common.open_settings()
def setup_library():
if common.get_setting('autoaddfolders') != 'yes':
common.notification('Starting Export', 10000)
return False
source_path = os.path.join(xbmc.translatePath('special://profile/'), 'sources.xml')
dialog = xbmcgui.Dialog()
# ensure the directories exist
_create_directory(MOVIE_PATH)
_create_directory(TV_SHOWS_PATH)
try:
file = open(source_path, 'r')
content = file.read()
file.close()
except:
# TODO Provide a Yes/No option here
dialog.ok("Error adding new sources ", "Could not read from sources.xml, does it really exist?")
return False
soup = BeautifulSoup(content)
video = soup.find("video")
added_new_paths = False
if len(soup.find_all('name', text='PFTV Movies')) < 1:
movie_source_tag = soup.new_tag('source')
movie_name_tag = soup.new_tag('name')
movie_name_tag.string = 'PFTV Movies'
movie_source_tag.insert(0, movie_name_tag)
movie_path_tag = soup.new_tag('path', pathversion='1')
movie_path_tag.string = MOVIE_PATH
movie_source_tag.insert(1, movie_path_tag)
movie_sharing = soup.new_tag('allowsharing')
movie_sharing.string = 'true'
movie_source_tag.insert(2, movie_sharing)
video.append(movie_source_tag)
added_new_paths = True
if len(soup.find_all('name', text='PFTV TV')) < 1:
tv_source_tag = soup.new_tag('source')
tvshow_name_tag = soup.new_tag('name')
tvshow_name_tag.string = 'PFTV TV'
tv_source_tag.insert(0, tvshow_name_tag)
tvshow_path_tag = soup.new_tag('path', pathversion='1')
tvshow_path_tag.string = TV_SHOWS_PATH
tv_source_tag.insert(1, tvshow_path_tag)
tvshow_sharing = soup.new_tag('allowsharing')
tvshow_sharing.string = 'true'
tv_source_tag.insert(2, tvshow_sharing)
video.append(tv_source_tag)
added_new_paths = True
if added_new_paths:
file = open(source_path, 'w')
file.write(str(soup))
file.close()
common.notification('Starting Export', 10000)
return added_new_paths
def update_xbmc_library():
xbmc.executebuiltin("UpdateLibrary(video)")
def complete_export(added_folders):
if added_folders:
xbmcgui.Dialog() \
.ok("Added PFTV Folders to Video Sources",
"Two steps are required to complete the process:",
"1. Kodi must be restarted",
"2. After restarting, you must configure the content type of the PFTV folders in the File section"
)
else:
common.notification('Export Complete')
if common.get_setting('updatelibraryafterexport') == 'true':
update_xbmc_library()
def export_movie(data):
if data['year']:
filename = _clean_filename(data['title'] + ' (' + str(data['year']) + ')')
else:
filename = _clean_filename(data['title'])
strm_file = filename + ".strm"
u = sys.argv[0] + '?url={0}&mode=movies&sitemode=play_movie&content_id={1}'.format(data['url'], data['content_id'])
_save_file(strm_file, u, MOVIE_PATH)
def export_series(series):
tv_db.update_series(series)
seasons = tv_db.get_seasons(series['series_id'], True)
dirname = _get_series_dir(series)
for season in seasons:
tv_db.update_season(season)
export_season(season, dirname)
def export_season(season, series_dir=None):
episodes = tv_db.get_episodes(season['season_id'])
dirname = _get_season_dir(season, series_dir)
for episode in episodes:
export_episode(episode, dirname)
def export_episode(episode, season_dir=None):
if season_dir is None:
season = tv_db.lookup_season(episode['season_id']).fetchone()
season_dir = _get_season_dir(season)
filename = 'S{0:02d}E{1:02d} - {2}'.format(episode['season_no'], episode['episode_no'], _clean_filename(episode['title']))
strm_file = filename + ".strm"
u = listtv.create_play_link(episode)
_save_file(strm_file, u, season_dir)
def _get_series_dir(series):
dirname = os.path.join(TV_SHOWS_PATH, _clean_filename(series['title']))
_create_directory(dirname)
return dirname
def _get_season_dir(season, series_dir=None):
if series_dir is None:
series = tv_db.lookup_series(season['series_id']).fetchone()
series_dir = _get_series_dir(series)
dirname = os.path.join(series_dir, 'Season {0}'.format(season['season_no']))
_create_directory(dirname)
return dirname
def _save_file(filename, data, dir):
path = os.path.join(dir, filename)
file = open(path, 'w')
file.write(data)
file.close()
def _create_directory(dir_path):
dir_path = dir_path.strip()
if not os.path.exists(dir_path):
os.makedirs(dir_path)
def _clean_filename(name):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
return ''.join(c for c in name if c in valid_chars)
|
{
"content_hash": "09993ee5ca8720552fb1b9f26381cffd",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 126,
"avg_line_length": 31.06878306878307,
"alnum_prop": 0.6374318801089919,
"repo_name": "bradyemerson/plugin.video.pftv",
"id": "7be5e84ab69d5e38be8fb72bddb24d7f298e2357",
"size": "5918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/lib/xbmclibrary.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "64120"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import sys
from twisted.internet import reactor, defer
from twittytwister import twitter
fetchCount = 0
@defer.deferredGenerator
def getSome(tw, user):
global fetchCount
fetchCount = 0
def gotEntry(msg):
global fetchCount
fetchCount += 1
sys.stdout.write(msg.text.encode("utf8") + "\n")
page = 1
while True:
fetchCount = 0
sys.stderr.write("Fetching page %d for %s\n" % (page, user))
d = tw.user_timeline(gotEntry, user, {'count': '200', 'page': str(page)})
page += 1
wfd = defer.waitForDeferred(d)
yield wfd
wfd.getResult()
if fetchCount == 0:
reactor.stop()
user = sys.argv[1]
if len(sys.argv) > 3:
user = sys.argv[3]
tw = twitter.Twitter(sys.argv[1], sys.argv[2])
defer.maybeDeferred(getSome, tw, user)
reactor.run()
|
{
"content_hash": "2973aafbd30b58d8d534c17d2c0498eb",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 81,
"avg_line_length": 20.4,
"alnum_prop": 0.6089324618736384,
"repo_name": "praekelt/twitty-twister",
"id": "86c084ea29d1e2a51efead05fb6894029416f64a",
"size": "940",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "example/user_timeline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "72574"
}
],
"symlink_target": ""
}
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 30, transform = "Difference", sigma = 0.0, exog_count = 100, ar_order = 0);
|
{
"content_hash": "732fd302d499d2ae918f8947b37ded1b",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 172,
"avg_line_length": 38.857142857142854,
"alnum_prop": 0.7132352941176471,
"repo_name": "antoinecarme/pyaf",
"id": "b54a58f78fa8a731c47941416f786e12a425d2b0",
"size": "272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_Difference/trend_ConstantTrend/cycle_30/ar_/test_artificial_1024_Difference_ConstantTrend_30__100.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
import copy
import os
import urlparse
import uuid
from keystone.common import cms
from keystone.common.sql import migration
from keystone import config
from keystone import contrib
from keystone.contrib import oauth1
from keystone.contrib.oauth1 import controllers
from keystone.openstack.common import importutils
from keystone.tests import test_v3
CONF = config.CONF
class OAuth1Tests(test_v3.RestfulTestCase):
EXTENSION_NAME = 'oauth1'
EXTENSION_TO_ADD = 'oauth_extension'
def setup_database(self):
super(OAuth1Tests, self).setup_database()
package_name = "%s.%s.migrate_repo" % (contrib.__name__,
self.EXTENSION_NAME)
package = importutils.import_module(package_name)
self.repo_path = os.path.abspath(os.path.dirname(package.__file__))
migration.db_version_control(version=None, repo_path=self.repo_path)
migration.db_sync(version=None, repo_path=self.repo_path)
def setUp(self):
super(OAuth1Tests, self).setUp()
# Now that the app has been served, we can query CONF values
self.base_url = (CONF.public_endpoint % CONF) + "v3"
self.controller = controllers.OAuthControllerV3()
def _create_single_consumer(self):
ref = {'description': uuid.uuid4().hex}
resp = self.post(
'/OS-OAUTH1/consumers',
body={'consumer': ref})
return resp.result.get('consumer')
def _oauth_request(self, consumer, token=None, **kw):
return oauth1.Request.from_consumer_and_token(consumer=consumer,
token=token,
**kw)
def _create_request_token(self, consumer, project_id):
params = {'requested_project_id': project_id}
headers = {'Content-Type': 'application/json'}
url = '/OS-OAUTH1/request_token'
oreq = self._oauth_request(
consumer=consumer,
http_url=self.base_url + url,
http_method='POST',
parameters=params)
hmac = oauth1.SignatureMethod_HMAC_SHA1()
oreq.sign_request(hmac, consumer, None)
headers.update(oreq.to_header())
headers.update(params)
return url, headers
def _create_access_token(self, consumer, token):
headers = {'Content-Type': 'application/json'}
url = '/OS-OAUTH1/access_token'
oreq = self._oauth_request(
consumer=consumer, token=token,
http_method='POST',
http_url=self.base_url + url)
hmac = oauth1.SignatureMethod_HMAC_SHA1()
oreq.sign_request(hmac, consumer, token)
headers.update(oreq.to_header())
return url, headers
def _get_oauth_token(self, consumer, token):
headers = {'Content-Type': 'application/json'}
body = {'auth': {'identity': {'methods': ['oauth1'], 'oauth1': {}}}}
url = '/auth/tokens'
oreq = self._oauth_request(
consumer=consumer, token=token,
http_method='POST',
http_url=self.base_url + url)
hmac = oauth1.SignatureMethod_HMAC_SHA1()
oreq.sign_request(hmac, consumer, token)
headers.update(oreq.to_header())
return url, headers, body
def _authorize_request_token(self, request_id):
return '/OS-OAUTH1/authorize/%s' % (request_id)
class ConsumerCRUDTests(OAuth1Tests):
def _consumer_create(self, description=None, description_flag=True):
if description_flag:
ref = {'description': description}
else:
ref = {}
resp = self.post(
'/OS-OAUTH1/consumers',
body={'consumer': ref})
consumer = resp.result.get('consumer')
consumer_id = consumer.get('id')
self.assertEqual(consumer['description'], description)
self.assertIsNotNone(consumer_id)
self.assertIsNotNone(consumer.get('secret'))
def test_consumer_create(self):
description = uuid.uuid4().hex
self._consumer_create(description=description)
def test_consumer_create_none_desc_1(self):
self._consumer_create()
def test_consumer_create_none_desc_2(self):
self._consumer_create(description_flag=False)
def test_consumer_delete(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': consumer_id})
self.assertResponseStatus(resp, 204)
def test_consumer_get(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
resp = self.get('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': consumer_id})
self.assertEqual(resp.result.get('consumer').get('id'), consumer_id)
def test_consumer_list(self):
resp = self.get('/OS-OAUTH1/consumers')
entities = resp.result.get('consumers')
self.assertIsNotNone(entities)
self.assertValidListLinks(resp.result.get('links'))
def test_consumer_update(self):
consumer = self._create_single_consumer()
original_id = consumer.get('id')
original_description = consumer.get('description')
update_description = original_description + "_new"
update_ref = {'description': update_description}
update_resp = self.patch('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': original_id},
body={'consumer': update_ref})
consumer = update_resp.result.get('consumer')
self.assertEqual(consumer.get('description'), update_description)
self.assertEqual(consumer.get('id'), original_id)
def test_consumer_update_bad_secret(self):
consumer = self._create_single_consumer()
original_id = consumer.get('id')
update_ref = copy.deepcopy(consumer)
update_ref['description'] = uuid.uuid4().hex
update_ref['secret'] = uuid.uuid4().hex
self.patch('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': original_id},
body={'consumer': update_ref},
expected_status=400)
def test_consumer_update_bad_id(self):
consumer = self._create_single_consumer()
original_id = consumer.get('id')
original_description = consumer.get('description')
update_description = original_description + "_new"
update_ref = copy.deepcopy(consumer)
update_ref['description'] = update_description
update_ref['id'] = update_description
self.patch('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': original_id},
body={'consumer': update_ref},
expected_status=400)
def test_consumer_create_no_description(self):
resp = self.post('/OS-OAUTH1/consumers', body={'consumer': {}})
consumer = resp.result.get('consumer')
consumer_id = consumer.get('id')
self.assertEqual(consumer.get('description'), None)
self.assertIsNotNone(consumer_id)
self.assertIsNotNone(consumer.get('secret'))
def test_consumer_get_bad_id(self):
self.get('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': uuid.uuid4().hex},
expected_status=404)
class OAuthFlowTests(OAuth1Tests):
def test_oauth_flow(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer_secret = consumer.get('secret')
self.consumer = oauth1.Consumer(consumer_id, consumer_secret)
self.assertIsNotNone(self.consumer.key)
url, headers = self._create_request_token(self.consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
request_key = credentials.get('oauth_token')[0]
request_secret = credentials.get('oauth_token_secret')[0]
self.request_token = oauth1.Token(request_key, request_secret)
self.assertIsNotNone(self.request_token.key)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
resp = self.put(url, body=body, expected_status=200)
self.verifier = resp.result['token']['oauth_verifier']
self.request_token.set_verifier(self.verifier)
url, headers = self._create_access_token(self.consumer,
self.request_token)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
access_key = credentials.get('oauth_token')[0]
access_secret = credentials.get('oauth_token_secret')[0]
self.access_token = oauth1.Token(access_key, access_secret)
self.assertIsNotNone(self.access_token.key)
url, headers, body = self._get_oauth_token(self.consumer,
self.access_token)
content = self.post(url, headers=headers, body=body)
self.keystone_token_id = content.headers.get('X-Subject-Token')
self.keystone_token = content.result.get('token')
self.assertIsNotNone(self.keystone_token_id)
class AccessTokenCRUDTests(OAuthFlowTests):
def test_delete_access_token_dne(self):
self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': uuid.uuid4().hex},
expected_status=404)
def test_list_no_access_tokens(self):
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result.get('access_tokens')
self.assertTrue(len(entities) == 0)
self.assertValidListLinks(resp.result.get('links'))
def test_get_single_access_token(self):
self.test_oauth_flow()
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s'
% {'user_id': self.user_id,
'key': self.access_token.key})
entity = resp.result.get('access_token')
self.assertEqual(entity['id'], self.access_token.key)
self.assertEqual(entity['consumer_id'], self.consumer.key)
def test_get_access_token_dne(self):
self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s'
% {'user_id': self.user_id,
'key': uuid.uuid4().hex},
expected_status=404)
def test_list_all_roles_in_access_token(self):
self.test_oauth_flow()
resp = self.get('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles'
% {'id': self.user_id,
'key': self.access_token.key})
entities = resp.result.get('roles')
self.assertTrue(len(entities) > 0)
self.assertValidListLinks(resp.result.get('links'))
def test_get_role_in_access_token(self):
self.test_oauth_flow()
url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
% {'id': self.user_id, 'key': self.access_token.key,
'role': self.role_id})
resp = self.get(url)
entity = resp.result.get('role')
self.assertEqual(entity['id'], self.role_id)
def test_get_role_in_access_token_dne(self):
self.test_oauth_flow()
url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
% {'id': self.user_id, 'key': self.access_token.key,
'role': uuid.uuid4().hex})
self.get(url, expected_status=404)
def test_list_and_delete_access_tokens(self):
self.test_oauth_flow()
# List access_tokens should be > 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result.get('access_tokens')
self.assertTrue(len(entities) > 0)
self.assertValidListLinks(resp.result.get('links'))
# Delete access_token
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
self.assertResponseStatus(resp, 204)
# List access_token should be 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result.get('access_tokens')
self.assertTrue(len(entities) == 0)
self.assertValidListLinks(resp.result.get('links'))
class AuthTokenTests(OAuthFlowTests):
def test_keystone_token_is_valid(self):
self.test_oauth_flow()
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
r = self.get('/auth/tokens', headers=headers)
self.assertValidTokenResponse(r, self.user)
# now verify the oauth section
oauth_section = r.result['token']['OS-OAUTH1']
self.assertEqual(oauth_section['access_token_id'],
self.access_token.key)
self.assertEqual(oauth_section['consumer_id'], self.consumer.key)
# verify the roles section
roles_list = r.result['token']['roles']
# we can just verify the 0th role since we are only assigning one role
self.assertEqual(roles_list[0]['id'], self.role_id)
# verify that the token can perform delegated tasks
ref = self.new_user_ref(domain_id=self.domain_id)
r = self.admin_request(path='/v3/users', headers=headers,
method='POST', body={'user': ref})
self.assertValidUserResponse(r, ref)
def test_delete_access_token_also_revokes_token(self):
self.test_oauth_flow()
# Delete access token
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
self.assertResponseStatus(resp, 204)
# Check Keystone Token no longer exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.get('/auth/tokens', headers=headers,
expected_status=404)
def test_deleting_consumer_also_deletes_tokens(self):
self.test_oauth_flow()
# Delete consumer
consumer_id = self.consumer.key
resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': consumer_id})
self.assertResponseStatus(resp, 204)
# List access_token should be 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result.get('access_tokens')
self.assertEqual(len(entities), 0)
# Check Keystone Token no longer exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.head('/auth/tokens', headers=headers,
expected_status=404)
def test_change_user_password_also_deletes_tokens(self):
self.test_oauth_flow()
# delegated keystone token exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
r = self.get('/auth/tokens', headers=headers)
self.assertValidTokenResponse(r, self.user)
user = {'password': uuid.uuid4().hex}
r = self.patch('/users/%(user_id)s' % {
'user_id': self.user['id']},
body={'user': user})
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.admin_request(path='/auth/tokens', headers=headers,
method='GET', expected_status=404)
def test_deleting_project_also_invalidates_tokens(self):
self.test_oauth_flow()
# delegated keystone token exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
r = self.get('/auth/tokens', headers=headers)
self.assertValidTokenResponse(r, self.user)
r = self.delete('/projects/%(project_id)s' % {
'project_id': self.project_id})
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.admin_request(path='/auth/tokens', headers=headers,
method='GET', expected_status=404)
def test_token_chaining_is_not_allowed(self):
self.test_oauth_flow()
#attempt to re-authenticate (token chain) with the given token
path = '/v3/auth/tokens/'
auth_data = self.build_authentication_request(
token=self.keystone_token_id)
self.admin_request(
path=path,
body=auth_data,
token=self.keystone_token_id,
method='POST',
expected_status=403)
def test_list_keystone_tokens_by_consumer(self):
self.test_oauth_flow()
tokens = self.token_api.list_tokens(self.user_id,
consumer_id=self.consumer.key)
keystone_token_uuid = cms.cms_hash_token(self.keystone_token_id)
self.assertTrue(len(tokens) > 0)
self.assertTrue(keystone_token_uuid in tokens)
class MaliciousOAuth1Tests(OAuth1Tests):
def test_bad_consumer_secret(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer = oauth1.Consumer(consumer_id, "bad_secret")
url, headers = self._create_request_token(consumer,
self.project_id)
self.post(url, headers=headers, expected_status=500)
def test_bad_request_token_key(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer_secret = consumer.get('secret')
consumer = oauth1.Consumer(consumer_id, consumer_secret)
url, headers = self._create_request_token(consumer,
self.project_id)
self.post(url, headers=headers)
url = self._authorize_request_token("bad_key")
body = {'roles': [{'id': self.role_id}]}
self.put(url, body=body, expected_status=404)
def test_bad_verifier(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer_secret = consumer.get('secret')
consumer = oauth1.Consumer(consumer_id, consumer_secret)
url, headers = self._create_request_token(consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
request_key = credentials.get('oauth_token')[0]
request_secret = credentials.get('oauth_token_secret')[0]
request_token = oauth1.Token(request_key, request_secret)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
resp = self.put(url, body=body, expected_status=200)
verifier = resp.result['token']['oauth_verifier']
self.assertIsNotNone(verifier)
request_token.set_verifier("bad verifier")
url, headers = self._create_access_token(consumer,
request_token)
self.post(url, headers=headers, expected_status=401)
def test_bad_authorizing_roles(self):
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer_secret = consumer.get('secret')
consumer = oauth1.Consumer(consumer_id, consumer_secret)
url, headers = self._create_request_token(consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
request_key = credentials.get('oauth_token')[0]
self.identity_api.remove_role_from_user_and_project(self.user_id,
self.project_id,
self.role_id)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
self.admin_request(path=url, method='PUT',
body=body, expected_status=404)
def test_expired_authorizing_request_token(self):
CONF.oauth1.request_token_duration = -1
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer_secret = consumer.get('secret')
self.consumer = oauth1.Consumer(consumer_id, consumer_secret)
self.assertIsNotNone(self.consumer.key)
url, headers = self._create_request_token(self.consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
request_key = credentials.get('oauth_token')[0]
request_secret = credentials.get('oauth_token_secret')[0]
self.request_token = oauth1.Token(request_key, request_secret)
self.assertIsNotNone(self.request_token.key)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
self.put(url, body=body, expected_status=401)
def test_expired_creating_keystone_token(self):
CONF.oauth1.access_token_duration = -1
consumer = self._create_single_consumer()
consumer_id = consumer.get('id')
consumer_secret = consumer.get('secret')
self.consumer = oauth1.Consumer(consumer_id, consumer_secret)
self.assertIsNotNone(self.consumer.key)
url, headers = self._create_request_token(self.consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
request_key = credentials.get('oauth_token')[0]
request_secret = credentials.get('oauth_token_secret')[0]
self.request_token = oauth1.Token(request_key, request_secret)
self.assertIsNotNone(self.request_token.key)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
resp = self.put(url, body=body, expected_status=200)
self.verifier = resp.result['token']['oauth_verifier']
self.request_token.set_verifier(self.verifier)
url, headers = self._create_access_token(self.consumer,
self.request_token)
content = self.post(url, headers=headers)
credentials = urlparse.parse_qs(content.result)
access_key = credentials.get('oauth_token')[0]
access_secret = credentials.get('oauth_token_secret')[0]
self.access_token = oauth1.Token(access_key, access_secret)
self.assertIsNotNone(self.access_token.key)
url, headers, body = self._get_oauth_token(self.consumer,
self.access_token)
self.post(url, headers=headers, body=body, expected_status=401)
|
{
"content_hash": "6431fe5427c23ebef194136460b8165d",
"timestamp": "",
"source": "github",
"line_count": 558,
"max_line_length": 78,
"avg_line_length": 42.57885304659498,
"alnum_prop": 0.5901763542236627,
"repo_name": "raildo/keystone",
"id": "a398cffbed20d921d7629f5f40394b9629aed5d3",
"size": "24390",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keystone/tests/test_v3_oauth1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "2403891"
},
{
"name": "Shell",
"bytes": "11206"
}
],
"symlink_target": ""
}
|
'''This script runs all tests in a directory.
It does not need to know about the tests ahead of time.
It recursively descends from the current directory and
automatically builds up a list of tests to run.
Only directories named 'tests' are processed.
The path to each 'tests' directory is added to the PYTHONPATH.
Only python scripts that start with 'test_' are added to
the list of scripts in the test suite.
Noah Spurrier
'''
import unittest
import os, os.path
import sys
import pexpect
print "Testing pexpect version:", pexpect.__version__
print "Testing pexpect revision:", pexpect.__revision__
def add_tests_to_list (import_list, dirname, names):
# Only check directories named 'tests'.
if os.path.basename(dirname) != 'tests':
return
# Add any files that start with 'test_' and end with '.py'.
for f in names:
filename, ext = os.path.splitext(f)
if ext != '.py':
continue
if filename.find('test_') == 0:
import_list.append (os.path.join(dirname, filename))
def find_modules_and_add_paths (root_path):
import_list = []
module_list = []
os.path.walk (root_path, add_tests_to_list, import_list)
for module_file in import_list:
path, module = os.path.split(module_file)
module_list.append (module)
print 'Adding:', module_file
if not path in sys.path:
sys.path.append (path)
if not os.path.dirname(path) in sys.path:
sys.path.append (os.path.dirname(path))
module_list.sort()
return module_list
def suite():
modules_to_test = find_modules_and_add_paths (os.getcwd())
alltests = unittest.TestSuite()
for module in map(__import__, modules_to_test):
alltests.addTest(unittest.findTestCases(module))
return alltests
if __name__ == '__main__':
unittest.main(defaultTest='suite')
# s = all()
# runner = unittest.TextTestRunner()
# runner.run (s)
|
{
"content_hash": "468dd207318b6e31999ae4435a67b53c",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 64,
"avg_line_length": 33.101694915254235,
"alnum_prop": 0.65847414234511,
"repo_name": "rockaboxmedia/pexpect",
"id": "68b2a2bcee5e3816afb31a7eec2a7297c216e2a9",
"size": "1975",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tools/testall.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2453"
},
{
"name": "Python",
"bytes": "346736"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import os
import logging
import re
from hamster_bridge.listeners import (
HamsterListener,
ConfigValue,
)
logger = logging.getLogger(__name__)
class RedmineHamsterListener(HamsterListener):
"""
Redmine listener for hamster tasks,
Tested with Redmine 2.5.1.stable
Important: will only work with German or English installation!
INFO: Unfortunately the Redmine API returns issue statuses in the currently set language.
There is only the id and the name of the status.
f.e. "New" has usually ID 1, but its name would be "Neu" in a German installation.
"""
short_name = 'redmine'
config_values = [
ConfigValue(
key='server_url',
setup_func=lambda: raw_input('Root URL to the Redmine server [f.e. "http://redmine.example.org/"]\n'),
sensitive=False,
),
ConfigValue(
key='api_key',
setup_func=lambda: raw_input('Your Redmine API access key.\n'),
sensitive=False,
),
ConfigValue(
key='version',
setup_func=lambda: raw_input('The Redmine version number, e.g. 2.5.1\n'),
sensitive=False,
),
ConfigValue(
key='auto_start',
setup_func=lambda: raw_input('Automatically start the issue when you start the task in hamster? [y/n]\n'),
sensitive=False,
),
ConfigValue(
key='verify_ssl',
setup_func=lambda: raw_input('Verify HTTPS/SSL connections? '
'You can also specify the path to a CA certificate bundle. [y/n/PATH]\n'),
sensitive=False,
),
]
# Redmine issue key is just a number
issue_from_title = re.compile('([0-9]+)\ ')
def __init__(self):
"""
Sets up the class be defining some internal variables.
"""
# issue status dict for the default issue status
self.__issue_status_default = None
# issue status dict for the "in Work" status
self.__issue_status_in_work = None
# the redmine instance
self.redmine = None
# will store the activities
self.__activities = {}
def __get_issue_from_fact(self, fact):
"""
Tries to find an issue matching the given fact.
:param fact: the currently stopped fact
:type fact: hamster.lib.stuff.Fact
:returns: the issue or None if not found
:rtype:
"""
from redmine.exceptions import ResourceNotFoundError
# iterate the possible issues, normally this should match exactly one...
for possible_issue in self.issue_from_title.findall(fact.activity):
try:
return self.redmine.issue.get(possible_issue)
except ResourceNotFoundError:
return None
return None
def __filter_issue_statuses(self):
"""
Filters the issue statuses for the relevant ones: the default and the status "In Work".
"""
def find_default(element):
"""
Filter function to find the default issue status.
"""
return hasattr(element, 'is_default') and getattr(element, 'is_default', False)
def find_in_work(element):
"""
Filter function to find the in work status.
"""
return element.name in [u'In Bearbeitung', u'In Work']
# get the issue statuses
issue_statuses = self.redmine.issue_status.all()
# if none are found, raise
if len(issue_statuses) == 0:
logger.exception('Unable to fetch issue statuses! Not possible to proceed!')
try:
self.__issue_status_default = [item for item in issue_statuses if find_default(item)][0]
except IndexError:
logger.exception('Unable to find a single default issue status!')
try:
self.__issue_status_in_work = [item for item in issue_statuses if find_in_work(item)][0]
except IndexError:
logger.exception('Unable to find a single "In Work" issue status!')
def __get_first_activity_id(self):
"""
Returns the first activity that was retrieved.
The first activity was marked with a True value.
:return: the id of the first activity
:rtype: int
"""
return [key for key, value in self.__activities.items() if value[1]][0]
def __get_activity_id(self, tags):
"""
Returns an activity id if it can be resolved from the list of given tags.
Otherwise the first found activity id is returned.
:param tags: list of tags
:type tags: list
:return: the activity id
:rtype: int
"""
if len(tags) == 0:
# this grabs the first activity from the dict
return self.__get_first_activity_id()
else:
for activity_id, activity_value in self.__activities.viewitems():
try:
next(tag for tag in tags if tag == activity_value[0])
return activity_id
except StopIteration:
# if not found
continue
# fallback if no tag matches
return self.__get_first_activity_id()
def prepare(self):
"""
Prepares the listener by checking connectivity to configured Redmine instance.
While doing so, grabs the issue statuses, too, used for on_fact_stopped.
"""
from redmine import Redmine
from redmine.exceptions import BaseRedmineError
verify_ssl = self.get_from_config('verify_ssl')
requests_dict = {}
if verify_ssl.lower() in ('y', 'true'):
logger.info("Enabling SSL/TLS certificate verification (default CA path)")
requests_dict['verify'] = True
elif verify_ssl.lower() in ('n', 'false'):
logger.warn("Disabling SSL/TLS certificate verification")
requests_dict['verify'] = False
elif os.path.isfile(verify_ssl):
logger.info("Enabling SSL/TLS certificate verification (custom CA "
"path) '%s'", verify_ssl)
requests_dict['verify'] = verify_ssl
else:
logger.error("verify_ssl = '%s' is not a valid CA cert path nor a "
"valid option. Falling back to enabling SSL/TLS verification "
"with default CA path", verify_ssl)
requests_dict['verify'] = True
# setup the redmine instance
self.redmine = Redmine(
self.get_from_config('server_url'),
key=self.get_from_config('api_key'),
version=self.get_from_config('version'),
requests=requests_dict,
)
# fetch the possible activities for time entries
time_entry_activities = self.redmine.enumeration.filter(resource='time_entry_activities')
# only now the real http request is made, use this as connectivity check
try:
logger.info('### Available Redmine activities for using as tag value:')
is_first = True
for tea in time_entry_activities:
self.__activities[tea.id] = (tea.name, is_first)
is_first = False
logger.info('### ' + tea.name)
except (BaseRedmineError, IOError):
logger.exception('Unable to communicate with redmine server. See error in the following output:')
# fetch all available issue statuses and filter the default and in work ones as they are the only relevant statuses here
self.__filter_issue_statuses()
def on_fact_started(self, fact):
"""
Called by HamsterBridge if a fact is started.
Will try to start the appropriate Redmine issue if there is one.
Uses the first found issue.
:param fact: the currently stopped fact
:type fact: hamster.lib.stuff.Fact
"""
# if issue shall be auto started...
auto_start = self.get_from_config('auto_start')
if auto_start == 'y':
# fetch the issue from the hamster fact
issue = self.__get_issue_from_fact(fact)
# abort if no issue was found
if not issue:
logger.error('Unable to query issue for starting of hamster fact %s', fact.original_activity)
return
# if the issue is in the default state (aka the initial state), put it into work state
if issue.status.id == self.__issue_status_default.id:
logger.info('setting status to "In Work" for issue %d', issue.id)
issue.status_id = self.__issue_status_in_work.id
issue.save()
def on_fact_stopped(self, fact):
"""
Called by HamsterBridge if a fact is stopped.
Will try to log the time to the appropriate Redmine issue if there is one.
Uses the first found issue.
:param fact: the currently stopped fact
:type fact: hamster.lib.stuff.Fact
"""
# fetch the issue from the hamster fact
issue = self.__get_issue_from_fact(fact)
# abort if no issue was found
if not issue:
logger.error('Unable to query issue for stopping of hamster fact %s', fact.original_activity)
return
# create the time entry
self.redmine.time_entry.create(
issue_id=issue.id,
hours='%0.2f' % (fact.delta.total_seconds() / 3600.0),
# grep the tags, convert to string (the values are dbus.String) and find an activity
activity_id=self.__get_activity_id([str(tag) for tag in fact.tags]),
comments=fact.description,
)
|
{
"content_hash": "ff25cfce83e0fd73cfb6ec0b563cd84c",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 128,
"avg_line_length": 37.18490566037736,
"alnum_prop": 0.5862593870509438,
"repo_name": "kraiz/hamster-bridge",
"id": "c1d2b95279e413b5236983dbb45206af58d53740",
"size": "9854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hamster_bridge/listeners/redmine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25489"
}
],
"symlink_target": ""
}
|
import sys
import os.path
import math
import random
import time
import numpy as np
from ginga.misc import Bunch, Callback
from ginga.fonts import font_asst
import ginga.icons
import ginga.toolkit
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk # noqa
from gi.repository import Gdk # noqa
from gi.repository import GdkPixbuf # noqa
from gi.repository import GObject # noqa
from gi.repository import Pango # noqa
import cairo
ginga.toolkit.use('gtk3')
# path to our icons
icondir = os.path.split(ginga.icons.__file__)[0]
DND_TARGET_TYPE_TEXT = 0
DND_TARGET_TYPE_URIS = 1
class WidgetMask(object):
def __init__(self, *args):
self.cb_fn = None
self.cb_args = []
self.cb_kwdargs = {}
self.connected = False
self.changed = False
def sconnect(self, signal, cb_fn, *args, **kwdargs):
self.cb_fn = cb_fn
self.cb_args = args
self.cb_kwdargs = kwdargs
self.connect(signal, self.cb)
self.connected = True
def change(self):
if self.connected:
self.changed = True
def cb(self, *args):
if self.changed:
self.changed = False
return
newargs = list(args)
newargs.extend(self.cb_args)
kwdargs = self.cb_kwdargs.copy()
return self.cb_fn(*newargs, **kwdargs)
class TopLevel(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self)
class CheckButton(WidgetMask, Gtk.CheckButton):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.CheckButton.__init__(self, *args, **kwdargs)
def set_active(self, newval):
oldval = self.get_active()
if oldval != newval:
self.change()
super(CheckButton, self).set_active(newval)
class ToggleButton(WidgetMask, Gtk.ToggleButton):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.ToggleButton.__init__(self, *args, **kwdargs)
def set_active(self, newval):
oldval = self.get_active()
if oldval != newval:
self.change()
super(ToggleButton, self).set_active(newval)
def toggle(self):
oldval = self.get_active()
newval = not oldval
super(ToggleButton, self).set_active(newval)
class RadioButton(WidgetMask, Gtk.RadioButton):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.RadioButton.__init__(self, *args, **kwdargs)
def set_active(self, newval):
oldval = self.get_active()
if oldval != newval:
self.change()
super(RadioButton, self).set_active(newval)
def toggle(self):
oldval = self.get_active()
newval = not oldval
super(RadioButton, self).set_active(newval)
class CheckMenuItem(WidgetMask, Gtk.CheckMenuItem):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.CheckMenuItem.__init__(self, *args, **kwdargs)
def set_active(self, newval):
oldval = self.get_active()
if oldval != newval:
self.change()
super(CheckMenuItem, self).set_active(newval)
class SpinButton(WidgetMask, Gtk.SpinButton):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.SpinButton.__init__(self, *args, **kwdargs)
def set_value(self, newval):
oldval = self.get_value()
if oldval != newval:
self.change()
super(SpinButton, self).set_value(newval)
class HScale(WidgetMask, Gtk.HScale):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.HScale.__init__(self, *args, **kwdargs)
def set_value(self, newval):
oldval = self.get_value()
if oldval != newval:
self.change()
super(HScale, self).set_value(newval)
class VScale(WidgetMask, Gtk.VScale):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.VScale.__init__(self, *args, **kwdargs)
def set_value(self, newval):
oldval = self.get_value()
if oldval != newval:
self.change()
super(VScale, self).set_value(newval)
class ComboBox(WidgetMask, Gtk.ComboBox):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.ComboBox.__init__(self, *args, **kwdargs)
def set_active(self, newval):
oldval = self.get_active()
if oldval != newval:
self.change()
super(ComboBox, self).set_active(newval)
def insert_alpha(self, text):
model = self.get_model()
tup = (text, )
j = 0
for i in range(len(model)):
j = i
if model[i][0] > text:
model.insert(j, tup)
return
model.insert(j + 1, tup)
def insert_text(self, idx, text):
model = self.get_model()
tup = (text, )
model.insert(idx, tup)
def delete_alpha(self, text):
model = self.get_model()
for i in range(len(model)):
if model[i][0] == text:
del model[i]
return
def clear(self):
model = self.get_model()
model.clear()
def show_text(self, text):
model = self.get_model()
for i in range(len(model)):
if model[i][0] == text:
self.set_active(i)
return
class Notebook(WidgetMask, Gtk.Notebook):
def __init__(self, *args, **kwdargs):
WidgetMask.__init__(self)
Gtk.Notebook.__init__(self, *args, **kwdargs)
def set_group_id(self, id):
super(Notebook, self).set_group_name(str(id))
def set_current_page(self, new_idx):
old_idx = self.get_current_page()
if old_idx != new_idx:
self.change()
super(Notebook, self).set_current_page(new_idx)
class MultiDragDropTreeView(Gtk.TreeView):
'''TreeView that captures mouse events to make drag and drop work
properly
See: https://gist.github.com/kevinmehall/278480#file-multiple-selection-dnd-class-py
'''
def __init__(self):
super(MultiDragDropTreeView, self).__init__()
self.connect('button_press_event', self.on_button_press)
self.connect('button_release_event', self.on_button_release)
self.defer_select = False
def on_button_press(self, widget, event):
# Here we intercept mouse clicks on selected items so that we can
# drag multiple items without the click selecting only one
target = self.get_path_at_pos(int(event.x), int(event.y))
if (target and
event.type == Gdk.EventType.BUTTON_PRESS and
not (event.state &
(Gdk.ModifierType.CONTROL_MASK |
Gdk.ModifierType.SHIFT_MASK)) and
self.get_selection().path_is_selected(target[0])):
# disable selection
self.get_selection().set_select_function(lambda *ignore: False)
self.defer_select = target[0]
def on_button_release(self, widget, event):
# re-enable selection
self.get_selection().set_select_function(lambda *ignore: True)
target = self.get_path_at_pos(int(event.x), int(event.y))
if (self.defer_select and target and
self.defer_select == target[0] and
not (event.x == 0 and event.y == 0)): # certain drag and drop
self.set_cursor(target[0], target[1], False)
self.defer_select = False
class MDISubWindow(Callback.Callbacks):
def __init__(self, widget, label):
super(MDISubWindow, self).__init__()
self.widget = widget
vbox = Gtk.VBox()
vbox.set_border_width(4)
hbox = Gtk.HBox()
close = Gtk.Button("x")
maxim = Gtk.Button("^")
minim = Gtk.Button("v")
hbox.pack_start(close, False, False, 0)
hbox.pack_start(minim, False, False, 0)
hbox.pack_start(maxim, False, False, 0)
evbox = Gtk.EventBox()
evbox.add(label)
modify_bg(evbox, "gray90")
self.label = label
self.evbox = evbox
hbox.pack_start(evbox, True, True, 2)
vbox.pack_start(hbox, False, False, 0)
vbox.pack_start(widget, True, True, 4)
# what size does the widget want to be?
rect = widget.get_allocation()
self.x, self.y, wd, ht = rect.x, rect.y, rect.width, rect.height
## wd = widget.get_preferred_width()
## ht = widget.get_preferred_height()
## wd, ht = widget.get_size_request()
self.width, self.height = max(wd, 300), max(ht, 300)
frame = Gtk.EventBox()
frame.set_size_request(self.width, self.height)
frame.props.visible_window = True
frame.set_border_width(0)
modify_bg(frame, "gray70")
self.frame = frame
frame.add(vbox)
frame.show_all()
for name in ('close', 'maximize', 'minimize'):
self.enable_callback(name)
maxim.connect('clicked', lambda *args: self.make_callback('maximize'))
minim.connect('clicked', lambda *args: self.make_callback('minimize'))
close.connect('clicked', lambda *args: self.make_callback('close'))
def raise_(self):
window = self.frame.get_window()
if window is not None:
window.raise_()
def lower(self):
window = self.frame.get_window()
if window is not None:
window.lower()
def focus(self):
self.frame.grab_focus()
class MDIWidget(Gtk.Layout):
"""
Multiple Document Interface type widget for Gtk.
"""
def __init__(self):
Gtk.Layout.__init__(self)
self.children = []
self.cur_index = -1
self.selected_child = None
self.kbdmouse_mask = 0
self.cascade_offset = 50
self.minimized_width = 150
self.delta_px = 50
mask = self.get_events()
self.set_events(mask |
Gdk.EventMask.ENTER_NOTIFY_MASK |
Gdk.EventMask.LEAVE_NOTIFY_MASK |
Gdk.EventMask.FOCUS_CHANGE_MASK |
Gdk.EventMask.STRUCTURE_MASK |
Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.BUTTON_RELEASE_MASK |
Gdk.EventMask.KEY_PRESS_MASK |
Gdk.EventMask.KEY_RELEASE_MASK |
Gdk.EventMask.POINTER_MOTION_MASK |
Gdk.EventMask.POINTER_MOTION_HINT_MASK |
Gdk.EventMask.SCROLL_MASK)
self.connect("motion_notify_event", self.motion_notify_event)
self.connect("button_press_event", self.button_press_event)
self.connect("button_release_event", self.button_release_event)
modify_bg(self, "gray50")
def add_subwin(self, subwin):
self.children.append(subwin)
subwin.evbox.connect("button_press_event", self.select_child_cb, subwin)
subwin.frame.connect("button_press_event", self.start_resize_cb, subwin)
subwin.add_callback('maximize', lambda *args: self.maximize_page(subwin))
subwin.add_callback('minimize', lambda *args: self.minimize_page(subwin))
self.put(subwin.frame, subwin.x, subwin.y)
# note: seem to need a slight delay to let the widget be mapped
# in order to accurately determine its position and size
#self.update_subwin_position(subwin)
#self.update_subwin_size(subwin)
GObject.timeout_add(1000, self.update_subwin_position, subwin)
GObject.timeout_add(1500, self.update_subwin_size, subwin)
self._update_area_size()
def append_page(self, widget, label):
subwin = MDISubWindow(widget, label)
# pick a random spot to place the window initially
rect = self.get_allocation()
wd, ht = rect.width, rect.height
x = random.randint(self.cascade_offset, # nosec
max(self.cascade_offset + 10, wd // 2))
y = random.randint(self.cascade_offset, # nosec
max(self.cascade_offset + 10, ht // 2))
subwin.x, subwin.y = x, y
self.add_subwin(subwin)
return subwin
def set_tab_reorderable(self, w, tf):
pass
def set_tab_detachable(self, w, tf):
pass
def get_tab_label(self, w):
return None
def page_num(self, widget):
index, subwin = self._widget_to_index(widget)
return index
def get_nth_page(self, idx):
if 0 <= idx < len(self.children):
subwin = self.children[idx]
return subwin.widget
return None
def set_current_page(self, idx):
subwin = self.children[idx]
subwin.raise_()
self.cur_index = idx
def get_current_page(self):
return self.cur_index
def _widget_to_index(self, widget):
index = 0
for subwin in self.children:
if subwin.widget == widget:
return index, subwin
index += 1
return -1, None
def remove_page(self, idx):
subwin = self.children[idx]
self.remove(subwin.widget)
def remove(self, widget):
idx, subwin = self._widget_to_index(widget)
if subwin is not None:
self.children.remove(subwin)
self.cur_index = -1
frame = subwin.frame
super(MDIWidget, self).remove(frame)
widget.unparent()
self._update_area_size()
def get_widget_position(self, widget):
rect = widget.get_allocation()
x, y = rect.x, rect.y
return x, y
def get_widget_size(self, widget):
rect = widget.get_allocation()
width, height = rect.width, rect.height
return width, height
def update_subwin_position(self, subwin):
rect = subwin.frame.get_allocation()
x, y, = rect.x, rect.y
subwin.x, subwin.y = x, y
def update_subwin_size(self, subwin):
rect = subwin.frame.get_allocation()
wd, ht = rect.width, rect.height
subwin.width, subwin.height = wd, ht
def raise_widget(self, subwin):
subwin.raise_()
def select_child_cb(self, layout, event, subwin):
x_root, y_root = event.x_root, event.y_root
x, y = self.get_widget_position(subwin.frame)
subwin.x, subwin.y = x, y
# make this the selected widget
idx = self.page_num(subwin.widget)
if idx >= 0:
self.set_current_page(idx)
self.selected_child = Bunch.Bunch(subwin=subwin, action='move',
x_origin=x, y_origin=y,
x_root=x_root, y_root=y_root)
return True
def start_resize_cb(self, widget, event, subwin):
self.update_subwin_size(subwin)
x_root, y_root = event.x_root, event.y_root
x, y = widget.translate_coordinates(self, event.x, event.y)
rect = subwin.frame.get_allocation()
x1, y1, wd, ht = rect.x, rect.y, rect.width, rect.height
x2, y2 = x1 + wd, y1 + ht
subwin.x, subwin.y = x1, y1
subwin.width, subwin.height = wd, ht
updates = set([])
if abs(x - x2) < self.delta_px:
# right side
if abs(y - y2) < self.delta_px:
# lower right corner
origin = 'lr'
updates = set(['w', 'h'])
elif abs(y - y1) < self.delta_px:
origin = 'ur'
updates = set(['w', 'h', 'y'])
else:
origin = 'r'
updates = set(['w'])
elif abs(x - x1) < self.delta_px:
# left side
if abs(y - y2) < self.delta_px:
# lower left corner
origin = 'll'
updates = set(['w', 'h', 'x'])
elif abs(y - y1) < self.delta_px:
origin = 'ul'
updates = set(['w', 'h', 'x', 'y'])
else:
origin = 'l'
updates = set(['w', 'x'])
elif abs(y - y2) < self.delta_px:
# bottom
origin = 'b'
updates = set(['h'])
else:
origin = 't'
updates = set(['h', 'y'])
self.selected_child = Bunch.Bunch(subwin=subwin, action='resize',
x_origin=x1, y_origin=y1,
wd=wd, ht=ht,
x_root=x_root, y_root=y_root,
origin=origin, updates=updates)
return True
def button_press_event(self, widget, event):
button = self.kbdmouse_mask
if event.button != 0:
button |= 0x1 << (event.button - 1)
return True
def _update_area_size(self):
rect = self.get_allocation()
mx_wd, mx_ht = rect.width, rect.height
for subwin in self.children:
rect = subwin.frame.get_allocation()
x, y, wd, ht = rect.x, rect.y, rect.width, rect.height
mx_wd, mx_ht = max(mx_wd, x + wd), max(mx_ht, y + ht)
self.set_size(mx_wd, mx_ht)
def _resize(self, bnch, x_root, y_root):
subwin = bnch.subwin
updates = bnch.updates
dx, dy = x_root - bnch.x_root, y_root - bnch.y_root
wd = bnch.wd
if 'w' in updates:
wd = int(wd + dx)
ht = bnch.ht
if 'h' in updates:
ht = int(ht + dy)
if 'x' in updates or 'y' in updates:
x = bnch.x_origin
if 'x' in updates:
x = int(x + dx)
if x < bnch.x_origin:
wd = bnch.wd + abs(dx)
else:
wd = bnch.wd + -abs(dx)
y = bnch.y_origin
if 'y' in updates:
y = int(y + dy)
if y < bnch.y_origin:
ht = bnch.ht + abs(dy)
else:
ht = bnch.ht + -abs(dy)
# this works better if it is not self.move_page()
self.move(subwin.frame, x, y)
if 'w' in updates or 'h' in updates:
# this works better if it is not self.resize_page()
subwin.frame.set_size_request(wd, ht)
self._update_area_size()
def button_release_event(self, widget, event):
x_root, y_root = event.x_root, event.y_root
button = self.kbdmouse_mask
if event.button != 0:
button |= 0x1 << (event.button - 1)
if self.selected_child is not None:
bnch = self.selected_child
subwin = bnch.subwin
if bnch.action == 'move':
x = int(subwin.x + (x_root - bnch.x_root))
y = int(subwin.y + (y_root - bnch.y_root))
self.move_page(subwin, x, y)
elif bnch.action == 'resize':
self._resize(bnch, x_root, y_root)
self.update_subwin_position(subwin)
# NOTE: necessary for wrapped widget to remember position
self.move_page(subwin, subwin.x, subwin.y)
self.update_subwin_size(subwin)
# NOTE: necessary for wrapped widget to remember size
self.resize_page(subwin, subwin.width, subwin.height)
self.selected_child = None
self._update_area_size()
return True
def motion_notify_event(self, widget, event):
button = self.kbdmouse_mask
x_root, y_root, state = event.x_root, event.y_root, event.state
if state & Gdk.ModifierType.BUTTON1_MASK:
button |= 0x1
elif state & Gdk.ModifierType.BUTTON2_MASK:
button |= 0x2
elif state & Gdk.ModifierType.BUTTON3_MASK:
button |= 0x4
if (button & 0x1) and (self.selected_child is not None):
bnch = self.selected_child
subwin = bnch.subwin
if bnch.action == 'move':
x = int(subwin.x + (x_root - bnch.x_root))
y = int(subwin.y + (y_root - bnch.y_root))
# this works better if it is not self.move_page()
self.move(subwin.frame, x, y)
elif bnch.action == 'resize':
self._resize(bnch, x_root, y_root)
self._update_area_size()
return True
def tile_pages(self):
# calculate number of rows and cols, try to maintain a square
# TODO: take into account the window geometry
num_widgets = len(self.children)
rows = int(round(math.sqrt(num_widgets)))
cols = rows
if rows**2 < num_widgets:
cols += 1
# find out how big each window should be
rect = self.get_allocation()
width, height = rect.width, rect.height
wd, ht = width // cols, height // rows
# and move and resize them into place
for i in range(0, rows):
for j in range(0, cols):
index = i * cols + j
if index < num_widgets:
subwin = self.children[index]
self.resize_page(subwin, wd, ht)
x, y = j * wd, i * ht
self.move_page(subwin, x, y)
subwin.raise_()
self._update_area_size()
def cascade_pages(self):
x, y = 0, 0
for subwin in self.children:
self.move_page(subwin, x, y)
subwin.raise_()
x += self.cascade_offset
y += self.cascade_offset
self._update_area_size()
def use_tabs(self, tf):
pass
def move_page(self, subwin, x, y):
self.move(subwin.frame, x, y)
subwin.x, subwin.y = x, y
def resize_page(self, subwin, wd, ht):
subwin.frame.set_size_request(wd, ht)
subwin.width, subwin.height = wd, ht
def maximize_page(self, subwin):
rect = self.get_allocation()
wd, ht = rect.width, rect.height
subwin.raise_()
self.resize_page(subwin, wd, ht)
self.move_page(subwin, 0, 0)
self._update_area_size()
def minimize_page(self, subwin):
rect = self.get_allocation()
height = rect.height
rect = subwin.frame.get_allocation()
x = rect.x
rect = subwin.label.get_allocation()
ht = rect.height
self.resize_page(subwin, self.minimized_width, ht)
self.move_page(subwin, x, height - ht)
subwin.lower()
self._update_area_size()
def close_page(self, subwin):
self._update_area_size()
class Splitter(Gtk.Layout):
"""
Splitter type widget for Gtk.
"""
def __init__(self, orientation='horizontal', thumb_px=8):
Gtk.Layout.__init__(self)
self.orientation = orientation
self._sizes = []
self._dims = (0, 0)
self.children = []
self.thumbs = []
self.thumb_px = thumb_px
self.thumb_aspect = 3.25
self.kbdmouse_mask = 0
mask = self.get_events()
self.set_events(mask |
Gdk.EventMask.ENTER_NOTIFY_MASK |
Gdk.EventMask.LEAVE_NOTIFY_MASK
)
self.connect("size-allocate", self._size_allocate_cb)
modify_bg(self, "gray50")
def add_widget(self, widget):
rect = self.get_allocation()
wd, ht = rect.width, rect.height
self.children.append(widget)
if len(self.children) == 1:
widget.set_size_request(wd, ht)
self.put(widget, 0, 0)
sizes = self._sizes
if len(sizes) == 0:
pos = wd if self.orientation == 'horizontal' else ht
sizes = [pos]
self.set_sizes(sizes)
else:
if self.orientation == 'horizontal':
thumbfile, _w, _h = ('vdots.png', self.thumb_px,
int(self.thumb_px * self.thumb_aspect))
else:
thumbfile, _w, _h = ('hdots.png',
int(self.thumb_px * self.thumb_aspect),
self.thumb_px)
iconfile = os.path.join(icondir, thumbfile)
pixbuf = pixbuf_new_from_file_at_size(iconfile, _w, _h)
image = Gtk.Image.new_from_pixbuf(pixbuf)
thumb = Gtk.EventBox()
thumb.set_visible_window(True)
thumb.add(image)
modify_bg(thumb, "gray90")
i = len(self.thumbs)
self.thumbs.append(thumb)
thumb.connect("button_press_event", self._start_resize_cb, i)
thumb.connect("button_release_event", self._stop_resize_cb, i)
thumb.connect("motion_notify_event", self._do_resize_cb, i)
thumb.connect("enter_notify_event", self._thumb_enter_cb)
thumb.connect("leave_notify_event", self._thumb_leave_cb)
self.put(thumb, 0, 0)
self.put(widget, 0, 0)
sizes = self._sizes
if len(sizes) < len(self.children):
pos = wd if self.orientation == 'horizontal' else ht
sizes.append(pos)
self.set_sizes(sizes)
self.show_all()
def _thumb_enter_cb(self, widget, event):
# change the cursor to a resize one when we enter the thumb area
display = self.get_display()
cur_name = ('ew-resize' if self.orientation == 'horizontal'
else 'ns-resize')
cursor = Gdk.Cursor.new_from_name(display, cur_name)
win = self.get_window()
if win is not None:
win.set_cursor(cursor)
def _thumb_leave_cb(self, widget, event):
# change the cursor to the normal one when we leave the thumb area
display = self.get_display()
cursor = Gdk.Cursor.new_from_name(display, 'default')
win = self.get_window()
if win is not None:
win.set_cursor(cursor)
def get_sizes(self):
return list(self._sizes)
def set_sizes(self, sizes):
sizes = list(sizes)
## if sizes == self._sizes:
## return
if self.get_realized():
rect = self.get_allocation()
wd, ht = rect.width, rect.height
else:
min_req, nat_req = self.get_preferred_size()
wd, ht = nat_req.width, nat_req.height
x, y = 0, 0
# calc space needed by all necessary thumbs
remaining_thumb_space = max(0, len(self.children) - 1) * self.thumb_px
new_sizes = []
thumbs, widgets = [], []
for num, child in enumerate(self.children):
off = sizes[num]
if self.orientation == 'horizontal':
if num == 0:
widgets.append((child, 0, 0, off, ht))
new_sizes.append(off)
x += off
else:
thumb = self.thumbs[num - 1]
thumbs.append((thumb, x, y, self.thumb_px, ht))
x += self.thumb_px
remaining_thumb_space -= self.thumb_px
rest = max(0, wd - (x + remaining_thumb_space))
if num < len(self.children) - 1:
rest = min(off, rest)
widgets.append((child, x, y, rest, ht))
new_sizes.append(rest)
x += rest
else:
if num == 0:
widgets.append((child, 0, 0, wd, off))
new_sizes.append(off)
y += off
else:
thumb = self.thumbs[num - 1]
thumbs.append((thumb, x, y, wd, self.thumb_px))
y += self.thumb_px
remaining_thumb_space -= self.thumb_px
rest = max(0, ht - (y + remaining_thumb_space))
if num < len(self.children) - 1:
rest = min(off, rest)
widgets.append((child, x, y, wd, rest))
new_sizes.append(rest)
y += rest
self._sizes = new_sizes
assert len(self._sizes) == len(self.children)
for child, x, y, wd, ht in widgets:
self._move_resize_child(child, x, y, wd, ht)
for thumb, x, y, wd, ht in thumbs:
self._move_resize_child(thumb, x, y, wd, ht)
def remove(self, child):
if child not in self.children:
raise ValueError("widget is not one of our children")
idx = self.children.index(child)
if len(self.children) > 1:
if idx > 0:
# not first child
thumb = self.thumbs.pop(idx - 1)
else:
thumb = self.thumbs.pop(0)
super(Splitter, self).remove(thumb)
self._sizes.pop(idx)
self.children.remove(child)
super(Splitter, self).remove(child)
self.set_sizes(self._sizes)
def _move_resize_child(self, child, x, y, wd, ht):
rect = child.get_allocation()
modified = False
if (rect.x, rect.y) != (x, y):
modified = True
self.move(child, x, y)
if (rect.width, rect.height) != (wd, ht):
modified = True
child.set_size_request(wd, ht)
alloc = Gdk.Rectangle()
alloc.x, alloc.y, alloc.width, alloc.height = x, y, wd, ht
child.size_allocate(alloc)
#child.set_clip(alloc)
win = child.get_window()
if win is not None:
win.invalidate_rect(None, True)
win.resize(wd, ht)
if modified:
# don't think this should be necessary, but just in case
child.queue_draw()
#child.queue_resize()
child.queue_allocate()
def _calc_size(self, i, pos):
sizes = list(self._sizes)
n = sum([sizes[j] for j in range(0, i)])
n += max(0, i - 1) * self.thumb_px
return max(0, pos - n)
def _start_resize_cb(self, widget, event, i):
x_root, y_root = event.x_root, event.y_root
x, y = widget.translate_coordinates(self, event.x, event.y)
pos = x if self.orientation == 'horizontal' else y
sizes = list(self._sizes)
sizes[i] = self._calc_size(i, pos)
self.set_sizes(sizes)
return True
def _stop_resize_cb(self, widget, event, i):
x_root, y_root = event.x_root, event.y_root
x, y = widget.translate_coordinates(self, event.x, event.y)
pos = x if self.orientation == 'horizontal' else y
sizes = list(self._sizes)
sizes[i] = self._calc_size(i, pos)
self.set_sizes(sizes)
return True
def _do_resize_cb(self, widget, event, i):
button = self.kbdmouse_mask
x_root, y_root, state = event.x_root, event.y_root, event.state
x, y = widget.translate_coordinates(self, event.x, event.y)
if state & Gdk.ModifierType.BUTTON1_MASK:
button |= 0x1
elif state & Gdk.ModifierType.BUTTON2_MASK:
button |= 0x2
elif state & Gdk.ModifierType.BUTTON3_MASK:
button |= 0x4
if button == 0x1:
pos = x if self.orientation == 'horizontal' else y
sizes = list(self._sizes)
sizes[i] = self._calc_size(i, pos)
self.set_sizes(sizes)
return True
def _size_allocate_cb(self, widget, rect):
x, y, wd, ht = rect.x, rect.y, rect.width, rect.height
dims = (wd, ht)
if dims == self._dims:
return
super(Splitter, self).set_size(wd, ht)
self._dims = dims
self.set_sizes(self._sizes)
return True
class Dial(Gtk.DrawingArea):
__gtype_name__ = "Dial"
__gsignals__ = {
"value-changed": (GObject.SignalFlags.RUN_FIRST, GObject.TYPE_NONE,
(GObject.TYPE_FLOAT,)),
}
def __init__(self):
Gtk.DrawingArea.__init__(self)
self.set_can_focus(True)
self.dims = np.array((0, 0))
self.center = np.array((0.0, 0.0))
self.bg = (0.94, 0.94, 0.94)
self.fg = (0.4, 0.4, 0.4)
self.knob_fg = (1.0, 1.0, 1.0)
self.knob_fill = (0.2, 0.2, 0.2)
self.focus_fg = (0.2, 0.6, 0.9)
self.fontname = 'Sans Serif'
self.fontsize = 10.0
self._has_focus = False
self.surface = None
# draw labels
self.draw_scale = True
# how to rotate the labels
self.label_style = 1
self.values = []
self.draw_value_pos = 0
self.value = 0.0
self.value_text = str(self.value)
# internal state
self._dragging = False
self.tracking = False
self.wrap = False
self.angle = 0.0
self.ang_offset = 0.0
self.ang_invert = False
self.turn_delta = 6.0
self.min_ang_deg = 0.0
self.max_ang_deg = 360.0
self.connect("draw", self.draw_event)
self.connect("configure-event", self.configure_event)
self.set_app_paintable(True)
# prevents extra redraws, because we manually redraw on a size
# change
self.set_redraw_on_allocate(False)
self.connect('button-press-event', self.button_press_event)
self.connect('button-release-event', self.button_release_event)
self.connect('motion-notify-event', self.motion_notify_event)
self.connect('scroll-event', self.scroll_event)
self.connect('focus_in_event', self.focus_event, True)
self.connect('focus_out_event', self.focus_event, False)
mask = self.get_events()
self.set_events(mask |
Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.BUTTON_RELEASE_MASK |
Gdk.EventMask.POINTER_MOTION_MASK |
Gdk.EventMask.SCROLL_MASK |
Gdk.EventMask.FOCUS_CHANGE_MASK |
Gdk.EventMask.EXPOSURE_MASK)
def button_press_event(self, widget, event):
if event.button == 1:
self._dragging = True
self._calc_action(event.x, event.y)
return True
def button_release_event(self, widget, event):
self._dragging = False
self._calc_action(event.x, event.y)
return True
def motion_notify_event(self, widget, event):
# Are we holding down the left mouse button?
if not self._dragging:
return False
self._calc_action(event.x, event.y)
return True
def scroll_event(self, widget, event):
degrees, direction = get_scroll_info(event)
if direction < 180.0:
self.turn_ccw()
else:
self.turn_cw()
self.draw()
return True
def focus_event(self, widget, event, tf):
self._has_focus = tf
self.draw()
return True
def _calc_action(self, x, y):
ang_deg = np.degrees(np.arctan2(x - self.center[0],
y - self.center[1]) + np.pi * 1.5)
ang_deg = self.normalize_angle(ang_deg + self.ang_offset)
if self.ang_invert:
ang_deg = 360.0 - ang_deg
self.angle_action(x, y, ang_deg)
def draw(self):
if self.surface is None:
return
cr = cairo.Context(self.surface)
cr.select_font_face(self.fontname)
cr.set_font_size(self.fontsize)
# fill background
wd, ht = self.dims
cr.rectangle(0, 0, wd, ht)
r, g, b = self.bg
cr.set_source_rgba(r, g, b)
cr.fill()
r, g, b = self.fg
cr.set_source_rgba(r, g, b)
cr.set_line_width(2.0)
cr.save()
cx, cy = self.center
cr.translate(cx, cy)
cr.move_to(0, 0)
# draw circle
cradius = min(cx, cy)
cradius *= 0.66
cr.arc(0, 0, cradius, 0, 2 * np.pi)
cr.fill()
if self._has_focus:
r, g, b = self.focus_fg
else:
r, g, b = (0.0, 0.0, 0.0)
cr.set_source_rgba(r, g, b)
cr.new_path()
cr.set_line_width(2)
cr.arc(0, 0, cradius, 0, 2 * np.pi)
cr.stroke()
cr.new_path()
cr.set_line_width(1)
if self.draw_scale:
cr.new_path()
cr.set_source_rgba(0.0, 0.0, 0.0)
for tup in self.values:
if len(tup) == 3:
label, value, theta = tup
else:
value, theta = tup
label = str(value)
if self.ang_invert:
theta = 360.0 - theta
theta_pos = self.normalize_angle(theta + 90.0 - self.ang_offset)
theta_rad = np.radians(theta_pos)
a, b, wd, ht, i, j = cr.text_extents(label)
crad2 = cradius + ht / 2.0
if self.label_style == 0:
crad2 += wd
# draw small filled dot as position marker
cx, cy = (np.sin(theta_rad) * cradius,
np.cos(theta_rad) * cradius)
cr.move_to(cx, cy)
r, g, b = self.knob_fill
cr.set_source_rgba(r, g, b)
cr.arc(cx, cy, 2, 0, 2 * np.pi)
cr.stroke_preserve()
cr.fill()
# draw label
cx, cy = np.sin(theta_rad) * crad2, np.cos(theta_rad) * crad2
cr.move_to(cx, cy)
text_rad = np.arctan2(cx, cy)
if self.label_style == 0:
text_rad = 0.0
elif self.label_style == 1:
text_rad += np.pi
elif self.label_style == 2:
text_rad += - np.pi / 2
cr.save()
cr.translate(cx, cy)
cr.rotate(-text_rad)
if self.label_style == 1:
cr.move_to(-wd / 2, 0)
cr.show_text(label)
#cr.rotate(text_rad)
cr.restore()
cr.new_path()
cr.move_to(0, 0)
theta = self.angle
if self.ang_invert:
theta = 360.0 - theta
theta = self.normalize_angle(theta - self.ang_offset)
cr.rotate(-np.radians(theta))
# draw knob (pointer)
r, g, b = self.knob_fg
cr.set_source_rgba(r, g, b)
crad2 = cradius
cr.new_path()
x1, y1, x2, y2 = -crad2, 0, crad2, 0
cx1, cy1, cx2, cy2 = self.calc_vertexes(x1, y1, x2, y2,
arrow_length=crad2)
cr.move_to(x2, y2)
cr.line_to(cx1, cy1)
#cr.line_to(0, 0)
cr.line_to(cx2, cy2)
cr.close_path()
r, g, b = self.knob_fg
cr.set_source_rgba(r, g, b)
cr.stroke_preserve()
r, g, b = self.knob_fill
cr.set_source_rgba(r, g, b)
cr.fill()
cr.move_to(0, 0)
cr.arc(0, 0, abs(cx1 + cx2) * 2.1, 0, 2 * np.pi)
cr.stroke_preserve()
cr.fill()
text = self.value_text
if self.draw_value_pos == 1:
r, g, b = self.bg
cr.set_source_rgba(r, g, b)
cr.move_to(0, 0)
cr.show_text(text)
cr.restore()
if self.draw_value_pos == 2:
a, b, wd, ht, i, j = cr.text_extents(text)
r, g, b = self.fg
cr.set_source_rgba(r, g, b)
x, y = self.center
cr.move_to(x - wd / 2, (y - cradius) * 0.5 + ht)
cr.show_text(text)
cr.move_to(0, 0)
self.update_widget()
def normalize_angle(self, ang_deg):
ang_deg = np.fmod(ang_deg + 360.0, 360.0)
return ang_deg
def finalize_angle(self, ang_deg):
self.angle = ang_deg
self.draw()
def get_angle(self):
return self.angle
def set_labels(self, val_ang_pairs):
self.values = val_ang_pairs
self.draw()
def set_tracking(self, tf):
self.tracking = tf
def configure_event(self, widget, event):
rect = widget.get_allocation()
x, y, width, height = rect.x, rect.y, rect.width, rect.height
self.dims = np.array((width, height))
self.center = np.array((width / 2, height / 2))
self.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
self.draw()
return True
def update_widget(self):
if self.surface is None:
# window is not mapped/configured yet
return
win = self.get_window()
if win is not None and self.surface is not None:
wd, ht = self.dims
self.queue_draw_area(0, 0, wd, ht)
def draw_event(self, widget, cr):
# redraw the screen from backing surface
cr.set_source_surface(self.surface, 0, 0)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.paint()
return False
def calc_vertexes(self, start_cx, start_cy, end_cx, end_cy,
arrow_length=10, arrow_degrees=0.35):
angle = np.arctan2(end_cy - start_cy, end_cx - start_cx) + np.pi
cx1 = end_cx + arrow_length * np.cos(angle - arrow_degrees)
cy1 = end_cy + arrow_length * np.sin(angle - arrow_degrees)
cx2 = end_cx + arrow_length * np.cos(angle + arrow_degrees)
cy2 = end_cy + arrow_length * np.sin(angle + arrow_degrees)
return (cx1, cy1, cx2, cy2)
def angle_action(self, x, y, ang_deg):
"""Subclass overrides to provide custom behavior"""
self._set_value(ang_deg)
def turn_ccw(self):
"""Subclass overrides to provide custom behavior"""
self._set_value(self.angle + self.turn_delta)
def turn_cw(self):
"""Subclass overrides to provide custom behavior"""
self._set_value(self.angle - self.turn_delta)
def _set_value(self, ang_deg):
"""Subclass overrides to provide custom behavior"""
ang_deg = self.normalize_angle(ang_deg)
ang_deg = np.clip(ang_deg, self.min_ang_deg, self.max_ang_deg)
self.value = ang_deg
self.finalize_angle(ang_deg)
if not self._dragging or self.tracking:
self.emit("value-changed", self.value)
def set_value(self, ang_deg):
"""Subclass overrides to provide custom behavior"""
self._set_value(ang_deg)
def get_value(self):
"""Subclass overrides to provide custom behavior"""
return self.value
class ValueDial(Dial):
__gtype_name__ = "ValueDial"
def __init__(self):
Dial.__init__(self)
# for drawing value
self.label_style = 1
self.draw_value_pos = 2
# setup axis orientation to match value
self.ang_offset = 140.0
self.ang_invert = True
self.min_ang_deg = 0.0
self.max_ang_deg = 260.0
self.set_labels([("min", 0.0), ("max", 260.0)])
self.min_val = 0.0
self.max_val = 0.0
self.inc_val = 0.0
def angle_action(self, x, y, ang_deg):
value = self._angle_to_value(ang_deg)
self._set_value(value)
def turn_ccw(self):
ang_deg = np.clip(self.angle + self.turn_delta,
0.0, self.max_ang_deg)
value = self._angle_to_value(ang_deg)
self._set_value(value)
def turn_cw(self):
ang_deg = np.clip(self.angle - self.turn_delta,
0.0, self.max_ang_deg)
value = self._angle_to_value(ang_deg)
self._set_value(value)
def _set_value(self, value):
if value < self.min_val or value > self.max_val:
raise ValueError("value '{}' is out of range".format(value))
self.value = value
self.value_text = "%.2f" % self.value
ang_deg = self._value_to_angle(value)
self.finalize_angle(ang_deg)
if not self._dragging or self.tracking:
self.emit("value-changed", self.value)
def get_value(self):
return self.value
def _value_to_angle(self, value):
# make angle match value
rng = self.max_val - self.min_val
pct = (value - self.min_val) / rng
ang_deg = pct * self.max_ang_deg
ang_deg = np.clip(ang_deg, 0.0, self.max_ang_deg)
return ang_deg
def _angle_to_value(self, ang_deg):
# make value match angle
pct = ang_deg / self.max_ang_deg
rng = self.max_val - self.min_val
value = self.min_val + pct * rng
value = np.clip(value, self.min_val, self.max_val)
return value
def set_limits(self, min_val, max_val, inc_val):
self.min_val = min_val
self.max_val = max_val
self.inc_val = inc_val
pct = inc_val / (max_val - min_val)
self.turn_delta = pct * self.max_ang_deg
class IndexDial(Dial):
__gtype_name__ = "IndexDial"
def __init__(self):
Dial.__init__(self)
self.idx = 0
self.label_style = 1
def angle_action(self, x, y, ang_deg):
idx = self.best_index(ang_deg)
self.set_index(idx)
def turn_ccw(self):
idx = self.idx - 1
if idx < 0:
if self.wrap:
self.set_index(len(self.values) - 1)
else:
self.set_index(idx)
def turn_cw(self):
idx = self.idx + 1
if idx >= len(self.values):
if self.wrap:
self.set_index(0)
else:
self.set_index(idx)
def set_index(self, idx):
idx = int(idx)
if idx < 0 or idx >= len(self.values):
raise ValueError("index '{}' is outside range 0-{}".format(idx,
len(self.values)))
self.idx = idx
tup = self.values[idx]
self.value = tup[0] if len(tup) == 2 else tup[1]
self.value_text = str(self.value)
self.angle = tup[-1]
self.draw()
if not self._dragging or self.tracking:
self.emit("value-changed", idx)
def get_index(self):
return self.idx
def get_value(self):
return self.value
def best_index(self, ang_deg):
# find the index that is closest to the angle ang_deg
angles = np.array([tup[-1] for tup in self.values])
ang_deg = self.normalize_angle(ang_deg)
angles = np.abs(angles - ang_deg)
idx = np.argmin(angles)
return idx
class FileSelection(object):
def __init__(self, parent_w, action=Gtk.FileChooserAction.OPEN,
title="Select a file", all_at_once=False):
# TODO: deprecate the functionality when all_at_once == False
# and make the default to be True
self.parent = parent_w
self.all_at_once = all_at_once
# Create a new file selection widget
self.filew = Gtk.FileChooserDialog(title=title, action=action)
self.filew.connect("destroy", self.close)
if action == Gtk.FileChooserAction.SAVE:
self.filew.add_buttons(Gtk.STOCK_SAVE, 1, Gtk.STOCK_CANCEL, 0)
else:
self.filew.add_buttons(Gtk.STOCK_OPEN, 1, Gtk.STOCK_CANCEL, 0)
self.filew.set_default_response(1)
self.filew.set_select_multiple(True)
self.filew.connect("response", self.file_ok_sel)
# Connect the cancel_button to destroy the widget
#self.filew.cancel_button.connect("clicked", self.close)
def popup(self, title, callfn, initialdir=None, filename=None):
"""Let user select and load file."""
self.cb = callfn
self.filew.set_title(title)
if initialdir:
self.filew.set_current_folder(initialdir)
if filename:
#self.filew.set_filename(filename)
self.filew.set_current_name(filename)
self.filew.show()
# default size can be enormous
self.filew.resize(800, 600)
# Get the selected filename
def file_ok_sel(self, w, rsp):
self.close(w)
if rsp == 0:
return
paths = self.filew.get_filenames()
if self.all_at_once:
self.cb(paths)
else:
for path in paths:
self.cb(path)
def close(self, widget):
self.filew.hide()
class DirectorySelection(FileSelection):
"""Handle directory selection dialog."""
def __init__(self, parent_w):
super(DirectorySelection, self).__init__(
parent_w, action=Gtk.FileChooserAction.SELECT_FOLDER,
title="Select a directory")
def popup(self, title, callfn, initialdir=None):
"""Let user select a directory."""
super(DirectorySelection, self).popup(title, callfn, initialdir)
class Timer(Callback.Callbacks):
"""Abstraction of a GUI-toolkit implemented timer."""
def __init__(self, duration=0.0):
"""Create a timer set to expire after `duration` sec.
"""
super(Timer, self).__init__()
self.duration = duration
# For storing aritrary data with timers
self.data = Bunch.Bunch()
self._timer = None
self.start_time = 0.0
self.deadline = 0.0
for name in ('expired', 'canceled'):
self.enable_callback(name)
def start(self, duration=None):
"""Start the timer. If `duration` is not None, it should
specify the time to expiration in seconds.
"""
if duration is None:
duration = self.duration
self.set(duration)
def set(self, duration):
self.stop()
self.start_time = time.time()
self.deadline = self.start_time + duration
# Gtk timer set in milliseconds
time_ms = int(duration * 1000.0)
self._timer = GObject.timeout_add(time_ms, self._redirect_cb)
def _redirect_cb(self):
self._timer = None
self.make_callback('expired')
def is_set(self):
return self._timer is not None
def cond_set(self, time_sec):
if not self.is_set():
# TODO: probably a race condition here
self.set(time_sec)
def elapsed_time(self):
return time.time() - self.start_time
def time_left(self):
return max(0.0, self.deadline - time.time())
def get_deadline(self):
return self.deadline
def stop(self):
try:
if self._timer is not None:
GObject.source_remove(self._timer)
except Exception:
pass
self._timer = None
def cancel(self):
"""Cancel this timer. If the timer is not running, there
is no error.
"""
self.stop()
self.make_callback('canceled')
clear = cancel
def combo_box_new_text():
liststore = Gtk.ListStore(GObject.TYPE_STRING)
combobox = ComboBox()
combobox.set_model(liststore)
cell = Gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 0)
return combobox
def get_scroll_info(event):
"""
Returns the (degrees, direction) of a scroll motion Gtk event.
"""
valid, dx, dy = event.get_scroll_deltas()
if valid:
# we have a trackpad or some device that reports pixel deltas
delta = math.sqrt(dx ** 2 + dy ** 2)
if dy < 0:
delta = -delta
ang_rad = math.atan2(dy, dx)
direction = math.degrees(ang_rad) - 90.0
direction = math.fmod(direction + 360.0, 360.0)
# TODO: is this accurate?--NOT TESTED
num_degrees = delta / 8.0
else:
valid, direction = event.get_scroll_direction()
if valid:
if event.direction == Gdk.ScrollDirection.UP:
direction = 0.0
elif event.direction == Gdk.ScrollDirection.DOWN:
direction = 180.0
elif event.direction == Gdk.ScrollDirection.LEFT:
direction = 270.0
elif event.direction == Gdk.ScrollDirection.RIGHT:
direction = 90.0
else:
direction = None
# TODO: does Gtk encode the amount of scroll?
# 15 deg is standard 1-click turn for a wheel mouse
num_degrees = 15.0
return (num_degrees, direction)
def get_icon(iconpath, size=None):
if size is not None:
wd, ht = size
else:
wd, ht = 24, 24
pixbuf = pixbuf_new_from_file_at_size(iconpath, wd, ht)
return pixbuf
def get_font(font_family, point_size):
font_family = font_asst.resolve_alias(font_family, font_family)
font = Pango.FontDescription('%s %d' % (font_family, point_size))
return font
def load_font(font_name, font_file):
# TODO!
## raise ValueError("Loading fonts dynamically is an unimplemented"
## " feature for gtk3 back end")
return font_name
def pixbuf_new_from_xpm_data(xpm_data):
xpm_data = bytes('\n'.join(xpm_data))
return GdkPixbuf.Pixbuf.new_from_xpm_data(xpm_data)
def pixbuf_new_from_array(data, rgbtype, bpp):
# NOTE: there is a bug in gtk3 with pixbuf_new_from_array()
# See: http://stackoverflow.com/questions/24062779/how-to-correctly-covert-3d-array-into-continguous-rgb-bytes/24070152#24070152
#return GdkPixbuf.Pixbuf.new_from_array(data, rgbtype, bpp)
height, width, depth = data.shape
pixl = GdkPixbuf.PixbufLoader.new_with_type('pnm')
# P6 is the magic number of PNM format,
# and 255 is the max color allowed
pixl.write((b"P6 %d %d 255 " % (width, height)) + data.tobytes(order='C'))
pix = pixl.get_pixbuf()
pixl.close()
return pix
def pixbuf_new_from_data(rgb_buf, rgbtype, hasAlpha, bpp, dawd, daht, stride):
return GdkPixbuf.Pixbuf.new_from_data(rgb_buf, rgbtype, hasAlpha, bpp,
dawd, daht, stride, None, None)
def pixbuf_new_from_file_at_size(foldericon, width, height):
return GdkPixbuf.Pixbuf.new_from_file_at_size(foldericon, width, height)
def pixbuf_new_from_file(file_path):
return GdkPixbuf.Pixbuf.new_from_file(file_path)
def make_cursor(widget, iconpath, x, y):
image = Gtk.Image()
image.set_from_file(iconpath)
pixbuf = image.get_pixbuf()
screen = widget.get_screen()
display = screen.get_display()
return Gdk.Cursor(display, pixbuf, x, y)
def modify_bg(widget, color):
context = widget.get_style_context()
if color is not None:
context.add_class("custom_bg")
css_data = "*.custom_bg { background-image: none; background-color: %s; }" % (color)
css_provider = Gtk.CssProvider()
css_provider.load_from_data(css_data.encode())
context.add_provider(css_provider, Gtk.STYLE_PROVIDER_PRIORITY_USER)
else:
context.remove_class("custom_bg")
def set_default_style():
style_provider = Gtk.CssProvider()
module_home = os.path.split(sys.modules[__name__].__file__)[0]
gtk_css = os.path.join(module_home, 'gtk_css')
with open(gtk_css, 'rb') as css_f:
css_data = css_f.read()
try:
style_provider.load_from_data(css_data)
Gtk.StyleContext.add_provider_for_screen(
Gdk.Screen.get_default(), style_provider,
Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION
)
except Exception:
pass
# END
|
{
"content_hash": "7536517451531de58e5ca1005ae0dea2",
"timestamp": "",
"source": "github",
"line_count": 1788,
"max_line_length": 132,
"avg_line_length": 31.095078299776286,
"alnum_prop": 0.540217274002662,
"repo_name": "pllim/ginga",
"id": "f9adde7ecc876c2f551a9cf339a755bdb78d4250",
"size": "55752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ginga/gtk3w/GtkHelp.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2781"
},
{
"name": "GLSL",
"bytes": "7344"
},
{
"name": "HTML",
"bytes": "2129"
},
{
"name": "JavaScript",
"bytes": "87198"
},
{
"name": "Jupyter Notebook",
"bytes": "2691970"
},
{
"name": "Makefile",
"bytes": "85"
},
{
"name": "Python",
"bytes": "4359761"
}
],
"symlink_target": ""
}
|
from setuptools import setup
setup(
name='slacker',
version='0.7.3',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak',
author_email='oktaysancak@gmail.com',
url='http://github.com/os/slacker/',
install_requires=['requests >= 2.2.1'],
license='http://www.apache.org/licenses/LICENSE-2.0',
test_suite='tests',
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
),
keywords='slack api'
)
|
{
"content_hash": "6feb75885c63b977fa5433aa8a3ef6b7",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 61,
"avg_line_length": 31.74074074074074,
"alnum_prop": 0.6021003500583431,
"repo_name": "kashyap32/slacker",
"id": "b80d2b4e8d1f00937a9f28e257d8dbcbe5f80385",
"size": "857",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21783"
}
],
"symlink_target": ""
}
|
import os
import thread
import admin
try:
virtenv = os.environ.get('OPENSHIFT_PYTHON_DIR', '.') + '/virtenv/'
virtualenv = os.path.join(virtenv, 'bin/activate_this.py')
execfile(virtualenv, dict(__file__=virtualenv))
except IOError:
pass
#
# IMPORTANT: Put any additional includes below this line. If placed above this
# line, it's possible required libraries won't be in your searchable path
#
import so
from sdk.mcn import occi_ext
class MyBackend(occi_ext.Backend):
def create(self, entity, extras):
token = extras['token']
tenant = extras['tenant_name']
self.so = so.ServiceOrchestrator(token, tenant)
self.so.so_e.design()
entity.actions = [occi_ext.DEPLOY_ACTION]
def retrieve(self, entity, extras):
if occi_ext.DEPLOY_ACTION in entity.actions:
entity.attributes['occi.mcn.stack.state'] = 'uninitialized'
entity.attributes['occi.mcn.stack.id'] = 'N/A'
else:
state, stack_id, stack_output = self.so.so_e.state()
entity.attributes['occi.mcn.stack.state'] = state
entity.attributes['occi.mcn.stack.id'] = stack_id
if stack_output is not None:
for kv in stack_output:
entity.attributes[kv['output_key']] = kv['output_value']
def update(self, old, new, extras):
# TODO: attributes would need to be defined by a mixin.
old.attributes.update(new.attributes)
self.so.so_e.update(old, new, extras)
def delete(self, entity, extras):
self.so.so_e.dispose()
# Actions
def deploy_me(self, entity, attributes, extras):
self.so.so_e.deploy()
entity.actions = [occi_ext.PROVISION_ACTION]
def provision_me(self, entity, attributes, extras):
self.so.so_e.provision()
entity.actions = []
application = occi_ext.Application(MyBackend())
def server():
thread.start_new_thread(admin.server, ('0.0.0.0', 8081))
from wsgiref.simple_server import make_server
httpd = make_server('0.0.0.0', 8080, application)
httpd.serve_forever()
if __name__ == '__main__':
server()
# import requests
#
# token = ''
#
# heads = {'X-Auth-Token':token, 'X-Tenant-Name':'edmo', 'Content-Type':'text/occi', 'Accept':'text/occi'}
#
# r = requests.get('http://127.0.0.1:8051/-/', headers=heads)
#
# heads['Category']='orchestrator; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"'
# r = requests.put('http://127.0.0.1:8051/orchestrator/default', headers=heads)
#
# r = requests.get('http://127.0.0.1:8051/orchestrator/default', headers=heads)
#
# heads['Category']='deploy; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"'
# r = requests.post('http://127.0.0.1:8051/orchestrator/default?action=deploy', headers=heads)
#
# r = requests.get('http://127.0.0.1:8051/orchestrator/default', headers=heads); r.headers
#
# heads['Category']='provision; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"'
# r = requests.post('http://127.0.0.1:8051/orchestrator/default?action=provision', headers=heads)
#
# heads['X-OCCI-Attribute']='mcn.endpoint.mme-pgwc-sgwc="123.123.123.123"'
# r = requests.post('http://127.0.0.1:8051/orchestrator/default', headers=heads)
#
# r = requests.delete('http://127.0.0.1:8051/orchestrator/default', headers=heads)
|
{
"content_hash": "4693f50b001584d9b3c83374a46a4aaf",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 106,
"avg_line_length": 34.947916666666664,
"alnum_prop": 0.6560357675111773,
"repo_name": "icclab/hurtle_sample_so",
"id": "238da830cda384750a6a5a15ca69efbba3dc718d",
"size": "4100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wsgi/application.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "31379"
},
{
"name": "Shell",
"bytes": "1052"
}
],
"symlink_target": ""
}
|
import os
import tempfile
from kripodb.hdf5 import SimilarityMatrix
from kripodb.frozen import FrozenSimilarityMatrix
def tmpname():
tmpf = tempfile.NamedTemporaryFile()
out_file = tmpf.name
tmpf.close()
return out_file
class SimilarityMatrixInMemory(object):
def __init__(self):
self.matrix_fn = tmpname()
self.matrix = SimilarityMatrix(self.matrix_fn, 'a', driver='H5FD_CORE', driver_core_backing_store=0)
def __enter__(self):
return self.matrix
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.matrix.close()
if os.path.isfile(self.matrix_fn):
os.remove(self.matrix_fn)
class FrozenSimilarityMatrixInMemory(object):
def __init__(self):
self.matrix_fn = tmpname()
self.matrix = FrozenSimilarityMatrix(self.matrix_fn, 'a', driver='H5FD_CORE', driver_core_backing_store=0)
def __enter__(self):
return self.matrix
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.matrix.close()
if os.path.isfile(self.matrix_fn):
os.remove(self.matrix_fn)
|
{
"content_hash": "73de736ae47921b68d8cd82ea9b508e1",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 114,
"avg_line_length": 25.76086956521739,
"alnum_prop": 0.6354430379746835,
"repo_name": "3D-e-Chem/python-modified-tanimoto",
"id": "d2e258c924fce10b29b3519000d28285a5225e61",
"size": "1185",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "107247"
}
],
"symlink_target": ""
}
|
"""The data layer used during training to train a Fast R-CNN network.
RoIDataLayerPi implements a Caffe Python layer for reading in multiple images
into the blobs. This is useful for datasets like NYUD2 and UCF 101.
"""
import caffe
from fast_rcnn.config import cfg
from roi_data_layer.minibatch import get_minibatch
import numpy as np
import argparse, os
from multiprocessing import Process, Queue
from IPython.core.debugger import Tracer
class RoIDataLayerPi(caffe.Layer):
"""Fast R-CNN data layer used for training."""
def _shuffle_roidb_inds(self):
"""Randomly permute the training roidb."""
valid = []
for i,r in enumerate(self._roidb):
ov = r['max_overlaps'][:, np.newaxis]
has_fg = np.any(np.all(ov > cfg.TRAIN.FG_THRESH, axis = 1), axis = 0)
has_bg = np.any(np.all(np.hstack((ov > cfg.TRAIN.BG_THRESH_LO, ov < cfg.TRAIN.BG_THRESH_HI)), axis = 1), axis = 0)
if has_fg and has_bg:
valid.append(i)
pp = np.random.permutation(np.arange(len(self._roidb)))
pp = [a for a in pp if a in valid]
self._perm = pp
self._cur = 0
def _get_next_minibatch_inds(self):
"""Return the roidb indices for the next minibatch."""
if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._perm):
self._shuffle_roidb_inds()
db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH]
self._cur += cfg.TRAIN.IMS_PER_BATCH
return db_inds
def _get_next_minibatch(self):
"""Return the blobs to be used for the next minibatch.
If cfg.TRAIN.USE_PREFETCH is True, then blobs will be computed in a
separate process and made available through self._blob_queue.
"""
if cfg.TRAIN.USE_PREFETCH:
return self._blob_queue.get()
else:
db_inds = self._get_next_minibatch_inds()
minibatch_db = [self._roidb[i] for i in db_inds]
return get_minibatch(minibatch_db, self._num_classes, self._num_data)
def set_roidb(self, roidb):
"""Set the roidb to be used by this layer during training."""
self._roidb = roidb
self._shuffle_roidb_inds()
if cfg.TRAIN.USE_PREFETCH:
self._blob_queue = Queue(10)
self._prefetch_process = BlobFetcher(self._blob_queue,
self._roidb,
self._num_classes)
self._prefetch_process.start()
# Terminate the child process when the parent exists
def cleanup():
print 'Terminating BlobFetcher'
self._prefetch_process.terminate()
self._prefetch_process.join()
import atexit
atexit.register(cleanup)
def _parse_args(self, str_arg):
parser = argparse.ArgumentParser(description='Python Layer Parameters Pi')
parser.add_argument('--num_classes', default=None, type=int)
parser.add_argument('--num_data', default=None, type=int)
args = parser.parse_args(str_arg.split())
return args
def setup(self, bottom, top):
"""Setup the RoIDataLayerPi."""
# parse the layer parameter string, which must be valid YAML
layer_params = self._parse_args(self.param_str)
self._num_classes = layer_params.num_classes
self._num_data = layer_params.num_data
self._name_to_top_map = {'data': 0};
# data blob: holds a batch of N images, each with 3 channels
# The height and width (100 x 100) are dummy values
top[0].reshape(1, 6, 100, 100) # change to 6
for i in xrange(1, self._num_data):
self._name_to_top_map['data_{:d}'.format(i)] = i;
top[i].reshape(1, 3, 100, 100)
self._name_to_top_map['rois'] = self._num_data;
# rois blob: holds R regions of interest, each is a 5-tuple
# (n, x1, y1, x2, y2) specifying an image batch index n and a
# rectangle (x1, y1, x2, y2)
top[self._name_to_top_map['rois']].reshape(1, 5)
self._name_to_top_map['labels'] = self._num_data+1;
# labels blob: R categorical labels in [0, ..., K] for K foreground
# classes plus background
top[self._name_to_top_map['labels']].reshape(1)
if cfg.TRAIN.BBOX_REG:
self._name_to_top_map['bbox_targets'] = self._num_data + 2
# bbox_targets blob: R bounding-box regression targets with 4
# targets per class
top[self._name_to_top_map['bbox_targets']].reshape(1, self._num_classes * 4)
self._name_to_top_map['bbox_loss_weights'] = self._num_data + 3
# bbox_loss_weights blob: At most 4 targets per roi are active;
# thisbinary vector sepcifies the subset of active targets
top[self._name_to_top_map['bbox_loss_weights']].reshape(1, self._num_classes * 4)
def forward(self, bottom, top):
"""Get blobs and copy them into this layer's top blob vector."""
blobs = self._get_next_minibatch()
for blob_name, blob in blobs.iteritems():
top_ind = self._name_to_top_map[blob_name]
# Reshape net's input blobs
top[top_ind].reshape(*(blob.shape))
# Copy data into net's input blobs
top[top_ind].data[...] = blob.astype(np.float32, copy=False)
def backward(self, top, propagate_down, bottom):
"""This layer does not propagate gradients."""
pass
def reshape(self, bottom, top):
"""Reshaping happens during the call to forward."""
pass
class BlobFetcher(Process):
"""Experimental class for prefetching blobs in a separate process."""
def __init__(self, queue, roidb, num_classes):
super(BlobFetcher, self).__init__()
self._queue = queue
self._roidb = roidb
self._num_classes = num_classes
self._perm = None
self._cur = 0
self._shuffle_roidb_inds()
# fix the random seed for reproducibility
np.random.seed(cfg.RNG_SEED)
def _shuffle_roidb_inds(self):
"""Randomly permute the training roidb."""
# TODO(rbg): remove duplicated code
self._perm = np.random.permutation(np.arange(len(self._roidb)))
self._cur = 0
def _get_next_minibatch_inds(self):
"""Return the roidb indices for the next minibatch."""
# TODO(rbg): remove duplicated code
if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb):
self._shuffle_roidb_inds()
db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH]
self._cur += cfg.TRAIN.IMS_PER_BATCH
return db_inds
def run(self):
print 'BlobFetcher started'
while True:
db_inds = self._get_next_minibatch_inds()
minibatch_db = [self._roidb[i] for i in db_inds]
blobs = get_minibatch(minibatch_db, self._num_classes)
self._queue.put(blobs)
|
{
"content_hash": "7df940d81ee883d450097362997e480b",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 126,
"avg_line_length": 40.38857142857143,
"alnum_prop": 0.5922467458970005,
"repo_name": "xiaolonw/fast-rcnn-distillation",
"id": "0caf1ab2d7d59b2842eb2d0632bbb463ab843a95",
"size": "7317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/roi_data_layer/layer_pi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "56"
},
{
"name": "Matlab",
"bytes": "10660"
},
{
"name": "Python",
"bytes": "112630"
},
{
"name": "Shell",
"bytes": "9009"
}
],
"symlink_target": ""
}
|
import os
import MySQLdb
import subprocess
from time import sleep
from celery.task import task
from MySQLdb import escape_string
from config import COMPILE_DIR
from config import TESTDATA_DIR
from config import CARETAKER_PATH
from config import MYSQL_DB
from config import MYSQL_HOST
from config import MYSQL_PORT
from config import MYSQL_USER
from config import MYSQL_PASS
STATUS_DICT = {
"A" : 1,
"W" : 2,
"T" : 3,
"M" : 4,
"R" : 5,
"C" : 6,
"N" : 7,
}
_e = lambda a: escape_string(str(a))
def _clean(dir_path):
for root, dirs, files in os.walk(dir_path):
for name in files:
os.remove(os.path.join(root, name))
def _compile_cmd(lang, filename):
lang = int(lang)
if lang == 1:
return "fpc %s -So -XS -v0 -O1 -o\"a.out\"" % filename
elif lang == 2:
return "gcc %s -lm -w -static -o a.out" % filename
elif lang == 3:
return "g++ %s -lm -static -o a.out" % filename
def _compile(result, query):
_clean(COMPILE_DIR)
os.chdir(COMPILE_DIR)
with open(COMPILE_DIR + query['filename'], "w+") as code:
code.write(query['code'].encode("utf-8"))
cmd = " ".join(["timeout 30", _compile_cmd(query['lang'], query['filename'])])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(stdoutput,erroutput) = proc.communicate()
result['msg'] = "".join(["STDOUT:\n--------\n", stdoutput, "\n--------\nSTDERR\n--------\n", erroutput])
result['cmd'] = cmd
if proc.returncode != 0:
result["compilesucc"] = 0
else:
result["compilesucc"] = 1
return result
def _get_input_file(tp, shortname):
with open(os.path.join(TESTDATA_DIR, shortname, shortname + str(tp+1) + ".in")) as input_data_p:
input_data = input_data_p.read()
with open(os.path.join(COMPILE_DIR, shortname + ".in"), "w") as input_data_p:
input_data_p.write(input_data)
def _run(result, query, tp):
shortname = query["shortname"]
os.chdir(COMPILE_DIR)
cmd = " ".join([CARETAKER_PATH, "--input=%s.in" % query["shortname"], \
"--output=%s.out" % query["shortname"], \
"--time=%s" % str(query["timelimit"]), \
"--memory=%s" % str(query["memlimit"] * 1024), \
"a.out"])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(stdoutput,erroutput) = proc.communicate()
testresult = ""
wexit = 0
time = 0
memory = 0
if proc.returncode:
if proc.returncode == 251:
testresult = "T"
elif proc.returncode == 252:
testresult = "M"
elif proc.returncode == 253:
testresult = "R"
else:
wexit, time, memory = stdoutput.split("\n")[-2].split(" ")
wexit = int(wexit)
time = int(time)
memory = int(memory)
if wexit != 0:
testresult = "W"
else:
# cmp output
with open(os.path.join(TESTDATA_DIR, shortname, shortname + str(tp+1) + ".ans")) as answer_fp:
answer = answer_fp.read()
try:
with open(os.path.join(COMPILE_DIR, shortname + ".out")) as prg_answer_fp:
prg_answer = prg_answer_fp.read()
except IOError:
testresult = "N"
else:
if _compare(answer, prg_answer):
testresult = "A"
result["score"] = result["score"] + 10
else:
testresult = "W"
result["testpoint"].append((testresult, time, memory))
def _compare(fout, fans):
out = [line.strip() for line in fout if line.strip()]
ans = [line.strip() for line in fans if line.strip()]
return ans == out
def _get_status(testpoint):
for tp in testpoint:
if tp != "A":
return STATUS_DICT[tp]
return STATUS_DICT["A"]
def _return_result(result, query):
# connect to server
conn = MySQLdb.connect(host = MYSQL_HOST, user = MYSQL_USER, passwd = MYSQL_PASS, db = MYSQL_DB)
cur = conn.cursor()
testpoint = ""
timecost = ""
memorycost = ""
totaltime = 0
totalmemory = 0
if result["compilesucc"] == 0:
status = 6
else:
# update
testpoint = []
timecost = []
memorycost = []
totaltime = 0
totalmemory = 0
for tp in result["testpoint"]:
testpoint.append(tp[0])
timecost.append(str(tp[1]))
memorycost.append(str(tp[2]))
totaltime = totaltime + tp[1]
totalmemory = totalmemory + tp[2]
testpoint = "".join(testpoint)
timecost = ",".join(timecost)
memorycost = ",".join(memorycost)
status = _get_status(testpoint)
cur.execute("""UPDATE `submit` SET `status` = %s,
`testpoint` = %s,
`testpoint_time` = %s,
`testpoint_memory` = %s,
`score` = %s,
`costtime` = %s,
`costmemory` = %s,
`msg` = %s
WHERE `id` = %s """, (_e(status), _e(testpoint), _e(timecost), _e(memorycost), \
_e(result["score"]), _e(totaltime), _e(totalmemory), \
_e(result["msg"]), _e(query["id"])))
conn.commit()
cur.close()
'''
Query String Format:
id - Required. Submit id.
code - Required. Code file content.
lang - Required. Code language. ( 1 - Pascal, 2 - C, 3 - C++ )
filename - Required. Code file filename.
shortname - Required. Problem shortname.
timelimit - Required. Problem time limit. (MB)
memlimit - Required. Problem memory limit.(ms)
testpoint - Required. Problem testpoint number.
sign - Signature.
time - For signature.
'''
@task
def judge(query):
result = {}
_compile(result, query)
if result["compilesucc"] == 0:
result["score"] = 0
_return_result(result, query)
result["testpoint"] = []
result["score"] = 0
if result["compilesucc"]:
for tp in range(query["testpoint"]):
_get_input_file(tp, query["shortname"])
_run(result, query, tp)
_return_result(result, query)
return 0
|
{
"content_hash": "2bec0558a39828301b0d4c585641cd86",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 115,
"avg_line_length": 33.891752577319586,
"alnum_prop": 0.524106463878327,
"repo_name": "ptphp/PyLib",
"id": "c2393146ba5969398475c37fc354da318f3af05c",
"size": "6726",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/tornado/demos/Vulpix-master/daemons/tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1523"
},
{
"name": "C++",
"bytes": "7541"
},
{
"name": "CSS",
"bytes": "625731"
},
{
"name": "JavaScript",
"bytes": "4811257"
},
{
"name": "PHP",
"bytes": "34868"
},
{
"name": "Python",
"bytes": "3824172"
},
{
"name": "Ruby",
"bytes": "322"
},
{
"name": "SQL",
"bytes": "685656"
},
{
"name": "Shell",
"bytes": "4143"
}
],
"symlink_target": ""
}
|
"""A non-blocking, single-threaded HTTP server.
Typical applications have little direct interaction with the `HTTPConnection`
class, which is the HTTP parser executed on incoming connections.
It is a protocol class that inherits Twisted's `LineReceiver
<http://twistedmatrix.com/documents/current/api/
twisted.protocols.basic.LineReceiver.html>`_, and is usually created by
`cyclone.web.Application`, our connection factory.
This module also defines the `HTTPRequest` class which is exposed via
`cyclone.web.RequestHandler.request`.
"""
from __future__ import absolute_import, division, with_statement
from http import cookies as http_cookies
import socket
import time
from io import BytesIO as StringIO
from tempfile import TemporaryFile
from twisted.python import log
from twisted.protocols import basic
from twisted.internet import address
from twisted.internet import defer
from twisted.internet import interfaces
from cyclone.escape import utf8, native_str, parse_qs_bytes, to_unicode
from cyclone import httputil
from cyclone.util import bytes_type
class _BadRequestException(Exception):
"""Exception class for malformed HTTP requests."""
pass
class HTTPConnection(basic.LineReceiver):
"""Handles a connection to an HTTP client, executing HTTP requests.
We parse HTTP headers and bodies, and execute the request callback
until the HTTP connection is closed.
If ``xheaders`` is ``True``, we support the ``X-Real-Ip`` and ``X-Scheme``
headers, which override the remote IP and HTTP scheme for all requests.
These headers are useful when running Tornado behind a reverse proxy or
load balancer.
"""
def connectionMade(self):
self._headersbuffer = []
self._contentbuffer = None
self._finish_callback = None
self.no_keep_alive = False
self.content_length = None
self.request_callback = self.factory
self.xheaders = self.factory.settings.get('xheaders', False)
self._request = None
self._request_finished = False
def connectionLost(self, reason):
if self._finish_callback:
self._finish_callback.callback(reason.getErrorMessage())
self._finish_callback = None
def notifyFinish(self):
if self._finish_callback is None:
self._finish_callback = defer.Deferred()
return self._finish_callback
def lineReceived(self, line):
if line:
self._headersbuffer.append(line + self.delimiter)
else:
buff = b"".join(self._headersbuffer)
self._headersbuffer = []
self._on_headers(buff)
def rawDataReceived(self, data):
if self.content_length is not None:
data, rest = data[:self.content_length], data[self.content_length:]
self.content_length -= len(data)
else:
rest = b''
self._contentbuffer.write(data)
if self.content_length == 0:
self._contentbuffer.seek(0, 0)
self._on_request_body(self._contentbuffer.read())
self.content_length = self._contentbuffer = None
self.setLineMode(rest)
def write(self, chunk):
assert self._request, "Request closed"
self.transport.write(chunk)
def finish(self):
assert self._request, "Request closed"
self._request_finished = True
self._finish_request()
def _on_write_complete(self):
if self._request_finished:
self._finish_request()
def _finish_request(self):
if self.no_keep_alive:
disconnect = True
else:
connection_header = self._request.headers.get("Connection")
if self._request.supports_http_1_1():
disconnect = connection_header == "close"
elif ("Content-Length" in self._request.headers
or self._request.method in ("HEAD", "GET")):
disconnect = connection_header != "Keep-Alive"
else:
disconnect = True
if self._finish_callback:
self._finish_callback.callback(None)
self._finish_callback = None
self._request = None
self._request_finished = False
if disconnect is True:
self.transport.loseConnection()
def _on_headers(self, data):
try:
eol = data.find(b"\r\n")
start_line = data[:eol]
try:
method, uri, version = start_line.split(b" ")
except ValueError:
raise _BadRequestException("Malformed HTTP request line")
if not version.startswith(b"HTTP/"):
raise _BadRequestException("Malformed HTTP version in HTTP Request-Line")
try:
headers = httputil.HTTPHeaders.parse(to_unicode(data[eol:]))
content_length = int(headers.get("Content-Length", 0))
except ValueError:
raise _BadRequestException("Malformed HTTP headers")
self._request = HTTPRequest(
connection=self, method=to_unicode(method), uri=to_unicode(uri),
version=to_unicode(version),
headers=headers, remote_ip=to_unicode(self._remote_ip))
if content_length:
if headers.get("Expect") == "100-continue":
self.transport.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
if content_length < 100000:
self._contentbuffer = StringIO()
else:
self._contentbuffer = TemporaryFile()
self.content_length = content_length
self.setRawMode()
return
self.request_callback(self._request)
except _BadRequestException as e:
log.msg("Malformed HTTP request from %s: %s", self._remote_ip, e)
self.transport.loseConnection()
def _on_request_body(self, data):
self._request.body = data
content_type = self._request.headers.get("Content-Type", "")
if self._request.method in ("POST", "PATCH", "PUT"):
if content_type.startswith("application/x-www-form-urlencoded"):
arguments = parse_qs_bytes(native_str(self._request.body))
for name, values in arguments.items():
values = [v for v in values if v]
if values:
self._request.arguments.setdefault(name,
[]).extend(values)
elif content_type.startswith("multipart/form-data"):
fields = content_type.split(";")
for field in fields:
k, sep, v, = field.strip().partition("=")
if k == "boundary" and v:
httputil.parse_multipart_form_data(
utf8(v), data,
self._request.arguments,
self._request.files)
break
else:
log.msg("Invalid multipart/form-data")
self.request_callback(self._request)
@property
def _remote_ip(self):
peer = self.transport.getPeer()
if isinstance(peer, address.UNIXAddress):
remote_ip = "unix:%s" % self.transport.getHost().name
else:
remote_ip = self.transport.getPeer().host
return remote_ip
class HTTPRequest(object):
"""A single HTTP request.
All attributes are type `str` unless otherwise noted.
.. attribute:: method
HTTP request method, e.g. "GET" or "POST"
.. attribute:: uri
The requested uri.
.. attribute:: path
The path portion of `uri`
.. attribute:: query
The query portion of `uri`
.. attribute:: version
HTTP version specified in request, e.g. "HTTP/1.1"
.. attribute:: headers
`HTTPHeader` dictionary-like object for request headers. Acts like
a case-insensitive dictionary with additional methods for repeated
headers.
.. attribute:: body
Request body, if present, as a byte string.
.. attribute:: remote_ip
Client's IP address as a string. If `HTTPConnection.xheaders` is set,
will pass along the real IP address provided by a load balancer
in the ``X-Real-Ip`` header
.. attribute:: protocol
The protocol used, either "http" or "https".
If `HTTPConnection.xheaders` is set, will pass along the protocol used
by a load balancer if
reported via an ``X-Scheme`` header.
.. attribute:: host
The requested hostname, usually taken from the ``Host`` header.
.. attribute:: arguments
GET/POST arguments are available in the arguments property, which
maps arguments names to lists of values (to support multiple values
for individual names). Names are of type `str`, while arguments
are byte strings. Note that this is different from
`RequestHandler.get_argument`, which returns argument values as
unicode strings.
.. attribute:: files
File uploads are available in the files property, which maps file
names to lists of :class:`HTTPFile`.
.. attribute:: connection
An HTTP request is attached to a single HTTP connection, which can
be accessed through the "connection" attribute. Since connections
are typically kept open in HTTP/1.1, multiple requests can be handled
sequentially on a single connection.
"""
def __init__(self, method, uri, version="HTTP/1.0", headers=None,
body=None, remote_ip=None, protocol=None, host=None,
files=None, connection=None):
self.method = method
self.uri = uri
self.version = version
self.headers = headers or httputil.HTTPHeaders()
self.body = body or b""
if connection and connection.xheaders:
# Squid uses X-Forwarded-For, others use X-Real-Ip
self.remote_ip = self.headers.get(
"X-Real-Ip", self.headers.get("X-Forwarded-For", remote_ip))
if not self._valid_ip(self.remote_ip):
self.remote_ip = remote_ip
# AWS uses X-Forwarded-Proto
self.protocol = self.headers.get(
"X-Scheme",
self.headers.get("X-Forwarded-Proto", protocol))
if self.protocol not in ("http", "https"):
self.protocol = "http"
else:
self.remote_ip = remote_ip
if connection and interfaces.ISSLTransport.providedBy(
connection.transport):
self.protocol = "https"
else:
self.protocol = "http"
self.host = host or self.headers.get("Host") or "127.0.0.1"
self.files = files or {}
self.connection = connection
self._start_time = time.time()
self._finish_time = None
self.path, sep, self.query = uri.partition("?")
self.arguments = parse_qs_bytes(self.query, keep_blank_values=True)
def supports_http_1_1(self):
"""Returns True if this request supports HTTP/1.1 semantics"""
return self.version == "HTTP/1.1"
@property
def cookies(self):
"""A dictionary of Cookie.Morsel objects."""
if not hasattr(self, "_cookies"):
self._cookies = http_cookies.SimpleCookie()
if "Cookie" in self.headers:
try:
self._cookies.load(native_str(self.headers["Cookie"]))
except Exception:
self._cookies = {}
return self._cookies
def write(self, chunk):
"""Writes the given chunk to the response stream."""
assert isinstance(chunk, bytes_type)
self.connection.write(chunk)
def finish(self):
"""Finishes this HTTP request on the open connection."""
self.connection.finish()
self._finish_time = time.time()
def full_url(self):
"""Reconstructs the full URL for this request."""
return self.protocol + "://" + self.host + self.uri
def request_time(self):
"""Returns the amount of time it took for this request to execute."""
if self._finish_time is None:
return time.time() - self._start_time
else:
return self._finish_time - self._start_time
def notifyFinish(self):
"""Returns a Deferred object, which is fired when the request is
finished and the connection is closed.
"""
return self.connection.notifyFinish()
def __repr__(self):
attrs = ("protocol", "host", "method", "uri", "version", "remote_ip",
"body")
args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs])
return "%s(%s, headers=%s)" % (
self.__class__.__name__, args, dict(self.headers))
def _valid_ip(self, ip):
try:
res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC,
socket.SOCK_STREAM,
0, socket.AI_NUMERICHOST)
return bool(res)
except socket.gaierror as e:
if e.args[0] == socket.EAI_NONAME:
return False
raise
|
{
"content_hash": "405111bdef5783e3fd5df3140e82b198",
"timestamp": "",
"source": "github",
"line_count": 368,
"max_line_length": 89,
"avg_line_length": 36.35597826086956,
"alnum_prop": 0.5906271021750504,
"repo_name": "fiorix/cyclone",
"id": "23a3482a84b775bfe5eceec5679df9f9ffc19bda",
"size": "14021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cyclone/httpserver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2386"
},
{
"name": "HTML",
"bytes": "32384"
},
{
"name": "Makefile",
"bytes": "642"
},
{
"name": "Python",
"bytes": "518718"
},
{
"name": "Shell",
"bytes": "9517"
}
],
"symlink_target": ""
}
|
"""
Copyright (C) 2013-2014 TopCoder Inc., All Rights Reserved.
This module defines the web controller classes by using cherrypy.
This module resides in Python source file httpservices.py
Thread Safety:
The implementation should be thread safe.
v1.1 - Healthcare Fraud Prevention Release Assembly v1.0
- updated for added StudyID in data request handler
- updated for analysis result handler
@author: TCSASSEMBLER
@version: 1.1
"""
import os,json,base64,zlib,urllib,tempfile,mmap,logging
import cherrypy
import isodate
from threading import Thread
from datetime import datetime
from datetime import timezone
import xml.etree.ElementTree as ET
from errors import PartnerClientError
from settings import HFPP_NODE_HTTP_SERVICE_BASE_URL
from settings import HFPP_PARTNER_USERNAME
from settings import HFPP_PARTNER_PASSWORD
from settings import CA_CERTIFICATE_FILE
from settings import PARTNER_IMMEDIATE_FULLFIL
from settings import STUDY_REPORT_DIRECTORY
from datafulfillment import can_fulfill_data_request
from dataappliance import query_data
from dataconversion import convert_data
from logginghelper import method_enter
from logginghelper import method_exit
from logginghelper import method_error
from validationhelper import check_string
from validationhelper import check_datetime
from validationhelper import check_bool
class DataRequestHandler:
"""
DataRequestHandler class defines the contract to handle a received data request.
This class resides in Python source file httpservices.py
Thread Safety:
This class is thread safe because it is immutable.
@author: TCSASSEMBLER
@version: 1.0
"""
def handle_data_request(self,request_id, study_id, query,expiration_time,
cache_available=False,cache_timestamp=None,
force_fullfil=False):
"""
This method is used to handle data request.
This method will not throw exceptions. Any error should be caught and logged.
@param self the DataRequestHandler itself, it should be DataRequestHandler
@param request_id the request ID,it is supposed to be a non-None/empty str. Required.
@param study_id the study ID,it is supposed to be a non-None/empty str. Required.
@param query the query string,it is supposed to be a non-None/empty str. Required.
@param expiration_time the request expiration time,it is supposed to be a non-None datetime. Required.
@param cache_available whether cache is available,it is supposed to be a bool. Optional, default to False.
@param cache_timestamp the cache timestamp,it is supposed to be a datetime. Optional, default to None.
@param force_fullfil this parameter is set to True when this method is called by decision module.
"""
signature='hfppnetwork.partner.httpservices.DataRequestHandler.handle_data_request'
method_enter(signature,{
"self":self,
"request_id":request_id,
"study_id":study_id,
"query":query,
"expiration_time":expiration_time,
"cache_available":cache_available,
"cache_timestamp":cache_timestamp
})
# Dictionary to hold data query result file names
query_result_file_names = {}
try:
#check input arguments
check_string("request_id",request_id)
check_string("study_id",study_id)
check_string("query",query)
check_datetime("expiration_time",expiration_time)
check_bool("cache_available",cache_available)
if cache_timestamp is not None:
check_datetime("cache_timestamp",cache_timestamp)
# Parse the query string
try:
query_dict = json.loads(query)
except ValueError as e:
query_dict = None
method_error(signature, e)
# Check if we can fulfill the data request
can_fulfill_request = can_fulfill_data_request(request_id, study_id, query,
expiration_time, cache_available,
cache_timestamp, force_fullfil)
# Dictionary to hold data conversion result file names
conversion_result_file_names = {}
# Data Response XML file name
response_xml_file_name = None
# Compressed file name
compressed_file_name = None
logging.debug('%s:%s', 'can_fulfill_request', can_fulfill_request)
#can_fulfill_request
if query_dict is not None and 'file_types' in query_dict \
and 'logical_expressions' in query_dict and can_fulfill_request:
# Can fulfill the request, create temporary files
for file_type in query_dict['file_types']:
query_result_file_names[file_type] = tempfile.NamedTemporaryFile(delete=False).name
conversion_result_file_names[file_type] = tempfile.NamedTemporaryFile(delete=False).name
response_xml_file_name = tempfile.NamedTemporaryFile(delete=False).name
compressed_file_name = tempfile.NamedTemporaryFile(delete=False).name
# Query data
use_cache = query_data(query_dict['file_types'], query_dict['logical_expressions'],
query_result_file_names,
cache_timestamp if cache_available else None)
with open(response_xml_file_name, 'ab') as response_xml_file:
# Write XML
xml = '<?xml version="1.0" encoding="utf-8"?>' \
'<DataResponse>' \
'<RequestID>{request_id}</RequestID>' \
'<RequestDenied>false</RequestDenied>' \
'<ErrorMessage></ErrorMessage>' \
'<Data useCache="{use_cache}"><![CDATA['.\
format(request_id=request_id, use_cache='true' if use_cache else 'false')
response_xml_file.write(xml.encode('utf-8'))
if not use_cache:
logging.debug('not use cache will use result from converted data')
# Convert data
for file_type in query_dict['file_types']:
convert_data(file_type, query_result_file_names[file_type],
conversion_result_file_names[file_type])
# Aggregate and compress data
compressor = zlib.compressobj(level=9)
with open(compressed_file_name, 'wb') as out_file:
for file_type in query_dict['file_types']:
with open(conversion_result_file_names[file_type], 'rb') as in_file:
out_file.write(compressor.compress(in_file.read()))
out_file.write(compressor.flush())
# Encode in Base64
with open(compressed_file_name, 'rb') as in_file:
base64.encode(in_file, response_xml_file)
# Write XML
response_xml_file.write(']]></Data></DataResponse>'.encode('utf-8'))
# POST XML to Network Node /data_response service
if datetime.now(timezone.utc) < expiration_time:
logging.debug('post to data response url %s%s',
HFPP_NODE_HTTP_SERVICE_BASE_URL ,'/data_response')
# Only POST the XML if the request has not been expired
request = urllib.request.Request(HFPP_NODE_HTTP_SERVICE_BASE_URL + '/data_response')
request.add_header('Content-Type','application/xml;charset=utf-8')
request.add_header('x-hfpp-username', HFPP_PARTNER_USERNAME)
request.add_header('x-hfpp-password', HFPP_PARTNER_PASSWORD)
if response_xml_file_name is not None and can_fulfill_request:
with open(response_xml_file_name, 'rb') as in_file,\
mmap.mmap(in_file.fileno(), 0, access=mmap.ACCESS_READ) as data_response_xml:
try:
resp = urllib.request.urlopen(request, data_response_xml,
cafile=CA_CERTIFICATE_FILE, cadefault=True)
# Parse response XML
resp_content = resp.read().decode('utf-8')
logging.debug('response code:%s',resp.getcode())
logging.debug('response:%s',resp_content)
except urllib.error.HTTPError as e:
method_error(signature, e)
self._handle_error_response(e)
else:
data_response_xml = '<?xml version="1.0" encoding="utf-8"?>' \
'<DataResponse>' \
'<RequestID>{request_id}</RequestID>' \
'<RequestDenied>true</RequestDenied>' \
'<ErrorMessage>{waitApproval}</ErrorMessage>' \
'<Data></Data>' \
'</DataResponse>'.format(request_id=request_id,
waitApproval=('' if PARTNER_IMMEDIATE_FULLFIL else 'Waiting Approval'))
logging.debug('post data response xml %s', data_response_xml)
try:
resp = urllib.request.urlopen(request, data_response_xml.encode('utf-8'),
cafile=CA_CERTIFICATE_FILE, cadefault=True)
# Parse response XML
resp_content = resp.read().decode('utf-8')
logging.debug('response code:%s',resp.getcode())
logging.debug('response:%s',resp_content)
except urllib.error.HTTPError as e:
method_error(signature, e)
self._handle_error_response(e)
else:
# Request expired, log error
logging.error('Request expired')
method_exit(signature)
except Exception as e:
# log error
method_error(signature, e)
finally:
if query_dict is not None and 'file_types' in query_dict:
# Remove temporary files
for file_type in query_dict['file_types']:
if file_type in query_result_file_names:
self._remove_file(query_result_file_names[file_type])
if file_type in conversion_result_file_names:
self._remove_file(conversion_result_file_names[file_type])
self._remove_file(compressed_file_name)
self._remove_file(response_xml_file_name)
def _remove_file(self,file_name):
"""
This method is used to remove file.
@param self the DataRequestHandler itself, it should be DataRequestHandler
@param file_name the file name,it is supposed to be a str, can be None/empty.
@throw Exception Any error should be raised to caller.
"""
signature='hfppnetwork.partner.httpservices.DataRequestHandler._remove_file'
method_enter(signature,{
"self":self,
"file_name":file_name
})
try:
if file_name is not None:
check_string("file_name",file_name)
if file_name and os.path.exists(file_name):
os.remove(file_name)
method_exit(signature)
except Exception as e:
method_error(signature, e)
def _handle_error_response(self,e):
"""
This method is used to handle http error.
@param self the DataRequestHandler itself, it should be DataRequestHandler
@param e the http error.
@throw Exception Any error should be raised to caller.
"""
logging.error('http error code:%s',e.code)
# Not succeeded
# 400, 401, 403 or 500
resp_content = e.read().decode('utf-8')
logging.error('error response:%s',resp_content)
root = ET.fromstring(resp_content)
error_code = root.findtext('./ErrorCode')
error_message = root.findtext('./ErrorMessage')
# Log error code and error message
logging.error('error code:%s',error_code)
logging.error('error message:%s',error_message)
def handle_deny_operation(request_id):
'''
Handle the manually deny operation.
@param request_id: The request_id to response to.
'''
logging.debug('post to data response url %s%s',
HFPP_NODE_HTTP_SERVICE_BASE_URL ,'/data_response')
request = urllib.request.Request(HFPP_NODE_HTTP_SERVICE_BASE_URL + '/data_response')
request.add_header('Content-Type','application/xml;charset=utf-8')
request.add_header('x-hfpp-username', HFPP_PARTNER_USERNAME)
request.add_header('x-hfpp-password', HFPP_PARTNER_PASSWORD)
data_response_xml = '<?xml version="1.0" encoding="utf-8"?>' \
'<DataResponse>' \
'<RequestID>{request_id}</RequestID>' \
'<RequestDenied>true</RequestDenied>' \
'<ErrorMessage>This request was denied manually.</ErrorMessage>' \
'<Data></Data>' \
'</DataResponse>'.format(request_id=request_id[0])
logging.debug('post data response xml %s', data_response_xml)
try:
resp = urllib.request.urlopen(request, data_response_xml.encode('utf-8'),
cafile=CA_CERTIFICATE_FILE, cadefault=True)
# Parse response XML
resp_content = resp.read().decode('utf-8')
logging.debug('response code:%s',resp.getcode())
logging.debug('response:%s',resp_content)
except urllib.error.HTTPError as e:
logging.exception()
class AnalysisResultHandler():
"""
AnalysisResultHandler class defines the contract to handle a received analysis result request.
This class resides in Python source file httpservices.py
Thread Safety:
This class is thread safe because it is immutable.
@author: TCSASSEMBLER
@version: 1.0
@since Healthcare Fraud Prevention Release Assembly v1.0
"""
def handle_analysis_result(self, request_id, study_id, result):
"""
This method is used to handle analysis result.
This method will not throw exceptions. Any error should be caught and logged.
@param self the AnalysisResultHandler itself, it should be AnalysisResultHandler
@param request_id the request ID,it is supposed to be a non-None/empty str. Required.
@param study_id the study ID,it is supposed to be a non-None/empty str. Required.
@param result the analysis result. Required
"""
signature='hfppnetwork.partner.httpservices.AnalysisResultHandler.handle_analysis_result'
method_enter(signature,{
"self":self,
"request_id":request_id,
"result":result
})
uncompressed_file = tempfile.NamedTemporaryFile(delete=False).name
try:
#check input arguments
check_string("request_id",request_id)
check_string("study_id", study_id)
check_string("result",result)
decoded_data = zlib.decompress(base64.b64decode(result))
file_name = STUDY_REPORT_DIRECTORY + "/" + study_id + ".xlsx"
with open(file_name, "wb") as out_file:
out_file.write(decoded_data)
method_exit(signature)
except Exception as e:
# log error
method_error(signature, e)
finally:
os.remove(uncompressed_file)
class PartnerHTTPServices:
"""
PartnerHTTPServices class defines the CherryPy handler to serve Partner Client HTTP services.
This class resides in Python source file httpservices.py
Thread Safety:
This class is thread safe because it is immutable.
CherryPy makes use of thread local data for HTTP request/response data, hence the use of CherryPy module is safe.
v1.1 - Healthcare Fraud Prevention Release Assembly v1.0
- added method analysis_result
@author: TCSASSEMBLER
@version: 1.1
"""
@cherrypy.expose()
@cherrypy.tools.allow(methods=['POST'])
@cherrypy.tools.accept(media=['application/xml'])
def data_request(self):
"""
This method is used to serve data request partner client http service.
@param self the PartnerHTTPServices itself, it should be PartnerHTTPServices
@throws PartnerClientError throws if request body is empty
@throws Exception any error should be raised to caller.
CherryPy will handle the error and translate to HTTP code 500 (refer to partnercli#handle_error)
"""
signature='hfppnetwork.partner.httpservices.PartnerHTTPServices.data_request'
method_enter(signature,{"self":self})
# Read the data request XML
request_body = cherrypy.request.body.read().decode("utf-8")
logging.debug('%s:%s', 'request_body', request_body)
if len(request_body)==0:
raise PartnerClientError("request body can not be empty")
# Parse data request XML
root = ET.fromstring(request_body)
request_id = root.findtext('./RequestID')
study_id = root.findtext('./StudyID')
query = root.findtext('./Query')
expiration_time = isodate.parse_datetime(root.findtext('./ExpirationTime'))
#CacheAvailable and CacheTimestamp could not exist
cache_available = 'true' == root.findtext('./CacheAvailable')
cache_timestamp = None
if root.findtext('./CacheTimestamp'):
cache_timestamp =isodate.parse_datetime(root.findtext('./CacheTimestamp'))
# Kick off a new thread to handle the request
handler = DataRequestHandler()
t = Thread(target=handler.handle_data_request, args=(request_id, study_id, query,
expiration_time, cache_available, cache_timestamp,))
t.daemon = False
t.start()
method_exit(signature)
@cherrypy.expose()
@cherrypy.tools.allow(methods=['POST'])
@cherrypy.tools.accept(media=['application/xml'])
def analysis_result(self):
"""
This method is used to serve analysis result request partner client http service.
@param self the PartnerHTTPServices itself, it should be PartnerHTTPServices
@throws PartnerClientError throws if request body is empty
@throws Exception any error should be raised to caller.
CherryPy will handle the error and translate to HTTP code 500 (refer to partnercli#handle_error)
"""
signature='hfppnetwork.partner.httpservices.PartnerHTTPServices.analysis_result'
method_enter(signature,{"self":self})
# Read the data request XML
request_body = cherrypy.request.body.read().decode("utf-8")
logging.debug('%s:%s', 'request_body', request_body)
if len(request_body)==0:
raise PartnerClientError("request body can not be empty")
# Parse data request XML
root = ET.fromstring(request_body)
request_id = root.findtext('./RequestID')
study_id = root.findtext('./StudyID')
result = root.findtext('./Result')
# Kick off a new thread to handle the request
handler = AnalysisResultHandler()
t = Thread(target=handler.handle_analysis_result, args=(request_id, study_id, result))
t.daemon = False
t.start()
method_exit(signature)
|
{
"content_hash": "e0c2d2a137f7d28a1a4ec617404a45d1",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 117,
"avg_line_length": 50.67171717171717,
"alnum_prop": 0.6001196053025017,
"repo_name": "NASA-Tournament-Lab/CoECI-CMS-Healthcare-Fraud-Prevention",
"id": "71cf23d8f4d188e9079fd1129a2f39e450479119",
"size": "20090",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "partnerclient/hfppnetwork/partner/httpservices.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "166273"
},
{
"name": "Java",
"bytes": "1039497"
},
{
"name": "JavaScript",
"bytes": "2043931"
},
{
"name": "Python",
"bytes": "589646"
},
{
"name": "Shell",
"bytes": "26869"
}
],
"symlink_target": ""
}
|
import fnmatch
import logging
import os
import shutil
import urllib
import urlparse
import jinja2
import misaka
import pygments
import pygments.formatters
import pygments.lexers
import requests
import yaml
from .document import Document, iter_all
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
logger = logging.getLogger(__name__)
class HtmlRenderer(misaka.HtmlRenderer, misaka.SmartyPants):
pass
class MarkdownRenderer(object):
def __init__(self, extensions=None, flags=None):
if extensions is None:
extensions = (misaka.EXT_AUTOLINK |
misaka.EXT_FENCED_CODE |
misaka.EXT_NO_INTRA_EMPHASIS |
misaka.EXT_SPACE_HEADERS |
misaka.EXT_STRIKETHROUGH |
misaka.EXT_SUPERSCRIPT |
misaka.EXT_TABLES)
if flags is None:
flags = (misaka.HTML_ESCAPE |
misaka.HTML_TOC)
self._renderer = HtmlRenderer(flags)
self._markdown = misaka.Markdown(self._renderer, extensions)
def render(self, text):
return self._markdown.render(text)
def highlight(text, lang='text'):
result = ''
try:
lexer = pygments.lexers.get_lexer_by_name(lang, stripall=True)
except Exception:
result += ('<div class="highlight"><span class="err">'
'Error: language "{lang}" is not supported'
'</span></div>').format(lang=lang)
lexer = pygments.lexers.get_lexer_by_name('text', stripall=True)
result += pygments.highlight(text, lexer, pygments.formatters.HtmlFormatter())
return result
def render_all(documents, output_dir, template_filename=None,
template_filters=None, markdown_renderer=None):
if template_filename is None:
template_filename = os.path.join(DATA_DIR, 'template.html')
if markdown_renderer is None:
markdown_renderer = MarkdownRenderer()
template_env = jinja2.Environment(trim_blocks=True)
template_env.filters['hl'] = highlight
template_env.filters['md'] = (
lambda text: markdown_renderer.render(text) if text else '')
if template_filters:
template_env.filters.update(template_filters)
with open(template_filename, 'r') as f:
template = template_env.from_string(f.read())
if not os.path.exists(output_dir):
logger.debug('Creating output directory %r', output_dir)
os.mkdir(output_dir)
try:
shutil.copytree(os.path.join(DATA_DIR, 'css'),
os.path.join(output_dir, 'css'))
except OSError as e:
if e.errno == 17:
logger.debug('CSS directory already exists, not creating')
else:
raise
for doc in documents:
html = template.render(doc=doc)
output_filename = os.path.join(
output_dir, '%s.html' % (doc.slug,))
logger.debug('Writing %r', output_filename)
with open(output_filename, 'w') as output_file:
output_file.write(html)
|
{
"content_hash": "51dabceedbd436aa0399abc237c345aa",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 82,
"avg_line_length": 32.4375,
"alnum_prop": 0.615606936416185,
"repo_name": "noonat/yup",
"id": "5bb421e973579e822a6b32e3b99e98425f77cf5d",
"size": "3114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yup/render.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11441"
},
{
"name": "Python",
"bytes": "14488"
}
],
"symlink_target": ""
}
|
import cgi
import json
import urllib
import logging
import string
import random
from httplib2 import Http
from django.conf import settings
from django.core.urlresolvers import reverse
from keystoneclient.exceptions import Conflict
from keystoneclient.v2_0 import client as keystone_client
from django.contrib import messages
from openstack_auth.backend import KeystoneBackend
from openstack_auth.user import create_user_from_token
from openstack_auth.user import Token
from django.utils.translation import ugettext_lazy as _
logger = logging.getLogger(__name__)
class FacebookBackend:
admin_client = None
def _admin_client(self):
if not self.admin_client:
self.admin_client = keystone_client.Client(
username=settings.ADMIN_USER,
password=settings.ADMIN_PASSWORD,
tenant_name=settings.ADMIN_TENANT,
auth_url=settings.OPENSTACK_KEYSTONE_URL)
return self.admin_client
def keystone_get_endpoint(self, service, type='publicurl'):
service = self._admin_client().services.find(name=service)
endpoint = self._admin_client().endpoints.find(service_id=service.id)
return eval('endpoint.%s' % type)
def keystone_user_exists(self, username):
keystone_admin = self._admin_client()
users = keystone_admin.users.list()
for user in users:
if user.name == username:
return True
return False
def get_keystone_tenant(self, tenant_name):
keystone_admin = self._admin_client()
tenants = keystone_admin.tenants.list()
for tenant in tenants:
if tenant.name == tenant_name:
return tenant
return None
def add_keystone_user(self, settings, tenant_name, password, fb_profile):
keystone_admin = self._admin_client()
tenant = keystone_admin.tenants.create(tenant_name,
"Auto created account",
True)
user = keystone_admin.users.create(tenant_name,
password,
fb_profile['email'],
tenant.id,
True)
member_user_role = settings.MEMBER_USER_ROLE
try:
keystone_admin.roles.add_user_role(user.id,
member_user_role,
tenant.id)
except Conflict:
pass
return tenant
def facebook_get_token_profile(self, token, request):
args = {
'client_id': settings.FACEBOOK_APP_ID,
'client_secret': settings.FACEBOOK_APP_SECRET,
'redirect_uri': request.build_absolute_uri(
reverse('horizon.facebook.views.authentication_callback')),
'code': token,
}
target = urllib.urlopen(
'https://graph.facebook.com/oauth/access_token?'
+ urllib.urlencode(args)).read()
response = cgi.parse_qs(target)
if 'access_token' not in response:
return None, None
access_token = response['access_token'][-1]
# Read the user's profile information
fb_profile = urllib.urlopen(
'https://graph.facebook.com/me?access_token=%s' % access_token)
fb_profile = json.load(fb_profile)
return access_token, fb_profile
def facebook_trystack_group_member(self, access_token):
try:
graph_data = None
group_url = (
"https://graph.facebook.com/"
"269238013145112/members?limit=1&access_token=%s"
% access_token)
f = urllib.urlopen(group_url)
graph_data_json = f.read()
f.close()
graph_data = json.loads(graph_data_json)
return (len(graph_data['data']) > 0)
except:
return False
def authenticate(self, token=None, request=None):
""" Reads in a Facebook code and asks Facebook
if it's valid and what user it points to. """
logger.error('test')
keystone = KeystoneBackend()
self.keystone = keystone
# Get a legit access token from Facebook
access_token, fb_profile = self.facebook_get_token_profile(token, request)
if not access_token:
# No access token means failed auth to FB
msg = _("Facebook login invalid or Token Expired")
messages.error(request, msg)
logger.error(msg)
return None
facebook_id = fb_profile['id']
# verify TryStack group membership
if not self.facebook_trystack_group_member(access_token):
msg = _("Facebook id %s is not a member of the TryStack Facebook group" % facebook_id)
messages.error(request, msg)
logger.error(msg)
return None
#### If we got here then auth and group membership are valid
#### time to ensure the user exists and get a token
# create user and tenant if they don't exist
username = tenant_name = "facebook%s" % facebook_id
password = "".join([random.choice(
string.ascii_lowercase + string.digits)
for i in range(8)])
if not self.keystone_user_exists(username):
tenant = self.add_keystone_user(settings, tenant_name, password, fb_profile)
else:
tenant = self.get_keystone_tenant(tenant_name)
# get a keystone token for the user (requires the custom auth filter)
d = '{"auth":{"%s": "customer-x", "passwordCredentials": {"username": "%s", "password": "fake_password"}}}' % (username, username)
url = "%s/tokens/" % self.keystone_get_endpoint('keystone', 'adminurl')
headers = {'Content-Type': 'application/json; charset=UTF-8', 'X-Auth-Token': settings.ADMIN_TOKEN}
resp, content = Http().request(uri=url, method="POST", headers=headers, body=d)
# load the token, create a new client based on the token
auth_json = json.loads(content)['access']
client = keystone_client.Client(username=username,
tenant_name=username,
token=auth_json['token']['id'],
auth_url=settings.OPENSTACK_KEYSTONE_URL)
# use the client and token to create a Token object
token = Token(auth_ref=client.auth_ref)
# generate a django user (this is how openstack_auth.backend does it too)
user = create_user_from_token(request, token, settings.OPENSTACK_KEYSTONE_URL)
#client.service_catalog.url_for(endpoint_type='publicURL'))
return user
def get_user(self, user_id):
""" Just returns the user of a given ID. """
try:
keystone = KeystoneBackend()
keystone.request = self.request
except:
return None
return keystone.get_user(user_id)
supports_object_permissions = False
supports_anonymous_user = True
supports_inactive_user = False
|
{
"content_hash": "1ee7b3daec24174620dcdc21454356e8",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 138,
"avg_line_length": 40.26086956521739,
"alnum_prop": 0.5718142548596112,
"repo_name": "trystack/python-django-horizon-facebook",
"id": "be24c94ef72c65f2519096eb6bd46c96f6bd15be",
"size": "7408",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "horizon/facebook/backend.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3011"
},
{
"name": "Python",
"bytes": "19739"
},
{
"name": "Shell",
"bytes": "499"
}
],
"symlink_target": ""
}
|
import sys
class Trace(object):
"A tracing class"
debugmode = False
quietmode = False
showlinesmode = False
prefix = None
def debug(cls, message):
"Show a debug message"
if not Trace.debugmode or Trace.quietmode:
return
Trace.show(message, sys.stdout)
def message(cls, message):
"Show a trace message"
if Trace.quietmode:
return
if Trace.prefix and Trace.showlinesmode:
message = Trace.prefix + message
Trace.show(message, sys.stdout)
def error(cls, message):
"Show an error message"
message = '* ' + message
if Trace.prefix and Trace.showlinesmode:
message = Trace.prefix + message
Trace.show(message, sys.stderr)
def fatal(cls, message):
"Show an error message and terminate"
Trace.error('FATAL: ' + message)
exit(-1)
def show(cls, message, channel):
"Show a message out of a channel"
if sys.version_info < (3,0):
message = message.encode('utf-8')
channel.write(message + '\n')
debug = classmethod(debug)
message = classmethod(message)
error = classmethod(error)
fatal = classmethod(fatal)
show = classmethod(show)
import os.path
import sys
class BibStylesConfig(object):
"Configuration class from elyxer.config file"
abbrvnat = {
'@article':'$authors. $title. <i>$journal</i>,{ {$volume:}$pages,} $month $year.{ doi: $doi.}{ URL <a href="$url">$url</a>.}{ $note.}',
'cite':'$surname($year)',
'default':'$authors. <i>$title</i>. $publisher, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
}
alpha = {
'@article':'$authors. $title.{ <i>$journal</i>{, {$volume}{($number)}}{: $pages}{, $year}.}{ <a href="$url">$url</a>.}{ <a href="$filename">$filename</a>.}{ $note.}',
'cite':'$Sur$YY',
'default':'$authors. $title.{ <i>$journal</i>,} $year.{ <a href="$url">$url</a>.}{ <a href="$filename">$filename</a>.}{ $note.}',
}
authordate2 = {
'@article':'$authors. $year. $title. <i>$journal</i>, <b>$volume</b>($number), $pages.{ URL <a href="$url">$url</a>.}{ $note.}',
'@book':'$authors. $year. <i>$title</i>. $publisher.{ URL <a href="$url">$url</a>.}{ $note.}',
'cite':'$surname, $year',
'default':'$authors. $year. <i>$title</i>. $publisher.{ URL <a href="$url">$url</a>.}{ $note.}',
}
default = {
'@article':'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@book':'{$authors: }<i>$title</i>{ ($editor, ed.)}.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@booklet':'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@conference':'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@inbook':'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@incollection':'$authors: <i>$title</i>{ in <i>$booktitle</i>{ ($editor, ed.)}}.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@inproceedings':'$authors: “$title”, <i>$booktitle</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@manual':'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@mastersthesis':'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@misc':'$authors: <i>$title</i>.{{ $publisher,}{ $howpublished,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@phdthesis':'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@proceedings':'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@techreport':'$authors: <i>$title</i>, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@unpublished':'$authors: “$title”, <i>$journal</i>, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'cite':'$index',
'default':'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
}
defaulttags = {
'YY':'??', 'authors':'', 'surname':'',
}
ieeetr = {
'@article':'$authors, “$title”, <i>$journal</i>, vol. $volume, no. $number, pp. $pages, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@book':'$authors, <i>$title</i>. $publisher, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'cite':'$index',
'default':'$authors, “$title”. $year.{ URL <a href="$url">$url</a>.}{ $note.}',
}
plain = {
'@article':'$authors. $title.{ <i>$journal</i>{, {$volume}{($number)}}{:$pages}{, $year}.}{ URL <a href="$url">$url</a>.}{ $note.}',
'@book':'$authors. <i>$title</i>. $publisher,{ $month} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@incollection':'$authors. $title.{ In <i>$booktitle</i> {($editor, ed.)}.} $publisher,{ $month} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
'@inproceedings':'$authors. $title. { <i>$booktitle</i>{, {$volume}{($number)}}{:$pages}{, $year}.}{ URL <a href="$url">$url</a>.}{ $note.}',
'cite':'$index',
'default':'{$authors. }$title.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
}
vancouver = {
'@article':'$authors. $title. <i>$journal</i>, $year{;{<b>$volume</b>}{($number)}{:$pages}}.{ URL: <a href="$url">$url</a>.}{ $note.}',
'@book':'$authors. $title. {$publisher, }$year.{ URL: <a href="$url">$url</a>.}{ $note.}',
'cite':'$index',
'default':'$authors. $title; {$publisher, }$year.{ $howpublished.}{ URL: <a href="$url">$url</a>.}{ $note.}',
}
class BibTeXConfig(object):
"Configuration class from elyxer.config file"
replaced = {
'--':'—', '..':'.',
}
class ContainerConfig(object):
"Configuration class from elyxer.config file"
endings = {
'Align':'\\end_layout', 'BarredText':'\\bar',
'BoldText':'\\series', 'Cell':'</cell',
'ChangeDeleted':'\\change_unchanged',
'ChangeInserted':'\\change_unchanged', 'ColorText':'\\color',
'EmphaticText':'\\emph', 'Hfill':'\\hfill', 'Inset':'\\end_inset',
'Layout':'\\end_layout', 'LyXFooter':'\\end_document',
'LyXHeader':'\\end_header', 'Row':'</row', 'ShapedText':'\\shape',
'SizeText':'\\size', 'StrikeOut':'\\strikeout',
'TextFamily':'\\family', 'VersalitasText':'\\noun',
}
extracttext = {
'allowed':['StringContainer','Constant','FormulaConstant',],
'cloned':['',],
'extracted':['PlainLayout','TaggedText','Align','Caption','TextFamily','EmphaticText','VersalitasText','BarredText','SizeText','ColorText','LangLine','Formula','Bracket','RawText','BibTag','FormulaNumber','AlphaCommand','EmptyCommand','OneParamFunction','SymbolFunction','TextFunction','FontFunction','CombiningFunction','DecoratingFunction','FormulaSymbol','BracketCommand','TeXCode',],
}
startendings = {
'\\begin_deeper':'\\end_deeper', '\\begin_inset':'\\end_inset',
'\\begin_layout':'\\end_layout',
}
starts = {
'':'StringContainer', '#LyX':'BlackBox', '</lyxtabular':'BlackBox',
'<cell':'Cell', '<column':'Column', '<row':'Row',
'\\align':'Align', '\\bar':'BarredText',
'\\bar default':'BlackBox', '\\bar no':'BlackBox',
'\\begin_body':'BlackBox', '\\begin_deeper':'DeeperList',
'\\begin_document':'BlackBox', '\\begin_header':'LyXHeader',
'\\begin_inset Argument':'ShortTitle',
'\\begin_inset Box':'BoxInset', '\\begin_inset Branch':'Branch',
'\\begin_inset Caption':'Caption',
'\\begin_inset CommandInset bibitem':'BiblioEntry',
'\\begin_inset CommandInset bibtex':'BibTeX',
'\\begin_inset CommandInset citation':'BiblioCitation',
'\\begin_inset CommandInset href':'URL',
'\\begin_inset CommandInset include':'IncludeInset',
'\\begin_inset CommandInset index_print':'PrintIndex',
'\\begin_inset CommandInset label':'Label',
'\\begin_inset CommandInset line':'LineInset',
'\\begin_inset CommandInset nomencl_print':'PrintNomenclature',
'\\begin_inset CommandInset nomenclature':'NomenclatureEntry',
'\\begin_inset CommandInset ref':'Reference',
'\\begin_inset CommandInset toc':'TableOfContents',
'\\begin_inset ERT':'ERT', '\\begin_inset Flex':'FlexInset',
'\\begin_inset Flex Chunkref':'NewfangledChunkRef',
'\\begin_inset Flex Marginnote':'SideNote',
'\\begin_inset Flex Sidenote':'SideNote',
'\\begin_inset Flex URL':'FlexURL', '\\begin_inset Float':'Float',
'\\begin_inset FloatList':'ListOf', '\\begin_inset Foot':'Footnote',
'\\begin_inset Formula':'Formula',
'\\begin_inset FormulaMacro':'FormulaMacro',
'\\begin_inset Graphics':'Image',
'\\begin_inset Index':'IndexReference',
'\\begin_inset Info':'InfoInset',
'\\begin_inset LatexCommand bibitem':'BiblioEntry',
'\\begin_inset LatexCommand bibtex':'BibTeX',
'\\begin_inset LatexCommand cite':'BiblioCitation',
'\\begin_inset LatexCommand citealt':'BiblioCitation',
'\\begin_inset LatexCommand citep':'BiblioCitation',
'\\begin_inset LatexCommand citet':'BiblioCitation',
'\\begin_inset LatexCommand htmlurl':'URL',
'\\begin_inset LatexCommand index':'IndexReference',
'\\begin_inset LatexCommand label':'Label',
'\\begin_inset LatexCommand nomenclature':'NomenclatureEntry',
'\\begin_inset LatexCommand prettyref':'Reference',
'\\begin_inset LatexCommand printindex':'PrintIndex',
'\\begin_inset LatexCommand printnomenclature':'PrintNomenclature',
'\\begin_inset LatexCommand ref':'Reference',
'\\begin_inset LatexCommand tableofcontents':'TableOfContents',
'\\begin_inset LatexCommand url':'URL',
'\\begin_inset LatexCommand vref':'Reference',
'\\begin_inset Marginal':'SideNote',
'\\begin_inset Newline':'NewlineInset',
'\\begin_inset Newpage':'NewPageInset', '\\begin_inset Note':'Note',
'\\begin_inset OptArg':'ShortTitle',
'\\begin_inset Phantom':'PhantomText',
'\\begin_inset Quotes':'QuoteContainer',
'\\begin_inset Tabular':'Table', '\\begin_inset Text':'InsetText',
'\\begin_inset VSpace':'VerticalSpace', '\\begin_inset Wrap':'Wrap',
'\\begin_inset listings':'Listing',
'\\begin_inset script':'ScriptInset', '\\begin_inset space':'Space',
'\\begin_layout':'Layout', '\\begin_layout Abstract':'Abstract',
'\\begin_layout Author':'Author',
'\\begin_layout Bibliography':'Bibliography',
'\\begin_layout Chunk':'NewfangledChunk',
'\\begin_layout Description':'Description',
'\\begin_layout Enumerate':'ListItem',
'\\begin_layout Itemize':'ListItem', '\\begin_layout List':'List',
'\\begin_layout LyX-Code':'LyXCode',
'\\begin_layout Plain':'PlainLayout',
'\\begin_layout Standard':'StandardLayout',
'\\begin_layout Title':'Title', '\\begin_preamble':'LyXPreamble',
'\\change_deleted':'ChangeDeleted',
'\\change_inserted':'ChangeInserted',
'\\change_unchanged':'BlackBox', '\\color':'ColorText',
'\\color inherit':'BlackBox', '\\color none':'BlackBox',
'\\emph default':'BlackBox', '\\emph off':'BlackBox',
'\\emph on':'EmphaticText', '\\emph toggle':'EmphaticText',
'\\end_body':'LyXFooter', '\\family':'TextFamily',
'\\family default':'BlackBox', '\\family roman':'BlackBox',
'\\hfill':'Hfill', '\\labelwidthstring':'BlackBox',
'\\lang':'LangLine', '\\length':'InsetLength',
'\\lyxformat':'LyXFormat', '\\lyxline':'LyXLine',
'\\newline':'Newline', '\\newpage':'NewPage',
'\\noindent':'BlackBox', '\\noun default':'BlackBox',
'\\noun off':'BlackBox', '\\noun on':'VersalitasText',
'\\paragraph_spacing':'BlackBox', '\\series bold':'BoldText',
'\\series default':'BlackBox', '\\series medium':'BlackBox',
'\\shape':'ShapedText', '\\shape default':'BlackBox',
'\\shape up':'BlackBox', '\\size':'SizeText',
'\\size normal':'BlackBox', '\\start_of_appendix':'StartAppendix',
'\\strikeout default':'BlackBox', '\\strikeout on':'StrikeOut',
}
string = {
'startcommand':'\\',
}
table = {
'headers':['<lyxtabular','<features',],
}
class EscapeConfig(object):
"Configuration class from elyxer.config file"
chars = {
'\n':'', ' -- ':' — ', ' --- ':' — ', '\'':'’', '`':'‘',
}
commands = {
'\\InsetSpace \\space{}':' ', '\\InsetSpace \\thinspace{}':' ',
'\\InsetSpace ~':' ', '\\SpecialChar \\-':'',
'\\SpecialChar \\@.':'.', '\\SpecialChar \\ldots{}':'…',
'\\SpecialChar \\menuseparator':' ▷ ',
'\\SpecialChar \\nobreakdash-':'-', '\\SpecialChar \\slash{}':'/',
'\\SpecialChar \\textcompwordmark{}':'', '\\backslash':'\\',
}
entities = {
'&':'&', '<':'<', '>':'>',
}
html = {
'/>':'>',
}
iso885915 = {
' ':' ', ' ':' ', ' ':' ',
}
nonunicode = {
' ':' ',
}
class FormulaConfig(object):
"Configuration class from elyxer.config file"
alphacommands = {
'\\AA':'Å', '\\AE':'Æ',
'\\AmS':'<span class="versalitas">AmS</span>', '\\Angstroem':'Å',
'\\DH':'Ð', '\\Koppa':'Ϟ', '\\L':'Ł', '\\Micro':'µ', '\\O':'Ø',
'\\OE':'Œ', '\\Sampi':'Ϡ', '\\Stigma':'Ϛ', '\\TH':'Þ',
'\\aa':'å', '\\ae':'æ', '\\alpha':'α', '\\beta':'β',
'\\delta':'δ', '\\dh':'ð', '\\digamma':'ϝ', '\\epsilon':'ϵ',
'\\eta':'η', '\\eth':'ð', '\\gamma':'γ', '\\i':'ı',
'\\imath':'ı', '\\iota':'ι', '\\j':'ȷ', '\\jmath':'ȷ',
'\\kappa':'κ', '\\koppa':'ϟ', '\\l':'ł', '\\lambda':'λ',
'\\mu':'μ', '\\nu':'ν', '\\o':'ø', '\\oe':'œ', '\\omega':'ω',
'\\phi':'φ', '\\pi':'π', '\\psi':'ψ', '\\rho':'ρ',
'\\sampi':'ϡ', '\\sigma':'σ', '\\ss':'ß', '\\stigma':'ϛ',
'\\tau':'τ', '\\tcohm':'Ω', '\\textcrh':'ħ', '\\th':'þ',
'\\theta':'θ', '\\upsilon':'υ', '\\varDelta':'∆',
'\\varGamma':'Γ', '\\varLambda':'Λ', '\\varOmega':'Ω',
'\\varPhi':'Φ', '\\varPi':'Π', '\\varPsi':'Ψ', '\\varSigma':'Σ',
'\\varTheta':'Θ', '\\varUpsilon':'Υ', '\\varXi':'Ξ',
'\\varbeta':'ϐ', '\\varepsilon':'ε', '\\varkappa':'ϰ',
'\\varphi':'φ', '\\varpi':'ϖ', '\\varrho':'ϱ', '\\varsigma':'ς',
'\\vartheta':'ϑ', '\\xi':'ξ', '\\zeta':'ζ',
}
array = {
'begin':'\\begin', 'cellseparator':'&', 'end':'\\end',
'rowseparator':'\\\\',
}
bigbrackets = {
'(':['⎛','⎜','⎝',], ')':['⎞','⎟','⎠',], '[':['⎡','⎢','⎣',],
']':['⎤','⎥','⎦',], '{':['⎧','⎪','⎨','⎩',], '|':['|',],
'}':['⎫','⎪','⎬','⎭',], '∥':['∥',],
}
bigsymbols = {
'∑':['⎲','⎳',], '∫':['⌠','⌡',],
}
bracketcommands = {
'\\left':'span class="symbol"',
'\\left.':'<span class="leftdot"></span>',
'\\middle':'span class="symbol"', '\\right':'span class="symbol"',
'\\right.':'<span class="rightdot"></span>',
}
combiningfunctions = {
'\\"':'̈', '\\\'':'́', '\\^':'̂', '\\`':'̀', '\\acute':'́',
'\\bar':'̄', '\\breve':'̆', '\\c':'̧', '\\check':'̌',
'\\dddot':'⃛', '\\ddot':'̈', '\\dot':'̇', '\\grave':'̀',
'\\hat':'̂', '\\mathring':'̊', '\\overleftarrow':'⃖',
'\\overrightarrow':'⃗', '\\r':'̊', '\\s':'̩',
'\\textcircled':'⃝', '\\textsubring':'̥', '\\tilde':'̃',
'\\v':'̌', '\\vec':'⃗', '\\~':'̃',
}
commands = {
'\\ ':' ', '\\!':'', '\\#':'#', '\\$':'$', '\\%':'%',
'\\&':'&', '\\,':' ', '\\:':' ', '\\;':' ', '\\AC':'∿',
'\\APLcomment':'⍝', '\\APLdownarrowbox':'⍗', '\\APLinput':'⍞',
'\\APLinv':'⌹', '\\APLleftarrowbox':'⍇', '\\APLlog':'⍟',
'\\APLrightarrowbox':'⍈', '\\APLuparrowbox':'⍐', '\\Box':'□',
'\\Bumpeq':'≎', '\\CIRCLE':'●', '\\Cap':'⋒',
'\\CapitalDifferentialD':'ⅅ', '\\CheckedBox':'☑', '\\Circle':'○',
'\\Coloneqq':'⩴', '\\ComplexI':'ⅈ', '\\ComplexJ':'ⅉ',
'\\Corresponds':'≙', '\\Cup':'⋓', '\\Delta':'Δ', '\\Diamond':'◇',
'\\Diamondblack':'◆', '\\Diamonddot':'⟐', '\\DifferentialD':'ⅆ',
'\\Downarrow':'⇓', '\\EUR':'€', '\\Euler':'ℇ',
'\\ExponetialE':'ⅇ', '\\Finv':'Ⅎ', '\\Game':'⅁', '\\Gamma':'Γ',
'\\Im':'ℑ', '\\Join':'⨝', '\\LEFTCIRCLE':'◖', '\\LEFTcircle':'◐',
'\\LHD':'◀', '\\Lambda':'Λ', '\\Lbag':'⟅', '\\Leftarrow':'⇐',
'\\Lleftarrow':'⇚', '\\Longleftarrow':'⟸',
'\\Longleftrightarrow':'⟺', '\\Longrightarrow':'⟹', '\\Lparen':'⦅',
'\\Lsh':'↰', '\\Mapsfrom':'⇐|', '\\Mapsto':'|⇒', '\\Omega':'Ω',
'\\P':'¶', '\\Phi':'Φ', '\\Pi':'Π', '\\Pr':'Pr', '\\Psi':'Ψ',
'\\Qoppa':'Ϙ', '\\RHD':'▶', '\\RIGHTCIRCLE':'◗',
'\\RIGHTcircle':'◑', '\\Rbag':'⟆', '\\Re':'ℜ', '\\Rparen':'⦆',
'\\Rrightarrow':'⇛', '\\Rsh':'↱', '\\S':'§', '\\Sigma':'Σ',
'\\Square':'☐', '\\Subset':'⋐', '\\Sun':'☉', '\\Supset':'⋑',
'\\Theta':'Θ', '\\Uparrow':'⇑', '\\Updownarrow':'⇕',
'\\Upsilon':'Υ', '\\Vdash':'⊩', '\\Vert':'∥', '\\Vvdash':'⊪',
'\\XBox':'☒', '\\Xi':'Ξ', '\\Yup':'⅄', '\\\\':'<br/>',
'\\_':'_', '\\aleph':'ℵ', '\\amalg':'∐', '\\anchor':'⚓',
'\\angle':'∠', '\\aquarius':'♒', '\\arccos':'arccos',
'\\arcsin':'arcsin', '\\arctan':'arctan', '\\arg':'arg',
'\\aries':'♈', '\\arrowbullet':'➢', '\\ast':'∗', '\\asymp':'≍',
'\\backepsilon':'∍', '\\backprime':'‵', '\\backsimeq':'⋍',
'\\backslash':'\\', '\\ballotx':'✗', '\\barwedge':'⊼',
'\\because':'∵', '\\beth':'ℶ', '\\between':'≬', '\\bigcap':'∩',
'\\bigcirc':'○', '\\bigcup':'∪', '\\bigodot':'⊙',
'\\bigoplus':'⊕', '\\bigotimes':'⊗', '\\bigsqcup':'⊔',
'\\bigstar':'★', '\\bigtriangledown':'▽', '\\bigtriangleup':'△',
'\\biguplus':'⊎', '\\bigvee':'∨', '\\bigwedge':'∧',
'\\biohazard':'☣', '\\blacklozenge':'⧫', '\\blacksmiley':'☻',
'\\blacksquare':'■', '\\blacktriangle':'▲',
'\\blacktriangledown':'▼', '\\blacktriangleleft':'◂',
'\\blacktriangleright':'▶', '\\blacktriangleup':'▴', '\\bot':'⊥',
'\\bowtie':'⋈', '\\box':'▫', '\\boxast':'⧆', '\\boxbar':'◫',
'\\boxbox':'⧈', '\\boxbslash':'⧅', '\\boxcircle':'⧇',
'\\boxdot':'⊡', '\\boxminus':'⊟', '\\boxplus':'⊞',
'\\boxslash':'⧄', '\\boxtimes':'⊠', '\\bullet':'•',
'\\bumpeq':'≏', '\\cancer':'♋', '\\cap':'∩', '\\capricornus':'♑',
'\\cat':'⁀', '\\cdot':'⋅', '\\cdots':'⋯', '\\cent':'¢',
'\\centerdot':'∙', '\\checkmark':'✓', '\\chi':'χ', '\\circ':'∘',
'\\circeq':'≗', '\\circlearrowleft':'↺', '\\circlearrowright':'↻',
'\\circledR':'®', '\\circledast':'⊛', '\\circledbslash':'⦸',
'\\circledcirc':'⊚', '\\circleddash':'⊝', '\\circledgtr':'⧁',
'\\circledless':'⧀', '\\clubsuit':'♣', '\\colon':': ', '\\coloneqq':'≔',
'\\complement':'∁', '\\cong':'≅', '\\coprod':'∐',
'\\copyright':'©', '\\cos':'cos', '\\cosh':'cosh', '\\cot':'cot',
'\\coth':'coth', '\\csc':'csc', '\\cup':'∪', '\\curlyvee':'⋎',
'\\curlywedge':'⋏', '\\curvearrowleft':'↶',
'\\curvearrowright':'↷', '\\dag':'†', '\\dagger':'†',
'\\daleth':'ℸ', '\\dashleftarrow':'⇠', '\\dashv':'⊣',
'\\ddag':'‡', '\\ddagger':'‡', '\\ddots':'⋱', '\\deg':'deg',
'\\det':'det', '\\diagdown':'╲', '\\diagup':'╱',
'\\diameter':'⌀', '\\diamond':'◇', '\\diamondsuit':'♦',
'\\dim':'dim', '\\div':'÷', '\\divideontimes':'⋇',
'\\dotdiv':'∸', '\\doteq':'≐', '\\doteqdot':'≑', '\\dotplus':'∔',
'\\dots':'…', '\\doublebarwedge':'⌆', '\\downarrow':'↓',
'\\downdownarrows':'⇊', '\\downharpoonleft':'⇃',
'\\downharpoonright':'⇂', '\\dsub':'⩤', '\\earth':'♁',
'\\eighthnote':'♪', '\\ell':'ℓ', '\\emptyset':'∅',
'\\eqcirc':'≖', '\\eqcolon':'≕', '\\eqsim':'≂', '\\euro':'€',
'\\exists':'∃', '\\exp':'exp', '\\fallingdotseq':'≒',
'\\fcmp':'⨾', '\\female':'♀', '\\flat':'♭', '\\forall':'∀',
'\\fourth':'⁗', '\\frown':'⌢', '\\frownie':'☹', '\\gcd':'gcd',
'\\gemini':'♊', '\\geq)':'≥', '\\geqq':'≧', '\\geqslant':'≥',
'\\gets':'←', '\\gg':'≫', '\\ggg':'⋙', '\\gimel':'ℷ',
'\\gneqq':'≩', '\\gnsim':'⋧', '\\gtrdot':'⋗', '\\gtreqless':'⋚',
'\\gtreqqless':'⪌', '\\gtrless':'≷', '\\gtrsim':'≳',
'\\guillemotleft':'«', '\\guillemotright':'»', '\\hbar':'ℏ',
'\\heartsuit':'♥', '\\hfill':'<span class="hfill"> </span>',
'\\hom':'hom', '\\hookleftarrow':'↩', '\\hookrightarrow':'↪',
'\\hslash':'ℏ', '\\idotsint':'<span class="bigsymbol">∫⋯∫</span>',
'\\iiint':'<span class="bigsymbol">∭</span>',
'\\iint':'<span class="bigsymbol">∬</span>', '\\imath':'ı',
'\\inf':'inf', '\\infty':'∞', '\\intercal':'⊺',
'\\interleave':'⫴', '\\invamp':'⅋', '\\invneg':'⌐',
'\\jmath':'ȷ', '\\jupiter':'♃', '\\ker':'ker', '\\land':'∧',
'\\landupint':'<span class="bigsymbol">∱</span>', '\\lang':'⟪',
'\\langle':'⟨', '\\lblot':'⦉', '\\lbrace':'{', '\\lbrace)':'{',
'\\lbrack':'[', '\\lceil':'⌈', '\\ldots':'…', '\\leadsto':'⇝',
'\\leftarrow)':'←', '\\leftarrowtail':'↢', '\\leftarrowtobar':'⇤',
'\\leftharpoondown':'↽', '\\leftharpoonup':'↼',
'\\leftleftarrows':'⇇', '\\leftleftharpoons':'⥢', '\\leftmoon':'☾',
'\\leftrightarrow':'↔', '\\leftrightarrows':'⇆',
'\\leftrightharpoons':'⇋', '\\leftthreetimes':'⋋', '\\leo':'♌',
'\\leq)':'≤', '\\leqq':'≦', '\\leqslant':'≤', '\\lessdot':'⋖',
'\\lesseqgtr':'⋛', '\\lesseqqgtr':'⪋', '\\lessgtr':'≶',
'\\lesssim':'≲', '\\lfloor':'⌊', '\\lg':'lg', '\\lgroup':'⟮',
'\\lhd':'⊲', '\\libra':'♎', '\\lightning':'↯', '\\limg':'⦇',
'\\liminf':'liminf', '\\limsup':'limsup', '\\ll':'≪',
'\\llbracket':'⟦', '\\llcorner':'⌞', '\\lll':'⋘', '\\ln':'ln',
'\\lneqq':'≨', '\\lnot':'¬', '\\lnsim':'⋦', '\\log':'log',
'\\longleftarrow':'⟵', '\\longleftrightarrow':'⟷',
'\\longmapsto':'⟼', '\\longrightarrow':'⟶', '\\looparrowleft':'↫',
'\\looparrowright':'↬', '\\lor':'∨', '\\lozenge':'◊',
'\\lrcorner':'⌟', '\\ltimes':'⋉', '\\lyxlock':'', '\\male':'♂',
'\\maltese':'✠', '\\mapsfrom':'↤', '\\mapsto':'↦',
'\\mathcircumflex':'^', '\\max':'max', '\\measuredangle':'∡',
'\\medbullet':'⚫', '\\medcirc':'⚪', '\\mercury':'☿', '\\mho':'℧',
'\\mid':'∣', '\\min':'min', '\\models':'⊨', '\\mp':'∓',
'\\multimap':'⊸', '\\nLeftarrow':'⇍', '\\nLeftrightarrow':'⇎',
'\\nRightarrow':'⇏', '\\nVDash':'⊯', '\\nabla':'∇',
'\\napprox':'≉', '\\natural':'♮', '\\ncong':'≇', '\\nearrow':'↗',
'\\neg':'¬', '\\neg)':'¬', '\\neptune':'♆', '\\nequiv':'≢',
'\\newline':'<br/>', '\\nexists':'∄', '\\ngeqslant':'≱',
'\\ngtr':'≯', '\\ngtrless':'≹', '\\ni':'∋', '\\ni)':'∋',
'\\nleftarrow':'↚', '\\nleftrightarrow':'↮', '\\nleqslant':'≰',
'\\nless':'≮', '\\nlessgtr':'≸', '\\nmid':'∤', '\\nolimits':'',
'\\nonumber':'', '\\not':'¬', '\\not<':'≮', '\\not=':'≠',
'\\not>':'≯', '\\notbackslash':'⍀', '\\notin':'∉', '\\notni':'∌',
'\\notslash':'⌿', '\\nparallel':'∦', '\\nprec':'⊀',
'\\nrightarrow':'↛', '\\nsim':'≁', '\\nsimeq':'≄',
'\\nsqsubset':'⊏̸', '\\nsubseteq':'⊈', '\\nsucc':'⊁',
'\\nsucccurlyeq':'⋡', '\\nsupset':'⊅', '\\nsupseteq':'⊉',
'\\ntriangleleft':'⋪', '\\ntrianglelefteq':'⋬',
'\\ntriangleright':'⋫', '\\ntrianglerighteq':'⋭', '\\nvDash':'⊭',
'\\nvdash':'⊬', '\\nwarrow':'↖', '\\odot':'⊙',
'\\officialeuro':'€', '\\oiiint':'<span class="bigsymbol">∰</span>',
'\\oiint':'<span class="bigsymbol">∯</span>',
'\\oint':'<span class="bigsymbol">∮</span>',
'\\ointclockwise':'<span class="bigsymbol">∲</span>',
'\\ointctrclockwise':'<span class="bigsymbol">∳</span>',
'\\ominus':'⊖', '\\oplus':'⊕', '\\oslash':'⊘', '\\otimes':'⊗',
'\\owns':'∋', '\\parallel':'∥', '\\partial':'∂', '\\pencil':'✎',
'\\perp':'⊥', '\\pisces':'♓', '\\pitchfork':'⋔', '\\pluto':'♇',
'\\pm':'±', '\\pointer':'➪', '\\pointright':'☞', '\\pounds':'£',
'\\prec':'≺', '\\preccurlyeq':'≼', '\\preceq':'≼',
'\\precsim':'≾', '\\prime':'′', '\\prompto':'∝', '\\qoppa':'ϙ',
'\\qquad':' ', '\\quad':' ', '\\quarternote':'♩',
'\\radiation':'☢', '\\rang':'⟫', '\\rangle':'⟩', '\\rblot':'⦊',
'\\rbrace':'}', '\\rbrace)':'}', '\\rbrack':']', '\\rceil':'⌉',
'\\recycle':'♻', '\\rfloor':'⌋', '\\rgroup':'⟯', '\\rhd':'⊳',
'\\rightangle':'∟', '\\rightarrow)':'→', '\\rightarrowtail':'↣',
'\\rightarrowtobar':'⇥', '\\rightharpoondown':'⇁',
'\\rightharpoonup':'⇀', '\\rightharpooondown':'⇁',
'\\rightharpooonup':'⇀', '\\rightleftarrows':'⇄',
'\\rightleftharpoons':'⇌', '\\rightmoon':'☽',
'\\rightrightarrows':'⇉', '\\rightrightharpoons':'⥤',
'\\rightthreetimes':'⋌', '\\rimg':'⦈', '\\risingdotseq':'≓',
'\\rrbracket':'⟧', '\\rsub':'⩥', '\\rtimes':'⋊',
'\\sagittarius':'♐', '\\saturn':'♄', '\\scorpio':'♏',
'\\searrow':'↘', '\\sec':'sec', '\\second':'″', '\\setminus':'∖',
'\\sharp':'♯', '\\simeq':'≃', '\\sin':'sin', '\\sinh':'sinh',
'\\sixteenthnote':'♬', '\\skull':'☠', '\\slash':'∕',
'\\smallsetminus':'∖', '\\smalltriangledown':'▿',
'\\smalltriangleleft':'◃', '\\smalltriangleright':'▹',
'\\smalltriangleup':'▵', '\\smile':'⌣', '\\smiley':'☺',
'\\spadesuit':'♠', '\\spddot':'¨', '\\sphat':'',
'\\sphericalangle':'∢', '\\spot':'⦁', '\\sptilde':'~',
'\\sqcap':'⊓', '\\sqcup':'⊔', '\\sqsubset':'⊏',
'\\sqsubseteq':'⊑', '\\sqsupset':'⊐', '\\sqsupseteq':'⊒',
'\\square':'□', '\\sslash':'⫽', '\\star':'⋆', '\\steaming':'☕',
'\\subseteqq':'⫅', '\\subsetneqq':'⫋', '\\succ':'≻',
'\\succcurlyeq':'≽', '\\succeq':'≽', '\\succnsim':'⋩',
'\\succsim':'≿', '\\sun':'☼', '\\sup':'sup', '\\supseteqq':'⫆',
'\\supsetneqq':'⫌', '\\surd':'√', '\\swarrow':'↙',
'\\swords':'⚔', '\\talloblong':'⫾', '\\tan':'tan',
'\\tanh':'tanh', '\\taurus':'♉', '\\textasciicircum':'^',
'\\textasciitilde':'~', '\\textbackslash':'\\',
'\\textcopyright':'©\'', '\\textdegree':'°', '\\textellipsis':'…',
'\\textemdash':'—', '\\textendash':'—', '\\texteuro':'€',
'\\textgreater':'>', '\\textless':'<', '\\textordfeminine':'ª',
'\\textordmasculine':'º', '\\textquotedblleft':'“',
'\\textquotedblright':'”', '\\textquoteright':'’',
'\\textregistered':'®', '\\textrightarrow':'→',
'\\textsection':'§', '\\texttrademark':'™',
'\\texttwosuperior':'²', '\\textvisiblespace':' ',
'\\therefore':'∴', '\\third':'‴', '\\top':'⊤', '\\triangle':'△',
'\\triangleleft':'⊲', '\\trianglelefteq':'⊴', '\\triangleq':'≜',
'\\triangleright':'▷', '\\trianglerighteq':'⊵',
'\\twoheadleftarrow':'↞', '\\twoheadrightarrow':'↠',
'\\twonotes':'♫', '\\udot':'⊍', '\\ulcorner':'⌜', '\\unlhd':'⊴',
'\\unrhd':'⊵', '\\unrhl':'⊵', '\\uparrow':'↑',
'\\updownarrow':'↕', '\\upharpoonleft':'↿', '\\upharpoonright':'↾',
'\\uplus':'⊎', '\\upuparrows':'⇈', '\\uranus':'♅',
'\\urcorner':'⌝', '\\vDash':'⊨', '\\varclubsuit':'♧',
'\\vardiamondsuit':'♦', '\\varheartsuit':'♥', '\\varnothing':'∅',
'\\varspadesuit':'♤', '\\vdash':'⊢', '\\vdots':'⋮', '\\vee':'∨',
'\\vee)':'∨', '\\veebar':'⊻', '\\vert':'∣', '\\virgo':'♍',
'\\warning':'⚠', '\\wasylozenge':'⌑', '\\wedge':'∧',
'\\wedge)':'∧', '\\wp':'℘', '\\wr':'≀', '\\yen':'¥',
'\\yinyang':'☯', '\\{':'{', '\\|':'∥', '\\}':'}',
}
decoratedcommand = {
}
decoratingfunctions = {
'\\overleftarrow':'⟵', '\\overrightarrow':'⟶', '\\widehat':'^',
}
endings = {
'bracket':'}', 'complex':'\\]', 'endafter':'}',
'endbefore':'\\end{', 'squarebracket':']',
}
environments = {
'align':['r','l',], 'eqnarray':['r','c','l',],
'gathered':['l','l',],
}
fontfunctions = {
'\\boldsymbol':'b', '\\mathbb':'span class="blackboard"',
'\\mathbb{A}':'𝔸', '\\mathbb{B}':'𝔹', '\\mathbb{C}':'ℂ',
'\\mathbb{D}':'𝔻', '\\mathbb{E}':'𝔼', '\\mathbb{F}':'𝔽',
'\\mathbb{G}':'𝔾', '\\mathbb{H}':'ℍ', '\\mathbb{J}':'𝕁',
'\\mathbb{K}':'𝕂', '\\mathbb{L}':'𝕃', '\\mathbb{N}':'ℕ',
'\\mathbb{O}':'𝕆', '\\mathbb{P}':'ℙ', '\\mathbb{Q}':'ℚ',
'\\mathbb{R}':'ℝ', '\\mathbb{S}':'𝕊', '\\mathbb{T}':'𝕋',
'\\mathbb{W}':'𝕎', '\\mathbb{Z}':'ℤ', '\\mathbf':'b',
'\\mathcal':'span class="scriptfont"', '\\mathcal{B}':'ℬ',
'\\mathcal{E}':'ℰ', '\\mathcal{F}':'ℱ', '\\mathcal{H}':'ℋ',
'\\mathcal{I}':'ℐ', '\\mathcal{L}':'ℒ', '\\mathcal{M}':'ℳ',
'\\mathcal{R}':'ℛ', '\\mathfrak':'span class="fraktur"',
'\\mathfrak{C}':'ℭ', '\\mathfrak{F}':'𝔉', '\\mathfrak{H}':'ℌ',
'\\mathfrak{I}':'ℑ', '\\mathfrak{R}':'ℜ', '\\mathfrak{Z}':'ℨ',
'\\mathit':'i', '\\mathring{A}':'Å', '\\mathring{U}':'Ů',
'\\mathring{a}':'å', '\\mathring{u}':'ů', '\\mathring{w}':'ẘ',
'\\mathring{y}':'ẙ', '\\mathrm':'span class="mathrm"',
'\\mathscr':'span class="scriptfont"', '\\mathscr{B}':'ℬ',
'\\mathscr{E}':'ℰ', '\\mathscr{F}':'ℱ', '\\mathscr{H}':'ℋ',
'\\mathscr{I}':'ℐ', '\\mathscr{L}':'ℒ', '\\mathscr{M}':'ℳ',
'\\mathscr{R}':'ℛ', '\\mathsf':'span class="mathsf"',
'\\mathtt':'tt',
}
hybridfunctions = {
'\\addcontentsline':['{$p!}{$q!}{$r!}','f0{}','ignored',],
'\\addtocontents':['{$p!}{$q!}','f0{}','ignored',],
'\\backmatter':['','f0{}','ignored',],
'\\binom':['{$1}{$2}','f2{(}f0{f1{$1}f1{$2}}f2{)}','span class="binom"','span class="binomstack"','span class="bigsymbol"',],
'\\boxed':['{$1}','f0{$1}','span class="boxed"',],
'\\cfrac':['[$p!]{$1}{$2}','f0{f3{(}f1{$1}f3{)/(}f2{$2}f3{)}}','span class="fullfraction"','span class="numerator align-$p"','span class="denominator"','span class="ignored"',],
'\\color':['{$p!}{$1}','f0{$1}','span style="color: $p;"',],
'\\colorbox':['{$p!}{$1}','f0{$1}','span class="colorbox" style="background: $p;"',],
'\\dbinom':['{$1}{$2}','(f0{f1{f2{$1}}f1{f2{ }}f1{f2{$2}}})','span class="binomial"','span class="binomrow"','span class="binomcell"',],
'\\dfrac':['{$1}{$2}','f0{f3{(}f1{$1}f3{)/(}f2{$2}f3{)}}','span class="fullfraction"','span class="numerator"','span class="denominator"','span class="ignored"',],
'\\displaystyle':['{$1}','f0{$1}','span class="displaystyle"',],
'\\fancyfoot':['[$p!]{$q!}','f0{}','ignored',],
'\\fancyhead':['[$p!]{$q!}','f0{}','ignored',],
'\\fbox':['{$1}','f0{$1}','span class="fbox"',],
'\\fboxrule':['{$p!}','f0{}','ignored',],
'\\fboxsep':['{$p!}','f0{}','ignored',],
'\\fcolorbox':['{$p!}{$q!}{$1}','f0{$1}','span class="boxed" style="border-color: $p; background: $q;"',],
'\\frac':['{$1}{$2}','f0{f3{(}f1{$1}f3{)/(}f2{$2}f3{)}}','span class="fraction"','span class="numerator"','span class="denominator"','span class="ignored"',],
'\\framebox':['[$p!][$q!]{$1}','f0{$1}','span class="framebox align-$q" style="width: $p;"',],
'\\frontmatter':['','f0{}','ignored',],
'\\href':['[$o]{$u!}{$t!}','f0{$t}','a href="$u"',],
'\\hspace':['{$p!}','f0{ }','span class="hspace" style="width: $p;"',],
'\\leftroot':['{$p!}','f0{ }','span class="leftroot" style="width: $p;px"',],
'\\mainmatter':['','f0{}','ignored',],
'\\markboth':['{$p!}{$q!}','f0{}','ignored',],
'\\markright':['{$p!}','f0{}','ignored',],
'\\nicefrac':['{$1}{$2}','f0{f1{$1}⁄f2{$2}}','span class="fraction"','sup class="numerator"','sub class="denominator"','span class="ignored"',],
'\\parbox':['[$p!]{$w!}{$1}','f0{1}','div class="Boxed" style="width: $w;"',],
'\\raisebox':['{$p!}{$1}','f0{$1.font}','span class="raisebox" style="vertical-align: $p;"',],
'\\renewenvironment':['{$1!}{$2!}{$3!}','',],
'\\rule':['[$v!]{$w!}{$h!}','f0/','hr class="line" style="width: $w; height: $h;"',],
'\\scriptscriptstyle':['{$1}','f0{$1}','span class="scriptscriptstyle"',],
'\\scriptstyle':['{$1}','f0{$1}','span class="scriptstyle"',],
'\\sqrt':['[$0]{$1}','f0{f1{$0}f2{√}f4{(}f3{$1}f4{)}}','span class="sqrt"','sup class="root"','span class="radical"','span class="root"','span class="ignored"',],
'\\stackrel':['{$1}{$2}','f0{f1{$1}f2{$2}}','span class="stackrel"','span class="upstackrel"','span class="downstackrel"',],
'\\tbinom':['{$1}{$2}','(f0{f1{f2{$1}}f1{f2{ }}f1{f2{$2}}})','span class="binomial"','span class="binomrow"','span class="binomcell"',],
'\\textcolor':['{$p!}{$1}','f0{$1}','span style="color: $p;"',],
'\\textstyle':['{$1}','f0{$1}','span class="textstyle"',],
'\\thispagestyle':['{$p!}','f0{}','ignored',],
'\\unit':['[$0]{$1}','$0f0{$1.font}','span class="unit"',],
'\\unitfrac':['[$0]{$1}{$2}','$0f0{f1{$1.font}⁄f2{$2.font}}','span class="fraction"','sup class="unit"','sub class="unit"',],
'\\uproot':['{$p!}','f0{ }','span class="uproot" style="width: $p;px"',],
'\\url':['{$u!}','f0{$u}','a href="$u"',],
'\\vspace':['{$p!}','f0{ }','span class="vspace" style="height: $p;"',],
}
hybridsizes = {
'\\binom':'$1+$2', '\\cfrac':'$1+$2', '\\dbinom':'$1+$2+1',
'\\dfrac':'$1+$2', '\\frac':'$1+$2', '\\tbinom':'$1+$2+1',
}
labelfunctions = {
'\\label':'a name="#"',
}
limitcommands = {
'\\biginterleave':'⫼', '\\bigsqcap':'⨅', '\\fint':'⨏',
'\\iiiint':'⨌', '\\int':'∫', '\\intop':'∫', '\\lim':'lim',
'\\prod':'∏', '\\smallint':'∫', '\\sqint':'⨖', '\\sum':'∑',
'\\varointclockwise':'∲', '\\varprod':'⨉', '\\zcmp':'⨟',
'\\zhide':'⧹', '\\zpipe':'⨠', '\\zproject':'⨡',
}
misccommands = {
'\\limits':'LimitPreviousCommand', '\\newcommand':'MacroDefinition',
'\\renewcommand':'MacroDefinition',
'\\setcounter':'SetCounterFunction', '\\tag':'FormulaTag',
'\\tag*':'FormulaTag', '\\today':'TodayCommand',
}
modified = {
'\n':'', ' ':'', '$':'', '&':' ', '\'':'’', '+':' + ',
',':', ', '-':' − ', '/':' ⁄ ', ':':' : ', '<':' < ',
'=':' = ', '>':' > ', '@':'', '~':'',
}
onefunctions = {
'\\Big':'span class="bigsymbol"', '\\Bigg':'span class="hugesymbol"',
'\\bar':'span class="bar"', '\\begin{array}':'span class="arraydef"',
'\\big':'span class="symbol"', '\\bigg':'span class="largesymbol"',
'\\bigl':'span class="bigsymbol"', '\\bigr':'span class="bigsymbol"',
'\\centering':'span class="align-center"',
'\\ensuremath':'span class="ensuremath"',
'\\hphantom':'span class="phantom"',
'\\noindent':'span class="noindent"',
'\\overbrace':'span class="overbrace"',
'\\overline':'span class="overline"',
'\\phantom':'span class="phantom"',
'\\underbrace':'span class="underbrace"', '\\underline':'u',
'\\vphantom':'span class="phantom"',
}
spacedcommands = {
'\\Bot':'⫫', '\\Doteq':'≑', '\\DownArrowBar':'⤓',
'\\DownLeftTeeVector':'⥞', '\\DownLeftVectorBar':'⥖',
'\\DownRightTeeVector':'⥟', '\\DownRightVectorBar':'⥗',
'\\Equal':'⩵', '\\LeftArrowBar':'⇤', '\\LeftDownTeeVector':'⥡',
'\\LeftDownVectorBar':'⥙', '\\LeftTeeVector':'⥚',
'\\LeftTriangleBar':'⧏', '\\LeftUpTeeVector':'⥠',
'\\LeftUpVectorBar':'⥘', '\\LeftVectorBar':'⥒',
'\\Leftrightarrow':'⇔', '\\Longmapsfrom':'⟽', '\\Longmapsto':'⟾',
'\\MapsDown':'↧', '\\MapsUp':'↥', '\\Nearrow':'⇗',
'\\NestedGreaterGreater':'⪢', '\\NestedLessLess':'⪡',
'\\NotGreaterLess':'≹', '\\NotGreaterTilde':'≵',
'\\NotLessTilde':'≴', '\\Nwarrow':'⇖', '\\Proportion':'∷',
'\\RightArrowBar':'⇥', '\\RightDownTeeVector':'⥝',
'\\RightDownVectorBar':'⥕', '\\RightTeeVector':'⥛',
'\\RightTriangleBar':'⧐', '\\RightUpTeeVector':'⥜',
'\\RightUpVectorBar':'⥔', '\\RightVectorBar':'⥓',
'\\Rightarrow':'⇒', '\\Same':'⩶', '\\Searrow':'⇘',
'\\Swarrow':'⇙', '\\Top':'⫪', '\\UpArrowBar':'⤒', '\\VDash':'⊫',
'\\approx':'≈', '\\approxeq':'≊', '\\backsim':'∽', '\\barin':'⋶',
'\\barleftharpoon':'⥫', '\\barrightharpoon':'⥭', '\\bij':'⤖',
'\\coloneq':'≔', '\\corresponds':'≙', '\\curlyeqprec':'⋞',
'\\curlyeqsucc':'⋟', '\\dashrightarrow':'⇢', '\\dlsh':'↲',
'\\downdownharpoons':'⥥', '\\downuparrows':'⇵',
'\\downupharpoons':'⥯', '\\drsh':'↳', '\\eqslantgtr':'⪖',
'\\eqslantless':'⪕', '\\equiv':'≡', '\\ffun':'⇻', '\\finj':'⤕',
'\\ge':'≥', '\\geq':'≥', '\\ggcurly':'⪼', '\\gnapprox':'⪊',
'\\gneq':'⪈', '\\gtrapprox':'⪆', '\\hash':'⋕', '\\iddots':'⋰',
'\\implies':' ⇒ ', '\\in':'∈', '\\le':'≤', '\\leftarrow':'←',
'\\leftarrowtriangle':'⇽', '\\leftbarharpoon':'⥪',
'\\leftrightarrowtriangle':'⇿', '\\leftrightharpoon':'⥊',
'\\leftrightharpoondown':'⥐', '\\leftrightharpoonup':'⥎',
'\\leftrightsquigarrow':'↭', '\\leftslice':'⪦',
'\\leftsquigarrow':'⇜', '\\leftupdownharpoon':'⥑', '\\leq':'≤',
'\\lessapprox':'⪅', '\\llcurly':'⪻', '\\lnapprox':'⪉',
'\\lneq':'⪇', '\\longmapsfrom':'⟻', '\\multimapboth':'⧟',
'\\multimapdotbothA':'⊶', '\\multimapdotbothB':'⊷',
'\\multimapinv':'⟜', '\\nVdash':'⊮', '\\ne':'≠', '\\neq':'≠',
'\\ngeq':'≱', '\\nleq':'≰', '\\nni':'∌', '\\not\\in':'∉',
'\\notasymp':'≭', '\\npreceq':'⋠', '\\nsqsubseteq':'⋢',
'\\nsqsupseteq':'⋣', '\\nsubset':'⊄', '\\nsucceq':'⋡',
'\\pfun':'⇸', '\\pinj':'⤔', '\\precapprox':'⪷', '\\preceqq':'⪳',
'\\precnapprox':'⪹', '\\precnsim':'⋨', '\\propto':'∝',
'\\psur':'⤀', '\\rightarrow':'→', '\\rightarrowtriangle':'⇾',
'\\rightbarharpoon':'⥬', '\\rightleftharpoon':'⥋',
'\\rightslice':'⪧', '\\rightsquigarrow':'⇝',
'\\rightupdownharpoon':'⥏', '\\sim':'~', '\\strictfi':'⥼',
'\\strictif':'⥽', '\\subset':'⊂', '\\subseteq':'⊆',
'\\subsetneq':'⊊', '\\succapprox':'⪸', '\\succeqq':'⪴',
'\\succnapprox':'⪺', '\\supset':'⊃', '\\supseteq':'⊇',
'\\supsetneq':'⊋', '\\times':'×', '\\to':'→',
'\\updownarrows':'⇅', '\\updownharpoons':'⥮', '\\upupharpoons':'⥣',
'\\vartriangleleft':'⊲', '\\vartriangleright':'⊳',
}
starts = {
'beginafter':'}', 'beginbefore':'\\begin{', 'bracket':'{',
'command':'\\', 'comment':'%', 'complex':'\\[', 'simple':'$',
'squarebracket':'[', 'unnumbered':'*',
}
symbolfunctions = {
'^':'sup', '_':'sub',
}
textfunctions = {
'\\mbox':'span class="mbox"', '\\text':'span class="text"',
'\\textbf':'b', '\\textipa':'span class="textipa"', '\\textit':'i',
'\\textnormal':'span class="textnormal"',
'\\textrm':'span class="textrm"',
'\\textsc':'span class="versalitas"',
'\\textsf':'span class="textsf"', '\\textsl':'i', '\\texttt':'tt',
'\\textup':'span class="normal"',
}
unmodified = {
'characters':['.','*','€','(',')','[',']','·','!',';','|','§','"',],
}
urls = {
'googlecharts':'http://chart.googleapis.com/chart?cht=tx&chl=',
}
class GeneralConfig(object):
"Configuration class from elyxer.config file"
version = {
'date':'2015-02-26', 'lyxformat':'413', 'number':'1.2.5',
}
class HeaderConfig(object):
"Configuration class from elyxer.config file"
parameters = {
'beginpreamble':'\\begin_preamble', 'branch':'\\branch',
'documentclass':'\\textclass', 'endbranch':'\\end_branch',
'endpreamble':'\\end_preamble', 'language':'\\language',
'lstset':'\\lstset', 'outputchanges':'\\output_changes',
'paragraphseparation':'\\paragraph_separation',
'pdftitle':'\\pdf_title', 'secnumdepth':'\\secnumdepth',
'tocdepth':'\\tocdepth',
}
styles = {
'article':['article','aastex','aapaper','acmsiggraph','sigplanconf','achemso','amsart','apa','arab-article','armenian-article','article-beamer','chess','dtk','elsarticle','heb-article','IEEEtran','iopart','kluwer','scrarticle-beamer','scrartcl','extarticle','paper','mwart','revtex4','spie','svglobal3','ltugboat','agu-dtd','jgrga','agums','entcs','egs','ijmpc','ijmpd','singlecol-new','doublecol-new','isprs','tarticle','jsarticle','jarticle','jss','literate-article','siamltex','cl2emult','llncs','svglobal','svjog','svprobth',],
'book':['book','amsbook','scrbook','extbook','tufte-book','report','extreport','scrreprt','memoir','tbook','jsbook','jbook','mwbk','svmono','svmult','treport','jreport','mwrep',],
}
class ImageConfig(object):
"Configuration class from elyxer.config file"
converters = {
'imagemagick':'convert[ -density $scale][ -define $format:use-cropbox=true] "$input" "$output"',
'inkscape':'inkscape "$input" --export-png="$output"',
'lyx':'lyx -C "$input" "$output"',
}
cropboxformats = {
'.eps':'ps', '.pdf':'pdf', '.ps':'ps',
}
formats = {
'default':'.png', 'vector':['.svg','.eps',],
}
class LayoutConfig(object):
"Configuration class from elyxer.config file"
groupable = {
'allowed':['StringContainer','Constant','TaggedText','Align','TextFamily','EmphaticText','VersalitasText','BarredText','SizeText','ColorText','LangLine','Formula',],
}
class NewfangleConfig(object):
"Configuration class from elyxer.config file"
constants = {
'chunkref':'chunkref{', 'endcommand':'}', 'endmark':'>',
'startcommand':'\\', 'startmark':'=<',
}
class NumberingConfig(object):
"Configuration class from elyxer.config file"
layouts = {
'ordered':['Chapter','Section','Subsection','Subsubsection','Paragraph',],
'roman':['Part','Book',],
}
sequence = {
'symbols':['*','**','†','‡','§','§§','¶','¶¶','#','##',],
}
class StyleConfig(object):
"Configuration class from elyxer.config file"
hspaces = {
'\\enskip{}':' ', '\\hfill{}':'<span class="hfill"> </span>',
'\\hspace*{\\fill}':' ', '\\hspace*{}':'', '\\hspace{}':' ',
'\\negthinspace{}':'', '\\qquad{}':' ', '\\quad{}':' ',
'\\space{}':' ', '\\thinspace{}':' ', '~':' ',
}
quotes = {
'ald':'»', 'als':'›', 'ard':'«', 'ars':'‹', 'eld':'“',
'els':'‘', 'erd':'”', 'ers':'’', 'fld':'«',
'fls':'‹', 'frd':'»', 'frs':'›', 'gld':'„', 'gls':'‚',
'grd':'“', 'grs':'‘', 'pld':'„', 'pls':'‚', 'prd':'”',
'prs':'’', 'sld':'”', 'srd':'”',
}
referenceformats = {
'eqref':'(@↕)', 'formatted':'¶↕', 'nameref':'$↕', 'pageref':'#↕',
'ref':'@↕', 'vpageref':'on-page#↕', 'vref':'@on-page#↕',
}
size = {
'ignoredtexts':['col','text','line','page','theight','pheight',],
}
vspaces = {
'bigskip':'<div class="bigskip"> </div>',
'defskip':'<div class="defskip"> </div>',
'medskip':'<div class="medskip"> </div>',
'smallskip':'<div class="smallskip"> </div>',
'vfill':'<div class="vfill"> </div>',
}
class TOCConfig(object):
"Configuration class from elyxer.config file"
extractplain = {
'allowed':['StringContainer','Constant','TaggedText','Align','TextFamily','EmphaticText','VersalitasText','BarredText','SizeText','ColorText','LangLine','Formula',],
'cloned':['',], 'extracted':['',],
}
extracttitle = {
'allowed':['StringContainer','Constant','Space',],
'cloned':['TextFamily','EmphaticText','VersalitasText','BarredText','SizeText','ColorText','LangLine','Formula',],
'extracted':['PlainLayout','TaggedText','Align','Caption','StandardLayout','FlexInset',],
}
class TagConfig(object):
"Configuration class from elyxer.config file"
barred = {
'under':'u',
}
family = {
'sans':'span class="sans"', 'typewriter':'tt',
}
flex = {
'CharStyle:Code':'span class="code"',
'CharStyle:MenuItem':'span class="menuitem"',
'Code':'span class="code"', 'MenuItem':'span class="menuitem"',
'Noun':'span class="noun"', 'Strong':'span class="strong"',
}
group = {
'layouts':['Quotation','Quote',],
}
layouts = {
'Center':'div', 'Chapter':'h?', 'Date':'h2', 'Paragraph':'div',
'Part':'h1', 'Quotation':'blockquote', 'Quote':'blockquote',
'Section':'h?', 'Subsection':'h?', 'Subsubsection':'h?',
}
listitems = {
'Enumerate':'ol', 'Itemize':'ul',
}
notes = {
'Comment':'', 'Greyedout':'span class="greyedout"', 'Note':'',
}
script = {
'subscript':'sub', 'superscript':'sup',
}
shaped = {
'italic':'i', 'slanted':'i', 'smallcaps':'span class="versalitas"',
}
class TranslationConfig(object):
"Configuration class from elyxer.config file"
constants = {
'Appendix':'Appendix', 'Book':'Book', 'Chapter':'Chapter',
'Paragraph':'Paragraph', 'Part':'Part', 'Section':'Section',
'Subsection':'Subsection', 'Subsubsection':'Subsubsection',
'abstract':'Abstract', 'bibliography':'Bibliography',
'figure':'figure', 'float-algorithm':'Algorithm ',
'float-figure':'Figure ', 'float-listing':'Listing ',
'float-table':'Table ', 'float-tableau':'Tableau ',
'footnotes':'Footnotes', 'generated-by':'Document generated by ',
'generated-on':' on ', 'index':'Index',
'jsmath-enable':'Please enable JavaScript on your browser.',
'jsmath-requires':' requires JavaScript to correctly process the mathematics on this page. ',
'jsmath-warning':'Warning: ', 'list-algorithm':'List of Algorithms',
'list-figure':'List of Figures', 'list-table':'List of Tables',
'list-tableau':'List of Tableaux', 'main-page':'Main page',
'next':'Next', 'nomenclature':'Nomenclature',
'on-page':' on page ', 'prev':'Prev', 'references':'References',
'toc':'Table of Contents', 'toc-for':'Contents for ', 'up':'Up',
}
languages = {
'american':'en', 'british':'en', 'deutsch':'de', 'dutch':'nl',
'english':'en', 'french':'fr', 'ngerman':'de', 'russian':'ru',
'spanish':'es',
}
class CommandLineParser(object):
"A parser for runtime options"
def __init__(self, options):
self.options = options
def parseoptions(self, args):
"Parse command line options"
if len(args) == 0:
return None
while len(args) > 0 and args[0].startswith('--'):
key, value = self.readoption(args)
if not key:
return 'Option ' + value + ' not recognized'
if not value:
return 'Option ' + key + ' needs a value'
setattr(self.options, key, value)
return None
def readoption(self, args):
"Read the key and value for an option"
arg = args[0][2:]
del args[0]
if '=' in arg:
key = self.readequalskey(arg, args)
else:
key = arg.replace('-', '')
if not hasattr(self.options, key):
return None, key
current = getattr(self.options, key)
if isinstance(current, bool):
return key, True
# read value
if len(args) == 0:
return key, None
if args[0].startswith('"'):
initial = args[0]
del args[0]
return key, self.readquoted(args, initial)
value = args[0].decode('utf-8')
del args[0]
if isinstance(current, list):
current.append(value)
return key, current
return key, value
def readquoted(self, args, initial):
"Read a value between quotes"
Trace.error('Oops')
value = initial[1:]
while len(args) > 0 and not args[0].endswith('"') and not args[0].startswith('--'):
Trace.error('Appending ' + args[0])
value += ' ' + args[0]
del args[0]
if len(args) == 0 or args[0].startswith('--'):
return None
value += ' ' + args[0:-1]
return value
def readequalskey(self, arg, args):
"Read a key using equals"
split = arg.split('=', 1)
key = split[0]
value = split[1]
args.insert(0, value)
return key
class Options(object):
"A set of runtime options"
instance = None
location = None
nocopy = False
copyright = False
debug = False
quiet = False
version = False
hardversion = False
versiondate = False
html = False
help = False
showlines = True
str = False
iso885915 = False
css = []
favicon = ''
title = None
directory = None
destdirectory = None
toc = False
toctarget = ''
tocfor = None
forceformat = None
lyxformat = False
target = None
splitpart = None
memory = True
lowmem = False
nobib = False
converter = 'imagemagick'
raw = False
jsmath = None
mathjax = None
nofooter = False
simplemath = False
template = None
noconvert = False
notoclabels = False
letterfoot = True
numberfoot = False
symbolfoot = False
hoverfoot = True
marginfoot = False
endfoot = False
supfoot = True
alignfoot = False
footnotes = None
imageformat = None
copyimages = False
googlecharts = False
embedcss = []
branches = dict()
def parseoptions(self, args):
"Parse command line options"
Options.location = args[0]
del args[0]
parser = CommandLineParser(Options)
result = parser.parseoptions(args)
if result:
Trace.error(result)
self.usage()
self.processoptions()
def processoptions(self):
"Process all options parsed."
if Options.help:
self.usage()
if Options.version:
self.showversion()
if Options.hardversion:
self.showhardversion()
if Options.versiondate:
self.showversiondate()
if Options.lyxformat:
self.showlyxformat()
if Options.splitpart:
try:
Options.splitpart = int(Options.splitpart)
if Options.splitpart <= 0:
Trace.error('--splitpart requires a number bigger than zero')
self.usage()
except:
Trace.error('--splitpart needs a numeric argument, not ' + Options.splitpart)
self.usage()
if Options.lowmem or Options.toc or Options.tocfor:
Options.memory = False
self.parsefootnotes()
if Options.forceformat and not Options.imageformat:
Options.imageformat = Options.forceformat
if Options.imageformat == 'copy':
Options.copyimages = True
if Options.css == []:
Options.css = ['http://elyxer.nongnu.org/lyx.css']
if Options.favicon == '':
pass # no default favicon
if Options.html:
Options.simplemath = True
if Options.toc and not Options.tocfor:
Trace.error('Option --toc is deprecated; use --tocfor "page" instead')
Options.tocfor = Options.toctarget
if Options.nocopy:
Trace.error('Option --nocopy is deprecated; it is no longer needed')
if Options.jsmath:
Trace.error('Option --jsmath is deprecated; use --mathjax instead')
# set in Trace if necessary
for param in dir(Trace):
if param.endswith('mode'):
setattr(Trace, param, getattr(self, param[:-4]))
def usage(self):
"Show correct usage"
Trace.error('Usage: ' + os.path.basename(Options.location) + ' [options] [filein] [fileout]')
Trace.error('Convert LyX input file "filein" to HTML file "fileout".')
Trace.error('If filein (or fileout) is not given use standard input (or output).')
Trace.error('Main program of the eLyXer package (http://elyxer.nongnu.org/).')
self.showoptions()
def parsefootnotes(self):
"Parse footnotes options."
if not Options.footnotes:
return
Options.marginfoot = False
Options.letterfoot = False
Options.hoverfoot = False
options = Options.footnotes.split(',')
for option in options:
footoption = option + 'foot'
if hasattr(Options, footoption):
setattr(Options, footoption, True)
else:
Trace.error('Unknown footnotes option: ' + option)
if not Options.endfoot and not Options.marginfoot and not Options.hoverfoot:
Options.hoverfoot = True
if not Options.numberfoot and not Options.symbolfoot:
Options.letterfoot = True
def showoptions(self):
"Show all possible options"
Trace.error(' Common options:')
Trace.error(' --help: show this online help')
Trace.error(' --quiet: disables all runtime messages')
Trace.error('')
Trace.error(' Advanced options:')
Trace.error(' --debug: enable debugging messages (for developers)')
Trace.error(' --version: show version number and release date')
Trace.error(' --lyxformat: return the highest LyX version supported')
Trace.error(' Options for HTML output:')
Trace.error(' --title "title": set the generated page title')
Trace.error(' --css "file.css": use a custom CSS file')
Trace.error(' --embedcss "file.css": embed styles from a CSS file into the output')
Trace.error(' --favicon "icon.ico": insert the specified favicon in the header.')
Trace.error(' --html: output HTML 4.0 instead of the default XHTML')
Trace.error(' --unicode: full Unicode output')
Trace.error(' --iso885915: output a document with ISO-8859-15 encoding')
Trace.error(' --nofooter: remove the footer "generated by eLyXer"')
Trace.error(' --simplemath: do not generate fancy math constructions')
Trace.error(' Options for image output:')
Trace.error(' --directory "img_dir": look for images in the specified directory')
Trace.error(' --destdirectory "dest": put converted images into this directory')
Trace.error(' --imageformat ".ext": image output format, or "copy" to copy images')
Trace.error(' --noconvert: do not convert images, use in original locations')
Trace.error(' --converter "inkscape": use an alternative program to convert images')
Trace.error(' Options for footnote display:')
Trace.error(' --numberfoot: mark footnotes with numbers instead of letters')
Trace.error(' --symbolfoot: mark footnotes with symbols (*, **...)')
Trace.error(' --hoverfoot: show footnotes as hovering text (default)')
Trace.error(' --marginfoot: show footnotes on the page margin')
Trace.error(' --endfoot: show footnotes at the end of the page')
Trace.error(' --supfoot: use superscript for footnote markers (default)')
Trace.error(' --alignfoot: use aligned text for footnote markers')
Trace.error(' --footnotes "options": specify several comma-separated footnotes options')
Trace.error(' Available options are: "number", "symbol", "hover", "margin", "end",')
Trace.error(' "sup", "align"')
Trace.error(' Advanced output options:')
Trace.error(' --splitpart "depth": split the resulting webpage at the given depth')
Trace.error(' --tocfor "page": generate a TOC that points to the given page')
Trace.error(' --target "frame": make all links point to the given frame')
Trace.error(' --notoclabels: omit the part labels in the TOC, such as Chapter')
Trace.error(' --lowmem: do the conversion on the fly (conserve memory)')
Trace.error(' --raw: generate HTML without header or footer.')
Trace.error(' --mathjax remote: use MathJax remotely to display equations')
Trace.error(' --mathjax "URL": use MathJax from the given URL to display equations')
Trace.error(' --googlecharts: use Google Charts to generate formula images')
Trace.error(' --template "file": use a template, put everything in <!--$content-->')
Trace.error(' --copyright: add a copyright notice at the bottom')
Trace.error(' Deprecated options:')
Trace.error(' --toc: (deprecated) create a table of contents')
Trace.error(' --toctarget "page": (deprecated) generate a TOC for the given page')
Trace.error(' --nocopy: (deprecated) maintained for backwards compatibility')
Trace.error(' --jsmath "URL": use jsMath from the given URL to display equations')
sys.exit()
def showversion(self):
"Return the current eLyXer version string"
string = 'eLyXer version ' + GeneralConfig.version['number']
string += ' (' + GeneralConfig.version['date'] + ')'
Trace.error(string)
sys.exit()
def showhardversion(self):
"Return just the version string"
Trace.message(GeneralConfig.version['number'])
sys.exit()
def showversiondate(self):
"Return just the version dte"
Trace.message(GeneralConfig.version['date'])
sys.exit()
def showlyxformat(self):
"Return just the lyxformat parameter"
Trace.message(GeneralConfig.version['lyxformat'])
sys.exit()
class BranchOptions(object):
"A set of options for a branch"
def __init__(self, name):
self.name = name
self.options = {'color':'#ffffff'}
def set(self, key, value):
"Set a branch option"
if not key.startswith(ContainerConfig.string['startcommand']):
Trace.error('Invalid branch option ' + key)
return
key = key.replace(ContainerConfig.string['startcommand'], '')
self.options[key] = value
def isselected(self):
"Return if the branch is selected"
if not 'selected' in self.options:
return False
return self.options['selected'] == '1'
def __unicode__(self):
"String representation"
return 'options for ' + self.name + ': ' + str(self.options)
import urllib.request, urllib.parse, urllib.error
class Cloner(object):
"An object used to clone other objects."
def clone(cls, original):
"Return an exact copy of an object."
"The original object must have an empty constructor."
return cls.create(original.__class__)
def create(cls, type):
"Create an object of a given class."
clone = type.__new__(type)
clone.__init__()
return clone
clone = classmethod(clone)
create = classmethod(create)
class ContainerExtractor(object):
"A class to extract certain containers."
def __init__(self, config):
"The config parameter is a map containing three lists: allowed, copied and extracted."
"Each of the three is a list of class names for containers."
"Allowed containers are included as is into the result."
"Cloned containers are cloned and placed into the result."
"Extracted containers are looked into."
"All other containers are silently ignored."
self.allowed = config['allowed']
self.cloned = config['cloned']
self.extracted = config['extracted']
def extract(self, container):
"Extract a group of selected containers from elyxer.a container."
list = []
locate = lambda c: c.__class__.__name__ in self.allowed + self.cloned
recursive = lambda c: c.__class__.__name__ in self.extracted
process = lambda c: self.process(c, list)
container.recursivesearch(locate, recursive, process)
return list
def process(self, container, list):
"Add allowed containers, clone cloned containers and add the clone."
name = container.__class__.__name__
if name in self.allowed:
list.append(container)
elif name in self.cloned:
list.append(self.safeclone(container))
else:
Trace.error('Unknown container class ' + name)
def safeclone(self, container):
"Return a new container with contents only in a safe list, recursively."
clone = Cloner.clone(container)
clone.output = container.output
clone.contents = self.extract(container)
return clone
class Parser(object):
"A generic parser"
def __init__(self):
self.begin = 0
self.parameters = dict()
def parseheader(self, reader):
"Parse the header"
header = reader.currentline().split()
reader.nextline()
self.begin = reader.linenumber
return header
def parseparameter(self, reader):
"Parse a parameter"
if reader.currentline().strip().startswith('<'):
key, value = self.parsexml(reader)
self.parameters[key] = value
return
split = reader.currentline().strip().split(' ', 1)
reader.nextline()
if len(split) == 0:
return
key = split[0]
if len(split) == 1:
self.parameters[key] = True
return
if not '"' in split[1]:
self.parameters[key] = split[1].strip()
return
doublesplit = split[1].split('"')
self.parameters[key] = doublesplit[1]
def parsexml(self, reader):
"Parse a parameter in xml form: <param attr1=value...>"
strip = reader.currentline().strip()
reader.nextline()
if not strip.endswith('>'):
Trace.error('XML parameter ' + strip + ' should be <...>')
split = strip[1:-1].split()
if len(split) == 0:
Trace.error('Empty XML parameter <>')
return None, None
key = split[0]
del split[0]
if len(split) == 0:
return key, dict()
attrs = dict()
for attr in split:
if not '=' in attr:
Trace.error('Erroneous attribute for ' + key + ': ' + attr)
attr += '="0"'
parts = attr.split('=')
attrkey = parts[0]
value = parts[1].split('"')[1]
attrs[attrkey] = value
return key, attrs
def parseending(self, reader, process):
"Parse until the current ending is found"
if not self.ending:
Trace.error('No ending for ' + str(self))
return
while not reader.currentline().startswith(self.ending):
process()
def parsecontainer(self, reader, contents):
container = self.factory.createcontainer(reader)
if container:
container.parent = self.parent
contents.append(container)
def __unicode__(self):
"Return a description"
return self.__class__.__name__ + ' (' + str(self.begin) + ')'
class LoneCommand(Parser):
"A parser for just one command line"
def parse(self,reader):
"Read nothing"
return []
class TextParser(Parser):
"A parser for a command and a bit of text"
stack = []
def __init__(self, container):
Parser.__init__(self)
self.ending = None
if container.__class__.__name__ in ContainerConfig.endings:
self.ending = ContainerConfig.endings[container.__class__.__name__]
self.endings = []
def parse(self, reader):
"Parse lines as long as they are text"
TextParser.stack.append(self.ending)
self.endings = TextParser.stack + [ContainerConfig.endings['Layout'],
ContainerConfig.endings['Inset'], self.ending]
contents = []
while not self.isending(reader):
self.parsecontainer(reader, contents)
return contents
def isending(self, reader):
"Check if text is ending"
current = reader.currentline().split()
if len(current) == 0:
return False
if current[0] in self.endings:
if current[0] in TextParser.stack:
TextParser.stack.remove(current[0])
else:
TextParser.stack = []
return True
return False
class ExcludingParser(Parser):
"A parser that excludes the final line"
def parse(self, reader):
"Parse everything up to (and excluding) the final line"
contents = []
self.parseending(reader, lambda: self.parsecontainer(reader, contents))
return contents
class BoundedParser(ExcludingParser):
"A parser bound by a final line"
def parse(self, reader):
"Parse everything, including the final line"
contents = ExcludingParser.parse(self, reader)
# skip last line
reader.nextline()
return contents
class BoundedDummy(Parser):
"A bound parser that ignores everything"
def parse(self, reader):
"Parse the contents of the container"
self.parseending(reader, lambda: reader.nextline())
# skip last line
reader.nextline()
return []
class StringParser(Parser):
"Parses just a string"
def parseheader(self, reader):
"Do nothing, just take note"
self.begin = reader.linenumber + 1
return []
def parse(self, reader):
"Parse a single line"
contents = reader.currentline()
reader.nextline()
return contents
class InsetParser(BoundedParser):
"Parses a LyX inset"
def parse(self, reader):
"Parse inset parameters into a dictionary"
startcommand = ContainerConfig.string['startcommand']
while reader.currentline() != '' and not reader.currentline().startswith(startcommand):
self.parseparameter(reader)
return BoundedParser.parse(self, reader)
class ContainerOutput(object):
"The generic HTML output for a container."
def gethtml(self, container):
"Show an error."
Trace.error('gethtml() not implemented for ' + str(self))
def isempty(self):
"Decide if the output is empty: by default, not empty."
return False
class EmptyOutput(ContainerOutput):
def gethtml(self, container):
"Return empty HTML code."
return []
def isempty(self):
"This output is particularly empty."
return True
class FixedOutput(ContainerOutput):
"Fixed output"
def gethtml(self, container):
"Return constant HTML code"
return container.html
class ContentsOutput(ContainerOutput):
"Outputs the contents converted to HTML"
def gethtml(self, container):
"Return the HTML code"
html = []
if container.contents == None:
return html
for element in container.contents:
if not hasattr(element, 'gethtml'):
Trace.error('No html in ' + element.__class__.__name__ + ': ' + str(element))
return html
html += element.gethtml()
return html
class TaggedOutput(ContentsOutput):
"Outputs an HTML tag surrounding the contents."
tag = None
breaklines = False
empty = False
def settag(self, tag, breaklines=False, empty=False):
"Set the value for the tag and other attributes."
self.tag = tag
if breaklines:
self.breaklines = breaklines
if empty:
self.empty = empty
return self
def setbreaklines(self, breaklines):
"Set the value for breaklines."
self.breaklines = breaklines
return self
def gethtml(self, container):
"Return the HTML code."
if self.empty:
return [self.selfclosing(container)]
html = [self.open(container)]
html += ContentsOutput.gethtml(self, container)
html.append(self.close(container))
return html
def open(self, container):
"Get opening line."
if not self.checktag():
return ''
open = '<' + self.tag + '>'
if self.breaklines:
return open + '\n'
return open
def close(self, container):
"Get closing line."
if not self.checktag():
return ''
close = '</' + self.tag.split()[0] + '>'
if self.breaklines:
return '\n' + close + '\n'
return close
def selfclosing(self, container):
"Get self-closing line."
if not self.checktag():
return ''
selfclosing = '<' + self.tag + '/>'
if self.breaklines:
return selfclosing + '\n'
return selfclosing
def checktag(self):
"Check that the tag is valid."
if not self.tag:
Trace.error('No tag in ' + str(container))
return False
if self.tag == '':
return False
return True
class FilteredOutput(ContentsOutput):
"Returns the output in the contents, but filtered:"
"some strings are replaced by others."
def __init__(self):
"Initialize the filters."
self.filters = []
def addfilter(self, original, replacement):
"Add a new filter: replace the original by the replacement."
self.filters.append((original, replacement))
def gethtml(self, container):
"Return the HTML code"
result = []
html = ContentsOutput.gethtml(self, container)
for line in html:
result.append(self.filter(line))
return result
def filter(self, line):
"Filter a single line with all available filters."
for original, replacement in self.filters:
if original in line:
line = line.replace(original, replacement)
return line
class StringOutput(ContainerOutput):
"Returns a bare string as output"
def gethtml(self, container):
"Return a bare string"
return [container.string]
import sys
import codecs
class LineReader(object):
"Reads a file line by line"
def __init__(self, filename):
if isinstance(filename, file):
self.file = filename
else:
self.file = codecs.open(filename, 'rU', 'utf-8')
self.linenumber = 1
self.lastline = None
self.current = None
self.mustread = True
self.depleted = False
try:
self.readline()
except UnicodeDecodeError:
# try compressed file
import gzip
self.file = gzip.open(filename, 'rb')
self.readline()
def setstart(self, firstline):
"Set the first line to read."
for i in range(firstline):
self.file.readline()
self.linenumber = firstline
def setend(self, lastline):
"Set the last line to read."
self.lastline = lastline
def currentline(self):
"Get the current line"
if self.mustread:
self.readline()
return self.current
def nextline(self):
"Go to next line"
if self.depleted:
Trace.fatal('Read beyond file end')
self.mustread = True
def readline(self):
"Read a line from elyxer.file"
self.current = self.file.readline()
if not isinstance(self.file, codecs.StreamReaderWriter):
self.current = self.current.decode('utf-8')
if len(self.current) == 0:
self.depleted = True
self.current = self.current.rstrip('\n\r')
self.linenumber += 1
self.mustread = False
Trace.prefix = 'Line ' + str(self.linenumber) + ': '
if self.linenumber % 1000 == 0:
Trace.message('Parsing')
def finished(self):
"Find out if the file is finished"
if self.lastline and self.linenumber == self.lastline:
return True
if self.mustread:
self.readline()
return self.depleted
def close(self):
self.file.close()
class LineWriter(object):
"Writes a file as a series of lists"
file = False
def __init__(self, filename):
if isinstance(filename, file):
self.file = filename
self.filename = None
else:
self.filename = filename
def write(self, strings):
"Write a list of strings"
for string in strings:
if not isinstance(string, str):
Trace.error('Not a string: ' + str(string) + ' in ' + str(strings))
return
self.writestring(string)
def writestring(self, string):
"Write a string"
if not self.file:
self.file = codecs.open(self.filename, 'w', "utf-8")
if self.file == sys.stdout and sys.version_info < (3,0):
string = string.encode('utf-8')
self.file.write(string)
def writeline(self, line):
"Write a line to file"
self.writestring(line + '\n')
def close(self):
self.file.close()
class Globable(object):
"""A bit of text which can be globbed (lumped together in bits).
Methods current(), skipcurrent(), checkfor() and isout() have to be
implemented by subclasses."""
leavepending = False
def __init__(self):
self.endinglist = EndingList()
def checkbytemark(self):
"Check for a Unicode byte mark and skip it."
if self.finished():
return
if ord(self.current()) == 0xfeff:
self.skipcurrent()
def isout(self):
"Find out if we are out of the position yet."
Trace.error('Unimplemented isout()')
return True
def current(self):
"Return the current character."
Trace.error('Unimplemented current()')
return ''
def checkfor(self, string):
"Check for the given string in the current position."
Trace.error('Unimplemented checkfor()')
return False
def finished(self):
"Find out if the current text has finished."
if self.isout():
if not self.leavepending:
self.endinglist.checkpending()
return True
return self.endinglist.checkin(self)
def skipcurrent(self):
"Return the current character and skip it."
Trace.error('Unimplemented skipcurrent()')
return ''
def glob(self, currentcheck):
"Glob a bit of text that satisfies a check on the current char."
glob = ''
while not self.finished() and currentcheck():
glob += self.skipcurrent()
return glob
def globalpha(self):
"Glob a bit of alpha text"
return self.glob(lambda: self.current().isalpha())
def globnumber(self):
"Glob a row of digits."
return self.glob(lambda: self.current().isdigit())
def isidentifier(self):
"Return if the current character is alphanumeric or _."
if self.current().isalnum() or self.current() == '_':
return True
return False
def globidentifier(self):
"Glob alphanumeric and _ symbols."
return self.glob(self.isidentifier)
def isvalue(self):
"Return if the current character is a value character:"
"not a bracket or a space."
if self.current().isspace():
return False
if self.current() in '{}()':
return False
return True
def globvalue(self):
"Glob a value: any symbols but brackets."
return self.glob(self.isvalue)
def skipspace(self):
"Skip all whitespace at current position."
return self.glob(lambda: self.current().isspace())
def globincluding(self, magicchar):
"Glob a bit of text up to (including) the magic char."
glob = self.glob(lambda: self.current() != magicchar) + magicchar
self.skip(magicchar)
return glob
def globexcluding(self, excluded):
"Glob a bit of text up until (excluding) any excluded character."
return self.glob(lambda: self.current() not in excluded)
def pushending(self, ending, optional = False):
"Push a new ending to the bottom"
self.endinglist.add(ending, optional)
def popending(self, expected = None):
"Pop the ending found at the current position"
if self.isout() and self.leavepending:
return expected
ending = self.endinglist.pop(self)
if expected and expected != ending:
Trace.error('Expected ending ' + expected + ', got ' + ending)
self.skip(ending)
return ending
def nextending(self):
"Return the next ending in the queue."
nextending = self.endinglist.findending(self)
if not nextending:
return None
return nextending.ending
class EndingList(object):
"A list of position endings"
def __init__(self):
self.endings = []
def add(self, ending, optional = False):
"Add a new ending to the list"
self.endings.append(PositionEnding(ending, optional))
def pickpending(self, pos):
"Pick any pending endings from a parse position."
self.endings += pos.endinglist.endings
def checkin(self, pos):
"Search for an ending"
if self.findending(pos):
return True
return False
def pop(self, pos):
"Remove the ending at the current position"
if pos.isout():
Trace.error('No ending out of bounds')
return ''
ending = self.findending(pos)
if not ending:
Trace.error('No ending at ' + pos.current())
return ''
for each in reversed(self.endings):
self.endings.remove(each)
if each == ending:
return each.ending
elif not each.optional:
Trace.error('Removed non-optional ending ' + each)
Trace.error('No endings left')
return ''
def findending(self, pos):
"Find the ending at the current position"
if len(self.endings) == 0:
return None
for index, ending in enumerate(reversed(self.endings)):
if ending.checkin(pos):
return ending
if not ending.optional:
return None
return None
def checkpending(self):
"Check if there are any pending endings"
if len(self.endings) != 0:
Trace.error('Pending ' + str(self) + ' left open')
def __unicode__(self):
"Printable representation"
string = 'endings ['
for ending in self.endings:
string += str(ending) + ','
if len(self.endings) > 0:
string = string[:-1]
return string + ']'
class PositionEnding(object):
"An ending for a parsing position"
def __init__(self, ending, optional):
self.ending = ending
self.optional = optional
def checkin(self, pos):
"Check for the ending"
return pos.checkfor(self.ending)
def __unicode__(self):
"Printable representation"
string = 'Ending ' + self.ending
if self.optional:
string += ' (optional)'
return string
class Position(Globable):
"""A position in a text to parse.
Including those in Globable, functions to implement by subclasses are:
skip(), identifier(), extract(), isout() and current()."""
def __init__(self):
Globable.__init__(self)
def skip(self, string):
"Skip a string"
Trace.error('Unimplemented skip()')
def identifier(self):
"Return an identifier for the current position."
Trace.error('Unimplemented identifier()')
return 'Error'
def extract(self, length):
"Extract the next string of the given length, or None if not enough text,"
"without advancing the parse position."
Trace.error('Unimplemented extract()')
return None
def checkfor(self, string):
"Check for a string at the given position."
return string == self.extract(len(string))
def checkforlower(self, string):
"Check for a string in lower case."
extracted = self.extract(len(string))
if not extracted:
return False
return string.lower() == self.extract(len(string)).lower()
def skipcurrent(self):
"Return the current character and skip it."
current = self.current()
self.skip(current)
return current
def __next__(self):
"Advance the position and return the next character."
self.skipcurrent()
return self.current()
def checkskip(self, string):
"Check for a string at the given position; if there, skip it"
if not self.checkfor(string):
return False
self.skip(string)
return True
def error(self, message):
"Show an error message and the position identifier."
Trace.error(message + ': ' + self.identifier())
class TextPosition(Position):
"A parse position based on a raw text."
def __init__(self, text):
"Create the position from elyxer.some text."
Position.__init__(self)
self.pos = 0
self.text = text
self.checkbytemark()
def skip(self, string):
"Skip a string of characters."
self.pos += len(string)
def identifier(self):
"Return a sample of the remaining text."
length = 30
if self.pos + length > len(self.text):
length = len(self.text) - self.pos
return '*' + self.text[self.pos:self.pos + length] + '*'
def isout(self):
"Find out if we are out of the text yet."
return self.pos >= len(self.text)
def current(self):
"Return the current character, assuming we are not out."
return self.text[self.pos]
def extract(self, length):
"Extract the next string of the given length, or None if not enough text."
if self.pos + length > len(self.text):
return None
return self.text[self.pos : self.pos + length]
class FilePosition(Position):
"A parse position based on an underlying file."
def __init__(self, filename):
"Create the position from a file."
Position.__init__(self)
self.reader = LineReader(filename)
self.pos = 0
self.checkbytemark()
def skip(self, string):
"Skip a string of characters."
length = len(string)
while self.pos + length > len(self.reader.currentline()):
length -= len(self.reader.currentline()) - self.pos + 1
self.nextline()
self.pos += length
def currentline(self):
"Get the current line of the underlying file."
return self.reader.currentline()
def nextline(self):
"Go to the next line."
self.reader.nextline()
self.pos = 0
def linenumber(self):
"Return the line number of the file."
return self.reader.linenumber + 1
def identifier(self):
"Return the current line and line number in the file."
before = self.reader.currentline()[:self.pos - 1]
after = self.reader.currentline()[self.pos:]
return 'line ' + str(self.getlinenumber()) + ': ' + before + '*' + after
def isout(self):
"Find out if we are out of the text yet."
if self.pos > len(self.reader.currentline()):
if self.pos > len(self.reader.currentline()) + 1:
Trace.error('Out of the line ' + self.reader.currentline() + ': ' + str(self.pos))
self.nextline()
return self.reader.finished()
def current(self):
"Return the current character, assuming we are not out."
if self.pos == len(self.reader.currentline()):
return '\n'
if self.pos > len(self.reader.currentline()):
Trace.error('Out of the line ' + self.reader.currentline() + ': ' + str(self.pos))
return '*'
return self.reader.currentline()[self.pos]
def extract(self, length):
"Extract the next string of the given length, or None if not enough text."
if self.pos + length > len(self.reader.currentline()):
return None
return self.reader.currentline()[self.pos : self.pos + length]
class Container(object):
"A container for text and objects in a lyx file"
partkey = None
parent = None
begin = None
def __init__(self):
self.contents = list()
def process(self):
"Process contents"
pass
def gethtml(self):
"Get the resulting HTML"
html = self.output.gethtml(self)
if isinstance(html, str):
Trace.error('Raw string ' + html)
html = [html]
return self.escapeall(html)
def escapeall(self, lines):
"Escape all lines in an array according to the output options."
result = []
for line in lines:
if Options.html:
line = self.escape(line, EscapeConfig.html)
if Options.iso885915:
line = self.escape(line, EscapeConfig.iso885915)
line = self.escapeentities(line)
elif not Options.str:
line = self.escape(line, EscapeConfig.nonunicode)
result.append(line)
return result
def escape(self, line, replacements = EscapeConfig.entities):
"Escape a line with replacements from elyxer.a map"
pieces = list(replacements.keys())
# do them in order
pieces.sort()
for piece in pieces:
if piece in line:
line = line.replace(piece, replacements[piece])
return line
def escapeentities(self, line):
"Escape all Unicode characters to HTML entities."
result = ''
pos = TextPosition(line)
while not pos.finished():
if ord(pos.current()) > 128:
codepoint = hex(ord(pos.current()))
if codepoint == '0xd835':
codepoint = hex(ord(next(pos)) + 0xf800)
result += '&#' + codepoint[1:] + ';'
else:
result += pos.current()
pos.skipcurrent()
return result
def searchall(self, type):
"Search for all embedded containers of a given type"
list = []
self.searchprocess(type, lambda container: list.append(container))
return list
def searchremove(self, type):
"Search for all containers of a type and remove them"
list = self.searchall(type)
for container in list:
container.parent.contents.remove(container)
return list
def searchprocess(self, type, process):
"Search for elements of a given type and process them"
self.locateprocess(lambda container: isinstance(container, type), process)
def locateprocess(self, locate, process):
"Search for all embedded containers and process them"
for container in self.contents:
container.locateprocess(locate, process)
if locate(container):
process(container)
def recursivesearch(self, locate, recursive, process):
"Perform a recursive search in the container."
for container in self.contents:
if recursive(container):
container.recursivesearch(locate, recursive, process)
if locate(container):
process(container)
def extracttext(self):
"Extract all text from elyxer.allowed containers."
result = ''
constants = ContainerExtractor(ContainerConfig.extracttext).extract(self)
for constant in constants:
result += constant.string
return result
def group(self, index, group, isingroup):
"Group some adjoining elements into a group"
if index >= len(self.contents):
return
if hasattr(self.contents[index], 'grouped'):
return
while index < len(self.contents) and isingroup(self.contents[index]):
self.contents[index].grouped = True
group.contents.append(self.contents[index])
self.contents.pop(index)
self.contents.insert(index, group)
def remove(self, index):
"Remove a container but leave its contents"
container = self.contents[index]
self.contents.pop(index)
while len(container.contents) > 0:
self.contents.insert(index, container.contents.pop())
def tree(self, level = 0):
"Show in a tree"
Trace.debug(" " * level + str(self))
for container in self.contents:
container.tree(level + 1)
def getparameter(self, name):
"Get the value of a parameter, if present."
if not name in self.parameters:
return None
return self.parameters[name]
def getparameterlist(self, name):
"Get the value of a comma-separated parameter as a list."
paramtext = self.getparameter(name)
if not paramtext:
return []
return paramtext.split(',')
def hasemptyoutput(self):
"Check if the parent's output is empty."
current = self.parent
while current:
if current.output.isempty():
return True
current = current.parent
return False
def __unicode__(self):
"Get a description"
if not self.begin:
return self.__class__.__name__
return self.__class__.__name__ + '@' + str(self.begin)
class BlackBox(Container):
"A container that does not output anything"
def __init__(self):
self.parser = LoneCommand()
self.output = EmptyOutput()
self.contents = []
class LyXFormat(BlackBox):
"Read the lyxformat command"
def process(self):
"Show warning if version < 276"
version = int(self.header[1])
if version < 276:
Trace.error('Warning: unsupported old format version ' + str(version))
if version > int(GeneralConfig.version['lyxformat']):
Trace.error('Warning: unsupported new format version ' + str(version))
class StringContainer(Container):
"A container for a single string"
parsed = None
def __init__(self):
self.parser = StringParser()
self.output = StringOutput()
self.string = ''
def process(self):
"Replace special chars from elyxer.the contents."
if self.parsed:
self.string = self.replacespecial(self.parsed)
self.parsed = None
def replacespecial(self, line):
"Replace all special chars from elyxer.a line"
replaced = self.escape(line, EscapeConfig.entities)
replaced = self.changeline(replaced)
if ContainerConfig.string['startcommand'] in replaced and len(replaced) > 1:
# unprocessed commands
if self.begin:
message = 'Unknown command at ' + str(self.begin) + ': '
else:
message = 'Unknown command: '
Trace.error(message + replaced.strip())
return replaced
def changeline(self, line):
line = self.escape(line, EscapeConfig.chars)
if not ContainerConfig.string['startcommand'] in line:
return line
line = self.escape(line, EscapeConfig.commands)
return line
def extracttext(self):
"Return all text."
return self.string
def __unicode__(self):
"Return a printable representation."
result = 'StringContainer'
if self.begin:
result += '@' + str(self.begin)
ellipsis = '...'
if len(self.string.strip()) <= 15:
ellipsis = ''
return result + ' (' + self.string.strip()[:15] + ellipsis + ')'
class Constant(StringContainer):
"A constant string"
def __init__(self, text):
self.contents = []
self.string = text
self.output = StringOutput()
def __unicode__(self):
return 'Constant: ' + self.string
class TaggedText(Container):
"Text inside a tag"
output = None
def __init__(self):
self.parser = TextParser(self)
self.output = TaggedOutput()
def complete(self, contents, tag, breaklines=False):
"Complete the tagged text and return it"
self.contents = contents
self.output.tag = tag
self.output.breaklines = breaklines
return self
def constant(self, text, tag, breaklines=False):
"Complete the tagged text with a constant"
constant = Constant(text)
return self.complete([constant], tag, breaklines)
def __unicode__(self):
"Return a printable representation."
if not hasattr(self.output, 'tag'):
return 'Emtpy tagged text'
if not self.output.tag:
return 'Tagged <unknown tag>'
return 'Tagged <' + self.output.tag + '>'
class DocumentParameters(object):
"Global parameters for the document."
pdftitle = None
indentstandard = False
tocdepth = 10
startinglevel = 0
maxdepth = 10
language = None
bibliography = None
outputchanges = False
displaymode = False
class FormulaParser(Parser):
"Parses a formula"
def parseheader(self, reader):
"See if the formula is inlined"
self.begin = reader.linenumber + 1
type = self.parsetype(reader)
if not type:
reader.nextline()
type = self.parsetype(reader)
if not type:
Trace.error('Unknown formula type in ' + reader.currentline().strip())
return ['unknown']
return [type]
def parsetype(self, reader):
"Get the formula type from the first line."
if reader.currentline().find(FormulaConfig.starts['simple']) >= 0:
return 'inline'
if reader.currentline().find(FormulaConfig.starts['complex']) >= 0:
return 'block'
if reader.currentline().find(FormulaConfig.starts['unnumbered']) >= 0:
return 'block'
if reader.currentline().find(FormulaConfig.starts['beginbefore']) >= 0:
return 'numbered'
return None
def parse(self, reader):
"Parse the formula until the end"
formula = self.parseformula(reader)
while not reader.currentline().startswith(self.ending):
stripped = reader.currentline().strip()
if len(stripped) > 0:
Trace.error('Unparsed formula line ' + stripped)
reader.nextline()
reader.nextline()
return formula
def parseformula(self, reader):
"Parse the formula contents"
simple = FormulaConfig.starts['simple']
if simple in reader.currentline():
rest = reader.currentline().split(simple, 1)[1]
if simple in rest:
# formula is $...$
return self.parsesingleliner(reader, simple, simple)
# formula is multiline $...$
return self.parsemultiliner(reader, simple, simple)
if FormulaConfig.starts['complex'] in reader.currentline():
# formula of the form \[...\]
return self.parsemultiliner(reader, FormulaConfig.starts['complex'],
FormulaConfig.endings['complex'])
beginbefore = FormulaConfig.starts['beginbefore']
beginafter = FormulaConfig.starts['beginafter']
if beginbefore in reader.currentline():
if reader.currentline().strip().endswith(beginafter):
current = reader.currentline().strip()
endsplit = current.split(beginbefore)[1].split(beginafter)
startpiece = beginbefore + endsplit[0] + beginafter
endbefore = FormulaConfig.endings['endbefore']
endafter = FormulaConfig.endings['endafter']
endpiece = endbefore + endsplit[0] + endafter
return startpiece + self.parsemultiliner(reader, startpiece, endpiece) + endpiece
Trace.error('Missing ' + beginafter + ' in ' + reader.currentline())
return ''
begincommand = FormulaConfig.starts['command']
beginbracket = FormulaConfig.starts['bracket']
if begincommand in reader.currentline() and beginbracket in reader.currentline():
endbracket = FormulaConfig.endings['bracket']
return self.parsemultiliner(reader, beginbracket, endbracket)
Trace.error('Formula beginning ' + reader.currentline() + ' is unknown')
return ''
def parsesingleliner(self, reader, start, ending):
"Parse a formula in one line"
line = reader.currentline().strip()
if not start in line:
Trace.error('Line ' + line + ' does not contain formula start ' + start)
return ''
if not line.endswith(ending):
Trace.error('Formula ' + line + ' does not end with ' + ending)
return ''
index = line.index(start)
rest = line[index + len(start):-len(ending)]
reader.nextline()
return rest
def parsemultiliner(self, reader, start, ending):
"Parse a formula in multiple lines"
formula = ''
line = reader.currentline()
if not start in line:
Trace.error('Line ' + line.strip() + ' does not contain formula start ' + start)
return ''
index = line.index(start)
line = line[index + len(start):].strip()
while not line.endswith(ending):
formula += line + '\n'
reader.nextline()
line = reader.currentline()
formula += line[:-len(ending)]
reader.nextline()
return formula
class MacroParser(FormulaParser):
"A parser for a formula macro."
def parseheader(self, reader):
"See if the formula is inlined"
self.begin = reader.linenumber + 1
return ['inline']
def parse(self, reader):
"Parse the formula until the end"
formula = self.parsemultiliner(reader, self.parent.start, self.ending)
reader.nextline()
return formula
class FormulaBit(Container):
"A bit of a formula"
type = None
size = 1
original = ''
def __init__(self):
"The formula bit type can be 'alpha', 'number', 'font'."
self.contents = []
self.output = ContentsOutput()
def setfactory(self, factory):
"Set the internal formula factory."
self.factory = factory
return self
def add(self, bit):
"Add any kind of formula bit already processed"
self.contents.append(bit)
self.original += bit.original
bit.parent = self
def skiporiginal(self, string, pos):
"Skip a string and add it to the original formula"
self.original += string
if not pos.checkskip(string):
Trace.error('String ' + string + ' not at ' + pos.identifier())
def computesize(self):
"Compute the size of the bit as the max of the sizes of all contents."
if len(self.contents) == 0:
return 1
self.size = max([element.size for element in self.contents])
return self.size
def clone(self):
"Return a copy of itself."
return self.factory.parseformula(self.original)
def __unicode__(self):
"Get a string representation"
return self.__class__.__name__ + ' read in ' + self.original
class TaggedBit(FormulaBit):
"A tagged string in a formula"
def constant(self, constant, tag):
"Set the constant and the tag"
self.output = TaggedOutput().settag(tag)
self.add(FormulaConstant(constant))
return self
def complete(self, contents, tag, breaklines = False):
"Set the constant and the tag"
self.contents = contents
self.output = TaggedOutput().settag(tag, breaklines)
return self
def selfcomplete(self, tag):
"Set the self-closing tag, no contents (as in <hr/>)."
self.output = TaggedOutput().settag(tag, empty = True)
return self
class FormulaConstant(Constant):
"A constant string in a formula"
def __init__(self, string):
"Set the constant string"
Constant.__init__(self, string)
self.original = string
self.size = 1
self.type = None
def computesize(self):
"Compute the size of the constant: always 1."
return self.size
def clone(self):
"Return a copy of itself."
return FormulaConstant(self.original)
def __unicode__(self):
"Return a printable representation."
return 'Formula constant: ' + self.string
class RawText(FormulaBit):
"A bit of text inside a formula"
def detect(self, pos):
"Detect a bit of raw text"
return pos.current().isalpha()
def parsebit(self, pos):
"Parse alphabetic text"
alpha = pos.globalpha()
self.add(FormulaConstant(alpha))
self.type = 'alpha'
class FormulaSymbol(FormulaBit):
"A symbol inside a formula"
modified = FormulaConfig.modified
unmodified = FormulaConfig.unmodified['characters']
def detect(self, pos):
"Detect a symbol"
if pos.current() in FormulaSymbol.unmodified:
return True
if pos.current() in FormulaSymbol.modified:
return True
return False
def parsebit(self, pos):
"Parse the symbol"
if pos.current() in FormulaSymbol.unmodified:
self.addsymbol(pos.current(), pos)
return
if pos.current() in FormulaSymbol.modified:
self.addsymbol(FormulaSymbol.modified[pos.current()], pos)
return
Trace.error('Symbol ' + pos.current() + ' not found')
def addsymbol(self, symbol, pos):
"Add a symbol"
self.skiporiginal(pos.current(), pos)
self.contents.append(FormulaConstant(symbol))
class FormulaNumber(FormulaBit):
"A string of digits in a formula"
def detect(self, pos):
"Detect a digit"
return pos.current().isdigit()
def parsebit(self, pos):
"Parse a bunch of digits"
digits = pos.glob(lambda: pos.current().isdigit())
self.add(FormulaConstant(digits))
self.type = 'number'
class Comment(FormulaBit):
"A LaTeX comment: % to the end of the line."
start = FormulaConfig.starts['comment']
def detect(self, pos):
"Detect the %."
return pos.current() == self.start
def parsebit(self, pos):
"Parse to the end of the line."
self.original += pos.globincluding('\n')
class WhiteSpace(FormulaBit):
"Some white space inside a formula."
def detect(self, pos):
"Detect the white space."
return pos.current().isspace()
def parsebit(self, pos):
"Parse all whitespace."
self.original += pos.skipspace()
def __unicode__(self):
"Return a printable representation."
return 'Whitespace: *' + self.original + '*'
class Bracket(FormulaBit):
"A {} bracket inside a formula"
start = FormulaConfig.starts['bracket']
ending = FormulaConfig.endings['bracket']
def __init__(self):
"Create a (possibly literal) new bracket"
FormulaBit.__init__(self)
self.inner = None
def detect(self, pos):
"Detect the start of a bracket"
return pos.checkfor(self.start)
def parsebit(self, pos):
"Parse the bracket"
self.parsecomplete(pos, self.innerformula)
return self
def parsetext(self, pos):
"Parse a text bracket"
self.parsecomplete(pos, self.innertext)
return self
def parseliteral(self, pos):
"Parse a literal bracket"
self.parsecomplete(pos, self.innerliteral)
return self
def parsecomplete(self, pos, innerparser):
"Parse the start and end marks"
if not pos.checkfor(self.start):
Trace.error('Bracket should start with ' + self.start + ' at ' + pos.identifier())
return None
self.skiporiginal(self.start, pos)
pos.pushending(self.ending)
innerparser(pos)
self.original += pos.popending(self.ending)
self.computesize()
def innerformula(self, pos):
"Parse a whole formula inside the bracket"
while not pos.finished():
self.add(self.factory.parseany(pos))
def innertext(self, pos):
"Parse some text inside the bracket, following textual rules."
specialchars = list(FormulaConfig.symbolfunctions.keys())
specialchars.append(FormulaConfig.starts['command'])
specialchars.append(FormulaConfig.starts['bracket'])
specialchars.append(Comment.start)
while not pos.finished():
if pos.current() in specialchars:
self.add(self.factory.parseany(pos))
if pos.checkskip(' '):
self.original += ' '
else:
self.add(FormulaConstant(pos.skipcurrent()))
def innerliteral(self, pos):
"Parse a literal inside the bracket, which does not generate HTML."
self.literal = ''
while not pos.finished() and not pos.current() == self.ending:
if pos.current() == self.start:
self.parseliteral(pos)
else:
self.literal += pos.skipcurrent()
self.original += self.literal
class SquareBracket(Bracket):
"A [] bracket inside a formula"
start = FormulaConfig.starts['squarebracket']
ending = FormulaConfig.endings['squarebracket']
def clone(self):
"Return a new square bracket with the same contents."
bracket = SquareBracket()
bracket.contents = self.contents
return bracket
class MathsProcessor(object):
"A processor for a maths construction inside the FormulaProcessor."
def process(self, contents, index):
"Process an element inside a formula."
Trace.error('Unimplemented process() in ' + str(self))
def __unicode__(self):
"Return a printable description."
return 'Maths processor ' + self.__class__.__name__
class FormulaProcessor(object):
"A processor specifically for formulas."
processors = []
def process(self, bit):
"Process the contents of every formula bit, recursively."
self.processcontents(bit)
self.processinsides(bit)
self.traversewhole(bit)
def processcontents(self, bit):
"Process the contents of a formula bit."
if not isinstance(bit, FormulaBit):
return
bit.process()
for element in bit.contents:
self.processcontents(element)
def processinsides(self, bit):
"Process the insides (limits, brackets) in a formula bit."
if not isinstance(bit, FormulaBit):
return
for index, element in enumerate(bit.contents):
for processor in self.processors:
processor.process(bit.contents, index)
# continue with recursive processing
self.processinsides(element)
def traversewhole(self, formula):
"Traverse over the contents to alter variables and space units."
last = None
for bit, contents in self.traverse(formula):
if bit.type == 'alpha':
self.italicize(bit, contents)
elif bit.type == 'font' and last and last.type == 'number':
bit.contents.insert(0, FormulaConstant(' '))
last = bit
def traverse(self, bit):
"Traverse a formula and yield a flattened structure of (bit, list) pairs."
for element in bit.contents:
if hasattr(element, 'type') and element.type:
yield (element, bit.contents)
elif isinstance(element, FormulaBit):
for pair in self.traverse(element):
yield pair
def italicize(self, bit, contents):
"Italicize the given bit of text."
index = contents.index(bit)
contents[index] = TaggedBit().complete([bit], 'i')
class Formula(Container):
"A LaTeX formula"
def __init__(self):
self.parser = FormulaParser()
self.output = TaggedOutput().settag('span class="formula"')
def process(self):
"Convert the formula to tags"
if self.header[0] == 'inline':
DocumentParameters.displaymode = False
else:
DocumentParameters.displaymode = True
self.output.settag('div class="formula"', True)
if Options.jsmath:
self.jsmath()
elif Options.mathjax:
self.mathjax()
elif Options.googlecharts:
self.googlecharts()
else:
self.classic()
def jsmath(self):
"Make the contents for jsMath."
if self.header[0] != 'inline':
self.output = TaggedOutput().settag('div class="math"')
else:
self.output = TaggedOutput().settag('span class="math"')
self.contents = [Constant(self.parsed)]
def mathjax(self):
"Make the contents for MathJax."
self.output.tag = 'span class="MathJax_Preview"'
tag = 'script type="math/tex'
if self.header[0] != 'inline':
tag += ';mode=display'
self.contents = [TaggedText().constant(self.parsed, tag + '"', True)]
def googlecharts(self):
"Make the contents using Google Charts http://code.google.com/apis/chart/."
url = FormulaConfig.urls['googlecharts'] + urllib.parse.quote_plus(self.parsed)
img = '<img class="chart" src="' + url + '" alt="' + self.parsed + '"/>'
self.contents = [Constant(img)]
def classic(self):
"Make the contents using classic output generation with XHTML and CSS."
whole = FormulaFactory().parseformula(self.parsed)
FormulaProcessor().process(whole)
whole.parent = self
self.contents = [whole]
def parse(self, pos):
"Parse using a parse position instead of self.parser."
if pos.checkskip('$$'):
self.parsedollarblock(pos)
elif pos.checkskip('$'):
self.parsedollarinline(pos)
elif pos.checkskip('\\('):
self.parseinlineto(pos, '\\)')
elif pos.checkskip('\\['):
self.parseblockto(pos, '\\]')
else:
pos.error('Unparseable formula')
self.process()
return self
def parsedollarinline(self, pos):
"Parse a $...$ formula."
self.header = ['inline']
self.parsedollar(pos)
def parsedollarblock(self, pos):
"Parse a $$...$$ formula."
self.header = ['block']
self.parsedollar(pos)
if not pos.checkskip('$'):
pos.error('Formula should be $$...$$, but last $ is missing.')
def parsedollar(self, pos):
"Parse to the next $."
pos.pushending('$')
self.parsed = pos.globexcluding('$')
pos.popending('$')
def parseinlineto(self, pos, limit):
"Parse a \\(...\\) formula."
self.header = ['inline']
self.parseupto(pos, limit)
def parseblockto(self, pos, limit):
"Parse a \\[...\\] formula."
self.header = ['block']
self.parseupto(pos, limit)
def parseupto(self, pos, limit):
"Parse a formula that ends with the given command."
pos.pushending(limit)
self.parsed = pos.glob(lambda: True)
pos.popending(limit)
def __unicode__(self):
"Return a printable representation."
if self.partkey and self.partkey.number:
return 'Formula (' + self.partkey.number + ')'
return 'Unnumbered formula'
class WholeFormula(FormulaBit):
"Parse a whole formula"
def detect(self, pos):
"Not outside the formula is enough."
return not pos.finished()
def parsebit(self, pos):
"Parse with any formula bit"
while not pos.finished():
self.add(self.factory.parseany(pos))
class FormulaFactory(object):
"Construct bits of formula"
# bit types will be appended later
types = [FormulaSymbol, RawText, FormulaNumber, Bracket, Comment, WhiteSpace]
skippedtypes = [Comment, WhiteSpace]
defining = False
def __init__(self):
"Initialize the map of instances."
self.instances = dict()
def detecttype(self, type, pos):
"Detect a bit of a given type."
if pos.finished():
return False
return self.instance(type).detect(pos)
def instance(self, type):
"Get an instance of the given type."
if not type in self.instances or not self.instances[type]:
self.instances[type] = self.create(type)
return self.instances[type]
def create(self, type):
"Create a new formula bit of the given type."
return Cloner.create(type).setfactory(self)
def clearskipped(self, pos):
"Clear any skipped types."
while not pos.finished():
if not self.skipany(pos):
return
return
def skipany(self, pos):
"Skip any skipped types."
for type in self.skippedtypes:
if self.instance(type).detect(pos):
return self.parsetype(type, pos)
return None
def parseany(self, pos):
"Parse any formula bit at the current location."
for type in self.types + self.skippedtypes:
if self.detecttype(type, pos):
return self.parsetype(type, pos)
Trace.error('Unrecognized formula at ' + pos.identifier())
return FormulaConstant(pos.skipcurrent())
def parsetype(self, type, pos):
"Parse the given type and return it."
bit = self.instance(type)
self.instances[type] = None
returnedbit = bit.parsebit(pos)
if returnedbit:
return returnedbit.setfactory(self)
return bit
def parseformula(self, formula):
"Parse a string of text that contains a whole formula."
pos = TextPosition(formula)
whole = self.create(WholeFormula)
if whole.detect(pos):
whole.parsebit(pos)
return whole
# no formula found
if not pos.finished():
Trace.error('Unknown formula at: ' + pos.identifier())
whole.add(TaggedBit().constant(formula, 'span class="unknown"'))
return whole
import unicodedata
import gettext
class Translator(object):
"Reads the configuration file and tries to find a translation."
"Otherwise falls back to the messages in the config file."
instance = None
def translate(cls, key):
"Get the translated message for a key."
return cls.instance.getmessage(key)
translate = classmethod(translate)
def __init__(self):
self.translation = None
self.first = True
def findtranslation(self):
"Find the translation for the document language."
self.langcodes = None
if not DocumentParameters.language:
Trace.error('No language in document')
return
if not DocumentParameters.language in TranslationConfig.languages:
Trace.error('Unknown language ' + DocumentParameters.language)
return
if TranslationConfig.languages[DocumentParameters.language] == 'en':
return
langcodes = [TranslationConfig.languages[DocumentParameters.language]]
try:
self.translation = gettext.translation('elyxer', None, langcodes)
except IOError:
Trace.error('No translation for ' + str(langcodes))
def getmessage(self, key):
"Get the translated message for the given key."
if self.first:
self.findtranslation()
self.first = False
message = self.getuntranslated(key)
if not self.translation:
return message
try:
message = self.translation.ugettext(message)
except IOError:
pass
return message
def getuntranslated(self, key):
"Get the untranslated message."
if not key in TranslationConfig.constants:
Trace.error('Cannot translate ' + key)
return key
return TranslationConfig.constants[key]
Translator.instance = Translator()
class NumberCounter(object):
"A counter for numbers (by default)."
"The type can be changed to return letters, roman numbers..."
name = None
value = None
mode = None
master = None
letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
symbols = NumberingConfig.sequence['symbols']
romannumerals = [
('M', 1000), ('CM', 900), ('D', 500), ('CD', 400), ('C', 100),
('XC', 90), ('L', 50), ('XL', 40), ('X', 10), ('IX', 9), ('V', 5),
('IV', 4), ('I', 1)
]
def __init__(self, name):
"Give a name to the counter."
self.name = name
def setmode(self, mode):
"Set the counter mode. Can be changed at runtime."
self.mode = mode
return self
def init(self, value):
"Set an initial value."
self.value = value
def gettext(self):
"Get the next value as a text string."
return str(self.value)
def getletter(self):
"Get the next value as a letter."
return self.getsequence(self.letters)
def getsymbol(self):
"Get the next value as a symbol."
return self.getsequence(self.symbols)
def getsequence(self, sequence):
"Get the next value from elyxer.a sequence."
return sequence[(self.value - 1) % len(sequence)]
def getroman(self):
"Get the next value as a roman number."
result = ''
number = self.value
for numeral, value in self.romannumerals:
if number >= value:
result += numeral * (number / value)
number = number % value
return result
def getvalue(self):
"Get the current value as configured in the current mode."
if not self.mode or self.mode in ['text', '1']:
return self.gettext()
if self.mode == 'A':
return self.getletter()
if self.mode == 'a':
return self.getletter().lower()
if self.mode == 'I':
return self.getroman()
if self.mode == '*':
return self.getsymbol()
Trace.error('Unknown counter mode ' + self.mode)
return self.gettext()
def getnext(self):
"Increase the current value and get the next value as configured."
if not self.value:
self.value = 0
self.value += 1
return self.getvalue()
def reset(self):
"Reset the counter."
self.value = 0
def __unicode__(self):
"Return a printable representation."
result = 'Counter ' + self.name
if self.mode:
result += ' in mode ' + self.mode
return result
class DependentCounter(NumberCounter):
"A counter which depends on another one (the master)."
def setmaster(self, master):
"Set the master counter."
self.master = master
self.last = self.master.getvalue()
return self
def getnext(self):
"Increase or, if the master counter has changed, restart."
if self.last != self.master.getvalue():
self.reset()
value = NumberCounter.getnext(self)
self.last = self.master.getvalue()
return value
def getvalue(self):
"Get the value of the combined counter: master.dependent."
return self.master.getvalue() + '.' + NumberCounter.getvalue(self)
class NumberGenerator(object):
"A number generator for unique sequences and hierarchical structures. Used in:"
" * ordered part numbers: Chapter 3, Section 5.3."
" * unique part numbers: Footnote 15, Bibliography cite [15]."
" * chaptered part numbers: Figure 3.15, Equation (8.3)."
" * unique roman part numbers: Part I, Book IV."
chaptered = None
generator = None
romanlayouts = [x.lower() for x in NumberingConfig.layouts['roman']]
orderedlayouts = [x.lower() for x in NumberingConfig.layouts['ordered']]
counters = dict()
appendix = None
def deasterisk(self, type):
"Remove the possible asterisk in a layout type."
return type.replace('*', '')
def isunique(self, type):
"Find out if the layout type corresponds to a unique part."
return self.isroman(type)
def isroman(self, type):
"Find out if the layout type should have roman numeration."
return self.deasterisk(type).lower() in self.romanlayouts
def isinordered(self, type):
"Find out if the layout type corresponds to an (un)ordered part."
return self.deasterisk(type).lower() in self.orderedlayouts
def isnumbered(self, type):
"Find out if the type for a layout corresponds to a numbered layout."
if '*' in type:
return False
if self.isroman(type):
return True
if not self.isinordered(type):
return False
if self.getlevel(type) > DocumentParameters.maxdepth:
return False
return True
def isunordered(self, type):
"Find out if the type contains an asterisk, basically."
return '*' in type
def getlevel(self, type):
"Get the level that corresponds to a layout type."
if self.isunique(type):
return 0
if not self.isinordered(type):
Trace.error('Unknown layout type ' + type)
return 0
type = self.deasterisk(type).lower()
level = self.orderedlayouts.index(type) + 1
return level - DocumentParameters.startinglevel
def getparttype(self, type):
"Obtain the type for the part: without the asterisk, "
"and switched to Appendix if necessary."
if NumberGenerator.appendix and self.getlevel(type) == 1:
return 'Appendix'
return self.deasterisk(type)
def generate(self, type):
"Generate a number for a layout type."
"Unique part types such as Part or Book generate roman numbers: Part I."
"Ordered part types return dot-separated tuples: Chapter 5, Subsection 2.3.5."
"Everything else generates unique numbers: Bibliography [1]."
"Each invocation results in a new number."
return self.getcounter(type).getnext()
def getcounter(self, type):
"Get the counter for the given type."
type = type.lower()
if not type in self.counters:
self.counters[type] = self.create(type)
return self.counters[type]
def create(self, type):
"Create a counter for the given type."
if self.isnumbered(type) and self.getlevel(type) > 1:
index = self.orderedlayouts.index(type)
above = self.orderedlayouts[index - 1]
master = self.getcounter(above)
return self.createdependent(type, master)
counter = NumberCounter(type)
if self.isroman(type):
counter.setmode('I')
return counter
def getdependentcounter(self, type, master):
"Get (or create) a counter of the given type that depends on another."
if not type in self.counters or not self.counters[type].master:
self.counters[type] = self.createdependent(type, master)
return self.counters[type]
def createdependent(self, type, master):
"Create a dependent counter given the master."
return DependentCounter(type).setmaster(master)
def startappendix(self):
"Start appendices here."
firsttype = self.orderedlayouts[DocumentParameters.startinglevel]
counter = self.getcounter(firsttype)
counter.setmode('A').reset()
NumberGenerator.appendix = True
class ChapteredGenerator(NumberGenerator):
"Generate chaptered numbers, as in Chapter.Number."
"Used in equations, figures: Equation (5.3), figure 8.15."
def generate(self, type):
"Generate a number which goes with first-level numbers (chapters). "
"For the article classes a unique number is generated."
if DocumentParameters.startinglevel > 0:
return NumberGenerator.generator.generate(type)
chapter = self.getcounter('Chapter')
return self.getdependentcounter(type, chapter).getnext()
NumberGenerator.chaptered = ChapteredGenerator()
NumberGenerator.generator = NumberGenerator()
class ContainerSize(object):
"The size of a container."
width = None
height = None
maxwidth = None
maxheight = None
scale = None
def set(self, width = None, height = None):
"Set the proper size with width and height."
self.setvalue('width', width)
self.setvalue('height', height)
return self
def setmax(self, maxwidth = None, maxheight = None):
"Set max width and/or height."
self.setvalue('maxwidth', maxwidth)
self.setvalue('maxheight', maxheight)
return self
def readparameters(self, container):
"Read some size parameters off a container."
self.setparameter(container, 'width')
self.setparameter(container, 'height')
self.setparameter(container, 'scale')
self.checkvalidheight(container)
return self
def setparameter(self, container, name):
"Read a size parameter off a container, and set it if present."
value = container.getparameter(name)
self.setvalue(name, value)
def setvalue(self, name, value):
"Set the value of a parameter name, only if it's valid."
value = self.processparameter(value)
if value:
setattr(self, name, value)
def checkvalidheight(self, container):
"Check if the height parameter is valid; otherwise erase it."
heightspecial = container.getparameter('height_special')
if self.height and self.extractnumber(self.height) == '1' and heightspecial == 'totalheight':
self.height = None
def processparameter(self, value):
"Do the full processing on a parameter."
if not value:
return None
if self.extractnumber(value) == '0':
return None
for ignored in StyleConfig.size['ignoredtexts']:
if ignored in value:
value = value.replace(ignored, '')
return value
def extractnumber(self, text):
"Extract the first number in the given text."
result = ''
decimal = False
for char in text:
if char.isdigit():
result += char
elif char == '.' and not decimal:
result += char
decimal = True
else:
return result
return result
def checkimage(self, width, height):
"Check image dimensions, set them if possible."
if width:
self.maxwidth = str(width) + 'px'
if self.scale and not self.width:
self.width = self.scalevalue(width)
if height:
self.maxheight = str(height) + 'px'
if self.scale and not self.height:
self.height = self.scalevalue(height)
if self.width and not self.height:
self.height = 'auto'
if self.height and not self.width:
self.width = 'auto'
def scalevalue(self, value):
"Scale the value according to the image scale and return it as unicode."
scaled = value * int(self.scale) / 100
return str(int(scaled)) + 'px'
def removepercentwidth(self):
"Remove percent width if present, to set it at the figure level."
if not self.width:
return None
if not '%' in self.width:
return None
width = self.width
self.width = None
if self.height == 'auto':
self.height = None
return width
def addstyle(self, container):
"Add the proper style attribute to the output tag."
if not isinstance(container.output, TaggedOutput):
Trace.error('No tag to add style, in ' + str(container))
if not self.width and not self.height and not self.maxwidth and not self.maxheight:
# nothing to see here; move along
return
tag = ' style="'
tag += self.styleparameter('width')
tag += self.styleparameter('maxwidth')
tag += self.styleparameter('height')
tag += self.styleparameter('maxheight')
if tag[-1] == ' ':
tag = tag[:-1]
tag += '"'
container.output.tag += tag
def styleparameter(self, name):
"Get the style for a single parameter."
value = getattr(self, name)
if value:
return name.replace('max', 'max-') + ': ' + value + '; '
return ''
class QuoteContainer(Container):
"A container for a pretty quote"
def __init__(self):
self.parser = BoundedParser()
self.output = FixedOutput()
def process(self):
"Process contents"
self.type = self.header[2]
if not self.type in StyleConfig.quotes:
Trace.error('Quote type ' + self.type + ' not found')
self.html = ['"']
return
self.html = [StyleConfig.quotes[self.type]]
class LyXLine(Container):
"A Lyx line"
def __init__(self):
self.parser = LoneCommand()
self.output = FixedOutput()
def process(self):
self.html = ['<hr class="line" />']
class EmphaticText(TaggedText):
"Text with emphatic mode"
def process(self):
self.output.tag = 'i'
class ShapedText(TaggedText):
"Text shaped (italic, slanted)"
def process(self):
self.type = self.header[1]
if not self.type in TagConfig.shaped:
Trace.error('Unrecognized shape ' + self.header[1])
self.output.tag = 'span'
return
self.output.tag = TagConfig.shaped[self.type]
class VersalitasText(TaggedText):
"Text in versalitas"
def process(self):
self.output.tag = 'span class="versalitas"'
class ColorText(TaggedText):
"Colored text"
def process(self):
self.color = self.header[1]
self.output.tag = 'span class="' + self.color + '"'
class SizeText(TaggedText):
"Sized text"
def process(self):
self.size = self.header[1]
self.output.tag = 'span class="' + self.size + '"'
class BoldText(TaggedText):
"Bold text"
def process(self):
self.output.tag = 'b'
class TextFamily(TaggedText):
"A bit of text from elyxer.a different family"
def process(self):
"Parse the type of family"
self.type = self.header[1]
if not self.type in TagConfig.family:
Trace.error('Unrecognized family ' + type)
self.output.tag = 'span'
return
self.output.tag = TagConfig.family[self.type]
class Hfill(TaggedText):
"Horizontall fill"
def process(self):
self.output.tag = 'span class="hfill"'
class BarredText(TaggedText):
"Text with a bar somewhere"
def process(self):
"Parse the type of bar"
self.type = self.header[1]
if not self.type in TagConfig.barred:
Trace.error('Unknown bar type ' + self.type)
self.output.tag = 'span'
return
self.output.tag = TagConfig.barred[self.type]
class LangLine(TaggedText):
"A line with language information"
def process(self):
"Only generate a span with lang info when the language is recognized."
lang = self.header[1]
if not lang in TranslationConfig.languages:
self.output = ContentsOutput()
return
isolang = TranslationConfig.languages[lang]
self.output = TaggedOutput().settag('span lang="' + isolang + '"', False)
class InsetLength(BlackBox):
"A length measure inside an inset."
def process(self):
self.length = self.header[1]
class Space(Container):
"A space of several types"
def __init__(self):
self.parser = InsetParser()
self.output = FixedOutput()
def process(self):
self.type = self.header[2]
if self.type not in StyleConfig.hspaces:
Trace.error('Unknown space type ' + self.type)
self.html = [' ']
return
self.html = [StyleConfig.hspaces[self.type]]
length = self.getlength()
if not length:
return
self.output = TaggedOutput().settag('span class="hspace"', False)
ContainerSize().set(length).addstyle(self)
def getlength(self):
"Get the space length from elyxer.the contents or parameters."
if len(self.contents) == 0 or not isinstance(self.contents[0], InsetLength):
return None
return self.contents[0].length
class VerticalSpace(Container):
"An inset that contains a vertical space."
def __init__(self):
self.parser = InsetParser()
self.output = FixedOutput()
def process(self):
"Set the correct tag"
self.type = self.header[2]
if self.type not in StyleConfig.vspaces:
self.output = TaggedOutput().settag('div class="vspace" style="height: ' + self.type + ';"', True)
return
self.html = [StyleConfig.vspaces[self.type]]
class Align(Container):
"Bit of aligned text"
def __init__(self):
self.parser = ExcludingParser()
self.output = TaggedOutput().setbreaklines(True)
def process(self):
self.output.tag = 'div class="' + self.header[1] + '"'
class Newline(Container):
"A newline"
def __init__(self):
self.parser = LoneCommand()
self.output = FixedOutput()
def process(self):
"Process contents"
self.html = ['<br/>\n']
class NewPage(Newline):
"A new page"
def process(self):
"Process contents"
self.html = ['<p><br/>\n</p>\n']
class Separator(Container):
"A separator string which is not extracted by extracttext()."
def __init__(self, constant):
self.output = FixedOutput()
self.contents = []
self.html = [constant]
class StrikeOut(TaggedText):
"Striken out text."
def process(self):
"Set the output tag to strike."
self.output.tag = 'strike'
class StartAppendix(BlackBox):
"Mark to start an appendix here."
"From this point on, all chapters become appendices."
def process(self):
"Activate the special numbering scheme for appendices, using letters."
NumberGenerator.generator.startappendix()
class Link(Container):
"A link to another part of the document"
anchor = None
url = None
type = None
page = None
target = None
destination = None
title = None
def __init__(self):
"Initialize the link, add target if configured."
self.contents = []
self.parser = InsetParser()
self.output = LinkOutput()
if Options.target:
self.target = Options.target
def complete(self, text, anchor = None, url = None, type = None, title = None):
"Complete the link."
self.contents = [Constant(text)]
if anchor:
self.anchor = anchor
if url:
self.url = url
if type:
self.type = type
if title:
self.title = title
return self
def computedestination(self):
"Use the destination link to fill in the destination URL."
if not self.destination:
return
self.url = ''
if self.destination.anchor:
self.url = '#' + self.destination.anchor
if self.destination.page:
self.url = self.destination.page + self.url
def setmutualdestination(self, destination):
"Set another link as destination, and set its destination to this one."
self.destination = destination
destination.destination = self
def __unicode__(self):
"Return a printable representation."
result = 'Link'
if self.anchor:
result += ' #' + self.anchor
if self.url:
result += ' to ' + self.url
return result
class URL(Link):
"A clickable URL"
def process(self):
"Read URL from elyxer.parameters"
target = self.escape(self.getparameter('target'))
self.url = target
type = self.getparameter('type')
if type:
self.url = self.escape(type) + target
name = self.getparameter('name')
if not name:
name = target
self.contents = [Constant(name)]
class FlexURL(URL):
"A flexible URL"
def process(self):
"Read URL from elyxer.contents"
self.url = self.extracttext()
class LinkOutput(ContainerOutput):
"A link pointing to some destination"
"Or an anchor (destination)"
def gethtml(self, link):
"Get the HTML code for the link"
type = link.__class__.__name__
if link.type:
type = link.type
tag = 'a class="' + type + '"'
if link.anchor:
tag += ' name="' + link.anchor + '"'
if link.destination:
link.computedestination()
if link.url:
tag += ' href="' + link.url + '"'
if link.target:
tag += ' target="' + link.target + '"'
if link.title:
tag += ' title="' + link.title + '"'
return TaggedOutput().settag(tag).gethtml(link)
class Postprocessor(object):
"Postprocess a container keeping some context"
stages = []
def __init__(self):
self.stages = StageDict(Postprocessor.stages, self)
self.current = None
self.last = None
def postprocess(self, next):
"Postprocess a container and its contents."
self.postrecursive(self.current)
result = self.postcurrent(next)
self.last = self.current
self.current = next
return result
def postrecursive(self, container):
"Postprocess the container contents recursively"
if not hasattr(container, 'contents'):
return
if len(container.contents) == 0:
return
if hasattr(container, 'postprocess'):
if not container.postprocess:
return
postprocessor = Postprocessor()
contents = []
for element in container.contents:
post = postprocessor.postprocess(element)
if post:
contents.append(post)
# two rounds to empty the pipeline
for i in range(2):
post = postprocessor.postprocess(None)
if post:
contents.append(post)
container.contents = contents
def postcurrent(self, next):
"Postprocess the current element taking into account next and last."
stage = self.stages.getstage(self.current)
if not stage:
return self.current
return stage.postprocess(self.last, self.current, next)
class StageDict(object):
"A dictionary of stages corresponding to classes"
def __init__(self, classes, postprocessor):
"Instantiate an element from elyxer.each class and store as a dictionary"
instances = self.instantiate(classes, postprocessor)
self.stagedict = dict([(x.processedclass, x) for x in instances])
def instantiate(self, classes, postprocessor):
"Instantiate an element from elyxer.each class"
stages = [x.__new__(x) for x in classes]
for element in stages:
element.__init__()
element.postprocessor = postprocessor
return stages
def getstage(self, element):
"Get the stage for a given element, if the type is in the dict"
if not element.__class__ in self.stagedict:
return None
return self.stagedict[element.__class__]
class Label(Link):
"A label to be referenced"
names = dict()
lastlayout = None
def __init__(self):
Link.__init__(self)
self.lastnumbered = None
def process(self):
"Process a label container."
key = self.getparameter('name')
self.create(' ', key)
self.lastnumbered = Label.lastlayout
def create(self, text, key, type = 'Label'):
"Create the label for a given key."
self.key = key
self.complete(text, anchor = key, type = type)
Label.names[key] = self
if key in Reference.references:
for reference in Reference.references[key]:
reference.destination = self
return self
def findpartkey(self):
"Get the part key for the latest numbered container seen."
numbered = self.numbered(self)
if numbered and numbered.partkey:
return numbered.partkey
return ''
def numbered(self, container):
"Get the numbered container for the label."
if container.partkey:
return container
if not container.parent:
if self.lastnumbered:
return self.lastnumbered
return None
return self.numbered(container.parent)
def __unicode__(self):
"Return a printable representation."
if not hasattr(self, 'key'):
return 'Unnamed label'
return 'Label ' + self.key
class Reference(Link):
"A reference to a label."
references = dict()
key = 'none'
def process(self):
"Read the reference and set the arrow."
self.key = self.getparameter('reference')
if self.key in Label.names:
self.direction = '↑'
label = Label.names[self.key]
else:
self.direction = '↓'
label = Label().complete(' ', self.key, 'preref')
self.destination = label
self.formatcontents()
if not self.key in Reference.references:
Reference.references[self.key] = []
Reference.references[self.key].append(self)
def formatcontents(self):
"Format the reference contents."
formatkey = self.getparameter('LatexCommand')
if not formatkey:
formatkey = 'ref'
self.formatted = '↕'
if formatkey in StyleConfig.referenceformats:
self.formatted = StyleConfig.referenceformats[formatkey]
else:
Trace.error('Unknown reference format ' + formatkey)
self.replace('↕', self.direction)
self.replace('#', '1')
self.replace('on-page', Translator.translate('on-page'))
partkey = self.destination.findpartkey()
# only if partkey and partkey.number are not null, send partkey.number
self.replace('@', partkey and partkey.number)
self.replace('¶', partkey and partkey.tocentry)
if not '$' in self.formatted or not partkey or not partkey.titlecontents:
# there is a $ left, but it should go away on preprocessing
self.contents = [Constant(self.formatted)]
return
pieces = self.formatted.split('$')
self.contents = [Constant(pieces[0])]
for piece in pieces[1:]:
self.contents += partkey.titlecontents
self.contents.append(Constant(piece))
def replace(self, key, value):
"Replace a key in the format template with a value."
if not key in self.formatted:
return
if not value:
value = ''
self.formatted = self.formatted.replace(key, value)
def __unicode__(self):
"Return a printable representation."
return 'Reference ' + self.key
class FormulaCommand(FormulaBit):
"A LaTeX command inside a formula"
types = []
start = FormulaConfig.starts['command']
commandmap = None
def detect(self, pos):
"Find the current command."
return pos.checkfor(FormulaCommand.start)
def parsebit(self, pos):
"Parse the command."
command = self.extractcommand(pos)
bit = self.parsewithcommand(command, pos)
if bit:
return bit
if command.startswith('\\up') or command.startswith('\\Up'):
upgreek = self.parseupgreek(command, pos)
if upgreek:
return upgreek
if not self.factory.defining:
Trace.error('Unknown command ' + command)
self.output = TaggedOutput().settag('span class="unknown"')
self.add(FormulaConstant(command))
return None
def parsewithcommand(self, command, pos):
"Parse the command type once we have the command."
for type in FormulaCommand.types:
if command in type.commandmap:
return self.parsecommandtype(command, type, pos)
return None
def parsecommandtype(self, command, type, pos):
"Parse a given command type."
bit = self.factory.create(type)
bit.setcommand(command)
returned = bit.parsebit(pos)
if returned:
return returned
return bit
def extractcommand(self, pos):
"Extract the command from elyxer.the current position."
if not pos.checkskip(FormulaCommand.start):
pos.error('Missing command start ' + FormulaCommand.start)
return
if pos.finished():
return self.emptycommand(pos)
if pos.current().isalpha():
# alpha command
command = FormulaCommand.start + pos.globalpha()
# skip mark of short command
pos.checkskip('*')
return command
# symbol command
return FormulaCommand.start + pos.skipcurrent()
def emptycommand(self, pos):
"""Check for an empty command: look for command disguised as ending.
Special case against '{ \\{ \\} }' situation."""
command = ''
if not pos.isout():
ending = pos.nextending()
if ending and pos.checkskip(ending):
command = ending
return FormulaCommand.start + command
def parseupgreek(self, command, pos):
"Parse the Greek \\up command.."
if len(command) < 4:
return None
if command.startswith('\\up'):
upcommand = '\\' + command[3:]
elif pos.checkskip('\\Up'):
upcommand = '\\' + command[3:4].upper() + command[4:]
else:
Trace.error('Impossible upgreek command: ' + command)
return
upgreek = self.parsewithcommand(upcommand, pos)
if upgreek:
upgreek.type = 'font'
return upgreek
class CommandBit(FormulaCommand):
"A formula bit that includes a command"
def setcommand(self, command):
"Set the command in the bit"
self.command = command
if self.commandmap:
self.original += command
self.translated = self.commandmap[self.command]
def parseparameter(self, pos):
"Parse a parameter at the current position"
self.factory.clearskipped(pos)
if pos.finished():
return None
parameter = self.factory.parseany(pos)
self.add(parameter)
return parameter
def parsesquare(self, pos):
"Parse a square bracket"
self.factory.clearskipped(pos)
if not self.factory.detecttype(SquareBracket, pos):
return None
bracket = self.factory.parsetype(SquareBracket, pos)
self.add(bracket)
return bracket
def parseliteral(self, pos):
"Parse a literal bracket."
self.factory.clearskipped(pos)
if not self.factory.detecttype(Bracket, pos):
if not pos.isvalue():
Trace.error('No literal parameter found at: ' + pos.identifier())
return None
return pos.globvalue()
bracket = Bracket().setfactory(self.factory)
self.add(bracket.parseliteral(pos))
return bracket.literal
def parsesquareliteral(self, pos):
"Parse a square bracket literally."
self.factory.clearskipped(pos)
if not self.factory.detecttype(SquareBracket, pos):
return None
bracket = SquareBracket().setfactory(self.factory)
self.add(bracket.parseliteral(pos))
return bracket.literal
def parsetext(self, pos):
"Parse a text parameter."
self.factory.clearskipped(pos)
if not self.factory.detecttype(Bracket, pos):
Trace.error('No text parameter for ' + self.command)
return None
bracket = Bracket().setfactory(self.factory).parsetext(pos)
self.add(bracket)
return bracket
class EmptyCommand(CommandBit):
"An empty command (without parameters)"
commandmap = FormulaConfig.commands
def parsebit(self, pos):
"Parse a command without parameters"
self.contents = [FormulaConstant(self.translated)]
class SpacedCommand(CommandBit):
"An empty command which should have math spacing in formulas."
commandmap = FormulaConfig.spacedcommands
def parsebit(self, pos):
"Place as contents the command translated and spaced."
self.contents = [FormulaConstant(' ' + self.translated + ' ')]
class AlphaCommand(EmptyCommand):
"A command without paramters whose result is alphabetical"
commandmap = FormulaConfig.alphacommands
def parsebit(self, pos):
"Parse the command and set type to alpha"
EmptyCommand.parsebit(self, pos)
self.type = 'alpha'
class OneParamFunction(CommandBit):
"A function of one parameter"
commandmap = FormulaConfig.onefunctions
simplified = False
def parsebit(self, pos):
"Parse a function with one parameter"
self.output = TaggedOutput().settag(self.translated)
self.parseparameter(pos)
self.simplifyifpossible()
def simplifyifpossible(self):
"Try to simplify to a single character."
if self.original in self.commandmap:
self.output = FixedOutput()
self.html = [self.commandmap[self.original]]
self.simplified = True
class SymbolFunction(CommandBit):
"Find a function which is represented by a symbol (like _ or ^)"
commandmap = FormulaConfig.symbolfunctions
def detect(self, pos):
"Find the symbol"
return pos.current() in SymbolFunction.commandmap
def parsebit(self, pos):
"Parse the symbol"
self.setcommand(pos.current())
pos.skip(self.command)
self.output = TaggedOutput().settag(self.translated)
self.parseparameter(pos)
class TextFunction(CommandBit):
"A function where parameters are read as text."
commandmap = FormulaConfig.textfunctions
def parsebit(self, pos):
"Parse a text parameter"
self.output = TaggedOutput().settag(self.translated)
self.parsetext(pos)
def process(self):
"Set the type to font"
self.type = 'font'
class LabelFunction(CommandBit):
"A function that acts as a label"
commandmap = FormulaConfig.labelfunctions
def parsebit(self, pos):
"Parse a literal parameter"
self.key = self.parseliteral(pos)
def process(self):
"Add an anchor with the label contents."
self.type = 'font'
self.label = Label().create(' ', self.key, type = 'eqnumber')
self.contents = [self.label]
# store as a Label so we know it's been seen
Label.names[self.key] = self.label
class FontFunction(OneParamFunction):
"A function of one parameter that changes the font"
commandmap = FormulaConfig.fontfunctions
def process(self):
"Simplify if possible using a single character."
self.type = 'font'
self.simplifyifpossible()
FormulaFactory.types += [FormulaCommand, SymbolFunction]
FormulaCommand.types = [
AlphaCommand, EmptyCommand, OneParamFunction, FontFunction, LabelFunction,
TextFunction, SpacedCommand,
]
class BigSymbol(object):
"A big symbol generator."
symbols = FormulaConfig.bigsymbols
def __init__(self, symbol):
"Create the big symbol."
self.symbol = symbol
def getpieces(self):
"Get an array with all pieces."
if not self.symbol in self.symbols:
return [self.symbol]
if self.smalllimit():
return [self.symbol]
return self.symbols[self.symbol]
def smalllimit(self):
"Decide if the limit should be a small, one-line symbol."
if not DocumentParameters.displaymode:
return True
if len(self.symbols[self.symbol]) == 1:
return True
return Options.simplemath
class BigBracket(BigSymbol):
"A big bracket generator."
def __init__(self, size, bracket, alignment='l'):
"Set the size and symbol for the bracket."
self.size = size
self.original = bracket
self.alignment = alignment
self.pieces = None
if bracket in FormulaConfig.bigbrackets:
self.pieces = FormulaConfig.bigbrackets[bracket]
def getpiece(self, index):
"Return the nth piece for the bracket."
function = getattr(self, 'getpiece' + str(len(self.pieces)))
return function(index)
def getpiece1(self, index):
"Return the only piece for a single-piece bracket."
return self.pieces[0]
def getpiece3(self, index):
"Get the nth piece for a 3-piece bracket: parenthesis or square bracket."
if index == 0:
return self.pieces[0]
if index == self.size - 1:
return self.pieces[-1]
return self.pieces[1]
def getpiece4(self, index):
"Get the nth piece for a 4-piece bracket: curly bracket."
if index == 0:
return self.pieces[0]
if index == self.size - 1:
return self.pieces[3]
if index == (self.size - 1)/2:
return self.pieces[2]
return self.pieces[1]
def getcell(self, index):
"Get the bracket piece as an array cell."
piece = self.getpiece(index)
span = 'span class="bracket align-' + self.alignment + '"'
return TaggedBit().constant(piece, span)
def getcontents(self):
"Get the bracket as an array or as a single bracket."
if self.size == 1 or not self.pieces:
return self.getsinglebracket()
rows = []
for index in range(self.size):
cell = self.getcell(index)
rows.append(TaggedBit().complete([cell], 'span class="arrayrow"'))
return [TaggedBit().complete(rows, 'span class="array"')]
def getsinglebracket(self):
"Return the bracket as a single sign."
if self.original == '.':
return [TaggedBit().constant('', 'span class="emptydot"')]
return [TaggedBit().constant(self.original, 'span class="symbol"')]
class FormulaEquation(CommandBit):
"A simple numbered equation."
piece = 'equation'
def parsebit(self, pos):
"Parse the array"
self.output = ContentsOutput()
self.add(self.factory.parsetype(WholeFormula, pos))
class FormulaCell(FormulaCommand):
"An array cell inside a row"
def setalignment(self, alignment):
self.alignment = alignment
self.output = TaggedOutput().settag('span class="arraycell align-' + alignment +'"', True)
return self
def parsebit(self, pos):
self.factory.clearskipped(pos)
if pos.finished():
return
self.add(self.factory.parsetype(WholeFormula, pos))
class FormulaRow(FormulaCommand):
"An array row inside an array"
cellseparator = FormulaConfig.array['cellseparator']
def setalignments(self, alignments):
self.alignments = alignments
self.output = TaggedOutput().settag('span class="arrayrow"', True)
return self
def parsebit(self, pos):
"Parse a whole row"
index = 0
pos.pushending(self.cellseparator, optional=True)
while not pos.finished():
cell = self.createcell(index)
cell.parsebit(pos)
self.add(cell)
index += 1
pos.checkskip(self.cellseparator)
if len(self.contents) == 0:
self.output = EmptyOutput()
def createcell(self, index):
"Create the cell that corresponds to the given index."
alignment = self.alignments[index % len(self.alignments)]
return self.factory.create(FormulaCell).setalignment(alignment)
class MultiRowFormula(CommandBit):
"A formula with multiple rows."
def parserows(self, pos):
"Parse all rows, finish when no more row ends"
self.rows = []
first = True
for row in self.iteraterows(pos):
if first:
first = False
else:
# intersparse empty rows
self.addempty()
row.parsebit(pos)
self.addrow(row)
self.size = len(self.rows)
def iteraterows(self, pos):
"Iterate over all rows, end when no more row ends"
rowseparator = FormulaConfig.array['rowseparator']
while True:
pos.pushending(rowseparator, True)
row = self.factory.create(FormulaRow)
yield row.setalignments(self.alignments)
if pos.checkfor(rowseparator):
self.original += pos.popending(rowseparator)
else:
return
def addempty(self):
"Add an empty row."
row = self.factory.create(FormulaRow).setalignments(self.alignments)
for index, originalcell in enumerate(self.rows[-1].contents):
cell = row.createcell(index)
cell.add(FormulaConstant(' '))
row.add(cell)
self.addrow(row)
def addrow(self, row):
"Add a row to the contents and to the list of rows."
self.rows.append(row)
self.add(row)
class FormulaArray(MultiRowFormula):
"An array within a formula"
piece = 'array'
def parsebit(self, pos):
"Parse the array"
self.output = TaggedOutput().settag('span class="array"', False)
self.parsealignments(pos)
self.parserows(pos)
def parsealignments(self, pos):
"Parse the different alignments"
# vertical
self.valign = 'c'
literal = self.parsesquareliteral(pos)
if literal:
self.valign = literal
# horizontal
literal = self.parseliteral(pos)
self.alignments = []
for l in literal:
self.alignments.append(l)
class FormulaMatrix(MultiRowFormula):
"A matrix (array with center alignment)."
piece = 'matrix'
def parsebit(self, pos):
"Parse the matrix, set alignments to 'c'."
self.output = TaggedOutput().settag('span class="array"', False)
self.valign = 'c'
self.alignments = ['c']
self.parserows(pos)
class FormulaCases(MultiRowFormula):
"A cases statement"
piece = 'cases'
def parsebit(self, pos):
"Parse the cases"
self.output = ContentsOutput()
self.alignments = ['l', 'l']
self.parserows(pos)
for row in self.contents:
for cell in row.contents:
cell.output.settag('span class="case align-l"', True)
cell.contents.append(FormulaConstant(' '))
array = TaggedBit().complete(self.contents, 'span class="bracketcases"', True)
brace = BigBracket(len(self.contents), '{', 'l')
self.contents = brace.getcontents() + [array]
class EquationEnvironment(MultiRowFormula):
"A \\begin{}...\\end equation environment with rows and cells."
def parsebit(self, pos):
"Parse the whole environment."
self.output = TaggedOutput().settag('span class="environment"', False)
environment = self.piece.replace('*', '')
if environment in FormulaConfig.environments:
self.alignments = FormulaConfig.environments[environment]
else:
Trace.error('Unknown equation environment ' + self.piece)
self.alignments = ['l']
self.parserows(pos)
class BeginCommand(CommandBit):
"A \\begin{}...\\end command and what it entails (array, cases, aligned)"
commandmap = {FormulaConfig.array['begin']:''}
types = [FormulaEquation, FormulaArray, FormulaCases, FormulaMatrix]
def parsebit(self, pos):
"Parse the begin command"
command = self.parseliteral(pos)
bit = self.findbit(command)
ending = FormulaConfig.array['end'] + '{' + command + '}'
pos.pushending(ending)
bit.parsebit(pos)
self.add(bit)
self.original += pos.popending(ending)
self.size = bit.size
def findbit(self, piece):
"Find the command bit corresponding to the \\begin{piece}"
for type in BeginCommand.types:
if piece.replace('*', '') == type.piece:
return self.factory.create(type)
bit = self.factory.create(EquationEnvironment)
bit.piece = piece
return bit
FormulaCommand.types += [BeginCommand]
import datetime
class CombiningFunction(OneParamFunction):
commandmap = FormulaConfig.combiningfunctions
def parsebit(self, pos):
"Parse a combining function."
self.type = 'alpha'
combining = self.translated
parameter = self.parsesingleparameter(pos)
if not parameter:
Trace.error('Empty parameter for combining function ' + self.command)
elif len(parameter.extracttext()) != 1:
Trace.error('Applying combining function ' + self.command + ' to invalid string "' + parameter.extracttext() + '"')
self.contents.append(Constant(combining))
def parsesingleparameter(self, pos):
"Parse a parameter, or a single letter."
self.factory.clearskipped(pos)
if pos.finished():
Trace.error('Error while parsing single parameter at ' + pos.identifier())
return None
if self.factory.detecttype(Bracket, pos) \
or self.factory.detecttype(FormulaCommand, pos):
return self.parseparameter(pos)
letter = FormulaConstant(pos.skipcurrent())
self.add(letter)
return letter
class DecoratingFunction(OneParamFunction):
"A function that decorates some bit of text"
commandmap = FormulaConfig.decoratingfunctions
def parsebit(self, pos):
"Parse a decorating function"
self.type = 'alpha'
symbol = self.translated
self.symbol = TaggedBit().constant(symbol, 'span class="symbolover"')
self.parameter = self.parseparameter(pos)
self.output = TaggedOutput().settag('span class="withsymbol"')
self.contents.insert(0, self.symbol)
self.parameter.output = TaggedOutput().settag('span class="undersymbol"')
self.simplifyifpossible()
class LimitCommand(EmptyCommand):
"A command which accepts limits above and below, in display mode."
commandmap = FormulaConfig.limitcommands
def parsebit(self, pos):
"Parse a limit command."
pieces = BigSymbol(self.translated).getpieces()
self.output = TaggedOutput().settag('span class="limits"')
for piece in pieces:
self.contents.append(TaggedBit().constant(piece, 'span class="limit"'))
class LimitPreviousCommand(LimitCommand):
"A command to limit the previous command."
commandmap = None
def parsebit(self, pos):
"Do nothing."
self.output = TaggedOutput().settag('span class="limits"')
self.factory.clearskipped(pos)
def __unicode__(self):
"Return a printable representation."
return 'Limit previous command'
class LimitsProcessor(MathsProcessor):
"A processor for limits inside an element."
def process(self, contents, index):
"Process the limits for an element."
if Options.simplemath:
return
if self.checklimits(contents, index):
self.modifylimits(contents, index)
if self.checkscript(contents, index) and self.checkscript(contents, index + 1):
self.modifyscripts(contents, index)
def checklimits(self, contents, index):
"Check if the current position has a limits command."
if not DocumentParameters.displaymode:
return False
if self.checkcommand(contents, index + 1, LimitPreviousCommand):
self.limitsahead(contents, index)
return False
if not isinstance(contents[index], LimitCommand):
return False
return self.checkscript(contents, index + 1)
def limitsahead(self, contents, index):
"Limit the current element based on the next."
contents[index + 1].add(contents[index].clone())
contents[index].output = EmptyOutput()
def modifylimits(self, contents, index):
"Modify a limits commands so that the limits appear above and below."
limited = contents[index]
subscript = self.getlimit(contents, index + 1)
limited.contents.append(subscript)
if self.checkscript(contents, index + 1):
superscript = self.getlimit(contents, index + 1)
else:
superscript = TaggedBit().constant(' ', 'sup class="limit"')
limited.contents.insert(0, superscript)
def getlimit(self, contents, index):
"Get the limit for a limits command."
limit = self.getscript(contents, index)
limit.output.tag = limit.output.tag.replace('script', 'limit')
return limit
def modifyscripts(self, contents, index):
"Modify the super- and subscript to appear vertically aligned."
subscript = self.getscript(contents, index)
# subscript removed so instead of index + 1 we get index again
superscript = self.getscript(contents, index)
scripts = TaggedBit().complete([superscript, subscript], 'span class="scripts"')
contents.insert(index, scripts)
def checkscript(self, contents, index):
"Check if the current element is a sub- or superscript."
return self.checkcommand(contents, index, SymbolFunction)
def checkcommand(self, contents, index, type):
"Check for the given type as the current element."
if len(contents) <= index:
return False
return isinstance(contents[index], type)
def getscript(self, contents, index):
"Get the sub- or superscript."
bit = contents[index]
bit.output.tag += ' class="script"'
del contents[index]
return bit
class BracketCommand(OneParamFunction):
"A command which defines a bracket."
commandmap = FormulaConfig.bracketcommands
def parsebit(self, pos):
"Parse the bracket."
OneParamFunction.parsebit(self, pos)
def create(self, direction, character):
"Create the bracket for the given character."
self.original = character
self.command = '\\' + direction
self.contents = [FormulaConstant(character)]
return self
class BracketProcessor(MathsProcessor):
"A processor for bracket commands."
def process(self, contents, index):
"Convert the bracket using Unicode pieces, if possible."
if Options.simplemath:
return
if self.checkleft(contents, index):
return self.processleft(contents, index)
def processleft(self, contents, index):
"Process a left bracket."
rightindex = self.findright(contents, index + 1)
if not rightindex:
return
size = self.findmax(contents, index, rightindex)
self.resize(contents[index], size)
self.resize(contents[rightindex], size)
def checkleft(self, contents, index):
"Check if the command at the given index is left."
return self.checkdirection(contents[index], '\\left')
def checkright(self, contents, index):
"Check if the command at the given index is right."
return self.checkdirection(contents[index], '\\right')
def checkdirection(self, bit, command):
"Check if the given bit is the desired bracket command."
if not isinstance(bit, BracketCommand):
return False
return bit.command == command
def findright(self, contents, index):
"Find the right bracket starting at the given index, or 0."
depth = 1
while index < len(contents):
if self.checkleft(contents, index):
depth += 1
if self.checkright(contents, index):
depth -= 1
if depth == 0:
return index
index += 1
return None
def findmax(self, contents, leftindex, rightindex):
"Find the max size of the contents between the two given indices."
sliced = contents[leftindex:rightindex]
return max([element.size for element in sliced])
def resize(self, command, size):
"Resize a bracket command to the given size."
character = command.extracttext()
alignment = command.command.replace('\\', '')
bracket = BigBracket(size, character, alignment)
command.output = ContentsOutput()
command.contents = bracket.getcontents()
class TodayCommand(EmptyCommand):
"Shows today's date."
commandmap = None
def parsebit(self, pos):
"Parse a command without parameters"
self.output = FixedOutput()
self.html = [datetime.date.today().strftime('%b %d, %Y')]
FormulaCommand.types += [
DecoratingFunction, CombiningFunction, LimitCommand, BracketCommand,
]
FormulaProcessor.processors += [
LimitsProcessor(), BracketProcessor(),
]
class ParameterDefinition(object):
"The definition of a parameter in a hybrid function."
"[] parameters are optional, {} parameters are mandatory."
"Each parameter has a one-character name, like {$1} or {$p}."
"A parameter that ends in ! like {$p!} is a literal."
"Example: [$1]{$p!} reads an optional parameter $1 and a literal mandatory parameter p."
parambrackets = [('[', ']'), ('{', '}')]
def __init__(self):
self.name = None
self.literal = False
self.optional = False
self.value = None
self.literalvalue = None
def parse(self, pos):
"Parse a parameter definition: [$0], {$x}, {$1!}..."
for (opening, closing) in ParameterDefinition.parambrackets:
if pos.checkskip(opening):
if opening == '[':
self.optional = True
if not pos.checkskip('$'):
Trace.error('Wrong parameter name, did you mean $' + pos.current() + '?')
return None
self.name = pos.skipcurrent()
if pos.checkskip('!'):
self.literal = True
if not pos.checkskip(closing):
Trace.error('Wrong parameter closing ' + pos.skipcurrent())
return None
return self
Trace.error('Wrong character in parameter template: ' + pos.skipcurrent())
return None
def read(self, pos, function):
"Read the parameter itself using the definition."
if self.literal:
if self.optional:
self.literalvalue = function.parsesquareliteral(pos)
else:
self.literalvalue = function.parseliteral(pos)
if self.literalvalue:
self.value = FormulaConstant(self.literalvalue)
elif self.optional:
self.value = function.parsesquare(pos)
else:
self.value = function.parseparameter(pos)
def __unicode__(self):
"Return a printable representation."
result = 'param ' + self.name
if self.value:
result += ': ' + str(self.value)
else:
result += ' (empty)'
return result
class ParameterFunction(CommandBit):
"A function with a variable number of parameters defined in a template."
"The parameters are defined as a parameter definition."
def readparams(self, readtemplate, pos):
"Read the params according to the template."
self.params = dict()
for paramdef in self.paramdefs(readtemplate):
paramdef.read(pos, self)
self.params['$' + paramdef.name] = paramdef
def paramdefs(self, readtemplate):
"Read each param definition in the template"
pos = TextPosition(readtemplate)
while not pos.finished():
paramdef = ParameterDefinition().parse(pos)
if paramdef:
yield paramdef
def getparam(self, name):
"Get a parameter as parsed."
if not name in self.params:
return None
return self.params[name]
def getvalue(self, name):
"Get the value of a parameter."
return self.getparam(name).value
def getliteralvalue(self, name):
"Get the literal value of a parameter."
param = self.getparam(name)
if not param or not param.literalvalue:
return None
return param.literalvalue
class HybridFunction(ParameterFunction):
"""
A parameter function where the output is also defined using a template.
The template can use a number of functions; each function has an associated
tag.
Example: [f0{$1},span class="fbox"] defines a function f0 which corresponds
to a span of class fbox, yielding <span class="fbox">$1</span>.
Literal parameters can be used in tags definitions:
[f0{$1},span style="color: $p;"]
yields <span style="color: $p;">$1</span>, where $p is a literal parameter.
Sizes can be specified in hybridsizes, e.g. adding parameter sizes. By
default the resulting size is the max of all arguments. Sizes are used
to generate the right parameters.
A function followed by a single / is output as a self-closing XHTML tag:
[f0/,hr]
will generate <hr/>.
"""
commandmap = FormulaConfig.hybridfunctions
def parsebit(self, pos):
"Parse a function with [] and {} parameters"
readtemplate = self.translated[0]
writetemplate = self.translated[1]
self.readparams(readtemplate, pos)
self.contents = self.writeparams(writetemplate)
self.computehybridsize()
def writeparams(self, writetemplate):
"Write all params according to the template"
return self.writepos(TextPosition(writetemplate))
def writepos(self, pos):
"Write all params as read in the parse position."
result = []
while not pos.finished():
if pos.checkskip('$'):
param = self.writeparam(pos)
if param:
result.append(param)
elif pos.checkskip('f'):
function = self.writefunction(pos)
if function:
function.type = None
result.append(function)
elif pos.checkskip('('):
result.append(self.writebracket('left', '('))
elif pos.checkskip(')'):
result.append(self.writebracket('right', ')'))
else:
result.append(FormulaConstant(pos.skipcurrent()))
return result
def writeparam(self, pos):
"Write a single param of the form $0, $x..."
name = '$' + pos.skipcurrent()
if not name in self.params:
Trace.error('Unknown parameter ' + name)
return None
if not self.params[name]:
return None
if pos.checkskip('.'):
self.params[name].value.type = pos.globalpha()
return self.params[name].value
def writefunction(self, pos):
"Write a single function f0,...,fn."
tag = self.readtag(pos)
if not tag:
return None
if pos.checkskip('/'):
# self-closing XHTML tag, such as <hr/>
return TaggedBit().selfcomplete(tag)
if not pos.checkskip('{'):
Trace.error('Function should be defined in {}')
return None
pos.pushending('}')
contents = self.writepos(pos)
pos.popending()
if len(contents) == 0:
return None
return TaggedBit().complete(contents, tag)
def readtag(self, pos):
"Get the tag corresponding to the given index. Does parameter substitution."
if not pos.current().isdigit():
Trace.error('Function should be f0,...,f9: f' + pos.current())
return None
index = int(pos.skipcurrent())
if 2 + index > len(self.translated):
Trace.error('Function f' + str(index) + ' is not defined')
return None
tag = self.translated[2 + index]
if not '$' in tag:
return tag
for variable in self.params:
if variable in tag:
param = self.params[variable]
if not param.literal:
Trace.error('Parameters in tag ' + tag + ' should be literal: {' + variable + '!}')
continue
if param.literalvalue:
value = param.literalvalue
else:
value = ''
tag = tag.replace(variable, value)
return tag
def writebracket(self, direction, character):
"Return a new bracket looking at the given direction."
return self.factory.create(BracketCommand).create(direction, character)
def computehybridsize(self):
"Compute the size of the hybrid function."
if not self.command in HybridSize.configsizes:
self.computesize()
return
self.size = HybridSize().getsize(self)
# set the size in all elements at first level
for element in self.contents:
element.size = self.size
class HybridSize(object):
"The size associated with a hybrid function."
configsizes = FormulaConfig.hybridsizes
def getsize(self, function):
"Read the size for a function and parse it."
sizestring = self.configsizes[function.command]
for name in function.params:
if name in sizestring:
size = function.params[name].value.computesize()
sizestring = sizestring.replace(name, str(size))
if '$' in sizestring:
Trace.error('Unconverted variable in hybrid size: ' + sizestring)
return 1
return eval(sizestring)
FormulaCommand.types += [HybridFunction]
class HeaderParser(Parser):
"Parses the LyX header"
def parse(self, reader):
"Parse header parameters into a dictionary, return the preamble."
contents = []
self.parseending(reader, lambda: self.parseline(reader, contents))
# skip last line
reader.nextline()
return contents
def parseline(self, reader, contents):
"Parse a single line as a parameter or as a start"
line = reader.currentline()
if line.startswith(HeaderConfig.parameters['branch']):
self.parsebranch(reader)
return
elif line.startswith(HeaderConfig.parameters['lstset']):
LstParser().parselstset(reader)
return
elif line.startswith(HeaderConfig.parameters['beginpreamble']):
contents.append(self.factory.createcontainer(reader))
return
# no match
self.parseparameter(reader)
def parsebranch(self, reader):
"Parse all branch definitions."
branch = reader.currentline().split()[1]
reader.nextline()
subparser = HeaderParser().complete(HeaderConfig.parameters['endbranch'])
subparser.parse(reader)
options = BranchOptions(branch)
for key in subparser.parameters:
options.set(key, subparser.parameters[key])
Options.branches[branch] = options
def complete(self, ending):
"Complete the parser with the given ending."
self.ending = ending
return self
class PreambleParser(Parser):
"A parser for the LyX preamble."
preamble = []
def parse(self, reader):
"Parse the full preamble with all statements."
self.ending = HeaderConfig.parameters['endpreamble']
self.parseending(reader, lambda: self.parsepreambleline(reader))
return []
def parsepreambleline(self, reader):
"Parse a single preamble line."
PreambleParser.preamble.append(reader.currentline())
reader.nextline()
class LstParser(object):
"Parse global and local lstparams."
globalparams = dict()
def parselstset(self, reader):
"Parse a declaration of lstparams in lstset."
paramtext = self.extractlstset(reader)
if not '{' in paramtext:
Trace.error('Missing opening bracket in lstset: ' + paramtext)
return
lefttext = paramtext.split('{')[1]
croppedtext = lefttext[:-1]
LstParser.globalparams = self.parselstparams(croppedtext)
def extractlstset(self, reader):
"Extract the global lstset parameters."
paramtext = ''
while not reader.finished():
paramtext += reader.currentline()
reader.nextline()
if paramtext.endswith('}'):
return paramtext
Trace.error('Could not find end of \\lstset settings; aborting')
def parsecontainer(self, container):
"Parse some lstparams from elyxer.a container."
container.lstparams = LstParser.globalparams.copy()
paramlist = container.getparameterlist('lstparams')
container.lstparams.update(self.parselstparams(paramlist))
def parselstparams(self, paramlist):
"Process a number of lstparams from elyxer.a list."
paramdict = dict()
for param in paramlist:
if not '=' in param:
if len(param.strip()) > 0:
Trace.error('Invalid listing parameter ' + param)
else:
key, value = param.split('=', 1)
paramdict[key] = value
return paramdict
class MacroDefinition(CommandBit):
"A function that defines a new command (a macro)."
macros = dict()
def parsebit(self, pos):
"Parse the function that defines the macro."
self.output = EmptyOutput()
self.parameternumber = 0
self.defaults = []
self.factory.defining = True
self.parseparameters(pos)
self.factory.defining = False
Trace.debug('New command ' + self.newcommand + ' (' + \
str(self.parameternumber) + ' parameters)')
self.macros[self.newcommand] = self
def parseparameters(self, pos):
"Parse all optional parameters (number of parameters, default values)"
"and the mandatory definition."
self.newcommand = self.parsenewcommand(pos)
# parse number of parameters
literal = self.parsesquareliteral(pos)
if literal:
self.parameternumber = int(literal)
# parse all default values
bracket = self.parsesquare(pos)
while bracket:
self.defaults.append(bracket)
bracket = self.parsesquare(pos)
# parse mandatory definition
self.definition = self.parseparameter(pos)
def parsenewcommand(self, pos):
"Parse the name of the new command."
self.factory.clearskipped(pos)
if self.factory.detecttype(Bracket, pos):
return self.parseliteral(pos)
if self.factory.detecttype(FormulaCommand, pos):
return self.factory.create(FormulaCommand).extractcommand(pos)
Trace.error('Unknown formula bit in defining function at ' + pos.identifier())
return 'unknown'
def instantiate(self):
"Return an instance of the macro."
return self.definition.clone()
class MacroParameter(FormulaBit):
"A parameter from elyxer.a macro."
def detect(self, pos):
"Find a macro parameter: #n."
return pos.checkfor('#')
def parsebit(self, pos):
"Parse the parameter: #n."
if not pos.checkskip('#'):
Trace.error('Missing parameter start #.')
return
self.number = int(pos.skipcurrent())
self.original = '#' + str(self.number)
self.contents = [TaggedBit().constant('#' + str(self.number), 'span class="unknown"')]
class MacroFunction(CommandBit):
"A function that was defined using a macro."
commandmap = MacroDefinition.macros
def parsebit(self, pos):
"Parse a number of input parameters."
self.output = FilteredOutput()
self.values = []
macro = self.translated
self.parseparameters(pos, macro)
self.completemacro(macro)
def parseparameters(self, pos, macro):
"Parse as many parameters as are needed."
self.parseoptional(pos, list(macro.defaults))
self.parsemandatory(pos, macro.parameternumber - len(macro.defaults))
if len(self.values) < macro.parameternumber:
Trace.error('Missing parameters in macro ' + str(self))
def parseoptional(self, pos, defaults):
"Parse optional parameters."
optional = []
while self.factory.detecttype(SquareBracket, pos):
optional.append(self.parsesquare(pos))
if len(optional) > len(defaults):
break
for value in optional:
default = defaults.pop()
if len(value.contents) > 0:
self.values.append(value)
else:
self.values.append(default)
self.values += defaults
def parsemandatory(self, pos, number):
"Parse a number of mandatory parameters."
for index in range(number):
parameter = self.parsemacroparameter(pos, number - index)
if not parameter:
return
self.values.append(parameter)
def parsemacroparameter(self, pos, remaining):
"Parse a macro parameter. Could be a bracket or a single letter."
"If there are just two values remaining and there is a running number,"
"parse as two separater numbers."
self.factory.clearskipped(pos)
if pos.finished():
return None
if self.factory.detecttype(FormulaNumber, pos):
return self.parsenumbers(pos, remaining)
return self.parseparameter(pos)
def parsenumbers(self, pos, remaining):
"Parse the remaining parameters as a running number."
"For example, 12 would be {1}{2}."
number = self.factory.parsetype(FormulaNumber, pos)
if not len(number.original) == remaining:
return number
for digit in number.original:
value = self.factory.create(FormulaNumber)
value.add(FormulaConstant(digit))
value.type = number
self.values.append(value)
return None
def completemacro(self, macro):
"Complete the macro with the parameters read."
self.contents = [macro.instantiate()]
replaced = [False] * len(self.values)
for parameter in self.searchall(MacroParameter):
index = parameter.number - 1
if index >= len(self.values):
Trace.error('Macro parameter index out of bounds: ' + str(index))
return
replaced[index] = True
parameter.contents = [self.values[index].clone()]
for index in range(len(self.values)):
if not replaced[index]:
self.addfilter(index, self.values[index])
def addfilter(self, index, value):
"Add a filter for the given parameter number and parameter value."
original = '#' + str(index + 1)
value = ''.join(self.values[0].gethtml())
self.output.addfilter(original, value)
class FormulaMacro(Formula):
"A math macro defined in an inset."
def __init__(self):
self.parser = MacroParser()
self.output = EmptyOutput()
def __unicode__(self):
"Return a printable representation."
return 'Math macro'
FormulaFactory.types += [ MacroParameter ]
FormulaCommand.types += [
MacroFunction,
]
def math2html(formula):
"Convert some TeX math to HTML."
factory = FormulaFactory()
whole = factory.parseformula(formula)
FormulaProcessor().process(whole)
whole.process()
return ''.join(whole.gethtml())
def main():
"Main function, called if invoked from elyxer.the command line"
args = sys.argv
Options().parseoptions(args)
if len(args) != 1:
Trace.error('Usage: math2html.py escaped_string')
exit()
result = math2html(args[0])
Trace.message(result)
if __name__ == '__main__':
main()
|
{
"content_hash": "23a0fa709f2e2c16e821b989fd395d65",
"timestamp": "",
"source": "github",
"line_count": 5344,
"max_line_length": 538,
"avg_line_length": 33.01309880239521,
"alnum_prop": 0.6145945516999014,
"repo_name": "lmregus/Portfolio",
"id": "9872cbc6b25827afc81bc48bc5ccc8b461df2ee7",
"size": "178955",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "python/design_patterns/env/lib/python3.7/site-packages/docutils/utils/math/math2html.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "27682"
},
{
"name": "C++",
"bytes": "25458"
},
{
"name": "CSS",
"bytes": "12842"
},
{
"name": "HTML",
"bytes": "49171"
},
{
"name": "Java",
"bytes": "99711"
},
{
"name": "JavaScript",
"bytes": "827"
},
{
"name": "Python",
"bytes": "42857"
},
{
"name": "Shell",
"bytes": "5710"
}
],
"symlink_target": ""
}
|
from msrest.serialization import Model
class ApplicationUpgradeProgressInfo(Model):
"""Describes the parameters for an application upgrade.
:param name:
:type name: str
:param type_name:
:type type_name: str
:param target_application_type_version:
:type target_application_type_version: str
:param upgrade_domains:
:type upgrade_domains: list of :class:`UpgradeDomainInfo
<azure.servicefabric.models.UpgradeDomainInfo>`
:param upgrade_state: Possible values include: 'Invalid',
'RollingBackInProgress', 'RollingBackCompleted',
'RollingForwardPending', 'RollingForwardInProgress',
'RollingForwardCompleted', 'Failed'
:type upgrade_state: str
:param next_upgrade_domain:
:type next_upgrade_domain: str
:param rolling_upgrade_mode: Possible values include: 'Invalid',
'UnmonitoredAuto', 'UnmonitoredManual', 'Monitored'. Default value:
"UnmonitoredAuto" .
:type rolling_upgrade_mode: str
:param upgrade_description:
:type upgrade_description: :class:`ApplicationUpgradeDescription
<azure.servicefabric.models.ApplicationUpgradeDescription>`
:param upgrade_duration_in_milliseconds: The estimated total amount of
time spent processing the overall upgrade.
:type upgrade_duration_in_milliseconds: str
:param upgrade_domain_duration_in_milliseconds: The estimated total
amount of time spent processing the current upgrade domain.
:type upgrade_domain_duration_in_milliseconds: str
:param unhealthy_evaluations:
:type unhealthy_evaluations: list of :class:`HealthEvaluationWrapper
<azure.servicefabric.models.HealthEvaluationWrapper>`
:param current_upgrade_domain_progress:
:type current_upgrade_domain_progress:
:class:`CurrentUpgradeDomainProgressInfo
<azure.servicefabric.models.CurrentUpgradeDomainProgressInfo>`
:param start_timestamp_utc: The estimated UTC datetime when the upgrade
started.
:type start_timestamp_utc: str
:param failure_timestamp_utc: The estimated UTC datetime when the upgrade
failed and FailureAction was executed.
:type failure_timestamp_utc: str
:param failure_reason: Possible values include: 'None', 'Interrupted',
'HealthCheck', 'UpgradeDomainTimeout', 'UpgradeTimeout'
:type failure_reason: str
:param upgrade_domain_progress_at_failure:
:type upgrade_domain_progress_at_failure:
:class:`FailureUpgradeDomainProgressInfo
<azure.servicefabric.models.FailureUpgradeDomainProgressInfo>`
:param upgrade_status_details: Additional detailed information about the
status of the pending upgrade.
:type upgrade_status_details: str
"""
_attribute_map = {
'name': {'key': 'Name', 'type': 'str'},
'type_name': {'key': 'TypeName', 'type': 'str'},
'target_application_type_version': {'key': 'TargetApplicationTypeVersion', 'type': 'str'},
'upgrade_domains': {'key': 'UpgradeDomains', 'type': '[UpgradeDomainInfo]'},
'upgrade_state': {'key': 'UpgradeState', 'type': 'str'},
'next_upgrade_domain': {'key': 'NextUpgradeDomain', 'type': 'str'},
'rolling_upgrade_mode': {'key': 'RollingUpgradeMode', 'type': 'str'},
'upgrade_description': {'key': 'UpgradeDescription', 'type': 'ApplicationUpgradeDescription'},
'upgrade_duration_in_milliseconds': {'key': 'UpgradeDurationInMilliseconds', 'type': 'str'},
'upgrade_domain_duration_in_milliseconds': {'key': 'UpgradeDomainDurationInMilliseconds', 'type': 'str'},
'unhealthy_evaluations': {'key': 'UnhealthyEvaluations', 'type': '[HealthEvaluationWrapper]'},
'current_upgrade_domain_progress': {'key': 'CurrentUpgradeDomainProgress', 'type': 'CurrentUpgradeDomainProgressInfo'},
'start_timestamp_utc': {'key': 'StartTimestampUtc', 'type': 'str'},
'failure_timestamp_utc': {'key': 'FailureTimestampUtc', 'type': 'str'},
'failure_reason': {'key': 'FailureReason', 'type': 'str'},
'upgrade_domain_progress_at_failure': {'key': 'UpgradeDomainProgressAtFailure', 'type': 'FailureUpgradeDomainProgressInfo'},
'upgrade_status_details': {'key': 'UpgradeStatusDetails', 'type': 'str'},
}
def __init__(self, name=None, type_name=None, target_application_type_version=None, upgrade_domains=None, upgrade_state=None, next_upgrade_domain=None, rolling_upgrade_mode="UnmonitoredAuto", upgrade_description=None, upgrade_duration_in_milliseconds=None, upgrade_domain_duration_in_milliseconds=None, unhealthy_evaluations=None, current_upgrade_domain_progress=None, start_timestamp_utc=None, failure_timestamp_utc=None, failure_reason=None, upgrade_domain_progress_at_failure=None, upgrade_status_details=None):
self.name = name
self.type_name = type_name
self.target_application_type_version = target_application_type_version
self.upgrade_domains = upgrade_domains
self.upgrade_state = upgrade_state
self.next_upgrade_domain = next_upgrade_domain
self.rolling_upgrade_mode = rolling_upgrade_mode
self.upgrade_description = upgrade_description
self.upgrade_duration_in_milliseconds = upgrade_duration_in_milliseconds
self.upgrade_domain_duration_in_milliseconds = upgrade_domain_duration_in_milliseconds
self.unhealthy_evaluations = unhealthy_evaluations
self.current_upgrade_domain_progress = current_upgrade_domain_progress
self.start_timestamp_utc = start_timestamp_utc
self.failure_timestamp_utc = failure_timestamp_utc
self.failure_reason = failure_reason
self.upgrade_domain_progress_at_failure = upgrade_domain_progress_at_failure
self.upgrade_status_details = upgrade_status_details
|
{
"content_hash": "0cdb4c1ff259626a80e46bdc73a84651",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 518,
"avg_line_length": 58.683673469387756,
"alnum_prop": 0.7169187967310033,
"repo_name": "v-iam/azure-sdk-for-python",
"id": "2cc394942de50d4246e6dada4d7242a7fd0f9242",
"size": "6225",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-servicefabric/azure/servicefabric/models/application_upgrade_progress_info.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19856874"
}
],
"symlink_target": ""
}
|
"""Functions and classes to make testing easier."""
import os
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import sample
from perfkitbenchmarker.configs import benchmark_config_spec
import six
from six.moves import range
_BENCHMARK_NAME = 'test_benchmark'
_BENCHMARK_UID = 'uid'
class SamplesTestMixin(object):
"""A mixin for unittest.TestCase that adds a type-specific equality
predicate for samples.
"""
def __init__(self, *args, **kwargs):
super(SamplesTestMixin, self).__init__(self, *args, **kwargs)
self.addTypeEqualityFunc(sample.Sample, self.assertSamplesEqual)
def assertSamplesEqualUpToTimestamp(self, a, b, msg=None):
"""Assert that two samples are equal, ignoring timestamp differences."""
self.assertEqual(a.metric, b.metric, msg or
'Samples %s and %s have different metrics' % (a, b))
if isinstance(a.value, float) and isinstance(b.value, float):
self.assertAlmostEqual(
a.value, b.value, msg=msg or
'Samples %s and %s have different values' % (a, b))
else:
self.assertEqual(
a.value, b.value, msg or
'Samples %s and %s have different values' % (a, b))
self.assertEqual(a.unit, b.unit, msg or
'Samples %s and %s have different units' % (a, b))
self.assertDictEqual(a.metadata, b.metadata, msg or
'Samples %s and %s have different metadata' % (a, b))
# Deliberately don't compare the timestamp fields of the samples.
def assertSampleListsEqualUpToTimestamp(self, a, b, msg=None):
"""Compare two lists of samples.
Sadly, the builtin assertListsEqual will only use Python's
built-in equality predicate for testing the equality of elements
in a list. Since we compare lists of samples a lot, we need a
custom test for that.
"""
self.assertEqual(len(a), len(b),
msg or 'Lists %s and %s are not the same length' % (a, b))
for i in range(len(a)):
self.assertIsInstance(a[i], sample.Sample,
msg or ('%s (item %s in list) is '
'not a sample.Sample object' %
(a[i], i)))
self.assertIsInstance(b[i], sample.Sample,
msg or ('%s (item %s in list) is '
'not a sample.Sample object' %
(b[i], i)))
try:
self.assertSamplesEqualUpToTimestamp(a[i], b[i], msg=msg)
except self.failureException as ex:
ex.message = str(ex) + (' (was item %s in list)' % i)
ex.args = (ex.message,)
raise ex
def assertDiskMounts(benchmark_config, mount_point):
"""Test whether a disk mounts in a given configuration.
Sets up a virtual machine following benchmark_config and then tests
whether the path mount_point contains a working disk by trying to
create a file there. Returns nothing if file creation works;
otherwise raises an exception.
Args:
benchmark_config: a dict in the format of
benchmark_spec.BenchmarkSpec. The config must specify exactly
one virtual machine.
mount_point: a path, represented as a string.
Raises:
RemoteCommandError if it cannot create a file at mount_point and
verify that the file exists.
AssertionError if benchmark_config does not specify exactly one
virtual machine.
"""
assert len(benchmark_config['vm_groups']) == 1
vm_group = next(six.itervalues(benchmark_config['vm_groups']))
assert vm_group.get('num_vms', 1) == 1
m = mock.MagicMock()
m.BENCHMARK_NAME = _BENCHMARK_NAME
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
_BENCHMARK_NAME, flag_values=flags.FLAGS, **benchmark_config)
spec = benchmark_spec.BenchmarkSpec(
m, config_spec, _BENCHMARK_UID)
with spec.RedirectGlobalFlags():
try:
spec.ConstructVirtualMachines()
spec.Provision()
vm = spec.vms[0]
test_file_path = os.path.join(mount_point, 'test_file')
vm.RemoteCommand('touch %s' % test_file_path)
# This will raise RemoteCommandError if the test file does not
# exist.
vm.RemoteCommand('test -e %s' % test_file_path)
finally:
spec.Delete()
|
{
"content_hash": "c24cfbbf996fca28cab0866b5680d506",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 79,
"avg_line_length": 35.459016393442624,
"alnum_prop": 0.6410078594544614,
"repo_name": "GoogleCloudPlatform/PerfKitBenchmarker",
"id": "a98e9647e75a5558002a33e9f523ca52c44f0fe2",
"size": "4936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "perfkitbenchmarker/test_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3420"
},
{
"name": "HTML",
"bytes": "113073"
},
{
"name": "Jinja",
"bytes": "62005"
},
{
"name": "Lua",
"bytes": "1547"
},
{
"name": "Python",
"bytes": "6076512"
},
{
"name": "R",
"bytes": "1017"
},
{
"name": "Shell",
"bytes": "76164"
},
{
"name": "Tcl",
"bytes": "14601"
}
],
"symlink_target": ""
}
|
"""
Module that defines the interface between the `manager` (i.e Django) and the `broker` (i.e. RabbitMQ).
Defines three functions involved in a job's lifecycle:
- `dispatch_job` - send a job to a queue
- `update_job` - update the status of a job by checking it's (intermediate) result
- `check_job` - for a parent job, trigger any child jobs, and / or update it's status
- `cancel_job` - remove job from the queue, or terminate it if already started
"""
import datetime
import logging
import time
from celery import Celery, signature
from celery.result import AsyncResult
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.utils import timezone
from jobs.models import Job, JobMethod, JobStatus, Queue, Worker
logger = logging.getLogger(__name__)
# Setup the Celery app
app = Celery("manager", broker=settings.BROKER_URL, backend=settings.CACHE_URL)
app.conf.update(
# By default Celery will keep on trying to connect to the broker forever
# This overrides that. Initially try again immediately, then add 0.5 seconds for each
# subsequent try (with a maximum of 3 seconds).
# See https://github.com/celery/celery/issues/4296
broker_transport_options={
"max_retries": 10,
"interval_start": 0,
"interval_step": 0.5,
"interval_max": 3,
},
# Needed to ensure STARTED state is emitted
task_track_started=True,
)
def dispatch_job(job: Job) -> Job:
"""
Send a job to a queue.
Decides which queue a job should be sent to and sends it.
The queue can depend upon both the project and the account (either the
account that the project is linked to, or the default account of the job
creator).
"""
if not JobMethod.is_member(job.method):
raise ValueError("Unknown job method '{}'".format(job.method))
if job.method in settings.JOB_METHODS_STAFF_ONLY and (
not job.creator or not job.creator.is_staff
):
raise PermissionDenied
if JobMethod.is_compound(job.method):
children = job.children.all().order_by("id")
if len(children) == 0:
# If there are no children (e.g. a pull job for a project with no sources)
# then job is immediately finished
job.runtime = 0
job.is_active = False
job.status = JobStatus.SUCCESS.value
else:
if job.method == JobMethod.parallel.value:
# Dispatch all child jobs simultaneously
for child in children:
dispatch_job(child)
else:
# Dispatch the first child; subsequent children
# will be status WAITING and will get dispatched later
# on update of the parent.
for index, child in enumerate(children):
if index == 0:
dispatch_job(child)
else:
child.is_active = True
child.status = JobStatus.WAITING.value
child.save()
job.is_active = True
job.status = JobStatus.DISPATCHED.value
else:
# Find queues that have active workers on them
# order by descending priority
queues = list(
Queue.objects.filter(
workers__in=Worker.objects.filter(
# Has not finished
finished__isnull=True,
# Has been updated in the last x minutes
updated__gte=timezone.now() - datetime.timedelta(minutes=15),
),
).order_by("priority")
)
# Fallback to the default Stencila queue
# Apart from anything else having this fallback is useful in development
# because if means that the `overseer` service does not need to be running
# in order keep track of the numbers of workers listening on each queue
# (during development `worker`s listen to the default queue)
if len(queues) == 0:
logger.warning("No queues found with active workers")
queue, _ = Queue.get_or_create(
account_name="stencila", queue_name="default"
)
else:
if job.creator is None or job.project is None:
# Jobs created by anonymous users go on the lowest
# priority queue
priority = 1
else:
# The priority of other jobs is determined by the
# account tier of the project
priority = job.project.account.tier.id
queue = queues[min(len(queues), priority) - 1]
# Add the job's project id, key and secrets to it's kwargs.
# Doing this here ensures it is done for all jobs
# and avoids putting the secrets in the job's `params` field.
kwargs = dict(**job.params) if job.params else {}
kwargs["project"] = job.project.id if job.project else None
kwargs["key"] = job.key
kwargs["secrets"] = job.secrets
# Send the job to the queue
task = signature(
job.method, kwargs=kwargs, queue=queue.name, task_id=str(job.id), app=app,
)
task.apply_async()
job.queue = queue
job.is_active = True
job.status = JobStatus.DISPATCHED.value
job.save()
return job
def update_job(job: Job, data={}, force: bool = False) -> Job:
"""
Update a job.
This method is triggered by a PATCH request from the
`overseer` service. It updates the status, and other fields of
the job, and if the job has a parent, updates it's status too.
See https://stackoverflow.com/a/38267978 for important considerations
in using AsyncResult.
"""
# Avoid unnecessary update
if not job.is_active and not force:
return job
was_active = job.is_active
if JobMethod.is_compound(job.method):
# Update the status of compound jobs based on children
status = job.status
is_active = False
all_previous_succeeded = True
any_previous_failed = False
for child in job.get_children():
# If the child has a 'higher' status then update the
# status of the compound job
status = JobStatus.highest([status, child.status])
# If the child is still waiting then...
if child.status == JobStatus.WAITING.value:
# If all previous have succeeded, dispatch it
if all_previous_succeeded:
dispatch_job(child)
# If any previous have failed, cancel it
elif any_previous_failed:
cancel_job(child)
if child.status != JobStatus.SUCCESS.value:
all_previous_succeeded = False
if child.status == JobStatus.FAILURE.value:
any_previous_failed = True
# If the child is still active then the compound job is active
if child.is_active:
is_active = True
job.is_active = is_active
job.status = JobStatus.RUNNING.value if is_active else status
else:
status = data.get("status")
assert status
# Do not do anything if the new status is lower rank than the
# existing status. This can exist for example when a job is
# terminated (the SUCCESS state is sent after TERMINATED)
if JobStatus.rank(status) < JobStatus.rank(job.status):
return job
# Update fields sent by `overseer` service, including `status`
for key, value in data.items():
setattr(job, key, value)
def async_result():
return AsyncResult(str(job.id), app=app)
# If job succeeded then get the result if we haven't already
if status == JobStatus.SUCCESS.value and job.result is None:
response = None
attempts = 0
while not response and attempts < 5:
try:
response = async_result().get(timeout=30)
except Exception:
# Catch all errors, but log them. Occasional
# errors encountered in prod include ResponseError and TimeoutError
logger.warning(
"Error getting async result",
exc_info=True,
extra=dict(id=job.id, method=job.method, attempts=attempts),
)
time.sleep(1)
attempts += 1
if response:
job.result = response.get("result")
job.log = response.get("log")
else:
logger.error(
"Unable to get async result",
extra=dict(id=job.id, method=job.method, attempts=attempts),
)
job.status = JobStatus.FAILURE.value
job.error = dict(
type="RuntimeError", message="Unable to get result of job"
)
# If job failed then get the error
# For FAILURE, `info` is the raised Exception
elif status == JobStatus.FAILURE.value:
info = async_result().info
if info:
job.error = dict(type=type(info).__name__, message=str(info))
# If the job has just ended then mark it as inactive
if JobStatus.has_ended(status):
job.is_active = False
# If the job is no longer active clear its secrets and run its callback
if was_active and not job.is_active:
job.secrets = None
job.run_callback()
# Save before updating parent (and then this again)
job.save()
# If the job has a parent then update it too
if job.parent:
update_job(job.parent)
return job
def cancel_job(job: Job) -> Job:
"""
Cancel a job.
This uses Celery's terminate options which will kill the worker child process.
This is not normally recommended but in this case is OK because there is only
one task per process.
See `worker/worker.py` for the reasoning for using `SIGUSR1`.
See https://docs.celeryproject.org/en/stable/userguide/workers.html#revoke-revoking-tasks
"""
if job.is_active:
if JobMethod.is_compound(job.method):
for child in job.children.all():
cancel_job(child)
else:
app.control.revoke(str(job.id), terminate=True, signal="SIGUSR1")
job.status = JobStatus.CANCELLED.value
job.is_active = False
job.secrets = None
job.save()
return job
|
{
"content_hash": "e288d7f4a07a3b681df4c693185678c1",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 102,
"avg_line_length": 37.142361111111114,
"alnum_prop": 0.5897915303356082,
"repo_name": "stencila/hub",
"id": "29a7fecfec58a37e5770387c0619949240d50800",
"size": "10697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manager/jobs/jobs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "5505"
},
{
"name": "HTML",
"bytes": "274142"
},
{
"name": "JavaScript",
"bytes": "18731"
},
{
"name": "Makefile",
"bytes": "14959"
},
{
"name": "Mustache",
"bytes": "1137"
},
{
"name": "Python",
"bytes": "1262375"
},
{
"name": "SCSS",
"bytes": "31993"
},
{
"name": "Shell",
"bytes": "8726"
},
{
"name": "TypeScript",
"bytes": "5270"
}
],
"symlink_target": ""
}
|
""" Principal Component Analysis.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis A. Engemann <denis-alexander.engemann@inria.fr>
# Michael Eickenberg <michael.eickenberg@inria.fr>
# Giorgio Patrini <giorgio.patrini@anu.edu.au>
#
# License: BSD 3 clause
from math import log, sqrt
import numbers
import numpy as np
from scipy import linalg
from scipy.special import gammaln
from scipy.sparse import issparse
from scipy.sparse.linalg import svds
from ._base import _BasePCA
from ..utils import check_random_state
from ..utils._arpack import _init_arpack_v0
from ..utils.extmath import fast_logdet, randomized_svd, svd_flip
from ..utils.extmath import stable_cumsum
from ..utils.validation import check_is_fitted
def _assess_dimension(spectrum, rank, n_samples):
"""Compute the log-likelihood of a rank ``rank`` dataset.
The dataset is assumed to be embedded in gaussian noise of shape(n,
dimf) having spectrum ``spectrum``. This implements the method of
T. P. Minka.
Parameters
----------
spectrum : ndarray of shape (n_features,)
Data spectrum.
rank : int
Tested rank value. It should be strictly lower than n_features,
otherwise the method isn't specified (division by zero in equation
(31) from the paper).
n_samples : int
Number of samples.
Returns
-------
ll : float
The log-likelihood.
References
----------
This implements the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604
<https://proceedings.neurips.cc/paper/2000/file/7503cfacd12053d309b6bed5c89de212-Paper.pdf>`_
"""
n_features = spectrum.shape[0]
if not 1 <= rank < n_features:
raise ValueError("the tested rank should be in [1, n_features - 1]")
eps = 1e-15
if spectrum[rank - 1] < eps:
# When the tested rank is associated with a small eigenvalue, there's
# no point in computing the log-likelihood: it's going to be very
# small and won't be the max anyway. Also, it can lead to numerical
# issues below when computing pa, in particular in log((spectrum[i] -
# spectrum[j]) because this will take the log of something very small.
return -np.inf
pu = -rank * log(2.0)
for i in range(1, rank + 1):
pu += (
gammaln((n_features - i + 1) / 2.0)
- log(np.pi) * (n_features - i + 1) / 2.0
)
pl = np.sum(np.log(spectrum[:rank]))
pl = -pl * n_samples / 2.0
v = max(eps, np.sum(spectrum[rank:]) / (n_features - rank))
pv = -np.log(v) * n_samples * (n_features - rank) / 2.0
m = n_features * rank - rank * (rank + 1.0) / 2.0
pp = log(2.0 * np.pi) * (m + rank) / 2.0
pa = 0.0
spectrum_ = spectrum.copy()
spectrum_[rank:n_features] = v
for i in range(rank):
for j in range(i + 1, len(spectrum)):
pa += log(
(spectrum[i] - spectrum[j]) * (1.0 / spectrum_[j] - 1.0 / spectrum_[i])
) + log(n_samples)
ll = pu + pl + pv + pp - pa / 2.0 - rank * log(n_samples) / 2.0
return ll
def _infer_dimension(spectrum, n_samples):
"""Infers the dimension of a dataset with a given spectrum.
The returned value will be in [1, n_features - 1].
"""
ll = np.empty_like(spectrum)
ll[0] = -np.inf # we don't want to return n_components = 0
for rank in range(1, spectrum.shape[0]):
ll[rank] = _assess_dimension(spectrum, rank, n_samples)
return ll.argmax()
class PCA(_BasePCA):
"""Principal component analysis (PCA).
Linear dimensionality reduction using Singular Value Decomposition of the
data to project it to a lower dimensional space. The input data is centered
but not scaled for each feature before applying the SVD.
It uses the LAPACK implementation of the full SVD or a randomized truncated
SVD by the method of Halko et al. 2009, depending on the shape of the input
data and the number of components to extract.
It can also use the scipy.sparse.linalg ARPACK implementation of the
truncated SVD.
Notice that this class does not support sparse input. See
:class:`TruncatedSVD` for an alternative with sparse data.
Read more in the :ref:`User Guide <PCA>`.
Parameters
----------
n_components : int, float or 'mle', default=None
Number of components to keep.
if n_components is not set all components are kept::
n_components == min(n_samples, n_features)
If ``n_components == 'mle'`` and ``svd_solver == 'full'``, Minka's
MLE is used to guess the dimension. Use of ``n_components == 'mle'``
will interpret ``svd_solver == 'auto'`` as ``svd_solver == 'full'``.
If ``0 < n_components < 1`` and ``svd_solver == 'full'``, select the
number of components such that the amount of variance that needs to be
explained is greater than the percentage specified by n_components.
If ``svd_solver == 'arpack'``, the number of components must be
strictly less than the minimum of n_features and n_samples.
Hence, the None case results in::
n_components == min(n_samples, n_features) - 1
copy : bool, default=True
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
whiten : bool, default=False
When True (False by default) the `components_` vectors are multiplied
by the square root of n_samples and then divided by the singular values
to ensure uncorrelated outputs with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
svd_solver : {'auto', 'full', 'arpack', 'randomized'}, default='auto'
If auto :
The solver is selected by a default policy based on `X.shape` and
`n_components`: if the input data is larger than 500x500 and the
number of components to extract is lower than 80% of the smallest
dimension of the data, then the more efficient 'randomized'
method is enabled. Otherwise the exact full SVD is computed and
optionally truncated afterwards.
If full :
run exact full SVD calling the standard LAPACK solver via
`scipy.linalg.svd` and select the components by postprocessing
If arpack :
run SVD truncated to n_components calling ARPACK solver via
`scipy.sparse.linalg.svds`. It requires strictly
0 < n_components < min(X.shape)
If randomized :
run randomized SVD by the method of Halko et al.
.. versionadded:: 0.18.0
tol : float, default=0.0
Tolerance for singular values computed by svd_solver == 'arpack'.
Must be of range [0.0, infinity).
.. versionadded:: 0.18.0
iterated_power : int or 'auto', default='auto'
Number of iterations for the power method computed by
svd_solver == 'randomized'.
Must be of range [0, infinity).
.. versionadded:: 0.18.0
random_state : int, RandomState instance or None, default=None
Used when the 'arpack' or 'randomized' solvers are used. Pass an int
for reproducible results across multiple function calls.
See :term:`Glossary <random_state>`.
.. versionadded:: 0.18.0
Attributes
----------
components_ : ndarray of shape (n_components, n_features)
Principal axes in feature space, representing the directions of
maximum variance in the data. The components are sorted by
``explained_variance_``.
explained_variance_ : ndarray of shape (n_components,)
The amount of variance explained by each of the selected components.
The variance estimation uses `n_samples - 1` degrees of freedom.
Equal to n_components largest eigenvalues
of the covariance matrix of X.
.. versionadded:: 0.18
explained_variance_ratio_ : ndarray of shape (n_components,)
Percentage of variance explained by each of the selected components.
If ``n_components`` is not set then all components are stored and the
sum of the ratios is equal to 1.0.
singular_values_ : ndarray of shape (n_components,)
The singular values corresponding to each of the selected components.
The singular values are equal to the 2-norms of the ``n_components``
variables in the lower-dimensional space.
.. versionadded:: 0.19
mean_ : ndarray of shape (n_features,)
Per-feature empirical mean, estimated from the training set.
Equal to `X.mean(axis=0)`.
n_components_ : int
The estimated number of components. When n_components is set
to 'mle' or a number between 0 and 1 (with svd_solver == 'full') this
number is estimated from input data. Otherwise it equals the parameter
n_components, or the lesser value of n_features and n_samples
if n_components is None.
n_features_ : int
Number of features in the training data.
n_samples_ : int
Number of samples in the training data.
noise_variance_ : float
The estimated noise covariance following the Probabilistic PCA model
from Tipping and Bishop 1999. See "Pattern Recognition and
Machine Learning" by C. Bishop, 12.2.1 p. 574 or
http://www.miketipping.com/papers/met-mppca.pdf. It is required to
compute the estimated data covariance and score samples.
Equal to the average of (min(n_features, n_samples) - n_components)
smallest eigenvalues of the covariance matrix of X.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
See Also
--------
KernelPCA : Kernel Principal Component Analysis.
SparsePCA : Sparse Principal Component Analysis.
TruncatedSVD : Dimensionality reduction using truncated SVD.
IncrementalPCA : Incremental Principal Component Analysis.
References
----------
For n_components == 'mle', this class uses the method from:
`Minka, T. P.. "Automatic choice of dimensionality for PCA".
In NIPS, pp. 598-604 <https://tminka.github.io/papers/pca/minka-pca.pdf>`_
Implements the probabilistic PCA model from:
`Tipping, M. E., and Bishop, C. M. (1999). "Probabilistic principal
component analysis". Journal of the Royal Statistical Society:
Series B (Statistical Methodology), 61(3), 611-622.
<http://www.miketipping.com/papers/met-mppca.pdf>`_
via the score and score_samples methods.
For svd_solver == 'arpack', refer to `scipy.sparse.linalg.svds`.
For svd_solver == 'randomized', see:
`Halko, N., Martinsson, P. G., and Tropp, J. A. (2011).
"Finding structure with randomness: Probabilistic algorithms for
constructing approximate matrix decompositions".
SIAM review, 53(2), 217-288.
<https://doi.org/10.1137/090771806>`_
and also
`Martinsson, P. G., Rokhlin, V., and Tygert, M. (2011).
"A randomized algorithm for the decomposition of matrices".
Applied and Computational Harmonic Analysis, 30(1), 47-68
<https://doi.org/10.1016/j.acha.2010.02.003>`_.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import PCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = PCA(n_components=2)
>>> pca.fit(X)
PCA(n_components=2)
>>> print(pca.explained_variance_ratio_)
[0.9924... 0.0075...]
>>> print(pca.singular_values_)
[6.30061... 0.54980...]
>>> pca = PCA(n_components=2, svd_solver='full')
>>> pca.fit(X)
PCA(n_components=2, svd_solver='full')
>>> print(pca.explained_variance_ratio_)
[0.9924... 0.00755...]
>>> print(pca.singular_values_)
[6.30061... 0.54980...]
>>> pca = PCA(n_components=1, svd_solver='arpack')
>>> pca.fit(X)
PCA(n_components=1, svd_solver='arpack')
>>> print(pca.explained_variance_ratio_)
[0.99244...]
>>> print(pca.singular_values_)
[6.30061...]
"""
def __init__(
self,
n_components=None,
*,
copy=True,
whiten=False,
svd_solver="auto",
tol=0.0,
iterated_power="auto",
random_state=None,
):
self.n_components = n_components
self.copy = copy
self.whiten = whiten
self.svd_solver = svd_solver
self.tol = tol
self.iterated_power = iterated_power
self.random_state = random_state
def fit(self, X, y=None):
"""Fit the model with X.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
y : Ignored
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(X)
return self
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
y : Ignored
Returns
-------
X_new : ndarray of shape (n_samples, n_components)
Transformed values.
Notes
-----
This method returns a Fortran-ordered array. To convert it to a
C-ordered array, use 'np.ascontiguousarray'.
"""
U, S, Vt = self._fit(X)
U = U[:, : self.n_components_]
if self.whiten:
# X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)
U *= sqrt(X.shape[0] - 1)
else:
# X_new = X * V = U * S * Vt * V = U * S
U *= S[: self.n_components_]
return U
def _fit(self, X):
"""Dispatch to the right submethod depending on the chosen solver."""
# Raise an error for sparse input.
# This is more informative than the generic one raised by check_array.
if issparse(X):
raise TypeError(
"PCA does not support sparse input. See "
"TruncatedSVD for a possible alternative."
)
X = self._validate_data(
X, dtype=[np.float64, np.float32], ensure_2d=True, copy=self.copy
)
# Handle n_components==None
if self.n_components is None:
if self.svd_solver != "arpack":
n_components = min(X.shape)
else:
n_components = min(X.shape) - 1
else:
n_components = self.n_components
# Handle svd_solver
self._fit_svd_solver = self.svd_solver
if self._fit_svd_solver == "auto":
# Small problem or n_components == 'mle', just call full PCA
if max(X.shape) <= 500 or n_components == "mle":
self._fit_svd_solver = "full"
elif n_components >= 1 and n_components < 0.8 * min(X.shape):
self._fit_svd_solver = "randomized"
# This is also the case of n_components in (0,1)
else:
self._fit_svd_solver = "full"
# Call different fits for either full or truncated SVD
if self._fit_svd_solver == "full":
return self._fit_full(X, n_components)
elif self._fit_svd_solver in ["arpack", "randomized"]:
return self._fit_truncated(X, n_components, self._fit_svd_solver)
else:
raise ValueError(
"Unrecognized svd_solver='{0}'" "".format(self._fit_svd_solver)
)
def _fit_full(self, X, n_components):
"""Fit the model by computing full SVD on X."""
n_samples, n_features = X.shape
if n_components == "mle":
if n_samples < n_features:
raise ValueError(
"n_components='mle' is only supported " "if n_samples >= n_features"
)
elif not 0 <= n_components <= min(n_samples, n_features):
raise ValueError(
"n_components=%r must be between 0 and "
"min(n_samples, n_features)=%r with "
"svd_solver='full'" % (n_components, min(n_samples, n_features))
)
elif n_components >= 1:
if not isinstance(n_components, numbers.Integral):
raise ValueError(
"n_components=%r must be of type int "
"when greater than or equal to 1, "
"was of type=%r" % (n_components, type(n_components))
)
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
U, S, Vt = linalg.svd(X, full_matrices=False)
# flip eigenvectors' sign to enforce deterministic output
U, Vt = svd_flip(U, Vt)
components_ = Vt
# Get variance explained by singular values
explained_variance_ = (S ** 2) / (n_samples - 1)
total_var = explained_variance_.sum()
explained_variance_ratio_ = explained_variance_ / total_var
singular_values_ = S.copy() # Store the singular values.
# Postprocess the number of components required
if n_components == "mle":
n_components = _infer_dimension(explained_variance_, n_samples)
elif 0 < n_components < 1.0:
# number of components for which the cumulated explained
# variance percentage is superior to the desired threshold
# side='right' ensures that number of features selected
# their variance is always greater than n_components float
# passed. More discussion in issue: #15669
ratio_cumsum = stable_cumsum(explained_variance_ratio_)
n_components = np.searchsorted(ratio_cumsum, n_components, side="right") + 1
# Compute noise covariance using Probabilistic PCA model
# The sigma2 maximum likelihood (cf. eq. 12.46)
if n_components < min(n_features, n_samples):
self.noise_variance_ = explained_variance_[n_components:].mean()
else:
self.noise_variance_ = 0.0
self.n_samples_, self.n_features_ = n_samples, n_features
self.components_ = components_[:n_components]
self.n_components_ = n_components
self.explained_variance_ = explained_variance_[:n_components]
self.explained_variance_ratio_ = explained_variance_ratio_[:n_components]
self.singular_values_ = singular_values_[:n_components]
return U, S, Vt
def _fit_truncated(self, X, n_components, svd_solver):
"""Fit the model by computing truncated SVD (by ARPACK or randomized)
on X.
"""
n_samples, n_features = X.shape
if isinstance(n_components, str):
raise ValueError(
"n_components=%r cannot be a string "
"with svd_solver='%s'" % (n_components, svd_solver)
)
elif not 1 <= n_components <= min(n_samples, n_features):
raise ValueError(
"n_components=%r must be between 1 and "
"min(n_samples, n_features)=%r with "
"svd_solver='%s'"
% (n_components, min(n_samples, n_features), svd_solver)
)
elif not isinstance(n_components, numbers.Integral):
raise ValueError(
"n_components=%r must be of type int "
"when greater than or equal to 1, was of type=%r"
% (n_components, type(n_components))
)
elif svd_solver == "arpack" and n_components == min(n_samples, n_features):
raise ValueError(
"n_components=%r must be strictly less than "
"min(n_samples, n_features)=%r with "
"svd_solver='%s'"
% (n_components, min(n_samples, n_features), svd_solver)
)
random_state = check_random_state(self.random_state)
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if svd_solver == "arpack":
v0 = _init_arpack_v0(min(X.shape), random_state)
U, S, Vt = svds(X, k=n_components, tol=self.tol, v0=v0)
# svds doesn't abide by scipy.linalg.svd/randomized_svd
# conventions, so reverse its outputs.
S = S[::-1]
# flip eigenvectors' sign to enforce deterministic output
U, Vt = svd_flip(U[:, ::-1], Vt[::-1])
elif svd_solver == "randomized":
# sign flipping is done inside
U, S, Vt = randomized_svd(
X,
n_components=n_components,
n_iter=self.iterated_power,
flip_sign=True,
random_state=random_state,
)
self.n_samples_, self.n_features_ = n_samples, n_features
self.components_ = Vt
self.n_components_ = n_components
# Get variance explained by singular values
self.explained_variance_ = (S ** 2) / (n_samples - 1)
total_var = np.var(X, ddof=1, axis=0)
self.explained_variance_ratio_ = self.explained_variance_ / total_var.sum()
self.singular_values_ = S.copy() # Store the singular values.
if self.n_components_ < min(n_features, n_samples):
self.noise_variance_ = total_var.sum() - self.explained_variance_.sum()
self.noise_variance_ /= min(n_features, n_samples) - n_components
else:
self.noise_variance_ = 0.0
return U, S, Vt
def score_samples(self, X):
"""Return the log-likelihood of each sample.
See. "Pattern Recognition and Machine Learning"
by C. Bishop, 12.2.1 p. 574
or http://www.miketipping.com/papers/met-mppca.pdf
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data.
Returns
-------
ll : ndarray of shape (n_samples,)
Log-likelihood of each sample under the current model.
"""
check_is_fitted(self)
X = self._validate_data(X, dtype=[np.float64, np.float32], reset=False)
Xr = X - self.mean_
n_features = X.shape[1]
precision = self.get_precision()
log_like = -0.5 * (Xr * (np.dot(Xr, precision))).sum(axis=1)
log_like -= 0.5 * (n_features * log(2.0 * np.pi) - fast_logdet(precision))
return log_like
def score(self, X, y=None):
"""Return the average log-likelihood of all samples.
See. "Pattern Recognition and Machine Learning"
by C. Bishop, 12.2.1 p. 574
or http://www.miketipping.com/papers/met-mppca.pdf
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data.
y : Ignored
Returns
-------
ll : float
Average log-likelihood of the samples under the current model.
"""
return np.mean(self.score_samples(X))
def _more_tags(self):
return {"preserves_dtype": [np.float64, np.float32]}
|
{
"content_hash": "976484b53a6260d1a975bd6373d2b167",
"timestamp": "",
"source": "github",
"line_count": 646,
"max_line_length": 97,
"avg_line_length": 36.87461300309597,
"alnum_prop": 0.595566936736493,
"repo_name": "amueller/scikit-learn",
"id": "01a2d7ac461dccb2c5e624c07375635a60abf4a6",
"size": "23821",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sklearn/decomposition/_pca.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2232"
},
{
"name": "C",
"bytes": "41206"
},
{
"name": "C++",
"bytes": "146835"
},
{
"name": "Makefile",
"bytes": "1711"
},
{
"name": "Python",
"bytes": "9958394"
},
{
"name": "Shell",
"bytes": "44588"
}
],
"symlink_target": ""
}
|
import datetime
from django.conf import settings
from jira.client import JIRA
def get_jira():
jira_kwargs = {
'options': {'server': settings.JIRA_SERVER},
'basic_auth': (settings.JIRA_USER, settings.JIRA_PASSWORD),
}
return JIRA(**jira_kwargs)
def default_newissue_kwargs():
duedate = datetime.date.today() + datetime.timedelta(days=settings.JIRA_DUEIN_DAYS)
return {
'project': {'key': settings.JIRA_SERVICES_PROJECT_KEY},
'issuetype': {'name': 'Task'},
'duedate': str(duedate),
}
def default_feedback_kwargs():
duedate = datetime.date.today() + datetime.timedelta(days=settings.JIRA_DUEIN_DAYS)
return {
'project': {'key': settings.JIRA_FEEDBACK_PROJECT_KEY},
'issuetype': {'name': 'Task'},
'duedate': str(duedate),
}
def default_request_for_service_kwargs():
duedate = datetime.date.today() + datetime.timedelta(days=settings.JIRA_DUEIN_DAYS)
return {
'project': {'key': settings.JIRA_REQUEST_SERVICE_PROJECT_KEY},
'issuetype': {'name': 'Task'},
'duedate': str(duedate),
}
|
{
"content_hash": "e61ba4c168fb739004effcea70e197e0",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 87,
"avg_line_length": 28.05,
"alnum_prop": 0.6301247771836007,
"repo_name": "theirc/ServiceInfo",
"id": "e20fcdc3d78a531504ebd25839e808904ec819d8",
"size": "1122",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "services/jira_support.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "91208"
},
{
"name": "HTML",
"bytes": "169211"
},
{
"name": "JavaScript",
"bytes": "126261"
},
{
"name": "Python",
"bytes": "486647"
},
{
"name": "Shell",
"bytes": "141"
}
],
"symlink_target": ""
}
|
import os, sys, py_compile
source_dir = sys.argv[ 1 ]
binary_dir = sys.argv[ 2 ]
if not source_dir.endswith('/'): source_dir += '/'
if not binary_dir.endswith('/'): binary_dir += '/'
tree = list( os.walk( source_dir ) )
if source_dir != binary_dir:
replace = True
else:
replace = False
for idir, dirs, files in tree:
ndir = idir.replace( source_dir , '' )
wdir = binary_dir
if not idir.endswith('/'):
idir += '/'
if ndir:
if not ndir.endswith('/'):
ndir += '/'
wdir += ndir
for d in dirs:
if not os.path.exists( wdir + d ):
os.system( 'mkdir ' + wdir + d )
print 'Created new folder in:', wdir + d
for f in files:
if f.endswith('.py'):
ifile = idir + f
ofile = wdir + f + 'c'
if not os.path.exists( ofile ) or os.stat( ofile ).st_mtime < os.stat( ifile ).st_mtime:
print 'Building file', ofile
py_compile.compile( ifile )
if replace:
os.system( 'mv ' + ifile + 'c ' + ofile )
|
{
"content_hash": "85d09f8e5c37948040379db4a8fcc77a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 100,
"avg_line_length": 29.54054054054054,
"alnum_prop": 0.5096065873741995,
"repo_name": "mrpPhys/Isis",
"id": "1306dba96a10141bb275c094fa288bd280529953",
"size": "1438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/compile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "333499"
},
{
"name": "CMake",
"bytes": "19658"
},
{
"name": "Python",
"bytes": "96556"
},
{
"name": "Shell",
"bytes": "6628"
}
],
"symlink_target": ""
}
|
"""
minimap.py
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import namedtuple
from OpenGL import GL
from PySide import QtCore, QtGui
import logging
import numpy
from mcedit2.rendering import compass, scenegraph, rendergraph
from mcedit2.rendering.layers import Layer
from mcedit2.util.glutils import gl
from mcedit2.util.raycast import rayCastInBounds, MaxDistanceError
from mcedit2.worldview.worldview import WorldView
from mceditlib.geometry import Vector, Ray
log = logging.getLogger(__name__)
class LineSegment(namedtuple("LineSegment", "p1 p2")):
def atHeight(self, y):
p1 = self.p1
p2 = self.p2
if not (p1.y < y < p2.y or p1.y > y > p2.y):
return None
r = Ray.fromPoints(p1, p2)
return r.atHeight(y)
class ViewCornersRenderNode(rendergraph.RenderNode):
#
# Renders the intersection of a horizontal plane with the view frustum
# We only check the four vertical segments (with respect to the view angle)
# and the four segments pointing outward from the viewer.
# The four outward segments are checked first, if all four intersect they are used
# as the corners. If only two outwards segments intersect, the two furthest
# verticals are intersected and used as the last two corners.
# If no outward segments intersect (rare) then all four verticals are used.
# 0: bottom left, near
# 1: bottom left, far
# 2: top left, near
# 3: top left, far
# 4: bottom right, near
# 5: bottom right, far
# 6: top right, near
# 7: top right, far
verticalIndices = [
(1, 3),
(5, 7),
(0, 2),
(4, 6),
]
outwardIndices = [
(0, 1),
(2, 3),
(4, 5),
(6, 7),
]
def drawSelf(self):
if self.sceneNode.corners is None or self.sceneNode.dimension is None:
return
corners = self.sceneNode.corners
outwardSegments = [LineSegment(corners[i], corners[j]) for i, j in self.outwardIndices]
verticalSegments = [LineSegment(corners[i], corners[j]) for i, j in self.verticalIndices]
points = []
for segment in outwardSegments:
p = segment.atHeight(self.sceneNode.planeHeight)
if p is not None:
points.append(p)
if len(points) < 4:
# only intersected two outward segments. check the far verticals.
for segment in verticalSegments[:2]:
r = Ray.fromPoints(*segment)
points.append(r.atHeight(self.sceneNode.planeHeight))
if len(points) < 4:
# intersected zero outward segments!
# rarely occurs, the near verticals are 1/10 of a block tall
for segment in verticalSegments[2:]:
r = Ray.fromPoints(*segment)
points.append(r.atHeight(self.sceneNode.planeHeight))
if len(points) < 4:
return
p1, p2, p3, p4 = points[:4]
points = [p1, p2, p4, p3, p1]
with gl.glPushAttrib(GL.GL_DEPTH_BUFFER_BIT, GL.GL_COLOR_BUFFER_BIT):
GL.glDepthMask(False)
GL.glEnable(GL.GL_BLEND)
GL.glVertexPointer(3, GL.GL_FLOAT, 0, numpy.array(points).ravel())
GL.glLineWidth(3.0)
GL.glColor(1, 1, .1, 0.5)
GL.glDisable(GL.GL_DEPTH_TEST)
GL.glDrawArrays(GL.GL_LINE_STRIP, 0, len(points))
class ViewCornersNode(scenegraph.Node):
RenderNodeClass = ViewCornersRenderNode
_corners = None
@property
def corners(self):
return self._corners
@corners.setter
def corners(self, value):
self._corners = value
self.dirty = True
_dimension = None
@property
def dimension(self):
return self._dimension
@dimension.setter
def dimension(self, value):
self._dimension = value
self.dirty = True
_planeHeight = None
@property
def planeHeight(self):
return self._planeHeight
@planeHeight.setter
def planeHeight(self, value):
self._planeHeight = value
self.dirty = True
class MinimapWorldView(WorldView):
minScale = 1.
def __init__(self, *a, **kw):
WorldView.__init__(self, *a, **kw)
self.setSizePolicy(QtGui.QSizePolicy.Policy.Minimum, QtGui.QSizePolicy.Policy.Minimum)
self.scale = 1.0
self.worldScene.minlod = 2
self.viewCornersNode = ViewCornersNode()
self.viewCornersNode.dimension = self.dimension
self.matrixNode.addChild(self.viewCornersNode)
def createWorldScene(self):
scene = super(MinimapWorldView, self).createWorldScene()
self.layerToggleGroup.setVisibleLayers([Layer.Blocks])
return scene
def createCompass(self):
compassNode = compass.CompassNode(small=True)
compassNode.yawPitch = 180, 0
return compassNode
def updateMatrices(self):
w, h = self.width(), self.height()
w *= self.scale
h *= self.scale
projection = QtGui.QMatrix4x4()
projection.ortho(-w/2, w/2, -h/2, h/2, -1000, 2000)
self.matrixNode.projection = projection
modelview = QtGui.QMatrix4x4()
modelview.rotate(90., 1., 0., 0.)
modelview.translate(-self.centerPoint[0], 0, -self.centerPoint[2])
self.matrixNode.modelview = modelview
def currentViewMatrixChanged(self, currentView):
self.viewCornersNode.corners = currentView.getViewCorners()
try:
targetPoint, face = rayCastInBounds(Ray(currentView.centerPoint, currentView.cameraVector), self.dimension, 100)
if targetPoint is None:
raise MaxDistanceError
planeHeight = targetPoint.y
except MaxDistanceError:
planeDistance = 20
planeHeight = (currentView.centerPoint + currentView.cameraVector * planeDistance).y
self.viewCornersNode.planeHeight = planeHeight
def zoom(self, scale, (mx, my)):
# Get mouse position in world coordinates
worldPos = self.unprojectAtHeight(self.width()/2, self.height()/2, 0)
if scale != self.scale:
self.scale = scale
# Get the new position under the mouse, find its distance from the old position,
# and shift the centerPoint by that amount.
newWorldPos = self.unprojectAtHeight(self.width()/2, self.height()/2, 0)
delta = newWorldPos - worldPos
self.centerPoint = self.centerPoint - delta
def sizeHint(self):
return QtCore.QSize(192, 192)
def mousePressEvent(self, event):
event.ignore()
def mouseMoveEvent(self, event):
event.ignore()
def mouseReleaseEvent(self, event):
event.ignore()
|
{
"content_hash": "31d0910072a7c25aee3e1d8a071e1b4d",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 124,
"avg_line_length": 31.186363636363637,
"alnum_prop": 0.6257105378224749,
"repo_name": "Rubisk/mcedit2",
"id": "f33416c4dd42ecdf80e6f79757c391c29e774511",
"size": "6861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mcedit2/worldview/minimap.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "8578"
},
{
"name": "Makefile",
"bytes": "156"
},
{
"name": "Python",
"bytes": "1198213"
}
],
"symlink_target": ""
}
|
'''
The MIT License (MIT)
Copyright (c) 2015 Thami Rusdi Agus - https://github.com/janglapuk/SPB-OpenCV-Recognizer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import gui, rebuild
if __name__ == '__main__':
rebuild.RebuildCsv()
app = gui.GUI()
app.show()
|
{
"content_hash": "248387c267123f645320e068acc728e4",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 88,
"avg_line_length": 41.733333333333334,
"alnum_prop": 0.7755591054313099,
"repo_name": "janglapuk/SPB-OpenCV-Recognizer",
"id": "5d16c7dab8dee30cb91efee202b07f872d957ff5",
"size": "1252",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33114"
}
],
"symlink_target": ""
}
|
"""A platform which allows you to get information from Tautulli."""
from datetime import timedelta
from pytautulli import Tautulli
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_PATH,
CONF_PORT,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
CONF_MONITORED_USERS = "monitored_users"
DEFAULT_NAME = "Tautulli"
DEFAULT_PORT = "8181"
DEFAULT_PATH = ""
DEFAULT_SSL = False
DEFAULT_VERIFY_SSL = True
TIME_BETWEEN_UPDATES = timedelta(seconds=10)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_MONITORED_USERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.string,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Create the Tautulli sensor."""
name = config.get(CONF_NAME)
host = config[CONF_HOST]
port = config.get(CONF_PORT)
path = config.get(CONF_PATH)
api_key = config[CONF_API_KEY]
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
user = config.get(CONF_MONITORED_USERS)
use_ssl = config[CONF_SSL]
verify_ssl = config.get(CONF_VERIFY_SSL)
session = async_get_clientsession(hass, verify_ssl)
tautulli = TautulliData(
Tautulli(host, port, api_key, hass.loop, session, use_ssl, path)
)
if not await tautulli.test_connection():
raise PlatformNotReady
sensor = [TautulliSensor(tautulli, name, monitored_conditions, user)]
async_add_entities(sensor, True)
class TautulliSensor(SensorEntity):
"""Representation of a Tautulli sensor."""
def __init__(self, tautulli, name, monitored_conditions, users):
"""Initialize the Tautulli sensor."""
self.tautulli = tautulli
self.monitored_conditions = monitored_conditions
self.usernames = users
self.sessions = {}
self.home = {}
self._attributes = {}
self._name = name
self._state = None
async def async_update(self):
"""Get the latest data from the Tautulli API."""
await self.tautulli.async_update()
self.home = self.tautulli.api.home_data
self.sessions = self.tautulli.api.session_data
self._attributes["Top Movie"] = self.home.get("movie")
self._attributes["Top TV Show"] = self.home.get("tv")
self._attributes["Top User"] = self.home.get("user")
for key in self.sessions:
if "sessions" not in key:
self._attributes[key] = self.sessions[key]
for user in self.tautulli.api.users:
if self.usernames is None or user in self.usernames:
userdata = self.tautulli.api.user_data
self._attributes[user] = {}
self._attributes[user]["Activity"] = userdata[user]["Activity"]
if self.monitored_conditions:
for key in self.monitored_conditions:
try:
self._attributes[user][key] = userdata[user][key]
except (KeyError, TypeError):
self._attributes[user][key] = ""
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self.sessions.get("stream_count")
@property
def icon(self):
"""Return the icon of the sensor."""
return "mdi:plex"
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return "Watching"
@property
def extra_state_attributes(self):
"""Return attributes for the sensor."""
return self._attributes
class TautulliData:
"""Get the latest data and update the states."""
def __init__(self, api):
"""Initialize the data object."""
self.api = api
@Throttle(TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from Tautulli."""
await self.api.get_data()
async def test_connection(self):
"""Test connection to Tautulli."""
await self.api.test_connection()
connection_status = self.api.connection
return connection_status
|
{
"content_hash": "b1084da520729928e7aade969d1fd6f0",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 86,
"avg_line_length": 33.23529411764706,
"alnum_prop": 0.6389380530973451,
"repo_name": "kennedyshead/home-assistant",
"id": "c50efb00ed79602fcf2bf40bb5806dbafbaf3b60",
"size": "5085",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/tautulli/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "33970989"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
'''
URL class and transformations for the cocrawler.
We apply "safe" transformations early and often.
We apply "unsafe" transformations right after parsing an url out of
a webpage. (These represent things that browsers do but aren't
in the RFC, like discarding /r/n in the middle of hostnames.)
See cocrawler/data/html-parsin-test.html for an analysis of browser
transformations.
'''
from collections import namedtuple
import urllib.parse
import logging
import re
import html
import tldextract
from . import surt
LOGGER = logging.getLogger(__name__)
'''
Notes from reading RFC 3986:
General rule: always unquote A-Za-z0-9-._~ # these are never delims
called 'unreserved' in the rfc ... x41-x5a x61-x7a x30-x39 x2d x2e x5f x7e
reserved:
general delims :/?#[]@
sub delims !$&'()*+,;=
scheme blah blah
netloc starts with //, ends with /?# and has internal delims of :@
hostname can be ip4 literal or [ip4 or ip6 literal] so also dots (ipv4) and : (ipv6)
(this is the only place where [] are allowed unquoted)
path
a character in a path is unreserved %enc sub-delims :@ and / is the actual delimiter
so, general-delims other than :/@ must be quoted & kept that way
that means ?#[] need quoting
. and .. are special (see section 5.2)
sub-delims can be present and don't have to be quoted
query
same as path chars but adds /? to chars allowed
so #[] still need quoting
we are going to split query up using &= which are allowed characters
fragment
same chars as query
due to quoting, % must be quoted
'''
def is_absolute_url(url):
if url[0:2] == '//':
return True
# TODO: allow more schemes
if url[0:7].lower() == 'http://' or url[0:8].lower() == 'https://':
return True
return False
def clean_webpage_links(link, urljoin=None):
'''
Webpage links have lots of random crap in them, which browsers tolerate,
that we'd like to clean up before calling urljoin() on them.
Also, since cocrawler allows a variety of html parsers, it's likely that
we will get improperly-terminated urls that result in the parser returning
the rest of the webpage as an url, etc etc.
Some of these come from
https://github.com/django/django/blob/master/django/utils/http.py#L287
and https://bugs.chromium.org/p/chromium/issues/detail?id=476478
See manual tests in cocrawler/data/html-parsing-test.html
TODO: headless browser testing to automate this
'''
# remove leading and trailing white space and unescaped control chars.
link = link.strip('\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'
'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f ')
# FF and Chrome interpret both ///example.com and http:///example.com as a hostname
m = re.match(r'(?:https?:)?/{3,}', link, re.I)
if m:
start = m.group(0)
link = start.rstrip('/') + '//' + link.replace(start, '', 1)
# ditto for \\\ -- and go ahead and fix up http:\\ while we're here
m = re.match(r'(?:https?:)?\\{2,}', link, re.I)
if m:
start = m.group(0)
link = start.rstrip('\\') + '//' + link.replace(start, '', 1)
# and the \ that might be after the hostname?
if is_absolute_url(link):
start = link.find('://') + 3 # works whether we have a scheme or not
m = re.search(r'[\\/?#]', link[start:])
if m:
if m.group(0) == '\\':
link = link[0:start] + link[start:].replace('\\', '/', 1)
# the current standard requires one round of &ent; unescaping, with tolerance for naked &
if '&' in link:
link = html.unescape(link)
'''
Runaway urls
We allow pluggable parsers, and some of them might non-clever and send us the entire
rest of the document as an url... or it could be that the webpage lacks a closing
quote for one of its urls, which can confuse diligent parsers.
There are formal rules for this in html5, by testing I see that FF and Chrome both
truncate *undelimited* urls at the first >\r\n
We have no idea which urls were delimited or not at this point. So, only molest
ones which seem awfully long.
'''
if len(link) > 300: # arbitrary choice
m = re.match(r'(.*?)[<>\"\'\r\n ]', link) # rare in urls and common in html markup
if m:
link = m.group(1)
if len(link) > 2000:
if link.startswith('javascript:') or link.startswith('data:'):
return ''
logstr = link[:50] + '...'
LOGGER.info('webpage urljoin=%s has an invalid-looking link %s of length %d',
str(urljoin), logstr, len(link))
return '' # will urljoin to the urljoin
# FF and Chrome eat ^I^J^M in the middle of quoted urls
link = link.replace('\t', '')
link = link.replace('\r', '')
link = link.replace('\n', '')
return link
def remove_dot_segments(path):
'''
Algorithm from RFC 3986. urllib.parse has this algorithm, but it's hidden in urljoin()
This is a stand-alone version. Since this is working on a non-relative url, path MUST begin with '/'
'''
if path[0] != '/':
# raise ValueError('Invalid path, must start with /: '+path)
# lots of invalid webpages! examples; '&x39;/', '%20/'
return path
segments = path.split('/')
# drop empty segment pieces to avoid // in output... but not the first segment
segments[1:-1] = filter(None, segments[1:-1])
resolved_segments = []
for s in segments[1:]:
if s == '..':
try:
resolved_segments.pop()
except IndexError:
# discard the .. if it's at the beginning
pass
elif s == '.':
continue
else:
resolved_segments.append(s)
return '/' + '/'.join(resolved_segments)
valid_hex = set('%02x' % i for i in range(256))
valid_hex.update(set('%02X' % i for i in range(256)))
unreserved = set('%02X' % i for i in range(0x41, 0x5b)) # A-Z
unreserved.update(set('%02X' % i for i in range(0x61, 0x7b))) # a-z
unreserved.update(set('%02X' % i for i in range(0x30, 0x3a))) # 0-9
unreserved.update(set(('2D', '2E', '5F', '7E'))) # -._~
subdelims = set(('21', '24', '3B', '3D')) # !$;=
subdelims.update(set('%02X' % i for i in range(0x26, 0x2d))) # &'()*+,
unquote_in_path = subdelims.copy()
unquote_in_path.update(set(('3A', '40'))) # ok: :@
quote_in_path = {' ': '%20'}
unquote_in_query = subdelims.copy()
unquote_in_query.update(set(('3A', '2F', '3F', '40'))) # ok: :/?@
unquote_in_query.remove('26') # not ok: &=
unquote_in_query.remove('3D')
quote_in_query = {' ': '+'}
unquote_in_frag = unquote_in_query.copy()
def unquote(text, safe):
pieces = text.split('%')
text = pieces.pop(0)
for p in pieces:
if text.endswith('%'): # deal with %%
text += '%' + p
continue
quote = p[:2]
rest = p[2:]
if quote in valid_hex:
quote = quote.upper()
if quote in safe:
text += chr(int(quote, base=16)) + rest
else:
text += '%' + quote + rest
return text
def quote(text, quoteme):
ret = ''
for c in text:
if c in quoteme:
c = quoteme[c]
ret += c
return ret
def safe_url_canonicalization(url):
'''
Do everything to the url which should not change it
Good discussion: https://en.wikipedia.org/wiki/URL_normalization
'''
original_url = url
url = unquote(url, unreserved)
try:
(scheme, netloc, path, query, fragment) = urllib.parse.urlsplit(url)
except ValueError:
LOGGER.info('invalid url %s', url)
return original_url, ''
scheme = scheme.lower()
if scheme not in ('http', 'https', 'ftp'):
return original_url, ''
netloc = surt.netloc_to_punycanon(scheme, netloc)
if path == '':
path = '/'
try:
path = remove_dot_segments(path)
except ValueError:
LOGGER.info('remove_dot_segments puking on url %s', url)
raise
path = path.replace('\\', '/') # might not be 100% safe but is needed for Windows buffoons
path = unquote(path, unquote_in_path)
path = quote(path, quote_in_path)
query = unquote(query, unquote_in_query)
query = quote(query, quote_in_query)
if fragment != '':
fragment = '#' + unquote(fragment, unquote_in_frag)
return urllib.parse.urlunsplit((scheme, netloc, path, query, None)), fragment
def upgrade_url_to_https(url):
# TODO
# use browser HSTS list to upgrade to https:
# https://chromium.googlesource.com/chromium/src/net/+/master/http/transport_security_state_static.json
# use HTTPSEverwhere? would have to have a fallback if https failed / redir to http
# do not use "mixed" rules from this dataset
# .app tld is 100% HTTPS
return
def special_redirect(url, next_url):
'''
Classifies some redirects that we wish to do special processing for
# XXX note that we are not normalizing unicode other than the surt hostname
'''
if not isinstance(url, str):
urlsplit = url.urlsplit
url = url.url
else:
urlsplit = urllib.parse.urlsplit(url)
if not isinstance(next_url, str):
next_urlsplit = next_url.urlsplit
next_url = next_url.url
else:
next_urlsplit = urllib.parse.urlsplit(next_url)
if abs(len(url) - len(next_url)) > 5: # 5 = 'www.' + 's'
return None
if url == next_url:
return 'same'
if url.casefold() == next_url.casefold():
return 'case-change'
if not url.endswith('/') and url + '/' == next_url:
return 'addslash'
if url.endswith('/') and url == next_url + '/':
return 'removeslash'
if url.replace('http', 'https', 1) == next_url:
return 'tohttps'
if url.startswith('https') and url.replace('https', 'http', 1) == next_url:
return 'tohttp'
if urlsplit.netloc.startswith('www.'):
if url.replace('www.', '', 1) == next_url:
return 'tononwww'
else:
if url.replace('www.', '', 1).replace('http', 'https', 1) == next_url:
return 'tononwww+tohttps'
elif (url.startswith('https') and
url.replace('www.', '', 1).replace('https', 'http', 1) == next_url):
return 'tononwww+tohttp'
elif next_urlsplit.netloc.startswith('www.'):
if url == next_url.replace('www.', '', 1):
return 'towww'
else:
if next_url.replace('www.', '', 1) == url.replace('http', 'https', 1):
return 'towww+tohttps'
elif (url.startswith('https') and
next_url.replace('www.', '', 1) == url.replace('https', 'http', 1)):
return 'towww+tohttp'
return None
def get_domain(hostname):
# TODO config option to set include_psl_private_domains=False
# currently we force *.blogspot.com to all be different domains
# another call below in URL __init__
try:
tlde = tldextract.extract(hostname, include_psl_private_domains=True)
except IndexError:
# can be raised for punycoded hostnames
raise
rd = tlde.registered_domain
if rd:
return rd
else:
return tlde.suffix # example used to be: s3.amazonaws.com, but no longer
def get_hostname(url, parts=None, remove_www=False):
# TODO: also duplicated in url_allowed.py
# XXX audit code for other places www is explicitly mentioned
if not parts:
parts = urllib.parse.urlsplit(url)
_, _, hostname, _ = surt.parse_netloc(parts.netloc)
if remove_www and hostname.startswith('www.'):
domain = get_domain(hostname)
if not domain.startswith('www.'):
hostname = hostname[4:]
return hostname
# stolen from urllib/parse.py
SplitResult = namedtuple('SplitResult', 'scheme netloc path query fragment')
class URL(object):
'''
Container for urls and url processing.
Precomputes a lot of stuff upon creation, which is usually done in a burner thread.
Currently idempotent.
'''
def __init__(self, url, urljoin=None, surt_strip_trailing_slash=False):
url = clean_webpage_links(url, urljoin=urljoin)
if urljoin:
if isinstance(urljoin, str):
urljoin = URL(urljoin)
# optimize a few common cases to dodge full urljoin cost
if url.startswith('http://') or url.startswith('https://'):
pass
elif url.startswith('/') and not url.startswith('//'):
url = urljoin.urlsplit.scheme + '://' + urljoin.hostname + url
else:
url = urllib.parse.urljoin(urljoin.url, url) # expensive
# TODO safe_url_canon has the parsed url, have it pass back the parts
url, frag = safe_url_canonicalization(url)
if len(frag) > 0:
self._original_frag = frag
else:
self._original_frag = None
try:
self._urlsplit = urllib.parse.urlsplit(url) # expensive
except ValueError:
LOGGER.info('invalid url %s sent into URL constructor', url)
# TODO: my code assumes URL() returns something valid, so...
# attempt to get rid of anything that would cause an invalid ipv6 ValueError
# XXX this isn't the only place that needs tweaking
url = url.replace('[', '').replace(']', '')
self._urlsplit = urllib.parse.urlsplit(url)
(scheme, netloc, path, query, _) = self._urlsplit
if path == '':
path = '/'
# TODO: there's a fair bit of duplicate computing in here
netloc = surt.netloc_to_punycanon(scheme, netloc)
self._netloc = netloc
self._hostname = surt.hostname_to_punycanon(netloc)
self._hostname_without_www = surt.discard_www_from_hostname(self._hostname)
self._surt = surt.surt(url, surt_strip_trailing_slash=surt_strip_trailing_slash)
self._urlsplit = SplitResult(scheme, netloc, path, query, '')
self._url = urllib.parse.urlunsplit(self._urlsplit) # final canonicalization
try:
# see note above about private domains
self._tldextract = tldextract.extract(self._url, include_psl_private_domains=True)
except IndexError:
# can be raised for punycoded hostnames
raise
self._registered_domain = self._tldextract.registered_domain
if not self._registered_domain:
self._registered_domain = self._tldextract.suffix # example used to be: s3.amazonaws.com, but no longer
@property
def url(self):
return self._url
def __str__(self):
return self._url
@property
def urlsplit(self):
return self._urlsplit
@property
def netloc(self):
return self._netloc
@property
def hostname(self):
return self._hostname
@property
def hostname_without_www(self):
return self._hostname_without_www
@property
def surt(self):
return self._surt
@property
def registered_domain(self):
return self._registered_domain
@property
def original_frag(self):
return self._original_frag
|
{
"content_hash": "911b4086341cae07f8f362e63735504c",
"timestamp": "",
"source": "github",
"line_count": 471,
"max_line_length": 116,
"avg_line_length": 32.740976645435246,
"alnum_prop": 0.6031385772647688,
"repo_name": "cocrawler/cocrawler",
"id": "75504b957b196b5d241e510b65682554a923fe59",
"size": "15421",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "cocrawler/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7966"
},
{
"name": "Makefile",
"bytes": "2223"
},
{
"name": "Python",
"bytes": "298604"
},
{
"name": "Shell",
"bytes": "5645"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
setup(
name='storm-indicator',
version='0.2',
packages=['storm_indicator'],
url='https://github.com/emre/storm-indicator',
license='MIT',
author='Emre Yilmaz',
author_email='mail@emreyilmaz.me',
description='A unity indicator for connecting to your SSH connections easily.',
scripts=[
'storm_indicator/bin/ssh_list_indicator'
],
install_requires=["stormssh", ],
)
|
{
"content_hash": "51fb29d7bb6cda50cb1dfdde3e659ae6",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 83,
"avg_line_length": 28.125,
"alnum_prop": 0.6622222222222223,
"repo_name": "emre/storm-indicator",
"id": "ce948f6aa2410983a64266f6bf6e7b7212624c73",
"size": "450",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3614"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
from typing import TYPE_CHECKING
import pandas as pd
import pandas.testing as tm
import pytest
import ibis
import ibis.expr.operations as ops
from ibis.expr.scope import Scope
from ibis.expr.timecontext import adjust_context
if TYPE_CHECKING:
from ibis.expr.typing import TimeContext
pytest.importorskip("pyspark")
from ibis.backends.pyspark.compiler import compile_window_op, compiles # noqa: E402
from ibis.backends.pyspark.timecontext import combine_time_context # noqa: E402
def test_table_with_timecontext(client):
table = client.table('time_indexed_table')
context = (pd.Timestamp('20170102'), pd.Timestamp('20170103'))
result = table.execute(timecontext=context)
expected = table.execute()
expected = expected[expected.time.between(*context)]
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
('contexts', 'expected'),
[
(
[
(pd.Timestamp('20200102'), pd.Timestamp('20200103')),
(pd.Timestamp('20200101'), pd.Timestamp('20200106')),
],
(pd.Timestamp('20200101'), pd.Timestamp('20200106')),
), # superset
(
[
(pd.Timestamp('20200101'), pd.Timestamp('20200103')),
(pd.Timestamp('20200102'), pd.Timestamp('20200106')),
],
(pd.Timestamp('20200101'), pd.Timestamp('20200106')),
), # overlap
(
[
(pd.Timestamp('20200101'), pd.Timestamp('20200103')),
(pd.Timestamp('20200202'), pd.Timestamp('20200206')),
],
(pd.Timestamp('20200101'), pd.Timestamp('20200206')),
), # non-overlap
(
[(pd.Timestamp('20200101'), pd.Timestamp('20200103')), None],
(pd.Timestamp('20200101'), pd.Timestamp('20200103')),
), # None in input
([None], None), # None for all
(
[
(pd.Timestamp('20200102'), pd.Timestamp('20200103')),
(pd.Timestamp('20200101'), pd.Timestamp('20200106')),
(pd.Timestamp('20200109'), pd.Timestamp('20200110')),
],
(pd.Timestamp('20200101'), pd.Timestamp('20200110')),
), # complex
],
)
def test_combine_time_context(contexts, expected):
assert combine_time_context(contexts) == expected
def test_adjust_context_scope(client):
"""Test that `adjust_context` has access to `scope` by default."""
table = client.table('time_indexed_table')
# Window is the only context-adjusted node that the PySpark backend
# can compile. Ideally we would test the context adjustment logic for
# Window itself, but building this test like that would unfortunately
# affect other tests that involve Window.
# To avoid that, we'll create a dummy subclass of Window and build the
# test around that.
class CustomWindow(ops.Window):
pass
# Tell the Spark backend compiler it should compile CustomWindow just
# like Window
compiles(CustomWindow)(compile_window_op)
# Create an `adjust_context` function for this subclass that simply checks
# that `scope` is passed in.
@adjust_context.register(CustomWindow)
def adjust_context_window_check_scope(
op: CustomWindow,
scope: Scope,
timecontext: TimeContext,
) -> TimeContext:
"""Confirms that `scope` is passed in."""
assert scope is not None
return timecontext
# Do an operation that will trigger context adjustment
# on a CustomWindow
value_count = table['value'].count()
win = ibis.window(
ibis.interval(hours=1),
0,
order_by='time',
group_by='key',
)
# the argument needs to be pull out from the alias
# any extensions must do the same
value_count_over_win = CustomWindow(value_count.op().arg, win).to_expr()
expr = table.mutate(value_count_over_win.name('value_count_over_win'))
context = (pd.Timestamp('20170105'), pd.Timestamp('20170111'))
expr.execute(timecontext=context)
|
{
"content_hash": "091d01763413f3b53e5ae2d1d2b8a9fe",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 84,
"avg_line_length": 34.107438016528924,
"alnum_prop": 0.623939907923431,
"repo_name": "ibis-project/ibis",
"id": "4a30f719c177a4a6921a9f52fb7e0c453c84fb85",
"size": "4127",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ibis/backends/pyspark/tests/test_timecontext.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "44931"
},
{
"name": "CMake",
"bytes": "1862"
},
{
"name": "Dockerfile",
"bytes": "70"
},
{
"name": "JavaScript",
"bytes": "2713"
},
{
"name": "Nix",
"bytes": "11917"
},
{
"name": "Python",
"bytes": "2958224"
},
{
"name": "Shell",
"bytes": "3167"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/particle/shared_particle_test_44.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "2ab20ce1bd821967bf8b327c119b65c5",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 71,
"avg_line_length": 23.076923076923077,
"alnum_prop": 0.69,
"repo_name": "obi-two/Rebelion",
"id": "c24da76824e83c721407647da71c42db2292bf7d",
"size": "445",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/static/particle/shared_particle_test_44.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
import webtest
import main
def test_get():
app = webtest.TestApp(main.app)
response = app.get('/')
assert response.status_int == 200
assert response.body == "Hello, World! It's Saymore here"
|
{
"content_hash": "11879da5d127e01b35f2a5e5cd635b1a",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 61,
"avg_line_length": 17.666666666666668,
"alnum_prop": 0.6556603773584906,
"repo_name": "saymorec/mzansi_yarona",
"id": "5178acc7bf64f1ea68eae2388ce5451b7ea73e6d",
"size": "809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10500"
},
{
"name": "HTML",
"bytes": "17405"
},
{
"name": "Python",
"bytes": "16579"
}
],
"symlink_target": ""
}
|
from utils import connect
class Query_Seq_Idx_Cache(object):
def __init__(self):
pass
class Query_Seq_Idx(object):
def __init__(self, unique_id=0):
self.query="""
SELECT
relid,
relname,
seq_scan,
seq_tup_read,
idx_scan,
idx_tup_fetch,
n_tup_ins,
n_tup_upd,
n_tup_del
FROM
pg_stat_all_tables
"""
# We need the cursor
self.db_connection_cursor = self.create_db_connection()
# Retrieve initial data
self.columns, self.rows = self.get_query_data(self.db_connection_cursor, self.query)
print self.columns
mygenerator = self.get_row()
for i in mygenerator:
print i
def get_columns(self):
return self.columns
def get_row(self):
for row in self.rows:
return row
def show_query(self):
return self.query
def create_db_connection(self):
return connect.pg_connect("localhost", "5432", "postgres", None, "postgres")
def get_query_data(self, db_connection_cursor, query):
return connect.pg_get_data(db_connection_cursor, query)
class Query_Table_Idx(object):
def __init__(self):
pass
"""
SELECT
indexrelid,
idx_scan,
idx_tup_read,
idx_tup_fetch,
relname,
indexrelname
FROM
pg_stat_all_indexes
"""
|
{
"content_hash": "91efcd89a656d2ec71b7df5bcac8a967",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 92,
"avg_line_length": 24.661290322580644,
"alnum_prop": 0.525833878351864,
"repo_name": "ragged/yapgt",
"id": "2e955deddd61369c19fdfd8d6a8e165ac43c7ea7",
"size": "1554",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/queries.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25613"
}
],
"symlink_target": ""
}
|
import os, sys, time
# add current script path libs
#pathname = os.path.dirname(sys.argv[0])
#sys.path.insert(0, os.path.join(pathname, 'lib', 'debug'))
pathname = os.path.dirname(os.path.realpath(__file__))
#sys.path.append(os.path.join(pathname, 'lib', 'debug'))
sys.path.append(pathname)
# import functools, used to preserve the correct func.__name__
import functools
# import some functions profiler functions and GUI
import functionprofiler
# Note: as an alternative, you can also use pyprof2calltree and kcachegrind to get a lot more informations and interactive call graph
# import profilehooks lib
from profilehooks import profile
# import memory profiler line by line
from memory_profiler import profile as memoryprofile_linebyline
#### NON DECORATOR FUNCTIONS ####
#################################
def startmemorytracker():
from pympler import tracker
tr = tracker.SummaryTracker()
return tr
def runprofilerandshow(funcname, profilepath, argv='', *args, **kwargs):
'''
Run a functions profiler and show it in a GUI visualisation using RunSnakeRun
Note: can also use calibration for more exact results
'''
functionprofiler.runprofile(funcname+'(\''+argv+'\')', profilepath, *args, **kwargs)
print 'Showing profile (windows should open in the background)'; sys.stdout.flush();
functionprofiler.browseprofilegui(profilepath)
#### DECORATOR FUNCTIONS ####
#############################
# @profile: use profilehooks to profile functions
# @profileit: profile using python's profile (works with threads)
# @showprofile: show the functions profile in a nice GUI using RunSnakeRun (alternative: using the generated profile log files you can use pyprof2calltree and kcachegrind to get a lot more informations and interactive call graph)
# @memorytrack: use Pympler to track and show memory usage (only console, no GUI)
#@callgraph: save the call graph in text format and image (if GraphViz is available, more specifically the dot program)
#@profile_linebyline: profile a function with line by line CPU consumption (using line_profiler, need to install it because it is compiled in C)
#@memoryprofile_linebyline: memory profile a function with line by line memory consumption (using memory_profiler, needs psutils on Windows)
# eg:
# @showprofile
# @profileit
# def func(): ...
def memorytrack(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
from pympler import tracker
tr = tracker.SummaryTracker()
func(*args, **kwargs)
tr.print_diff()
return wrapper
def profileit(func):
import profile
@functools.wraps(func)
def wrapper(*args, **kwargs):
#datafn = func.__name__ + ".profile" # Name the data file sensibly
datafn = 'profile.log'
prof = profile.Profile()
retval = prof.runcall(func, *args, **kwargs)
prof.dump_stats(datafn)
return retval
return wrapper
def profileit_log(log):
import profile
def inner(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
prof = profile.Profile()
retval = prof.runcall(func, *args, **kwargs)
# Note use of name from outer scope
prof.dump_stats(log)
return retval
return wrapper
return inner
def showprofile(func):
profilepath = 'profile.log'
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
functionprofiler.browseprofilegui(profilepath)
return wrapper
def callgraph(func):
''' Makes a call graph
Note: be sure to install GraphViz prior to printing the dot graph!
'''
import pycallgraph
@functools.wraps(func)
def wrapper(*args, **kwargs):
pycallgraph.start_trace()
func(*args, **kwargs)
pycallgraph.save_dot('callgraph.log')
pycallgraph.make_dot_graph('callgraph.png')
#pycallgraph.make_dot_graph('callgraph.jpg', format='jpg', tool='neato')
return wrapper
def profile_linebyline(func):
import line_profiler
@functools.wraps(func)
def wrapper(*args, **kwargs):
prof = line_profiler.LineProfiler()
val = prof(func)(*args, **kwargs)
prof.print_stats()
return val
return wrapper
# Some debug testing here
if __name__ == '__main__':
@showprofile
@profileit
#@memorytrack
#@callgraph
#@profile
#@memoryprofile_linebyline
#@profile_linebyline
def testcaptcha():
import captchagenerator
captcha = captchagenerator.CaptchaGenerator(True, True, debugPng=True, debug=False, nbElem=10, modelsPath='bammodels', windowWidth='320', windowHeight='240')
#captcha.renderCaptcha('solmasks', 'solmasks')
captcha.renderCaptchaMulti(4, 'solmasks', 'solmasks')
#time.sleep(20)
#@memoryprofile_linebyline
#@profile_linebyline
def test_1():
a = [1] * (10 ** 6)
b = [2] * (2 * 10 ** 7)
del b
for i in range(2):
a = [1] * (10 ** 6)
b = [2] * (2 * 10 ** 7)
del b
return a
# Test 1
#runprofilerandshow('testcaptcha', 'profile.log')
# Test 2
testcaptcha()
# Test 3
#test_1()
|
{
"content_hash": "542c294d604323b6c5eb56974b956990",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 229,
"avg_line_length": 30.994082840236686,
"alnum_prop": 0.656930126002291,
"repo_name": "lrq3000/pyFileFixity",
"id": "7a4fa7b36f5105e0d3a82a420fa755e1ad10fb1c",
"size": "5261",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pyFileFixity/lib/profilers/visual/debug.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "48259"
},
{
"name": "CSS",
"bytes": "1629"
},
{
"name": "JavaScript",
"bytes": "339"
},
{
"name": "Makefile",
"bytes": "1301"
},
{
"name": "Python",
"bytes": "2235740"
},
{
"name": "Shell",
"bytes": "964"
},
{
"name": "TeX",
"bytes": "19102"
}
],
"symlink_target": ""
}
|
def wait_until_existing(self, timeout=None):
"""
Wait until page object is existing in the DOM.
:param int timeout: number of seconds to wait, if not provided
PageObject.DEFAULT_WAIT_TIMEOUT is used
"""
if timeout is None:
timeout = self.DEFAULT_WAIT_TIMEOUT
self.logger.info(('waiting until page contains page object {}'
).format(self._log_id_short))
self.logger.debug(('waiting until page contains page object; {}'
).format(self._log_id_long))
error_msg = ('page object still not existing after {} seconds; {}'
).format(timeout, self._log_id_long)
self.wait_until(self.is_existing, func_kwargs=dict(log=False),
timeout=timeout, error_msg=error_msg)
self.logger.info(('finished waiting until page contains page object {}'
).format(self._log_id_short))
self.logger.debug(('finished waiting until page contains page object; {}'
).format(self._log_id_long))
return self
|
{
"content_hash": "61093f5655afcfd442882337e0b9951e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 77,
"avg_line_length": 35.25,
"alnum_prop": 0.6565349544072948,
"repo_name": "lukas-linhart/pageobject",
"id": "e56a0be496c5f372b3a99f179e8547bcf68f31d9",
"size": "987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pageobject/commands/wait_until_existing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103810"
}
],
"symlink_target": ""
}
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from resource_management import *
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
from hbase import hbase
from hbase_service import hbase_service
from hbase_decommission import hbase_decommission
import upgrade
from setup_ranger_hbase import setup_ranger_hbase
from ambari_commons import OSCheck, OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
class HbaseMaster(Script):
def configure(self, env):
import params
env.set_params(params)
hbase(name='master')
def install(self, env):
import params
self.install_packages(env)
def decommission(self, env):
import params
env.set_params(params)
hbase_decommission(env)
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class HbaseMasterWindows(HbaseMaster):
def start(self, env):
import status_params
self.configure(env)
Service(status_params.hbase_master_win_service_name, action="start")
def stop(self, env):
import status_params
env.set_params(status_params)
Service(status_params.hbase_master_win_service_name, action="stop")
def status(self, env):
import status_params
env.set_params(status_params)
check_windows_service_status(status_params.hbase_master_win_service_name)
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HbaseMasterDefault(HbaseMaster):
def get_stack_to_component(self):
return {"HDP": "hbase-master"}
def pre_upgrade_restart(self, env, upgrade_type=None):
import params
env.set_params(params)
upgrade.prestart(env, "hbase-master")
def start(self, env, upgrade_type=None):
import params
env.set_params(params)
self.configure(env) # for security
setup_ranger_hbase(upgrade_type=upgrade_type)
hbase_service('master', action = 'start')
def stop(self, env, upgrade_type=None):
import params
env.set_params(params)
hbase_service('master', action = 'stop')
def status(self, env):
import status_params
env.set_params(status_params)
pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
check_process_status(pid_file)
def security_status(self, env):
import status_params
env.set_params(status_params)
if status_params.security_enabled:
props_value_check = {"hbase.security.authentication" : "kerberos",
"hbase.security.authorization": "true"}
props_empty_check = ['hbase.master.keytab.file',
'hbase.master.kerberos.principal']
props_read_check = ['hbase.master.keytab.file']
hbase_site_expectations = build_expectations('hbase-site', props_value_check, props_empty_check,
props_read_check)
hbase_expectations = {}
hbase_expectations.update(hbase_site_expectations)
security_params = get_params_from_filesystem(status_params.hbase_conf_dir,
{'hbase-site.xml': FILE_TYPE_XML})
result_issues = validate_security_config_properties(security_params, hbase_expectations)
if not result_issues: # If all validations passed successfully
try:
# Double check the dict before calling execute
if ( 'hbase-site' not in security_params
or 'hbase.master.keytab.file' not in security_params['hbase-site']
or 'hbase.master.kerberos.principal' not in security_params['hbase-site']):
self.put_structured_out({"securityState": "UNSECURED"})
self.put_structured_out(
{"securityIssuesFound": "Keytab file or principal are not set property."})
return
cached_kinit_executor(status_params.kinit_path_local,
status_params.hbase_user,
security_params['hbase-site']['hbase.master.keytab.file'],
security_params['hbase-site']['hbase.master.kerberos.principal'],
status_params.hostname,
status_params.tmp_dir)
self.put_structured_out({"securityState": "SECURED_KERBEROS"})
except Exception as e:
self.put_structured_out({"securityState": "ERROR"})
self.put_structured_out({"securityStateErrorInfo": str(e)})
else:
issues = []
for cf in result_issues:
issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
self.put_structured_out({"securityState": "UNSECURED"})
else:
self.put_structured_out({"securityState": "UNSECURED"})
if __name__ == "__main__":
HbaseMaster().execute()
|
{
"content_hash": "30d0db7f03adc11deae760ec471355cb",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 114,
"avg_line_length": 38.16216216216216,
"alnum_prop": 0.6738668555240793,
"repo_name": "arenadata/ambari",
"id": "6a869c0a87278e7f1e065f00352b9e45e53b5006",
"size": "5670",
"binary": false,
"copies": "2",
"ref": "refs/heads/branch-adh-1.6",
"path": "ambari-server/src/main/resources/stacks/ADH/1.0/services/HBASE/package/scripts/hbase_master.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "46700"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "215907"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "343739"
},
{
"name": "CoffeeScript",
"bytes": "8465"
},
{
"name": "Dockerfile",
"bytes": "6387"
},
{
"name": "EJS",
"bytes": "777"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Gherkin",
"bytes": "990"
},
{
"name": "Groovy",
"bytes": "15882"
},
{
"name": "HTML",
"bytes": "717983"
},
{
"name": "Handlebars",
"bytes": "1819641"
},
{
"name": "Java",
"bytes": "29172298"
},
{
"name": "JavaScript",
"bytes": "18571926"
},
{
"name": "Jinja",
"bytes": "1490416"
},
{
"name": "Less",
"bytes": "412933"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "287501"
},
{
"name": "PowerShell",
"bytes": "2090340"
},
{
"name": "Python",
"bytes": "18507704"
},
{
"name": "R",
"bytes": "3943"
},
{
"name": "Ruby",
"bytes": "38590"
},
{
"name": "SCSS",
"bytes": "40072"
},
{
"name": "Shell",
"bytes": "924115"
},
{
"name": "Stylus",
"bytes": "820"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "2303"
}
],
"symlink_target": ""
}
|
from typing import List
from flask import Flask
from flask_migrate import Migrate
from flask_security import RoleMixin
from flask_security import SQLAlchemyUserDatastore
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import and_
from sqlalchemy.exc import OperationalError
from opwen_email_client.domain.email.user_store import User
from opwen_email_client.domain.email.user_store import UserStore
from opwen_email_client.webapp.config import AppConfig
_db = SQLAlchemy()
# noinspection PyUnresolvedReferences
_roles_users = _db.Table(
'roles_users',
_db.Column('user_id', _db.Integer(), _db.ForeignKey('user.id')),
_db.Column('role_id', _db.Integer(), _db.ForeignKey('role.id')),
)
# noinspection PyUnresolvedReferences
class _User(_db.Model, User):
__tablename__ = 'user'
id = _db.Column(_db.Integer(), primary_key=True)
email = _db.Column(_db.String(255), unique=True, index=True)
password = _db.Column(_db.String(255), nullable=False)
active = _db.Column(_db.Boolean(), default=True)
last_login_at = _db.Column(_db.DateTime())
current_login_at = _db.Column(_db.DateTime())
last_login_ip = _db.Column(_db.String(128))
current_login_ip = _db.Column(_db.String(128))
login_count = _db.Column(_db.Integer())
timezone_offset_minutes = _db.Column(_db.Integer(), nullable=False, default=0)
language = _db.Column(_db.String(8))
roles = _db.relationship('_Role', secondary=_roles_users, backref=_db.backref('users', lazy='dynamic'))
synced = _db.Column(_db.Boolean(), default=False)
is_admin = _db.Column(_db.Boolean(), default=False)
# noinspection PyUnresolvedReferences
class _Role(_db.Model, RoleMixin):
__tablename__ = 'role'
id = _db.Column(_db.Integer(), primary_key=True)
name = _db.Column(_db.String(32), unique=True)
description = _db.Column(_db.String(255))
_migrate = Migrate()
class FlaskLoginUserStore(UserStore):
def __init__(self):
store = SQLAlchemyUserDatastore(_db, _User, _Role)
super().__init__(read=store, write=store)
self._app = None
def init_app(self, app: Flask):
with app.app_context():
_db.init_app(app)
_migrate.init_app(app, _db)
try:
_db.create_all()
except OperationalError:
pass
self._app = app
def fetch_all(self, user: User) -> List[User]:
with self._app.app_context():
return _User.query.all()
def fetch_pending(self) -> List[User]:
is_not_synced = _User.synced == False # noqa: E712
is_non_admin = _User.email != AppConfig.ADMIN_INBOX
with self._app.app_context():
return _User.query\
.filter(and_(is_not_synced, is_non_admin))\
.all()
|
{
"content_hash": "24aa63fd3994e64df1cc53e2633483d9",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 107,
"avg_line_length": 32.310344827586206,
"alnum_prop": 0.6431874777659196,
"repo_name": "ascoderu/opwen-cloudserver",
"id": "c76462afb356ca18ae59bb97f97ac22652bbaeae",
"size": "2811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opwen_email_client/webapp/login.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "5098"
},
{
"name": "Makefile",
"bytes": "5771"
},
{
"name": "Python",
"bytes": "167331"
},
{
"name": "Shell",
"bytes": "25506"
},
{
"name": "Smarty",
"bytes": "2467"
}
],
"symlink_target": ""
}
|
"""Utility function to get information from graph."""
from __future__ import absolute_import as _abs
import tvm
from . import graph_attr
def infer_shape(graph, **shape):
"""Infer the shape given the shape of inputs.
Parameters
----------
graph : Graph
The graph to perform shape inference from
shape : dict of str to tuple
The specific input shape.
Returns
-------
in_shape : list of tuple
Shape of inputs
out_shape: list of tuple
Shape of outputs
"""
graph = graph_attr.set_shape_inputs(graph, shape)
graph = graph.apply("InferShape")
shape = graph.json_attr("shape")
index = graph.index
input_shape = [shape[index.entry_id(x)] for x in index.input_names]
output_shape = [shape[index.entry_id(x)] for x in index.output_entries]
return input_shape, output_shape
def infer_dtype(graph, **dtype):
"""Infer the type given the typeS of inputs.
Parameters
----------
graph : Graph
The graph to perform type inference from
dtype : dict of str to dtype
The specific input data type.
Returns
-------
in_dtype : list of tuple
Dtype of inputs
out_dtype: list of tuple
Dtype of outputs
"""
graph = graph_attr.set_dtype_inputs(graph, dtype)
graph = graph.apply("InferType")
dtype = graph.json_attr("dtype")
index = graph.index
input_dtype = [graph_attr.TCODE_TO_DTYPE[dtype[index.entry_id(x)]]
for x in index.input_names]
output_dtype = [graph_attr.TCODE_TO_DTYPE[dtype[index.entry_id(x)]]
for x in index.output_entries]
return input_dtype, output_dtype
_deep_compare = tvm.get_global_func("nnvm.graph.DeepCompare")
def check_graph_equal(grapha, graphb, compare_variable_attrs=False):
"""Check if two graphs have equal structure.
Parameters
----------
grapha : Graph
The first graph
graphb : Graph
The second graph
compare_variable_attrs : bool, optional
Whether we want to compare attributes(names) on variables.
Usually it is safe to skip it unless we want input name
to exactly match
Raises
------
ValueError
ValueError is raised with error message when graph not equal
"""
err = _deep_compare(grapha, graphb, compare_variable_attrs)
if err:
raise ValueError("Graph compare error: " + err)
|
{
"content_hash": "6709e5332c7cd6a5f84167bce8053077",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 75,
"avg_line_length": 27.18888888888889,
"alnum_prop": 0.6297507151614221,
"repo_name": "imai-lm/nnvm",
"id": "dcad54a8b778f0da83b656a742e13aba47468548",
"size": "2478",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/nnvm/compiler/graph_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "368176"
},
{
"name": "CMake",
"bytes": "18500"
},
{
"name": "Makefile",
"bytes": "4556"
},
{
"name": "Python",
"bytes": "272397"
},
{
"name": "Shell",
"bytes": "11786"
}
],
"symlink_target": ""
}
|
"""Class implementation of Python Bernoulli Bandit environment."""
from typing import Callable, Optional
import gin
import numpy as np
from tf_agents.bandits.environments import bandit_py_environment
from tf_agents.specs import array_spec
from tf_agents.typing import types
@gin.configurable
class PiecewiseBernoulliPyEnvironment(
bandit_py_environment.BanditPyEnvironment):
"""Implements piecewise stationary finite-armed Bernoulli Bandits.
This environment implements piecewise stationary finite-armed non-contextual
Bernoulli Bandit environment as a subclass of BanditPyEnvironment.
With respect to Bernoulli stationary environment, the reward distribution
parameters undergo abrupt changes at given time steps. The current time is
kept by the environment and increased by a unit at each call of _apply_action.
For each stationary piece, the reward distribution is 0/1 (Bernoulli) with
the parameter p valid for the current piece.
Examples:
means = [[0.1, 0.5], [0.5, 0.1], [0.5, 0.5]] # 3 pieces, 2 arms.
def constant_duration_gen(delta):
while True:
yield delta
env_piecewise_10_steps = PiecewiseBernoulliPyEnvironment(
means, constant_duration_gen(10))
def random_duration_gen(a, b):
while True:
yield random.randint(a, b)
env_rnd_piecewise_10_to_20_steps = PiecewiseBernoulliPyEnvironment(
means, random_duration_gen(10, 20))
For a reference on bandits see e.g., Example 1.1 in "A Tutorial on Thompson
Sampling" by Russo et al. (https://web.stanford.edu/~bvr/pubs/TS_Tutorial.pdf)
A paper using piecewise stationary environments is Qingyun Wu, Naveen Iyer,
Hongning Wang, ``Learning Contextual Bandits in a Non-stationary
Environment,'' Proceedings of the 2017 ACM on Conference on Information and
Knowledge Management (https://arxiv.org/pdf/1805.09365.pdf.)
"""
def __init__(self,
piece_means: np.ndarray,
change_duration_generator: Callable[[], int],
batch_size: Optional[int] = 1):
"""Initializes a piecewise stationary Bernoulli Bandit environment.
Args:
piece_means: a matrix (list of lists) with shape (num_pieces, num_arms)
containing floats in [0, 1]. Each list contains the mean rewards for
the num_arms actions of the num_pieces pieces. The list is wrapped
around after the last piece.
change_duration_generator: a generator of the time durations. If this
yields the values d0, d1, d2, ..., then the reward parameters change at
steps d0, d0 + d1, d0 + d1 + d2, ..., as following:
piece_means[0] for 0 <= t < d0
piece_means[1] for d0 <= t < d0 + d1
piece_means[2] for d0 + d1 <= t < d0 + d1 + d2
...
Note that the values generated have to be non-negative. The value zero
means that the corresponding parameters in the piece_means list are
skipped, i.e. the duration of the piece is zero steps.
If the generator ends (e.g. if it is obtained with iter(<list>)) and the
step goes beyond the last piece, a StopIteration exception is raised.
batch_size: If specified, this is the batch size for observation and
actions.
"""
self._batch_size = batch_size
self._piece_means = np.asarray(piece_means, dtype=np.float32)
if np.any(self._piece_means > 1.0) or np.any(self._piece_means < 0):
raise ValueError('All parameters should be floats in [0, 1].')
self._num_pieces, self._num_actions = self._piece_means.shape
self._change_duration_generator = change_duration_generator
self._current_time = -1
self._current_piece = -1
self._next_change = 0
self._increment_time()
action_spec = array_spec.BoundedArraySpec(
shape=(),
dtype=np.int32,
minimum=0,
maximum=self._num_actions - 1,
name='action')
observation_spec = array_spec.ArraySpec(
shape=(1,), dtype=np.int32, name='observation')
super(PiecewiseBernoulliPyEnvironment, self).__init__(
observation_spec, action_spec)
@property
def batch_size(self) -> int:
return self._batch_size
@property
def batched(self) -> bool:
return True
def _increment_time(self):
self._current_time += 1
while self._current_time >= self._next_change:
duration = int(next(self._change_duration_generator)) # pytype: disable=wrong-arg-types # trace-all-classes
if duration < 0:
raise ValueError(
'Generated duration must be non-negative. Got {}.'.format(duration))
self._next_change += duration
self._current_piece = (self._current_piece + 1) % self._num_pieces
def _observe(self) -> types.NestedArray:
return np.zeros(
shape=[self._batch_size, 1],
dtype=self.observation_spec().dtype)
def _apply_action(self, action: types.NestedArray) -> types.NestedArray:
reward = np.floor(self._piece_means[self._current_piece, action] +
np.random.random((self._batch_size,)))
self._increment_time()
return reward
|
{
"content_hash": "16044d40804d08cafb736e585560cae9",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 115,
"avg_line_length": 39.325581395348834,
"alnum_prop": 0.6798738419081412,
"repo_name": "tensorflow/agents",
"id": "9dbcc2a82040b501183c4254c538caec3b3fe6c7",
"size": "5676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tf_agents/bandits/environments/piecewise_bernoulli_py_environment.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4930266"
},
{
"name": "Shell",
"bytes": "10950"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
from pants.backend.jvm.tasks.nailgun_task import NailgunTask
from pants.util.dirutil import safe_mkdir
class BundleEntries(NailgunTask):
@classmethod
def prepare(cls, options, round_manager):
super(BundleEntries, cls).prepare(options, round_manager)
round_manager.require_data('kythe_entries_files')
@classmethod
def register_options(cls, register):
super(BundleEntries, cls).register_options(register)
register('--archive', type=str,
choices=['none', 'uncompressed', 'tar', 'zip', 'gztar', 'bztar'],
default='none', fingerprint=True,
help='Create an archive of this type.')
def execute(self):
archive = self.get_options().archive
if archive == 'none':
return
for tgt, entries in self.context.products.get_data('kythe_entries_files', dict).items():
kythe_distdir = os.path.join(self.get_options().pants_distdir, 'kythe')
safe_mkdir(kythe_distdir)
uncompressed_kythe_distpath = os.path.join(
kythe_distdir, '{}.entries'.format(tgt.address.path_safe_spec))
if archive == 'uncompressed':
kythe_distpath = uncompressed_kythe_distpath
shutil.copy(entries, kythe_distpath)
else:
kythe_distpath = shutil.make_archive(base_name=uncompressed_kythe_distpath,
format=archive,
root_dir=os.path.dirname(entries),
base_dir=os.path.basename(entries))
self.context.log.info('Copied entries to {}'.format(kythe_distpath))
|
{
"content_hash": "5f72b853daf0b58b83ed739e8e20852c",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 92,
"avg_line_length": 40.595238095238095,
"alnum_prop": 0.6404692082111437,
"repo_name": "twitter/pants",
"id": "0652884c97453952b774dcd5063a92fd021452ab",
"size": "1852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/codeanalysis/src/python/pants/contrib/codeanalysis/tasks/bundle_entries.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5639"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "85294"
},
{
"name": "Java",
"bytes": "498956"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "6700799"
},
{
"name": "Rust",
"bytes": "765598"
},
{
"name": "Scala",
"bytes": "89346"
},
{
"name": "Shell",
"bytes": "94395"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
import collections
import copy
import os
import sys
import traceback
from oslo_utils import encodeutils
from oslo_utils import reflection
import six
from taskflow import exceptions as exc
from taskflow.utils import iter_utils
from taskflow.utils import mixins
from taskflow.utils import schema_utils as su
_exception_message = encodeutils.exception_to_unicode
def _copy_exc_info(exc_info):
if exc_info is None:
return None
exc_type, exc_value, tb = exc_info
# NOTE(imelnikov): there is no need to copy the exception type, and
# a shallow copy of the value is fine and we can't copy the traceback since
# it contains reference to the internal stack frames...
return (exc_type, copy.copy(exc_value), tb)
def _are_equal_exc_info_tuples(ei1, ei2):
if ei1 == ei2:
return True
if ei1 is None or ei2 is None:
return False # if both are None, we returned True above
# NOTE(imelnikov): we can't compare exceptions with '=='
# because we want exc_info be equal to it's copy made with
# copy_exc_info above.
if ei1[0] is not ei2[0]:
return False
# NOTE(dhellmann): The flake8/pep8 error E721 does not apply here
# because we want the types to be exactly the same, not just have
# one be inherited from the other.
if not all((type(ei1[1]) == type(ei2[1]), # noqa: E721
_exception_message(ei1[1]) == _exception_message(ei2[1]),
repr(ei1[1]) == repr(ei2[1]))):
return False
if ei1[2] == ei2[2]:
return True
tb1 = traceback.format_tb(ei1[2])
tb2 = traceback.format_tb(ei2[2])
return tb1 == tb2
class Failure(mixins.StrMixin):
"""An immutable object that represents failure.
Failure objects encapsulate exception information so that they can be
re-used later to re-raise, inspect, examine, log, print, serialize,
deserialize...
One example where they are depended upon is in the WBE engine. When a
remote worker throws an exception, the WBE based engine will receive that
exception and desire to reraise it to the user/caller of the WBE based
engine for appropriate handling (this matches the behavior of non-remote
engines). To accomplish this a failure object (or a
:py:meth:`~.Failure.to_dict` form) would be sent over the WBE channel
and the WBE based engine would deserialize it and use this objects
:meth:`.reraise` method to cause an exception that contains
similar/equivalent information as the original exception to be reraised,
allowing the user (or the WBE engine itself) to then handle the worker
failure/exception as they desire.
For those who are curious, here are a few reasons why the original
exception itself *may* not be reraised and instead a reraised wrapped
failure exception object will be instead. These explanations are *only*
applicable when a failure object is serialized and deserialized (when it is
retained inside the python process that the exception was created in the
the original exception can be reraised correctly without issue).
* Traceback objects are not serializable/recreatable, since they contain
references to stack frames at the location where the exception was
raised. When a failure object is serialized and sent across a channel
and recreated it is *not* possible to restore the original traceback and
originating stack frames.
* The original exception *type* can not be guaranteed to be found, workers
can run code that is not accessible/available when the failure is being
deserialized. Even if it was possible to use pickle safely it would not
be possible to find the originating exception or associated code in this
situation.
* The original exception *type* can not be guaranteed to be constructed in
a *correct* manner. At the time of failure object creation the exception
has already been created and the failure object can not assume it has
knowledge (or the ability) to recreate the original type of the captured
exception (this is especially hard if the original exception was created
via a complex process via some custom exception constructor).
* The original exception *type* can not be guaranteed to be constructed in
a *safe* manner. Importing *foreign* exception types dynamically can be
problematic when not done correctly and in a safe manner; since failure
objects can capture any exception it would be *unsafe* to try to import
those exception types namespaces and modules on the receiver side
dynamically (this would create similar issues as the ``pickle`` module in
python has where foreign modules can be imported, causing those modules
to have code ran when this happens, and this can cause issues and
side-effects that the receiver would not have intended to have caused).
TODO(harlowja): use parts of http://bugs.python.org/issue17911 and the
backport at https://pypi.org/project/traceback2/ to (hopefully)
simplify the methods and contents of this object...
"""
DICT_VERSION = 1
BASE_EXCEPTIONS = ('BaseException', 'Exception')
"""
Root exceptions of all other python exceptions.
See: https://docs.python.org/2/library/exceptions.html
"""
#: Expected failure schema (in json schema format).
SCHEMA = {
"$ref": "#/definitions/cause",
"definitions": {
"cause": {
"type": "object",
'properties': {
'version': {
"type": "integer",
"minimum": 0,
},
'exc_args': {
"type": "array",
"minItems": 0,
},
'exception_str': {
"type": "string",
},
'traceback_str': {
"type": "string",
},
'exc_type_names': {
"type": "array",
"items": {
"type": "string",
},
"minItems": 1,
},
'causes': {
"type": "array",
"items": {
"$ref": "#/definitions/cause",
},
}
},
"required": [
"exception_str",
'traceback_str',
'exc_type_names',
],
"additionalProperties": True,
},
},
}
def __init__(self, exc_info=None, **kwargs):
if not kwargs:
if exc_info is None:
exc_info = sys.exc_info()
else:
# This should always be the (type, value, traceback) tuple,
# either from a prior sys.exc_info() call or from some other
# creation...
if len(exc_info) != 3:
raise ValueError("Provided 'exc_info' must contain three"
" elements")
self._exc_info = exc_info
self._exc_args = tuple(getattr(exc_info[1], 'args', []))
self._exc_type_names = tuple(
reflection.get_all_class_names(exc_info[0], up_to=Exception))
if not self._exc_type_names:
raise TypeError("Invalid exception type '%s' (%s)"
% (exc_info[0], type(exc_info[0])))
self._exception_str = _exception_message(self._exc_info[1])
self._traceback_str = ''.join(
traceback.format_tb(self._exc_info[2]))
self._causes = kwargs.pop('causes', None)
else:
self._causes = kwargs.pop('causes', None)
self._exc_info = exc_info
self._exc_args = tuple(kwargs.pop('exc_args', []))
self._exception_str = kwargs.pop('exception_str')
self._exc_type_names = tuple(kwargs.pop('exc_type_names', []))
self._traceback_str = kwargs.pop('traceback_str', None)
if kwargs:
raise TypeError(
'Failure.__init__ got unexpected keyword argument(s): %s'
% ', '.join(six.iterkeys(kwargs)))
@classmethod
def from_exception(cls, exception):
"""Creates a failure object from a exception instance."""
exc_info = (
type(exception),
exception,
getattr(exception, '__traceback__', None)
)
return cls(exc_info=exc_info)
@classmethod
def validate(cls, data):
"""Validate input data matches expected failure ``dict`` format."""
try:
su.schema_validate(data, cls.SCHEMA)
except su.ValidationError as e:
raise exc.InvalidFormat("Failure data not of the"
" expected format: %s" % (e.message), e)
else:
# Ensure that all 'exc_type_names' originate from one of
# BASE_EXCEPTIONS, because those are the root exceptions that
# python mandates/provides and anything else is invalid...
causes = collections.deque([data])
while causes:
cause = causes.popleft()
root_exc_type = cause['exc_type_names'][-1]
if root_exc_type not in cls.BASE_EXCEPTIONS:
raise exc.InvalidFormat(
"Failure data 'exc_type_names' must"
" have an initial exception type that is one"
" of %s types: '%s' is not one of those"
" types" % (cls.BASE_EXCEPTIONS, root_exc_type))
sub_causes = cause.get('causes')
if sub_causes:
causes.extend(sub_causes)
def _matches(self, other):
if self is other:
return True
return (self._exc_type_names == other._exc_type_names
and self.exception_args == other.exception_args
and self.exception_str == other.exception_str
and self.traceback_str == other.traceback_str
and self.causes == other.causes)
def matches(self, other):
"""Checks if another object is equivalent to this object.
:returns: checks if another object is equivalent to this object
:rtype: boolean
"""
if not isinstance(other, Failure):
return False
if self.exc_info is None or other.exc_info is None:
return self._matches(other)
else:
return self == other
def __eq__(self, other):
if not isinstance(other, Failure):
return NotImplemented
return (self._matches(other) and
_are_equal_exc_info_tuples(self.exc_info, other.exc_info))
def __ne__(self, other):
return not (self == other)
# NOTE(imelnikov): obj.__hash__() should return same values for equal
# objects, so we should redefine __hash__. Failure equality semantics
# is a bit complicated, so for now we just mark Failure objects as
# unhashable. See python docs on object.__hash__ for more info:
# http://docs.python.org/2/reference/datamodel.html#object.__hash__
__hash__ = None
@property
def exception(self):
"""Exception value, or none if exception value is not present.
Exception value may be lost during serialization.
"""
if self._exc_info:
return self._exc_info[1]
else:
return None
@property
def exception_str(self):
"""String representation of exception."""
return self._exception_str
@property
def exception_args(self):
"""Tuple of arguments given to the exception constructor."""
return self._exc_args
@property
def exc_info(self):
"""Exception info tuple or none.
See: https://docs.python.org/2/library/sys.html#sys.exc_info for what
the contents of this tuple are (if none, then no contents can
be examined).
"""
return self._exc_info
@property
def traceback_str(self):
"""Exception traceback as string."""
return self._traceback_str
@staticmethod
def reraise_if_any(failures):
"""Re-raise exceptions if argument is not empty.
If argument is empty list/tuple/iterator, this method returns
None. If argument is converted into a list with a
single ``Failure`` object in it, that failure is reraised. Else, a
:class:`~taskflow.exceptions.WrappedFailure` exception
is raised with the failure list as causes.
"""
if not isinstance(failures, (list, tuple)):
# Convert generators/other into a list...
failures = list(failures)
if len(failures) == 1:
failures[0].reraise()
elif len(failures) > 1:
raise exc.WrappedFailure(failures)
def reraise(self):
"""Re-raise captured exception."""
if self._exc_info:
six.reraise(*self._exc_info)
else:
raise exc.WrappedFailure([self])
def check(self, *exc_classes):
"""Check if any of ``exc_classes`` caused the failure.
Arguments of this method can be exception types or type
names (stings). If captured exception is instance of
exception of given type, the corresponding argument is
returned. Else, None is returned.
"""
for cls in exc_classes:
if isinstance(cls, type):
err = reflection.get_class_name(cls)
else:
err = cls
if err in self._exc_type_names:
return cls
return None
@classmethod
def _extract_causes_iter(cls, exc_val):
seen = [exc_val]
causes = [exc_val]
while causes:
exc_val = causes.pop()
if exc_val is None:
continue
# See: https://www.python.org/dev/peps/pep-3134/ for why/what
# these are...
#
# '__cause__' attribute for explicitly chained exceptions
# '__context__' attribute for implicitly chained exceptions
# '__traceback__' attribute for the traceback
#
# See: https://www.python.org/dev/peps/pep-0415/ for why/what
# the '__suppress_context__' is/means/implies...
suppress_context = getattr(exc_val,
'__suppress_context__', False)
if suppress_context:
attr_lookups = ['__cause__']
else:
attr_lookups = ['__cause__', '__context__']
nested_exc_val = None
for attr_name in attr_lookups:
attr_val = getattr(exc_val, attr_name, None)
if attr_val is None:
continue
if attr_val not in seen:
nested_exc_val = attr_val
break
if nested_exc_val is not None:
exc_info = (
type(nested_exc_val),
nested_exc_val,
getattr(nested_exc_val, '__traceback__', None),
)
seen.append(nested_exc_val)
causes.append(nested_exc_val)
yield cls(exc_info=exc_info)
@property
def causes(self):
"""Tuple of all *inner* failure *causes* of this failure.
NOTE(harlowja): Does **not** include the current failure (only
returns connected causes of this failure, if any). This property
is really only useful on 3.x or newer versions of python as older
versions do **not** have associated causes (the tuple will **always**
be empty on 2.x versions of python).
Refer to :pep:`3134` and :pep:`409` and :pep:`415` for what
this is examining to find failure causes.
"""
if self._causes is not None:
return self._causes
else:
self._causes = tuple(self._extract_causes_iter(self.exception))
return self._causes
def __unicode__(self):
return self.pformat()
def pformat(self, traceback=False):
"""Pretty formats the failure object into a string."""
buf = six.StringIO()
if not self._exc_type_names:
buf.write('Failure: %s' % (self._exception_str))
else:
buf.write('Failure: %s: %s' % (self._exc_type_names[0],
self._exception_str))
if traceback:
if self._traceback_str is not None:
traceback_str = self._traceback_str.rstrip()
else:
traceback_str = None
if traceback_str:
buf.write(os.linesep)
buf.write('Traceback (most recent call last):')
buf.write(os.linesep)
buf.write(traceback_str)
else:
buf.write(os.linesep)
buf.write('Traceback not available.')
return buf.getvalue()
def __iter__(self):
"""Iterate over exception type names."""
for et in self._exc_type_names:
yield et
def __getstate__(self):
dct = self.to_dict()
if self._exc_info:
# Avoids 'TypeError: can't pickle traceback objects'
dct['exc_info'] = self._exc_info[0:2]
return dct
def __setstate__(self, dct):
self._exception_str = dct['exception_str']
if 'exc_args' in dct:
self._exc_args = tuple(dct['exc_args'])
else:
# Guess we got an older version somehow, before this
# was added, so at that point just set to an empty tuple...
self._exc_args = ()
self._traceback_str = dct['traceback_str']
self._exc_type_names = dct['exc_type_names']
if 'exc_info' in dct:
# Tracebacks can't be serialized/deserialized, but since we
# provide a traceback string (and more) this should be
# acceptable...
#
# TODO(harlowja): in the future we could do something like
# what the twisted people have done, see for example
# twisted-13.0.0/twisted/python/failure.py#L89 for how they
# created a fake traceback object...
self._exc_info = tuple(iter_utils.fill(dct['exc_info'], 3))
else:
self._exc_info = None
causes = dct.get('causes')
if causes is not None:
causes = tuple(self.from_dict(d) for d in causes)
self._causes = causes
@classmethod
def from_dict(cls, data):
"""Converts this from a dictionary to a object."""
data = dict(data)
version = data.pop('version', None)
if version != cls.DICT_VERSION:
raise ValueError('Invalid dict version of failure object: %r'
% version)
causes = data.get('causes')
if causes is not None:
data['causes'] = tuple(cls.from_dict(d) for d in causes)
return cls(**data)
def to_dict(self, include_args=True):
"""Converts this object to a dictionary.
:param include_args: boolean indicating whether to include the
exception args in the output.
"""
return {
'exception_str': self.exception_str,
'traceback_str': self.traceback_str,
'exc_type_names': list(self),
'version': self.DICT_VERSION,
'exc_args': self.exception_args if include_args else tuple(),
'causes': [f.to_dict() for f in self.causes],
}
def copy(self):
"""Copies this object."""
return Failure(exc_info=_copy_exc_info(self.exc_info),
exception_str=self.exception_str,
traceback_str=self.traceback_str,
exc_args=self.exception_args,
exc_type_names=self._exc_type_names[:],
causes=self._causes)
|
{
"content_hash": "ae1b61ce73e0e3704b137e62fa6ca36d",
"timestamp": "",
"source": "github",
"line_count": 511,
"max_line_length": 79,
"avg_line_length": 39.99021526418787,
"alnum_prop": 0.5653046244188892,
"repo_name": "jimbobhickville/taskflow",
"id": "31c30ce8f676a0944528c914a1a16e4955d5c920",
"size": "21092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taskflow/types/failure.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "1650287"
},
{
"name": "Shell",
"bytes": "11250"
}
],
"symlink_target": ""
}
|
""" Test functions for the sparse.linalg.isolve module
"""
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from numpy.testing import TestCase, assert_equal, assert_array_equal, \
assert_, assert_allclose, assert_raises
from numpy import zeros, ones, arange, array, abs, max, ones, eye, iscomplexobj
from numpy.linalg import cond
from scipy.linalg import norm
from scipy.sparse import spdiags, csr_matrix, SparseEfficiencyWarning
from scipy.sparse.linalg import LinearOperator, aslinearoperator
from scipy.sparse.linalg.isolve import cg, cgs, bicg, bicgstab, gmres, qmr, minres, lgmres
# TODO check that method preserve shape and type
# TODO test both preconditioner methods
class Case(object):
def __init__(self, name, A, skip=None):
self.name = name
self.A = A
if skip is None:
self.skip = []
else:
self.skip = skip
def __repr__(self):
return "<%s>" % self.name
class IterativeParams(object):
def __init__(self):
# list of tuples (solver, symmetric, positive_definite )
solvers = [cg, cgs, bicg, bicgstab, gmres, qmr, minres, lgmres]
sym_solvers = [minres, cg]
posdef_solvers = [cg]
real_solvers = [minres]
self.solvers = solvers
# list of tuples (A, symmetric, positive_definite )
self.cases = []
# Symmetric and Positive Definite
N = 40
data = ones((3,N))
data[0,:] = 2
data[1,:] = -1
data[2,:] = -1
Poisson1D = spdiags(data, [0,-1,1], N, N, format='csr')
self.Poisson1D = Case("poisson1d", Poisson1D)
self.cases.append(Case("poisson1d", Poisson1D))
# note: minres fails for single precision
self.cases.append(Case("poisson1d", Poisson1D.astype('f'),
skip=[minres]))
# Symmetric and Negative Definite
self.cases.append(Case("neg-poisson1d", -Poisson1D,
skip=posdef_solvers))
# note: minres fails for single precision
self.cases.append(Case("neg-poisson1d", (-Poisson1D).astype('f'),
skip=posdef_solvers + [minres]))
# Symmetric and Indefinite
data = array([[6, -5, 2, 7, -1, 10, 4, -3, -8, 9]],dtype='d')
RandDiag = spdiags(data, [0], 10, 10, format='csr')
self.cases.append(Case("rand-diag", RandDiag, skip=posdef_solvers))
self.cases.append(Case("rand-diag", RandDiag.astype('f'),
skip=posdef_solvers))
# Random real-valued
np.random.seed(1234)
data = np.random.rand(4, 4)
self.cases.append(Case("rand", data, skip=posdef_solvers+sym_solvers))
self.cases.append(Case("rand", data.astype('f'),
skip=posdef_solvers+sym_solvers))
# Random symmetric real-valued
np.random.seed(1234)
data = np.random.rand(4, 4)
data = data + data.T
self.cases.append(Case("rand-sym", data, skip=posdef_solvers))
self.cases.append(Case("rand-sym", data.astype('f'),
skip=posdef_solvers))
# Random pos-def symmetric real
np.random.seed(1234)
data = np.random.rand(9, 9)
data = np.dot(data.conj(), data.T)
self.cases.append(Case("rand-sym-pd", data))
# note: minres fails for single precision
self.cases.append(Case("rand-sym-pd", data.astype('f'),
skip=[minres]))
# Random complex-valued
np.random.seed(1234)
data = np.random.rand(4, 4) + 1j*np.random.rand(4, 4)
self.cases.append(Case("rand-cmplx", data,
skip=posdef_solvers+sym_solvers+real_solvers))
self.cases.append(Case("rand-cmplx", data.astype('F'),
skip=posdef_solvers+sym_solvers+real_solvers))
# Random hermitian complex-valued
np.random.seed(1234)
data = np.random.rand(4, 4) + 1j*np.random.rand(4, 4)
data = data + data.T.conj()
self.cases.append(Case("rand-cmplx-herm", data,
skip=posdef_solvers+real_solvers))
self.cases.append(Case("rand-cmplx-herm", data.astype('F'),
skip=posdef_solvers+real_solvers))
# Random pos-def hermitian complex-valued
np.random.seed(1234)
data = np.random.rand(9, 9) + 1j*np.random.rand(9, 9)
data = np.dot(data.conj(), data.T)
self.cases.append(Case("rand-cmplx-sym-pd", data, skip=real_solvers))
self.cases.append(Case("rand-cmplx-sym-pd", data.astype('F'),
skip=real_solvers))
# Non-symmetric and Positive Definite
#
# cgs, qmr, and bicg fail to converge on this one
# -- algorithmic limitation apparently
data = ones((2,10))
data[0,:] = 2
data[1,:] = -1
A = spdiags(data, [0,-1], 10, 10, format='csr')
self.cases.append(Case("nonsymposdef", A,
skip=sym_solvers+[cgs, qmr, bicg]))
self.cases.append(Case("nonsymposdef", A.astype('F'),
skip=sym_solvers+[cgs, qmr, bicg]))
def setup_module():
global params
params = IterativeParams()
def check_maxiter(solver, case):
A = case.A
tol = 1e-12
b = arange(A.shape[0], dtype=float)
x0 = 0*b
residuals = []
def callback(x):
residuals.append(norm(b - case.A*x))
x, info = solver(A, b, x0=x0, tol=tol, maxiter=3, callback=callback)
assert_equal(len(residuals), 3)
assert_equal(info, 3)
def test_maxiter():
case = params.Poisson1D
for solver in params.solvers:
if solver in case.skip:
continue
yield check_maxiter, solver, case
def assert_normclose(a, b, tol=1e-8):
residual = norm(a - b)
tolerance = tol*norm(b)
msg = "residual (%g) not smaller than tolerance %g" % (residual, tolerance)
assert_(residual < tolerance, msg=msg)
def check_convergence(solver, case):
A = case.A
if A.dtype.char in "dD":
tol = 1e-8
else:
tol = 1e-2
b = arange(A.shape[0], dtype=A.dtype)
x0 = 0*b
x, info = solver(A, b, x0=x0, tol=tol)
assert_array_equal(x0, 0*b) # ensure that x0 is not overwritten
assert_equal(info,0)
assert_normclose(A.dot(x), b, tol=tol)
def test_convergence():
for solver in params.solvers:
for case in params.cases:
if solver in case.skip:
continue
yield check_convergence, solver, case
def check_precond_dummy(solver, case):
tol = 1e-8
def identity(b,which=None):
"""trivial preconditioner"""
return b
A = case.A
M,N = A.shape
D = spdiags([1.0/A.diagonal()], [0], M, N)
b = arange(A.shape[0], dtype=float)
x0 = 0*b
precond = LinearOperator(A.shape, identity, rmatvec=identity)
if solver is qmr:
x, info = solver(A, b, M1=precond, M2=precond, x0=x0, tol=tol)
else:
x, info = solver(A, b, M=precond, x0=x0, tol=tol)
assert_equal(info,0)
assert_normclose(A.dot(x), b, tol)
A = aslinearoperator(A)
A.psolve = identity
A.rpsolve = identity
x, info = solver(A, b, x0=x0, tol=tol)
assert_equal(info,0)
assert_normclose(A*x, b, tol=tol)
def test_precond_dummy():
case = params.Poisson1D
for solver in params.solvers:
if solver in case.skip:
continue
yield check_precond_dummy, solver, case
def test_gmres_basic():
A = np.vander(np.arange(10) + 1)[:, ::-1]
b = np.zeros(10)
b[0] = 1
x = np.linalg.solve(A, b)
x_gm, err = gmres(A, b, restart=5, maxiter=1)
assert_allclose(x_gm[0], 0.359, rtol=1e-2)
def test_reentrancy():
non_reentrant = [cg, cgs, bicg, bicgstab, gmres, qmr]
reentrant = [lgmres, minres]
for solver in reentrant + non_reentrant:
yield _check_reentrancy, solver, solver in reentrant
def _check_reentrancy(solver, is_reentrant):
def matvec(x):
A = np.array([[1.0, 0, 0], [0, 2.0, 0], [0, 0, 3.0]])
y, info = solver(A, x)
assert_equal(info, 0)
return y
b = np.array([1, 1./2, 1./3])
op = LinearOperator((3, 3), matvec=matvec, rmatvec=matvec,
dtype=b.dtype)
if not is_reentrant:
assert_raises(RuntimeError, solver, op, b)
else:
y, info = solver(op, b)
assert_equal(info, 0)
assert_allclose(y, [1, 1, 1])
#------------------------------------------------------------------------------
class TestQMR(TestCase):
def test_leftright_precond(self):
"""Check that QMR works with left and right preconditioners"""
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=SparseEfficiencyWarning)
from scipy.sparse.linalg.dsolve import splu
from scipy.sparse.linalg.interface import LinearOperator
n = 100
dat = ones(n)
A = spdiags([-2*dat, 4*dat, -dat], [-1,0,1],n,n)
b = arange(n,dtype='d')
L = spdiags([-dat/2, dat], [-1,0], n, n)
U = spdiags([4*dat, -dat], [0,1], n, n)
L_solver = splu(L)
U_solver = splu(U)
def L_solve(b):
return L_solver.solve(b)
def U_solve(b):
return U_solver.solve(b)
def LT_solve(b):
return L_solver.solve(b,'T')
def UT_solve(b):
return U_solver.solve(b,'T')
M1 = LinearOperator((n,n), matvec=L_solve, rmatvec=LT_solve)
M2 = LinearOperator((n,n), matvec=U_solve, rmatvec=UT_solve)
x,info = qmr(A, b, tol=1e-8, maxiter=15, M1=M1, M2=M2)
assert_equal(info,0)
assert_normclose(A*x, b, tol=1e-8)
class TestGMRES(TestCase):
def test_callback(self):
def store_residual(r, rvec):
rvec[rvec.nonzero()[0].max()+1] = r
# Define, A,b
A = csr_matrix(array([[-2,1,0,0,0,0],[1,-2,1,0,0,0],[0,1,-2,1,0,0],[0,0,1,-2,1,0],[0,0,0,1,-2,1],[0,0,0,0,1,-2]]))
b = ones((A.shape[0],))
maxiter = 1
rvec = zeros(maxiter+1)
rvec[0] = 1.0
callback = lambda r:store_residual(r, rvec)
x,flag = gmres(A, b, x0=zeros(A.shape[0]), tol=1e-16, maxiter=maxiter, callback=callback)
diff = max(abs((rvec - array([1.0, 0.81649658092772603]))))
assert_(diff < 1e-5)
def test_abi(self):
# Check we don't segfault on gmres with complex argument
A = eye(2)
b = ones(2)
r_x, r_info = gmres(A, b)
r_x = r_x.astype(complex)
x, info = gmres(A.astype(complex), b.astype(complex))
assert_(iscomplexobj(x))
assert_allclose(r_x, x)
assert_(r_info == info)
if __name__ == "__main__":
import nose
nose.run(argv=['', __file__])
|
{
"content_hash": "643546f165b087315ee4642d2725de0f",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 122,
"avg_line_length": 30.972067039106147,
"alnum_prop": 0.5572691197691197,
"repo_name": "kmspriyatham/symath",
"id": "4b524dac1374baad99c0850759ed06fbc61ec9f9",
"size": "11110",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scipy/scipy/sparse/linalg/isolve/tests/test_iterative.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "17042868"
},
{
"name": "C++",
"bytes": "10078577"
},
{
"name": "CSS",
"bytes": "14254"
},
{
"name": "FORTRAN",
"bytes": "6345626"
},
{
"name": "JavaScript",
"bytes": "3133"
},
{
"name": "M",
"bytes": "66"
},
{
"name": "Matlab",
"bytes": "4280"
},
{
"name": "Objective-C",
"bytes": "15478"
},
{
"name": "Python",
"bytes": "7388118"
},
{
"name": "Shell",
"bytes": "3288"
},
{
"name": "TeX",
"bytes": "37261"
},
{
"name": "nesC",
"bytes": "1736"
}
],
"symlink_target": ""
}
|
import argparse
import json
import logging
import sys
from validator import constants
from validator.validate import validate
def main():
'Main function. Handles delegation to other functions.'
logging.basicConfig()
type_choices = {'any': constants.PACKAGE_ANY,
'extension': constants.PACKAGE_EXTENSION,
'theme': constants.PACKAGE_THEME,
'dictionary': constants.PACKAGE_DICTIONARY,
'languagepack': constants.PACKAGE_LANGPACK,
'search': constants.PACKAGE_SEARCHPROV,
'multi': constants.PACKAGE_MULTI}
# Parse the arguments that
parser = argparse.ArgumentParser(
description='Run tests on a Mozilla-type addon.')
parser.add_argument('package',
help="The path of the package you're testing")
parser.add_argument('-t',
'--type',
default='any',
choices=type_choices.keys(),
help="Type of addon you assume you're testing",
required=False)
parser.add_argument('-o',
'--output',
default='text',
choices=('text', 'json'),
help='The output format that you expect',
required=False)
parser.add_argument('-D',
'--debug',
action='store_true',
help="""Runs extra consistency checks, and in certain
circumstance prints more verbose debugging output.""")
parser.add_argument('-v',
'--verbose',
action='store_true',
help="""If the output format supports it, makes
the analysis summary include extra info.""")
parser.add_argument('--boring',
action='store_true',
help="""Activating this flag will remove color
support from the terminal.""")
parser.add_argument('--determined',
action='store_true',
help="""This flag will continue running tests in
successive tests even if a lower tier fails.""")
parser.add_argument('--selfhosted',
action='store_true',
help="""Indicates that the addon will not be
hosted on addons.mozilla.org. This allows the
<em:updateURL> element to be set.""")
parser.add_argument('--approved_applications',
default='validator/app_versions.json',
help="""A JSON file containing acceptable applications
and their versions""")
parser.add_argument('--target-maxversion',
help="""JSON string to override the package's
targetapp_maxVersion for validation. The JSON object
should be a dict of versions keyed by application
GUID. For example, setting a package's max Firefox
version to 5.*:
{"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}": "5.*"}
""")
parser.add_argument('--target-minversion',
help="""JSON string to override the package's
targetapp_minVersion for validation. The JSON object
should be a dict of versions keyed by application
GUID. For example, setting a package's min Firefox
version to 5.*:
{"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}": "5.*"}
""")
parser.add_argument('--for-appversions',
help="""JSON string to run validation tests for
compatibility with a specific app/version. The JSON
object should be a dict of version lists keyed by
application GUID. For example, running Firefox 6.*
compatibility tests:
{"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}": ["6.*"]}
""")
parser.add_argument('--timeout',
help='The amount of time before validation is '
'terminated with a timeout exception.',
default='60')
args = parser.parse_args()
# We want to make sure that the output is expected. Parse out the expected
# type for the add-on and pass it in for validation.
if args.type not in type_choices:
# Fail if the user provided invalid input.
print 'Given expectation (%s) not valid. See --help for details' % \
args.type
sys.exit(1)
overrides = {}
if args.target_minversion:
overrides['targetapp_minVersion'] = json.loads(args.target_minversion)
if args.target_maxversion:
overrides['targetapp_maxVersion'] = json.loads(args.target_maxversion)
for_appversions = None
if args.for_appversions:
for_appversions = json.loads(args.for_appversions)
try:
timeout = int(args.timeout)
except ValueError:
print 'Invalid timeout. Integer expected.'
sys.exit(1)
expectation = type_choices[args.type]
error_bundle = validate(args.package,
format=None,
approved_applications=args.approved_applications,
determined=args.determined,
listed=not args.selfhosted,
debug=args.debug,
overrides=overrides,
for_appversions=for_appversions,
expectation=expectation,
timeout=timeout)
# Print the output of the tests based on the requested format.
if args.output == 'text':
print error_bundle.print_summary(verbose=args.verbose,
no_color=args.boring).encode('utf-8')
elif args.output == 'json':
sys.stdout.write(error_bundle.render_json())
if error_bundle.failed():
sys.exit(1)
else:
sys.exit(0)
# Start up the testing and return the output.
if __name__ == '__main__':
main()
|
{
"content_hash": "cc6d38baa4b90f992d7d632f34959153",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 78,
"avg_line_length": 43.651006711409394,
"alnum_prop": 0.5195264452644527,
"repo_name": "kmaglione/amo-validator",
"id": "e0f904aef0fc47753914550b1a70d6003b6a329d",
"size": "6504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "validator/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "466"
},
{
"name": "HTML",
"bytes": "2802"
},
{
"name": "JavaScript",
"bytes": "602"
},
{
"name": "Python",
"bytes": "853297"
},
{
"name": "Shell",
"bytes": "1837"
}
],
"symlink_target": ""
}
|
class Solution:
def findMaxValueOfEquation(self, points: List[List[int]], k: int) -> int:
# Elements of q are [y-x, x], increasing order.
q=collections.deque()
answer=-sys.maxsize
for point in points:
while q and point[0]-q[0][1]>k:
q.popleft()
if q:
answer=max(answer, q[0][0]+point[1]+point[0])
while q and q[-1][0]<point[1]-point[0]:
q.pop()
q.append([point[1]-point[0], point[0]])
return answer
# Ref: https://leetcode.com/problems/max-value-of-equation/discuss/709231/Python-Stack-O(N)
|
{
"content_hash": "b234f211ba7b1c1eeb0a71eeb11d2e04",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 91,
"avg_line_length": 40.375,
"alnum_prop": 0.5309597523219814,
"repo_name": "Magic07/online-judge-solutions",
"id": "d4c59ba5fd67ab6971a8a45fa56bccfa2db1719b",
"size": "646",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "leetcode/1622-max-value-of-equation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "34617"
}
],
"symlink_target": ""
}
|
"""
test_zbsensor.py
By James Saunders, 2017
Tests PyAlertMe Module.
"""
import sys
sys.path.insert(0, '../')
from pyalertme import *
import unittest
from mock_serial import Serial
class TestZBSensor(unittest.TestCase):
"""
Test PyAlertMe ZBSensor Class.
"""
def setUp(self):
"""
Create a node object for each test.
"""
self.ser = Serial()
self.device_obj = ZBSensor(self.ser)
def tearDown(self):
"""
Teardown node object.
"""
self.device_obj.halt()
def test_generate_type_update(self):
"""
Test Generate Type Update.
"""
result = self.device_obj.message_version_info_update()
expected = {
'src_endpoint': b'\x02',
'dest_endpoint': b'\x02',
'cluster': b'\x00\xf6',
'profile': b'\xc2\x16',
'data': b'\tq\xfeHA\xd2\x1b\x19\x00\x00o\r\x009\x10\x07\x00\x01\x1c\x2d\x7b\x09PyAlertMe\x08ZBSensor\n2017-01-01'
}
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
{
"content_hash": "92ff2f5462c62cb9951e00f92be7ea45",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 125,
"avg_line_length": 23.76595744680851,
"alnum_prop": 0.5666965085049239,
"repo_name": "jamesleesaunders/pi-hive",
"id": "bf12c77527dc97c625f196f23933c2cf91056803",
"size": "1136",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_zbsensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54843"
}
],
"symlink_target": ""
}
|
description = """
Kurisu, the bot for the 3DS Hacking Discord!
"""
# import dependencies
import os
from discord.ext import commands
import discord
import datetime
import json, asyncio
import copy
import configparser
import traceback
import sys
import os
import re
# sets working directory to bot's folder
dir_path = os.path.dirname(os.path.realpath(__file__))
os.chdir(dir_path)
# read config for token
config = configparser.ConfigParser()
config.read("config.ini")
os.makedirs("data", exist_ok=True)
# create warns.json if it doesn't exist
if not os.path.isfile("data/warns.json"):
with open("data/warns.json", "w") as f:
f.write("{}")
# create restrictions.json if it doesn't exist
if not os.path.isfile("data/restrictions.json"):
with open("data/restrictions.json", "w") as f:
f.write("{}")
# create staff.json if it doesn't exist
if not os.path.isfile("data/staff.json"):
with open("data/staff.json", "w") as f:
f.write("{}")
# create helpers.json if it doesn't exist
if not os.path.isfile("data/helpers.json"):
with open("data/helpers.json", "w") as f:
f.write("{}")
# create timebans.json if it doesn't exist
if not os.path.isfile("data/timebans.json"):
with open("data/timebans.json", "w") as f:
f.write("{}")
# create softbans.json if it doesn't exist
if not os.path.isfile("data/softbans.json"):
with open("data/softbans.json", "w") as f:
f.write("{}")
# create watch.json if it doesn't exist
if not os.path.isfile("data/watch.json"):
with open("data/watch.json", "w") as f:
f.write("{}")
prefix = ['!', '.']
bot = commands.Bot(command_prefix=prefix, description=description, pm_help=None)
bot.actions = [] # changes messages in mod-/server-logs
with open("data/watch.json", "r") as f:
bot.watching = json.load(f) # post user messages to messaage-logs
# http://stackoverflow.com/questions/3411771/multiple-character-replace-with-python
chars = "\\`*_<>#@:~"
def escape_name(name):
name = str(name)
for c in chars:
if c in name:
name = name.replace(c, "\\" + c)
return name.replace("@", "@\u200b") # prevent mentions
bot.escape_name = escape_name
bot.pruning = False # used to disable leave logs if pruning, maybe.
# mostly taken from https://github.com/Rapptz/discord.py/blob/async/discord/ext/commands/bot.py
@bot.event
async def on_command_error(error, ctx):
if isinstance(error, discord.ext.commands.errors.CommandNotFound):
pass # ...don't need to know if commands don't exist
if isinstance(error, discord.ext.commands.errors.CheckFailure):
await bot.send_message(ctx.message.channel, "{} You don't have permission to use this command.".format(ctx.message.author.mention))
elif isinstance(error, discord.ext.commands.errors.MissingRequiredArgument):
formatter = commands.formatter.HelpFormatter()
await bot.send_message(ctx.message.channel, "{} You are missing required arguments.\n{}".format(ctx.message.author.mention, formatter.format_help_for(ctx, ctx.command)[0]))
else:
if ctx.command:
await bot.send_message(ctx.message.channel, "An error occured while processing the `{}` command.".format(ctx.command.name))
print('Ignoring exception in command {}'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
bot.all_ready = False
bot._is_all_ready = asyncio.Event(loop=bot.loop)
async def wait_until_all_ready():
"""Wait until the entire bot is ready."""
await bot._is_all_ready.wait()
bot.wait_until_all_ready = wait_until_all_ready
@bot.event
async def on_ready():
# this bot should only ever be in one server anyway
for server in bot.servers:
bot.server = server
if bot.all_ready:
break
print("{} has started! {} has {:,} members!".format(bot.user.name, server.name, server.member_count))
# channels
bot.welcome_channel = discord.utils.get(server.channels, name="welcome-and-rules")
bot.announcements_channel = discord.utils.get(server.channels, name="announcements")
bot.helpers_channel = discord.utils.get(server.channels, name="logs")
bot.mods_channel = discord.utils.get(server.channels, name="staff-general")
bot.modlogs_channel = discord.utils.get(server.channels, name="logs")
bot.serverlogs_channel = discord.utils.get(server.channels, name="logs")
bot.messagelogs_channel = discord.utils.get(server.channels, name="logs")
# roles
bot.staff_role = discord.utils.get(server.roles, name="Staff")
bot.halfop_role = discord.utils.get(server.roles, name="Moderator")
bot.op_role = discord.utils.get(server.roles, name="Administrator")
bot.superop_role = discord.utils.get(server.roles, name="Administrator")
bot.owner_role = discord.utils.get(server.roles, name="Owner")
bot.helpers_role = discord.utils.get(server.roles, name="Helpers")
bot.onduty3ds_role = discord.utils.get(server.roles, name="On-Duty 3DS")
bot.ondutywiiu_role = discord.utils.get(server.roles, name="On-Duty Wii U")
bot.verified_role = discord.utils.get(server.roles, name="Verified")
bot.trusted_role = discord.utils.get(server.roles, name="Trusted")
bot.probation_role = discord.utils.get(server.roles, name="Probation")
bot.muted_role = discord.utils.get(server.roles, name="Muted")
bot.nomemes_role = discord.utils.get(server.roles, name="No-Memes")
bot.nohelp_role = discord.utils.get(server.roles, name="No-Help")
bot.noembed_role = discord.utils.get(server.roles, name="No-Embed")
bot.elsewhere_role = discord.utils.get(server.roles, name="#elsewhere")
bot.everyone_role = server.default_role
bot.staff_ranks = {
"Moderator": bot.halfop_role,
"Administrator": bot.op_role,
"Administrator": bot.superop_role,
"Owner": bot.owner_role,
}
bot.helper_roles = {
"3DS": bot.onduty3ds_role,
"WiiU": bot.ondutywiiu_role,
}
# load timebans
with open("data/timebans.json", "r") as f:
timebans = json.load(f)
bot.timebans = {}
timebans_i = copy.copy(timebans)
for user_id, timestamp in timebans_i.items():
found = False
for user in await bot.get_bans(server):
if user.id == user_id:
bot.timebans[user_id] = [user, datetime.datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S"), False] # last variable is "notified", for <=30 minute notifications
found = True
break
if not found:
timebans.pop(user_id) # somehow not in the banned list anymore so let's just remove it
with open("data/timebans.json", "w") as f:
json.dump(timebans, f)
bot.all_ready = True
bot._is_all_ready.set()
msg = "{} has started! {} has {:,} members!".format(bot.user.name, server.name, server.member_count)
if len(failed_addons) != 0:
msg += "\n\nSome addons failed to load:\n"
for f in failed_addons:
msg += "\n{}: `{}: {}`".format(*f)
await bot.send_message(bot.helpers_channel, msg)
# softban check
with open("data/softbans.json", "r") as f:
softbans = json.load(f)
for member in server.members:
if member.id in softbans:
await bot.send_message(member, "This account has not been permitted to participate in {}. The reason is: {}".format(bot.server.name, softbans[member.id]["reason"]))
bot.actions.append("sbk:"+member.id)
await bot.kick(member)
msg = "🚨 **Attempted join**: {} is soft-banned by <@{}> | {}#{}".format(member.mention, softbans[member.id]["issuer_id"], bot.escape_name(member.name), member.discriminator)
embed = discord.Embed(color=discord.Color.red())
embed.description = softbans[member.id]["reason"]
await bot.send_message(bot.serverlogs_channel, msg, embed=embed)
return
break
# loads extensions
addons = [
'addons.assistance',
'addons.blah',
#'addons.bf',
'addons.err',
# 'addons.events',
'addons.extras',
'addons.kickban',
'addons.load',
'addons.lockdown',
'addons.logs',
'addons.loop',
'addons.memes',
'addons.helper_list',
'addons.mod_staff',
'addons.mod_warn',
'addons.mod_watch',
'addons.mod',
# 'addons.rules',
]
failed_addons = []
for extension in addons:
try:
bot.load_extension(extension)
except Exception as e:
print('{} failed to load.\n{}: {}'.format(extension, type(e).__name__, e))
failed_addons.append([extension, type(e).__name__, e])
# Execute
print('Bot directory: ', dir_path)
bot.run(config['Main']['token'])
|
{
"content_hash": "b9c935e4a640c0dc14713f17d65cac46",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 189,
"avg_line_length": 39.33913043478261,
"alnum_prop": 0.6345048629531388,
"repo_name": "T3CHNOLOG1C/Kurisu",
"id": "40c94563c1604c504a27e3ad1ff563f0c8282555",
"size": "9172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "162194"
}
],
"symlink_target": ""
}
|
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_ethereum_events', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='MonitoredEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('contract_address', models.CharField(max_length=42, validators=[django.core.validators.MinLengthValidator(42)])),
('topic', models.CharField(max_length=64, validators=[django.core.validators.MinLengthValidator(64)])),
('event_receiver', models.CharField(max_length=256)),
('monitored_from', models.IntegerField(blank=True, help_text='Block number in which monitoring for this event started', null=True)),
],
options={
'verbose_name_plural': 'Monitored Events',
'verbose_name': 'Monitored Event',
},
),
migrations.AlterUniqueTogether(
name='monitoredevent',
unique_together={('topic', 'contract_address')},
),
]
|
{
"content_hash": "5e5441c89263c7815ef498cb616e9a6d",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 148,
"avg_line_length": 41,
"alnum_prop": 0.5940204563335956,
"repo_name": "artemistomaras/django-ethereum-events",
"id": "3c3cc32ecc8b67e921898ee610bfc9f35c1016e0",
"size": "1320",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_ethereum_events/migrations/0002_auto_20180531_0806.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4086"
},
{
"name": "Python",
"bytes": "66636"
},
{
"name": "Solidity",
"bytes": "724"
}
],
"symlink_target": ""
}
|
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'huxley.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
{
"content_hash": "3d8d9bd158cc901ae3d3968a0b8f6998",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 73,
"avg_line_length": 30.2,
"alnum_prop": 0.6556291390728477,
"repo_name": "bmun/huxley",
"id": "3269a55b62a2e3566bba1865d8baa4d3906e2c55",
"size": "774",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "13301"
},
{
"name": "JavaScript",
"bytes": "400597"
},
{
"name": "Less",
"bytes": "19215"
},
{
"name": "Python",
"bytes": "635783"
},
{
"name": "Shell",
"bytes": "2475"
}
],
"symlink_target": ""
}
|
import distutils.version as dist_version
import os
import migrate
from migrate.versioning import util as migrate_util
import sqlalchemy
from tuskar.common import exception
from tuskar.db import migration
from tuskar.openstack.common.db.sqlalchemy import session as db_session
@migrate_util.decorator
def patched_with_engine(f, *a, **kw):
url = a[0]
engine = migrate_util.construct_engine(url, **kw)
try:
kw['engine'] = engine
return f(*a, **kw)
finally:
if isinstance(engine, migrate_util.Engine) and engine is not url:
migrate_util.log.debug('Disposing SQLAlchemy engine %s', engine)
engine.dispose()
# TODO(jkoelker) When migrate 0.7.3 is released and nova depends
# on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine
# NOTE(jkoelker) Delay importing migrate until we are patched
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
_REPOSITORY = None
get_engine = db_session.get_engine
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.TuskarException(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(migration.INIT_VERSION)
return versioning_api.db_version(get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.TuskarException(
_("Upgrade DB using Essex release first."))
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
global _REPOSITORY
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY
|
{
"content_hash": "b325f73651665099c2ae8474517a99a6",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 78,
"avg_line_length": 32.5531914893617,
"alnum_prop": 0.6666666666666666,
"repo_name": "tuskar/tuskar",
"id": "125a6ff533e65325f33bb19a8752f43d4e2f7f5c",
"size": "3837",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tuskar/db/sqlalchemy/migration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "578219"
}
],
"symlink_target": ""
}
|
from django.core.urlresolvers import reverse
from django.template.defaultfilters import slugify
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.http import Http404, HttpResponseForbidden, HttpResponseServerError, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from contacts.models import Company
from contacts.forms import CompanyCreateForm, CompanyUpdateForm, PhoneNumberFormSet, EmailAddressFormSet, InstantMessengerFormSet, WebSiteFormSet, StreetAddressFormSet, SpecialDateFormSet
def list(request, page=1, template='contacts/company/list.html'):
"""List of all the comapnies.
:param template: Add a custom template.
"""
company_list = Company.objects.all()
paginator = Paginator(company_list, 20)
try:
companies = paginator.page(page)
except (EmptyPage, InvalidPage):
companies = paginator.page(paginator.num_pages)
kwvars = {
'object_list': companies.object_list,
'has_next': companies.has_next(),
'has_previous': companies.has_previous(),
'has_other_pages': companies.has_other_pages(),
'start_index': companies.start_index(),
'end_index': companies.end_index(),
}
try:
kwvars['previous_page_number'] = companies.previous_page_number()
except (EmptyPage, InvalidPage):
kwvars['previous_page_number'] = None
try:
kwvars['next_page_number'] = companies.next_page_number()
except (EmptyPage, InvalidPage):
kwvars['next_page_number'] = None
return render_to_response(template, kwvars, RequestContext(request))
def detail(request, pk, slug=None, template='contacts/company/detail.html'):
"""Detail of a company.
:param template: Add a custom template.
"""
try:
company = Company.objects.get(pk__iexact=pk)
except Company.DoesNotExist:
raise Http404
kwvars = {
'object': company,
}
return render_to_response(template, kwvars, RequestContext(request))
def create(request, template='contacts/company/create.html'):
"""Create a company.
:param template: A custom template.
:param form: A custom form.
"""
user = request.user
if not user.has_perm('add_company'):
return HttpResponseForbidden()
if request.method == 'POST':
company_form = CompanyCreateForm(request.POST)
if company_form.is_valid():
c = company_form.save(commit=False)
# TODO Make sure that the slug isn't already in the database
if c.nickname:
c.slug = slugify(c.nickname)
else:
c.slug = slugify(c.name)
c.save()
return HttpResponseRedirect(c.get_absolute_url())
else:
return HttpResponseServerError
kwvars = {
'form': CompanyCreateForm(request.POST)
}
return render_to_response(template, kwvars, RequestContext(request))
def update(request, pk, slug=None, template='contacts/company/update.html'):
"""Update a company.
:param template: A custom template.
:param form: A custom form.
"""
user = request.user
if not user.has_perm('change_company'):
return HttpResponseForbidden()
try:
company = Company.objects.get(pk__iexact=pk)
except Company.DoesNotExist:
raise Http404
form = CompanyUpdateForm(instance=company)
phone_formset = PhoneNumberFormSet(instance=company)
email_formset = EmailAddressFormSet(instance=company)
im_formset = InstantMessengerFormSet(instance=company)
website_formset = WebSiteFormSet(instance=company)
address_formset = StreetAddressFormSet(instance=company)
special_date_formset = SpecialDateFormSet(instance=company)
if request.method == 'POST':
form = CompanyUpdateForm(request.POST, instance=company)
phone_formset = PhoneNumberFormSet(request.POST, instance=company)
email_formset = EmailAddressFormSet(request.POST, instance=company)
im_formset = InstantMessengerFormSet(request.POST, instance=company)
website_formset = WebSiteFormSet(request.POST, instance=company)
address_formset = StreetAddressFormSet(request.POST, instance=company)
special_date_formset = SpecialDateFormSet(request.POST, instance=company)
if form.is_valid() and phone_formset.is_valid() and \
email_formset.is_valid() and im_formset.is_valid() and \
website_formset.is_valid() and address_formset.is_valid():
form.save()
phone_formset.save()
email_formset.save()
im_formset.save()
website_formset.save()
address_formset.save()
special_date_formset.save()
return HttpResponseRedirect(company.get_absolute_url())
kwvars = {
'form': form,
'phone_formset': phone_formset,
'email_formset': email_formset,
'im_formset': im_formset,
'website_formset': website_formset,
'address_formset': address_formset,
'special_date_formset': special_date_formset,
'object': company,
}
return render_to_response(template, kwvars, RequestContext(request))
def delete(request, pk, slug=None, template='contacts/company/delete.html'):
"""Update a company.
:param template: A custom template.
"""
user = request.user
if not user.has_perm('delete_company'):
return HttpResponseForbidden()
try:
company = Company.objects.get(pk__iexact=pk)
except Company.DoesNotExist:
raise Http404
if request.method == 'POST':
new_data = request.POST.copy()
if new_data['delete_company'] == 'Yes':
company.delete()
return HttpResponseRedirect(reverse('contacts_company_list'))
else:
return HttpResponseRedirect(company.get_absolute_url())
kwvars = {
'object': company,
}
return render_to_response(template, kwvars, RequestContext(request))
|
{
"content_hash": "5e4b9421ef6295b6c360421d374d85f6",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 187,
"avg_line_length": 33.94444444444444,
"alnum_prop": 0.6641571194762684,
"repo_name": "weijia/django-contacts",
"id": "b86e0572f8c1b370cadfbfc5f10e7b08c0c2a4e5",
"size": "6110",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/contacts/views/company.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "186151"
},
{
"name": "Shell",
"bytes": "2977"
}
],
"symlink_target": ""
}
|
#----------------------------------------------------------------------
# Copyright (c) 2016 Inria/iMinds by Arthur Garnier
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
import subprocess
import re
import os
import uuid
import threading
import sys
import json
import random
import tempfile
import hashlib
import zipfile
import shutil
import time
import logging
import Pyro4
from urllib2 import urlopen, URLError, HTTPError
locked_port = list()
lock = threading.Lock()
building = dict()
@Pyro4.expose
class DockerManager(object):
def __init__(self,
default_image="jessie_gcf_ssh",
default_image_dockerfile_dir=os.path.dirname(os.path.realpath(__file__))):
self.default_image = default_image
self.default_image_dockerfile_dir = default_image_dockerfile_dir
#Return the number of running containers
def getRunningContainerCount(self):
cmd = "docker ps | grep -v '^CONTAINER' | wc -l"
output = subprocess.check_output(['bash', '-c', cmd])
output=output.strip().decode('utf-8')
return int(output)
#Return the next port available on the host using netstat
#starting_port : From which port start to check
def getNextPort(self, starting_port):
_locked_port = list(locked_port)
cmd = "netstat -ant 2>/dev/null | awk '{print $4}' | grep -o \":[0-9]\\+$\" | grep -o [0-9]* | sort -n | uniq"
output = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
expected = starting_port
busy = list()
for line in output.split('\n'):
if int(line) >= starting_port:
busy.append(int(line))
while expected in busy or expected in _locked_port:
expected+=1
return expected
#Get the next port with a Lock (avoid concurrency issue) and add it to the locked_port array
def reserveNextPort(self, starting_port):
lock.acquire()
port = self.getNextPort(starting_port)
locked_port.append(port)
lock.release()
return port
#Start a new container
#container_id : Specific name to give to the container
#sliver_type : Kind of container (limited to 100M memory for example)
#ssh_port : Port to bind to port 22
#mac_address : Defined mac address of the container
#image : Specific image to install (See processImage() documentation)
def startNew(self, container_id=None, sliver_type=None, ssh_port=None, mac_address=None, image=None):
if ssh_port is None:
ssh_port = self.reserveNextPort()
uid = str(uuid.uuid4()) if container_id == None else container_id
imageName = self.default_image
if image is not None:
imageName=self.processImage(image)
if not re.match(r'[a-fA-F0-9]{40}', imageName) and image.split("::")[1]!=imageName: #An error occured during processImage
raise Exception("Invalid image name: %s" % imageName)
if sliver_type=="docker-container":
cmd = "docker run -d --mac-address "+mac_address+" --name "+uid+" -p " + str(ssh_port) + ":22 -P -t "+imageName+" 2>&1"
elif sliver_type == "docker-container_100M":
cmd = "docker run -d --mac-address "+mac_address+" --name "+uid+" -p " + str(ssh_port) + ":22 -m 100M -P -t "+imageName+" 2>&1"
elif sliver_type == "docker-container-with-tunnel":
cmd = "docker run -d --mac-address "+mac_address+" --name "+uid+" -p " + str(ssh_port) + ":22 --cap-add=NET_ADMIN --device=/dev/net/tun -P -t "+imageName+" 2>&1"
else:
raise Exception("Internal error: no known sliver_type chosen: %s" % sliver_type)
try:
subprocess.check_output(['bash', '-c', cmd]).decode('utf-8').strip()
except Exception as e:
if "Unable to find image" not in e.output:
return e.output
#This should only be reached if the default_image itself is not yet built.
# So we try building it, then retry the command, and fail if that still fails
build = "docker build -t "+self.default_image+" " + self.default_image_dockerfile_dir
try:
if building.get(imageName, None) is None:
building[imageName] = threading.Lock()
building[imageName].acquire() #Don't run multiple build at the same time
subprocess.check_output(['bash', '-c', build]).decode('utf-8').strip()
subprocess.check_output(['bash', '-c', cmd]).decode('utf-8').strip()
except subprocess.CalledProcessError, e:
return e.output
finally:
building[imageName].release()
if ssh_port in locked_port:
i=0
while self.isContainerUp(ssh_port) == False: #Wait the container to listen before release the port
i+=1
time.sleep(1)
if i==45:
return "Container not up after 45 seconds. Something went wrong"
locked_port.remove(ssh_port)
return True
def restartContainer(self, container_id):
cmd = "docker restart " + str(container_id) + " 2>&1"
try:
subprocess.check_output(['bash', '-c', cmd]).decode('utf-8').strip()
return True
except Exception as e:
return False
#Remove a port from locked ports list
#Have to be done if container start failed
def releasePort(self, port):
if port in locked_port:
locked_port.remove(port)
def stopContainer(self, container_id):
cmd = "docker stop " + str(container_id)
try:
subprocess.check_output(['bash', '-c', cmd]).decode('utf-8').strip()
return True
except Exception as e:
return False
def removeContainer(self, container_id):
cmd = "docker rm -f " + str(container_id)
try:
subprocess.check_output(['bash', '-c', cmd]).decode('utf-8').strip()
return True
except Exception as e:
return e.output
#Check if a container is up using netstat
#In fact, check if the port is listenning
def isContainerUp(self, port):
cmd = "netstat -ant 2>/dev/null | awk '{print $4}' | grep '.*:"+str(port)+"$'"
try:
out=subprocess.check_output(['bash', '-c', cmd]).decode('utf-8').strip()
return True
except subprocess.CalledProcessError:
return False
def resetContainer(self, container_id):
self.stopContainer(container_id)
self.removeContainer(container_id)
self.startNew(container_id)
#Setup a user in the container
#ssh_keys : Array of public ssh keys to allow (authorized_keys file)
def setupUser(self, container_id, username, ssh_keys):
try:
cmd_create_user = "docker exec " + container_id + " sh -c 'grep \'^" + username + ":\' /etc/passwd ; if [ $? -ne 0 ] ; then useradd -m -d /home/" + username + " " + username + " && mkdir -p /home/" + username + "/.ssh ; fi' 2>&1"
out = subprocess.check_output(['bash', '-c', cmd_create_user])
cmd_add_key = "docker exec " + container_id + " sh -c \"echo '' > /home/" + username + "/.ssh/authorized_keys\" 2>&1"
out = subprocess.check_output(['bash', '-c', cmd_add_key])
for key in ssh_keys:
cmd_add_key = "docker exec " + container_id + " sh -c \"echo '" + key + "' >> /home/" + username + "/.ssh/authorized_keys\" 2>&1"
out = subprocess.check_output(['bash', '-c', cmd_add_key])
cmd_set_rights = "docker exec " + container_id + " sh -c 'chown -R " + username + ": /home/" + username + " && chmod 700 /home/" + username + "/.ssh && chmod 644 /home/" + username + "/.ssh/authorized_keys' 2>&1"
out = subprocess.check_output(['bash', '-c', cmd_set_rights])
return True
except subprocess.CalledProcessError, e:
return e.output
def setupContainer(self, container_id, user_keys_dict):
for username, ssh_keys in user_keys_dict.items():
res = self.setupUser(container_id, username, ssh_keys)
if res is not True:
return res
return True
#Get the ssh_port used by a specific container
def getPort(self, container_id):
cmd = "docker ps --format {{.Names}}//{{.Ports}} --no-trunc | grep " + container_id
output = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
m = re.search(':([0-9]*)->', output)
if m!=None:
return int(m.group(1))
else:
return None
#Get list of user with an account in the container (with a home and authorized ssh key)
def getUsers(self, container_id):
cmd = "docker exec " + container_id + " find /home -name \"authorized_keys\" | grep \"/home/.*/.ssh/authorized_keys\" | cut -d'/' -f 3"
out = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
return filter(None, out.split('\n')) #Remove empty elements
#Check if docker is installed and accessible by the AM
def checkDocker(self):
cmd = "docker ps"
try:
subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
except Exception, e:
sys.stderr.write('Docker is not installed OR this user is not in the docker group OR the docker daemon is not started\n')
exit(1)
#Get IPv6 of a container
def getIpV6(self, container_id):
cmd = "docker inspect " + container_id
output = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
output = json.loads(output)
return output[0]['NetworkSettings']['GlobalIPv6Address']
#Predict Ipv6 using the ipv6 prefix and the mac address
def computeIpV6(self, prefix, mac):
ipv6 = prefix
parts=mac.split(':')
for i in range(0, len(parts), 2):
ipv6 += str(parts[i])+str(parts[i+1])+":"
ipv6 = ipv6[:-1]
return ipv6
#Returns a random Mac Address with the same prefix as Docker (02:42:ac:11)
def randomMacAddress(self):
mac = [0x02, 0x42, 0xac, 0x11, random.randint(0x00, 0xff), random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
#Delete a docker built image
def deleteImage(self, name):
if name.startswith("urn") and len(name.split("::"))==2:
name = hashlib.sha1(name).hexdigest()
cmd = "docker rmi -f "+name
subprocess.check_output(['bash', '-c', cmd])
#Build a docker hub image with an OpenSSH server
def buildSshImage(self, name):
try:
cmd = "mktemp"
tmpfile = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
cmd = "echo 'FROM " +name+"' > "+tmpfile
out = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
cmd = "cat "+os.path.dirname(os.path.abspath(__file__))+"/Dockerfile_template >> "+tmpfile
out = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
cmd = "docker build -t "+name+" --force-rm -f "+tmpfile+" /tmp 2>&1"
out = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
cmd = "rm -f "+tmpfile
out = subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
return True
except subprocess.CalledProcessError, e:
return e.output
#Build the image given in parameter
#image : could be URL to a DockerFile or a zip or just the name from Docker Hub (eg debian:jessie). Always starts with "foo::" (foo is usually the slice urn) to make the name "private"
def processImage(self, image):
fullName = image.split("::")
user = fullName[0]
imageName = fullName[1]
if imageName.startswith("http://") or imageName.startswith("https://"):
image = hashlib.sha1(image).hexdigest() #Hash the "URN::imagename" to avoid issue with docker
cmd = "docker images --no-trunc --format {{.Repository}} | grep -x "+image
try:
subprocess.check_output(['bash', '-c', cmd]).strip().decode('utf-8')
except subprocess.CalledProcessError: #Image doesn't exists
out = self.buildExternalImage(imageName, image)
if out is not True:
return out
return image
else: #Docker hub image
#Check if image exists
cmd = "docker images --no-trunc --format {{.Repository}} | grep -x "+imageName
try:
subprocess.check_output(['bash', '-c', cmd])
except subprocess.CalledProcessError:
if building.get(image, None) is None:
building[image] = threading.Lock()
building[image].acquire()
out = self.buildSshImage(imageName)
if out is not True:
return out
building[image].release()
return imageName
#Build image from a URL and set the name "fullname" in docker
def buildExternalImage(self, url, fullName):
tmpdir = tempfile.mkdtemp()
self.dlfile(url, tmpdir)
if os.path.basename(url) == "Dockerfile": #If the target URL is a simple DockerFile
pass
elif os.path.basename(url).split(".")[-1] == "zip": #A zip containing /Dockerfile or /folder/Dockerfile (and other things)
zipfile.ZipFile(tmpdir+"/"+os.path.basename(url)).extractall(tmpdir)
if len(os.listdir(tmpdir))==2 and "Dockerfile" not in os.listdir(tmpdir): #If the zip contains a subfolder
cmd = "mv "+tmpdir+"/*/* "+tmpdir
subprocess.check_output(['bash', '-c', cmd])
else:
shutil.rmtree(tmpdir)
return "Error : Unsupported URL"
#Fix CMD to start SSH daemon and the original command
cmd = ""
for line in open(tmpdir+"/Dockerfile"):
if line.startswith("CMD "):
cmd = line.strip()[4:]
if len(cmd) > 0:
if cmd.startswith("[") and cmd.endswith("]"): #if CMD looks like "CMD ["nginx", "-g"]"
cmd = cmd[1:-1]
index = 0
shell_cmd=""
while index != -1:
next_index = cmd.find("\"", index+1)
shell_cmd +=" "+cmd[index:next_index+1]
index=cmd.find("\"", next_index+1)
new_cmd = "CMD sh -c '"+shell_cmd+" & /usr/sbin/sshd -D'"
else: #if CMD looks like "CMD nginx -g"
new_cmd = "CMD sh -c '"+cmd+" & /usr/sbin/sshd -D'"
else: #If no CMD in the Dockerfile
new_cmd = "CMD [\"/usr/sbin/sshd\", \"-D\"]"
with open(tmpdir+"/Dockerfile", 'a') as fo:
with open(os.path.dirname(os.path.abspath(__file__))+"/Dockerfile_template", 'r') as fi:
fo.write(fi.read())
cmd = "sed -i 's/CMD.*//g' "+tmpdir+"/Dockerfile"
subprocess.check_output(['bash', '-c', cmd])
with open(tmpdir+"/Dockerfile", 'a') as fo:
fo.write(new_cmd)
cmd = "docker build -t "+fullName+" --force-rm -f "+tmpdir+"/Dockerfile "+tmpdir+" 2>&1"
try:
subprocess.check_output(['bash', '-c', cmd])
except subprocess.CalledProcessError, e:
shutil.rmtree(tmpdir)
return e.output
shutil.rmtree(tmpdir)
return True
#Download a file to the given path
def dlfile(self, url, dest):
try:
f = urlopen(url)
# Open local file for writing
with open(dest+"/"+os.path.basename(url), "wb") as local_file:
local_file.write(f.read())
#handle errors
except HTTPError, e:
logging.getLogger('gcf.am3').error("HTTP Error:", e.code, url)
except URLError, e:
logging.getLogger('gcf.am3').error("HTTP Error:", e.code, url)
#Extract a tar.gz file given to the install_path in the container id
def installCommand(self, container_id, url, install_path):
cmd_docker = "docker exec " + container_id + " "
filename = os.path.basename(url)
ext = os.path.basename(url).split(".")[-1]
cmd = cmd_docker+"mkdir -p "+install_path+" 2>&1"
try:
subprocess.check_output(['bash', '-c', cmd])
cmd = cmd_docker+"curl -fsS -o "+install_path+"/"+filename+" "+url+" 2>&1"
subprocess.check_output(['bash', '-c', cmd])
if filename.split(".")[-1] == "gz" and filename.split(".")[-2] == "tar": # tar.gz file
cmd = cmd_docker+"tar xzf "+install_path+"/"+filename+" -C "+install_path+" 2>&1"
subprocess.check_output(['bash', '-c', cmd])
except subprocess.CalledProcessError as e:
return e.output.strip()
return True
#Executes the command cmd with the shell 'shell' in the container id
#Creates 3 files in /tmp of the container : startup-[0-9].(status|txt|sh)
#.sh contains the command executed
#.status contains the return status of the command
#.txt return the output
def executeCommand(self, container_id, shell, cmd):
cmd_docker = "docker exec " + container_id + " "
log_dir = "/tmp/"
if shell not in ['sh', 'bash']:
cmd = cmd_docker+"sh -c 'echo \"Invalid shell\" >> /tmp/execute.log '"
subprocess.check_output(['bash', '-c', cmd])
return
try:
list_startup = cmd_docker+"sh -c 'ls "+log_dir+" | grep startup-.*.sh | grep -o [0-9]*'"
out = subprocess.check_output(['bash', '-c', list_startup]).strip().split('\n')
next_nb = int(max(out))+1
except subprocess.CalledProcessError as e:
next_nb = 0
tmp = tempfile.mkstemp()[1]
with open(tmp, 'w') as local:
local.write(cmd)
cmd = "docker cp " + tmp +" " + container_id + ":/" + log_dir + "startup-" + str(next_nb) + ".sh"
try:
subprocess.check_output(['bash', '-c', cmd])
os.remove(tmp)
cmd = cmd_docker+"sudo sh -c '"+shell+" "+log_dir+"startup-"+str(next_nb)+".sh 2>&1 > "+log_dir+"startup-"+str(next_nb)+".txt'"
subprocess.check_output(['bash', '-c', cmd])
cmd = cmd_docker+"sh -c 'echo \"0\" > "+log_dir+"startup-"+str(next_nb)+".status'"
subprocess.check_output(['bash', '-c', cmd])
except subprocess.CalledProcessError as e:
cmd = cmd_docker+"sh -c 'echo \""+str(e.returncode)+"\" > "+log_dir+"startup-"+str(next_nb)+".status'"
subprocess.check_output(['bash', '-c', cmd])
|
{
"content_hash": "1ef292b14bea932e30652cc33767f27e",
"timestamp": "",
"source": "github",
"line_count": 414,
"max_line_length": 241,
"avg_line_length": 48.15458937198068,
"alnum_prop": 0.5755417335473515,
"repo_name": "wvdemeer/docker-am",
"id": "bd5714e5cbbd691f5c2662c6ed3c32db9304fc49",
"size": "19936",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gcf_docker_plugin/gcf_to_docker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "538"
},
{
"name": "Dockerfile",
"bytes": "614"
},
{
"name": "HTML",
"bytes": "6612"
},
{
"name": "JavaScript",
"bytes": "5267"
},
{
"name": "Puppet",
"bytes": "8219"
},
{
"name": "Python",
"bytes": "123805"
},
{
"name": "Shell",
"bytes": "2478"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup, find_packages
VERSION = __import__("nashvegas").__version__
def read(*path):
return open(os.path.join(os.path.abspath(os.path.dirname(__file__)), *path)).read()
tests_require = [
'nose>=1.1.2',
'django-nose>=0.1.3',
]
setup(
name="nashvegas",
version=VERSION,
description="nashvegas is a management command for managing Django database migrations",
long_description=read("README.rst"),
author="Patrick Altman",
author_email="paltman@gmail.com",
maintainer="Patrick Altman",
maintainer_email="paltman@gmail.com",
url="http://github.com/paltman/nashvegas/",
packages=find_packages(),
tests_require=tests_require,
test_suite='runtests.runtests',
zip_safe=False,
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Framework :: Django",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
],
)
|
{
"content_hash": "e0bb15159ea0802ad808d6dd83be9893",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 92,
"avg_line_length": 27.853658536585368,
"alnum_prop": 0.637478108581436,
"repo_name": "iivvoo/nashvegas",
"id": "b4947cd8f61d45cf65ad47b55410b862553af2a5",
"size": "1142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41563"
}
],
"symlink_target": ""
}
|
""" Example 8: Interfacing with Verilog.
While there is much more about PyRTL design to discuss, at some point somebody
might ask you to do something with your code other than have it print
pretty things out to the terminal. We provide import from and export to
Verilog of designs, export of waveforms to VCD, and a set of transforms
that make doing netlist-level transforms and analyis directly in pyrtl easy.
"""
import random
import io
import pyrtl
# ---- Importing From Verilog ----
# Sometimes it is useful to pull in components written in Verilog to be used
# as subcomponents of PyRTL designs or to be subject to analysis written over
# the PyRTL core. One standard format supported by PyRTL is "blif" format:
# https://www.ece.cmu.edu/~ee760/760docs/blif.pdf
# Many tools supoprt outputing hardware designs to this format, including the
# free open source project "Yosys". Blif files can then be imported either
# as a string or directly from a file name by the function input_from_blif.
# Here is a simple example of a 1 bit full adder imported and then simulated
# from this blif format.
full_adder_blif = """
# Generated by Yosys 0.3.0+ (git sha1 7e758d5, clang 3.4-1ubuntu3 -fPIC -Os)
.model full_adder
.inputs x y cin
.outputs sum cout
.names $false
.names $true
1
.names y $not$FA.v:12$3_Y
0 1
.names x $not$FA.v:11$1_Y
0 1
.names cin $not$FA.v:15$6_Y
0 1
.names ind3 ind4 sum
1- 1
-1 1
.names $not$FA.v:15$6_Y ind2 ind3
11 1
.names x $not$FA.v:12$3_Y ind1
11 1
.names ind2 $not$FA.v:16$8_Y
0 1
.names cin $not$FA.v:16$8_Y ind4
11 1
.names x y $and$FA.v:19$11_Y
11 1
.names ind0 ind1 ind2
1- 1
-1 1
.names cin ind2 $and$FA.v:19$12_Y
11 1
.names $and$FA.v:19$11_Y $and$FA.v:19$12_Y cout
1- 1
-1 1
.names $not$FA.v:11$1_Y y ind0
11 1
.end
"""
pyrtl.input_from_blif(full_adder_blif)
# have to find the actual wire vectors generated from the names in the blif file
x, y, cin = [pyrtl.working_block().get_wirevector_by_name(s) for s in ['x', 'y', 'cin']]
io_vectors = pyrtl.working_block().wirevector_subset((pyrtl.Input, pyrtl.Output))
# we are only going to trace the input and output vectors for clarity
sim_trace = pyrtl.SimulationTrace(wirevector_subset=io_vectors)
# now simulate the logic with some random inputs
sim = pyrtl.Simulation(tracer=sim_trace)
for i in range(15):
# here we actually generate random booleans for the inputs
sim.step({
x: random.choice([0, 1]),
y: random.choice([0, 1]),
cin: random.choice([0, 1])
})
sim_trace.render_trace(symbol_len=5, segment_size=5)
# ---- Exporting to Verilog ----
# However, not only do we want to have a method to import from Verilog, we also
# want a way to export it back out to Verilog as well. To demonstrate PyRTL's
# ability to export in Verilog, we will create a sample 3-bit counter. However
# unlike the example in example2, we extend it to be synchronously resetting.
pyrtl.reset_working_block()
zero = pyrtl.Input(1, 'zero')
counter_output = pyrtl.Output(3, 'counter_output')
counter = pyrtl.Register(3, 'counter')
counter.next <<= pyrtl.mux(zero, counter + 1, 0)
counter_output <<= counter
# The counter gets 0 in the next cycle if the "zero" signal goes high, otherwise just
# counter + 1. Note that both "0" and "1" are bit extended to the proper length and
# here we are making use of that native add operation. Let's dump this bad boy out
# to a verilog file and see what is looks like (here we are using StringIO just to
# print it to a string for demo purposes, most likely you will want to pass a normal
# open file).
print("--- PyRTL Representation ---")
print(pyrtl.working_block())
print()
print("--- Verilog for the Counter ---")
with io.StringIO() as vfile:
pyrtl.output_to_verilog(vfile)
print(vfile.getvalue())
print("--- Simulation Results ---")
sim_trace = pyrtl.SimulationTrace([counter_output, zero])
sim = pyrtl.Simulation(tracer=sim_trace)
for cycle in range(15):
sim.step({zero: random.choice([0, 0, 0, 1])})
sim_trace.render_trace()
# We already did the "hard" work of generating a test input for this simulation so
# we might want to reuse that work when we take this design through a verilog toolchain.
# The function output_verilog_testbench grabs the inputs used in the simulation trace
# and sets them up in a standar verilog testbench.
print("--- Verilog for the TestBench ---")
with io.StringIO() as tbfile:
pyrtl.output_verilog_testbench(tbfile, sim_trace)
print(tbfile.getvalue())
# Not let's talk about transformations of the hardware block. Many times when you are
# doing some hardware-level analysis you might wish to ignore higher level things like
# multi-bit wirevectors, adds, concatination, etc. and just thing about wires and basic
# gates. PyRTL supports "lowering" of designs into this more restricted set of functionality
# though the function "synthesize". Once we lower a design to this form we can then apply
# basic optimizations like constant propgation and dead wire elimination as well. By
# printing it out to verilog we can see exactly how the design changed.
print("--- Optimized Single-bit Verilog for the Counter ---")
pyrtl.synthesize()
pyrtl.optimize()
with io.StringIO() as vfile:
pyrtl.output_to_verilog(vfile)
print(vfile.getvalue())
|
{
"content_hash": "eb5ef6c358179d473e0e27c420a16af0",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 93,
"avg_line_length": 35.53691275167785,
"alnum_prop": 0.7246458923512747,
"repo_name": "deekshadangwal/PyRTL",
"id": "e9ec2a491f90f38e781cb577faa5061ba59deb04",
"size": "5295",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "examples/example8-verilog.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "347120"
},
{
"name": "Shell",
"bytes": "1554"
}
],
"symlink_target": ""
}
|
import sys
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.codec.der import encoder as der_encoder
from pyasn1_modules import pem
from pyasn1_modules import rfc5280
from pyasn1_modules import rfc8103
try:
import unittest2 as unittest
except ImportError:
import unittest
class CAEADChaCha20Poly1305TestCase(unittest.TestCase):
alg_id_pem_text = "MBsGCyqGSIb3DQEJEAMSBAzK/rq++s7brd7K+Ig="
def setUp(self):
self.asn1Spec = rfc5280.AlgorithmIdentifier()
def testDerCodec(self):
substrate = pem.readBase64fromText(self.alg_id_pem_text)
asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
assert not rest
assert asn1Object.prettyPrint()
assert asn1Object[0] == rfc8103.id_alg_AEADChaCha20Poly1305
param, rest = der_decoder.decode(asn1Object[1], rfc8103.AEADChaCha20Poly1305Nonce())
assert not rest
assert param.prettyPrint()
assert param == rfc8103.AEADChaCha20Poly1305Nonce(value='\xca\xfe\xba\xbe\xfa\xce\xdb\xad\xde\xca\xf8\x88')
assert der_encoder.encode(asn1Object) == substrate
suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
import sys
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not result.wasSuccessful())
|
{
"content_hash": "d73def9f03870f46c0406b2bb86f56c7",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 115,
"avg_line_length": 31.46511627906977,
"alnum_prop": 0.7228381374722838,
"repo_name": "kawamon/hue",
"id": "921b9db6eb5a5a51e235100c1526b36ab5ed3f1c",
"size": "1527",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/pyasn1-modules-0.2.6/tests/test_rfc8103.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
}
|
import itertools
import functools
import operator
import warnings
from distutils.version import LooseVersion
import numpy as np
from pandas import compat
from pandas._libs import tslib, algos, lib
from pandas.core.dtypes.common import (
_get_dtype,
is_float, is_scalar,
is_integer, is_complex, is_float_dtype,
is_complex_dtype, is_integer_dtype,
is_bool_dtype, is_object_dtype,
is_numeric_dtype,
is_datetime64_dtype, is_timedelta64_dtype,
is_datetime_or_timedelta_dtype,
is_int_or_datetime_dtype, is_any_int_dtype)
from pandas.core.dtypes.cast import _int64_max, maybe_upcast_putmask
from pandas.core.dtypes.missing import isna, notna
from pandas.core.config import get_option
from pandas.core.common import _values_from_object
_BOTTLENECK_INSTALLED = False
_MIN_BOTTLENECK_VERSION = '1.0.0'
try:
import bottleneck as bn
ver = bn.__version__
_BOTTLENECK_INSTALLED = (LooseVersion(ver) >=
LooseVersion(_MIN_BOTTLENECK_VERSION))
if not _BOTTLENECK_INSTALLED:
warnings.warn(
"The installed version of bottleneck {ver} is not supported "
"in pandas and will be not be used\nThe minimum supported "
"version is {min_ver}\n".format(
ver=ver, min_ver=_MIN_BOTTLENECK_VERSION), UserWarning)
except ImportError: # pragma: no cover
pass
_USE_BOTTLENECK = False
def set_use_bottleneck(v=True):
# set/unset to use bottleneck
global _USE_BOTTLENECK
if _BOTTLENECK_INSTALLED:
_USE_BOTTLENECK = v
set_use_bottleneck(get_option('compute.use_bottleneck'))
class disallow(object):
def __init__(self, *dtypes):
super(disallow, self).__init__()
self.dtypes = tuple(np.dtype(dtype).type for dtype in dtypes)
def check(self, obj):
return hasattr(obj, 'dtype') and issubclass(obj.dtype.type,
self.dtypes)
def __call__(self, f):
@functools.wraps(f)
def _f(*args, **kwargs):
obj_iter = itertools.chain(args, compat.itervalues(kwargs))
if any(self.check(obj) for obj in obj_iter):
raise TypeError('reduction operation {0!r} not allowed for '
'this dtype'.format(
f.__name__.replace('nan', '')))
try:
with np.errstate(invalid='ignore'):
return f(*args, **kwargs)
except ValueError as e:
# we want to transform an object array
# ValueError message to the more typical TypeError
# e.g. this is normally a disallowed function on
# object arrays that contain strings
if is_object_dtype(args[0]):
raise TypeError(e)
raise
return _f
class bottleneck_switch(object):
def __init__(self, zero_value=None, **kwargs):
self.zero_value = zero_value
self.kwargs = kwargs
def __call__(self, alt):
bn_name = alt.__name__
try:
bn_func = getattr(bn, bn_name)
except (AttributeError, NameError): # pragma: no cover
bn_func = None
@functools.wraps(alt)
def f(values, axis=None, skipna=True, **kwds):
if len(self.kwargs) > 0:
for k, v in compat.iteritems(self.kwargs):
if k not in kwds:
kwds[k] = v
try:
if self.zero_value is not None and values.size == 0:
if values.ndim == 1:
# wrap the 0's if needed
if is_timedelta64_dtype(values):
return lib.Timedelta(0)
return 0
else:
result_shape = (values.shape[:axis] +
values.shape[axis + 1:])
result = np.empty(result_shape)
result.fill(0)
return result
if (_USE_BOTTLENECK and skipna and
_bn_ok_dtype(values.dtype, bn_name)):
result = bn_func(values, axis=axis, **kwds)
# prefer to treat inf/-inf as NA, but must compute the func
# twice :(
if _has_infs(result):
result = alt(values, axis=axis, skipna=skipna, **kwds)
else:
result = alt(values, axis=axis, skipna=skipna, **kwds)
except Exception:
try:
result = alt(values, axis=axis, skipna=skipna, **kwds)
except ValueError as e:
# we want to transform an object array
# ValueError message to the more typical TypeError
# e.g. this is normally a disallowed function on
# object arrays that contain strings
if is_object_dtype(values):
raise TypeError(e)
raise
return result
return f
def _bn_ok_dtype(dt, name):
# Bottleneck chokes on datetime64
if (not is_object_dtype(dt) and not is_datetime_or_timedelta_dtype(dt)):
# bottleneck does not properly upcast during the sum
# so can overflow
if name == 'nansum':
if dt.itemsize < 8:
return False
return True
return False
def _has_infs(result):
if isinstance(result, np.ndarray):
if result.dtype == 'f8':
return lib.has_infs_f8(result.ravel())
elif result.dtype == 'f4':
return lib.has_infs_f4(result.ravel())
try:
return np.isinf(result).any()
except (TypeError, NotImplementedError):
# if it doesn't support infs, then it can't have infs
return False
def _get_fill_value(dtype, fill_value=None, fill_value_typ=None):
""" return the correct fill value for the dtype of the values """
if fill_value is not None:
return fill_value
if _na_ok_dtype(dtype):
if fill_value_typ is None:
return np.nan
else:
if fill_value_typ == '+inf':
return np.inf
else:
return -np.inf
else:
if fill_value_typ is None:
return tslib.iNaT
else:
if fill_value_typ == '+inf':
# need the max int here
return _int64_max
else:
return tslib.iNaT
def _get_values(values, skipna, fill_value=None, fill_value_typ=None,
isfinite=False, copy=True):
""" utility to get the values view, mask, dtype
if necessary copy and mask using the specified fill_value
copy = True will force the copy
"""
values = _values_from_object(values)
if isfinite:
mask = _isfinite(values)
else:
mask = isna(values)
dtype = values.dtype
dtype_ok = _na_ok_dtype(dtype)
# get our fill value (in case we need to provide an alternative
# dtype for it)
fill_value = _get_fill_value(dtype, fill_value=fill_value,
fill_value_typ=fill_value_typ)
if skipna:
if copy:
values = values.copy()
if dtype_ok:
np.putmask(values, mask, fill_value)
# promote if needed
else:
values, changed = maybe_upcast_putmask(values, mask, fill_value)
elif copy:
values = values.copy()
values = _view_if_needed(values)
# return a platform independent precision dtype
dtype_max = dtype
if is_integer_dtype(dtype) or is_bool_dtype(dtype):
dtype_max = np.int64
elif is_float_dtype(dtype):
dtype_max = np.float64
return values, mask, dtype, dtype_max
def _isfinite(values):
if is_datetime_or_timedelta_dtype(values):
return isna(values)
if (is_complex_dtype(values) or is_float_dtype(values) or
is_integer_dtype(values) or is_bool_dtype(values)):
return ~np.isfinite(values)
return ~np.isfinite(values.astype('float64'))
def _na_ok_dtype(dtype):
return not is_int_or_datetime_dtype(dtype)
def _view_if_needed(values):
if is_datetime_or_timedelta_dtype(values):
return values.view(np.int64)
return values
def _wrap_results(result, dtype):
""" wrap our results if needed """
if is_datetime64_dtype(dtype):
if not isinstance(result, np.ndarray):
result = lib.Timestamp(result)
else:
result = result.view(dtype)
elif is_timedelta64_dtype(dtype):
if not isinstance(result, np.ndarray):
# raise if we have a timedelta64[ns] which is too large
if np.fabs(result) > _int64_max:
raise ValueError("overflow in timedelta operation")
result = lib.Timedelta(result, unit='ns')
else:
result = result.astype('i8').view(dtype)
return result
def nanany(values, axis=None, skipna=True):
values, mask, dtype, _ = _get_values(values, skipna, False, copy=skipna)
return values.any(axis)
def nanall(values, axis=None, skipna=True):
values, mask, dtype, _ = _get_values(values, skipna, True, copy=skipna)
return values.all(axis)
@disallow('M8')
@bottleneck_switch(zero_value=0)
def nansum(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(values, skipna, 0)
dtype_sum = dtype_max
if is_float_dtype(dtype):
dtype_sum = dtype
elif is_timedelta64_dtype(dtype):
dtype_sum = np.float64
the_sum = values.sum(axis, dtype=dtype_sum)
the_sum = _maybe_null_out(the_sum, axis, mask)
return _wrap_results(the_sum, dtype)
@disallow('M8')
@bottleneck_switch()
def nanmean(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(values, skipna, 0)
dtype_sum = dtype_max
dtype_count = np.float64
if is_integer_dtype(dtype) or is_timedelta64_dtype(dtype):
dtype_sum = np.float64
elif is_float_dtype(dtype):
dtype_sum = dtype
dtype_count = dtype
count = _get_counts(mask, axis, dtype=dtype_count)
the_sum = _ensure_numeric(values.sum(axis, dtype=dtype_sum))
if axis is not None and getattr(the_sum, 'ndim', False):
the_mean = the_sum / count
ct_mask = count == 0
if ct_mask.any():
the_mean[ct_mask] = np.nan
else:
the_mean = the_sum / count if count > 0 else np.nan
return _wrap_results(the_mean, dtype)
@disallow('M8')
@bottleneck_switch()
def nanmedian(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(values, skipna)
def get_median(x):
mask = notna(x)
if not skipna and not mask.all():
return np.nan
return algos.median(_values_from_object(x[mask]))
if not is_float_dtype(values):
values = values.astype('f8')
values[mask] = np.nan
if axis is None:
values = values.ravel()
notempty = values.size
# an array from a frame
if values.ndim > 1:
# there's a non-empty array to apply over otherwise numpy raises
if notempty:
return _wrap_results(
np.apply_along_axis(get_median, axis, values), dtype)
# must return the correct shape, but median is not defined for the
# empty set so return nans of shape "everything but the passed axis"
# since "axis" is where the reduction would occur if we had a nonempty
# array
shp = np.array(values.shape)
dims = np.arange(values.ndim)
ret = np.empty(shp[dims != axis])
ret.fill(np.nan)
return _wrap_results(ret, dtype)
# otherwise return a scalar value
return _wrap_results(get_median(values) if notempty else np.nan, dtype)
def _get_counts_nanvar(mask, axis, ddof, dtype=float):
dtype = _get_dtype(dtype)
count = _get_counts(mask, axis, dtype=dtype)
d = count - dtype.type(ddof)
# always return NaN, never inf
if is_scalar(count):
if count <= ddof:
count = np.nan
d = np.nan
else:
mask2 = count <= ddof
if mask2.any():
np.putmask(d, mask2, np.nan)
np.putmask(count, mask2, np.nan)
return count, d
@disallow('M8')
@bottleneck_switch(ddof=1)
def nanstd(values, axis=None, skipna=True, ddof=1):
result = np.sqrt(nanvar(values, axis=axis, skipna=skipna, ddof=ddof))
return _wrap_results(result, values.dtype)
@disallow('M8')
@bottleneck_switch(ddof=1)
def nanvar(values, axis=None, skipna=True, ddof=1):
values = _values_from_object(values)
dtype = values.dtype
mask = isna(values)
if is_any_int_dtype(values):
values = values.astype('f8')
values[mask] = np.nan
if is_float_dtype(values):
count, d = _get_counts_nanvar(mask, axis, ddof, values.dtype)
else:
count, d = _get_counts_nanvar(mask, axis, ddof)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
# xref GH10242
# Compute variance via two-pass algorithm, which is stable against
# cancellation errors and relatively accurate for small numbers of
# observations.
#
# See https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
avg = _ensure_numeric(values.sum(axis=axis, dtype=np.float64)) / count
if axis is not None:
avg = np.expand_dims(avg, axis)
sqr = _ensure_numeric((avg - values)**2)
np.putmask(sqr, mask, 0)
result = sqr.sum(axis=axis, dtype=np.float64) / d
# Return variance as np.float64 (the datatype used in the accumulator),
# unless we were dealing with a float array, in which case use the same
# precision as the original values array.
if is_float_dtype(dtype):
result = result.astype(dtype)
return _wrap_results(result, values.dtype)
@disallow('M8', 'm8')
def nansem(values, axis=None, skipna=True, ddof=1):
var = nanvar(values, axis, skipna, ddof=ddof)
mask = isna(values)
if not is_float_dtype(values.dtype):
values = values.astype('f8')
count, _ = _get_counts_nanvar(mask, axis, ddof, values.dtype)
var = nanvar(values, axis, skipna, ddof=ddof)
return np.sqrt(var) / np.sqrt(count)
def _nanminmax(meth, fill_value_typ):
@bottleneck_switch()
def reduction(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(
values, skipna, fill_value_typ=fill_value_typ, )
if ((axis is not None and values.shape[axis] == 0) or
values.size == 0):
try:
result = getattr(values, meth)(axis, dtype=dtype_max)
result.fill(np.nan)
except:
result = np.nan
else:
result = getattr(values, meth)(axis)
result = _wrap_results(result, dtype)
return _maybe_null_out(result, axis, mask)
reduction.__name__ = 'nan' + meth
return reduction
nanmin = _nanminmax('min', fill_value_typ='+inf')
nanmax = _nanminmax('max', fill_value_typ='-inf')
@disallow('O')
def nanargmax(values, axis=None, skipna=True):
"""
Returns -1 in the NA case
"""
values, mask, dtype, _ = _get_values(values, skipna, fill_value_typ='-inf')
result = values.argmax(axis)
result = _maybe_arg_null_out(result, axis, mask, skipna)
return result
@disallow('O')
def nanargmin(values, axis=None, skipna=True):
"""
Returns -1 in the NA case
"""
values, mask, dtype, _ = _get_values(values, skipna, fill_value_typ='+inf')
result = values.argmin(axis)
result = _maybe_arg_null_out(result, axis, mask, skipna)
return result
@disallow('M8', 'm8')
def nanskew(values, axis=None, skipna=True):
""" Compute the sample skewness.
The statistic computed here is the adjusted Fisher-Pearson standardized
moment coefficient G1. The algorithm computes this coefficient directly
from the second and third central moment.
"""
values = _values_from_object(values)
mask = isna(values)
if not is_float_dtype(values.dtype):
values = values.astype('f8')
count = _get_counts(mask, axis)
else:
count = _get_counts(mask, axis, dtype=values.dtype)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
mean = values.sum(axis, dtype=np.float64) / count
if axis is not None:
mean = np.expand_dims(mean, axis)
adjusted = values - mean
if skipna:
np.putmask(adjusted, mask, 0)
adjusted2 = adjusted ** 2
adjusted3 = adjusted2 * adjusted
m2 = adjusted2.sum(axis, dtype=np.float64)
m3 = adjusted3.sum(axis, dtype=np.float64)
# floating point error
m2 = _zero_out_fperr(m2)
m3 = _zero_out_fperr(m3)
with np.errstate(invalid='ignore', divide='ignore'):
result = (count * (count - 1) ** 0.5 / (count - 2)) * (m3 / m2 ** 1.5)
dtype = values.dtype
if is_float_dtype(dtype):
result = result.astype(dtype)
if isinstance(result, np.ndarray):
result = np.where(m2 == 0, 0, result)
result[count < 3] = np.nan
return result
else:
result = 0 if m2 == 0 else result
if count < 3:
return np.nan
return result
@disallow('M8', 'm8')
def nankurt(values, axis=None, skipna=True):
""" Compute the sample skewness.
The statistic computed here is the adjusted Fisher-Pearson standardized
moment coefficient G2, computed directly from the second and fourth
central moment.
"""
values = _values_from_object(values)
mask = isna(values)
if not is_float_dtype(values.dtype):
values = values.astype('f8')
count = _get_counts(mask, axis)
else:
count = _get_counts(mask, axis, dtype=values.dtype)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
mean = values.sum(axis, dtype=np.float64) / count
if axis is not None:
mean = np.expand_dims(mean, axis)
adjusted = values - mean
if skipna:
np.putmask(adjusted, mask, 0)
adjusted2 = adjusted ** 2
adjusted4 = adjusted2 ** 2
m2 = adjusted2.sum(axis, dtype=np.float64)
m4 = adjusted4.sum(axis, dtype=np.float64)
with np.errstate(invalid='ignore', divide='ignore'):
adj = 3 * (count - 1) ** 2 / ((count - 2) * (count - 3))
numer = count * (count + 1) * (count - 1) * m4
denom = (count - 2) * (count - 3) * m2**2
result = numer / denom - adj
# floating point error
numer = _zero_out_fperr(numer)
denom = _zero_out_fperr(denom)
if not isinstance(denom, np.ndarray):
# if ``denom`` is a scalar, check these corner cases first before
# doing division
if count < 4:
return np.nan
if denom == 0:
return 0
with np.errstate(invalid='ignore', divide='ignore'):
result = numer / denom - adj
dtype = values.dtype
if is_float_dtype(dtype):
result = result.astype(dtype)
if isinstance(result, np.ndarray):
result = np.where(denom == 0, 0, result)
result[count < 4] = np.nan
return result
@disallow('M8', 'm8')
def nanprod(values, axis=None, skipna=True):
mask = isna(values)
if skipna and not is_any_int_dtype(values):
values = values.copy()
values[mask] = 1
result = values.prod(axis)
return _maybe_null_out(result, axis, mask)
def _maybe_arg_null_out(result, axis, mask, skipna):
# helper function for nanargmin/nanargmax
if axis is None or not getattr(result, 'ndim', False):
if skipna:
if mask.all():
result = -1
else:
if mask.any():
result = -1
else:
if skipna:
na_mask = mask.all(axis)
else:
na_mask = mask.any(axis)
if na_mask.any():
result[na_mask] = -1
return result
def _get_counts(mask, axis, dtype=float):
dtype = _get_dtype(dtype)
if axis is None:
return dtype.type(mask.size - mask.sum())
count = mask.shape[axis] - mask.sum(axis)
if is_scalar(count):
return dtype.type(count)
try:
return count.astype(dtype)
except AttributeError:
return np.array(count, dtype=dtype)
def _maybe_null_out(result, axis, mask):
if axis is not None and getattr(result, 'ndim', False):
null_mask = (mask.shape[axis] - mask.sum(axis)) == 0
if np.any(null_mask):
if is_numeric_dtype(result):
if np.iscomplexobj(result):
result = result.astype('c16')
else:
result = result.astype('f8')
result[null_mask] = np.nan
else:
# GH12941, use None to auto cast null
result[null_mask] = None
elif result is not tslib.NaT:
null_mask = mask.size - mask.sum()
if null_mask == 0:
result = np.nan
return result
def _zero_out_fperr(arg):
if isinstance(arg, np.ndarray):
with np.errstate(invalid='ignore'):
return np.where(np.abs(arg) < 1e-14, 0, arg)
else:
return arg.dtype.type(0) if np.abs(arg) < 1e-14 else arg
@disallow('M8', 'm8')
def nancorr(a, b, method='pearson', min_periods=None):
"""
a, b: ndarrays
"""
if len(a) != len(b):
raise AssertionError('Operands to nancorr must have same size')
if min_periods is None:
min_periods = 1
valid = notna(a) & notna(b)
if not valid.all():
a = a[valid]
b = b[valid]
if len(a) < min_periods:
return np.nan
f = get_corr_func(method)
return f(a, b)
def get_corr_func(method):
if method in ['kendall', 'spearman']:
from scipy.stats import kendalltau, spearmanr
def _pearson(a, b):
return np.corrcoef(a, b)[0, 1]
def _kendall(a, b):
rs = kendalltau(a, b)
if isinstance(rs, tuple):
return rs[0]
return rs
def _spearman(a, b):
return spearmanr(a, b)[0]
_cor_methods = {
'pearson': _pearson,
'kendall': _kendall,
'spearman': _spearman
}
return _cor_methods[method]
@disallow('M8', 'm8')
def nancov(a, b, min_periods=None):
if len(a) != len(b):
raise AssertionError('Operands to nancov must have same size')
if min_periods is None:
min_periods = 1
valid = notna(a) & notna(b)
if not valid.all():
a = a[valid]
b = b[valid]
if len(a) < min_periods:
return np.nan
return np.cov(a, b)[0, 1]
def _ensure_numeric(x):
if isinstance(x, np.ndarray):
if is_integer_dtype(x) or is_bool_dtype(x):
x = x.astype(np.float64)
elif is_object_dtype(x):
try:
x = x.astype(np.complex128)
except:
x = x.astype(np.float64)
else:
if not np.any(x.imag):
x = x.real
elif not (is_float(x) or is_integer(x) or is_complex(x)):
try:
x = float(x)
except Exception:
try:
x = complex(x)
except Exception:
raise TypeError('Could not convert %s to numeric' % str(x))
return x
# NA-friendly array comparisons
def make_nancomp(op):
def f(x, y):
xmask = isna(x)
ymask = isna(y)
mask = xmask | ymask
with np.errstate(all='ignore'):
result = op(x, y)
if mask.any():
if is_bool_dtype(result):
result = result.astype('O')
np.putmask(result, mask, np.nan)
return result
return f
nangt = make_nancomp(operator.gt)
nange = make_nancomp(operator.ge)
nanlt = make_nancomp(operator.lt)
nanle = make_nancomp(operator.le)
naneq = make_nancomp(operator.eq)
nanne = make_nancomp(operator.ne)
|
{
"content_hash": "7a49322efe5e10fdce6ea2886e9021de",
"timestamp": "",
"source": "github",
"line_count": 819,
"max_line_length": 79,
"avg_line_length": 29.500610500610502,
"alnum_prop": 0.578204544513886,
"repo_name": "DGrady/pandas",
"id": "b2bbf1c75b7ea06d69e18307e7ba3877f509d95f",
"size": "24161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/core/nanops.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4071"
},
{
"name": "C",
"bytes": "493476"
},
{
"name": "C++",
"bytes": "17353"
},
{
"name": "HTML",
"bytes": "551706"
},
{
"name": "Makefile",
"bytes": "907"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "12054479"
},
{
"name": "R",
"bytes": "1177"
},
{
"name": "Shell",
"bytes": "22265"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
}
|
class RandomListNode(object):
def __init__(self, x):
self.label = x
self.next = None
self.random = None
def dump(self):
s = self.label
if self.random is not None:
s += ' rand={}'.format(self.random.label)
print(s)
if self.next is not None:
self.next.dump()
class Solution(object):
def copyRandomList(self, head):
if head is None:
return None
# label->node map
label_to_node = {}
node = head
pre = None
while node is not None:
node_copy = label_to_node.setdefault(
node.label,
RandomListNode(node.label),
)
if pre is not None:
pre.next = node_copy
if node.random is not None:
node_copy.random = label_to_node.setdefault(
node.random.label,
RandomListNode(node.random.label),
)
node = node.next
pre = node_copy
return label_to_node[head.label]
def test():
a = RandomListNode('a')
b = RandomListNode('b')
c = RandomListNode('c')
d = RandomListNode('d')
a.next = b
b.next = c
c.next = d
a.random = d
b.random = c
d.random = a
a.dump()
cpy = Solution().copyRandomList(a)
cpy.dump()
if __name__ == '__main__':
test()
|
{
"content_hash": "8b88c15eda89dc919916d1f6ec6bf195",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 60,
"avg_line_length": 20.714285714285715,
"alnum_prop": 0.48551724137931035,
"repo_name": "mmcloughlin/interviews",
"id": "35287cbb67e5fea3367e0e5732216d006937ab40",
"size": "1509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode/copy-list-with-random-pointer/copyrandptr.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5013"
},
{
"name": "Go",
"bytes": "1517"
},
{
"name": "Makefile",
"bytes": "184"
},
{
"name": "Python",
"bytes": "116579"
},
{
"name": "Shell",
"bytes": "459"
}
],
"symlink_target": ""
}
|
"""The vroom test runner."""
import os
import signal
import subprocess
import sys
import vroom.args
import vroom.color
import vroom.output
import vroom.runner
import vroom.vim
def main(argv=None):
if argv is None:
argv = sys.argv
try:
args = vroom.args.Parse(argv[1:])
except ValueError as e:
sys.stderr.write('%s\n' % ', '.join(e.args))
return 1
if args.murder:
try:
output = subprocess.check_output(['ps', '-A']).decode('utf-8')
except subprocess.CalledProcessError:
sys.stdout.write("Can't find running processes.\n")
return 1
for line in output.splitlines():
if line.endswith('vroom'):
pid = int(line.split(None, 1)[0])
# ARE YOU SUICIDAL?!
if pid != os.getpid():
sys.stdout.write('Killing a vroom: %s\n' % line)
os.kill(pid, signal.SIGKILL)
break
else:
sys.stdout.write('No running vrooms found.\n')
return 0
end = 'VroomEnd()'
kill = ['vim', '--servername', args.servername, '--remote-expr', end]
sys.stdout.write("I hope you're happy.\n")
return subprocess.call(kill)
dirty = False
writers = []
try:
for filename in args.filenames:
with open(filename) as f:
runner = vroom.runner.Vroom(filename, args)
writers.append(runner(f))
if runner.dirty:
dirty = True
except vroom.vim.ServerQuit as e:
# If the vim server process fails, the details are probably on stderr, so hope
# for the best and exit without shell reset.
sys.stderr.write('Exception: {}\n'.format(e))
return 2
if dirty:
# Running vim in a process can screw with shell line endings. Reset terminal.
subprocess.call(['reset'])
for writer in writers:
writer.Write()
vroom.output.WriteBackmatter(writers, args)
failed_tests = [w for w in writers if w.Status() != vroom.output.STATUS.PASS]
if failed_tests:
return 3
if __name__ == '__main__':
sys.exit(main())
|
{
"content_hash": "01e6828daf19a0d4fba33c30af5c27f8",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 82,
"avg_line_length": 26.013157894736842,
"alnum_prop": 0.6317653009610521,
"repo_name": "martindemello/vroom",
"id": "45f6bd821dab7a1857fe9c152643ee88f1a0937a",
"size": "1977",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vroom/__main__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "115683"
}
],
"symlink_target": ""
}
|
"""
Parser for to pofile translation format.
"""
from datetime import datetime
import polib
from pontoon.base.formats.base import ParsedResource
from pontoon.base.vcs_models import VCSTranslation
class POEntity(VCSTranslation):
def __init__(self, po_entry, order):
self.po_entry = po_entry
if po_entry.msgstr_plural:
strings = po_entry.msgstr_plural
else:
strings = {None: po_entry.msgstr}
# Remove empty strings from the string dict.
strings = {key: value for key, value in strings.items() if value}
super(POEntity, self).__init__(
key=po_entry.msgid, # Pofiles use the source as the key.
source_string=po_entry.msgid,
source_string_plural=po_entry.msgid_plural,
strings=strings,
comments=po_entry.comment.split('\n') if po_entry.comment else [],
fuzzy='fuzzy' in po_entry.flags,
order=order,
source=po_entry.occurrences
)
def update_entry(self, locale):
"""Update the POEntry associated with this translation."""
if self.po_entry.msgstr_plural:
self.po_entry.msgstr_plural = {
plural_form: self.strings.get(plural_form, '')
for plural_form in range(locale.nplurals or 1)
}
else:
self.po_entry.msgstr = self.strings.get(None, '')
if self.fuzzy and 'fuzzy' not in self.po_entry.flags:
self.po_entry.flags.append('fuzzy')
elif 'fuzzy' in self.po_entry.flags:
self.po_entry.flags.remove('fuzzy')
def __repr__(self):
return '<POEntity {key}>'.format(key=self.key.encode('utf-8'))
class POResource(ParsedResource):
def __init__(self, pofile):
self.pofile = pofile
self.entities = [
POEntity(entry, k) for k, entry in enumerate(self.pofile)
if not entry.obsolete
]
@property
def translations(self):
return self.entities
def save(self, locale):
for entity in self.translations:
entity.update_entry(locale)
metadata = self.pofile.metadata
if len(self.translations) > 0:
latest_translation = max(
self.translations,
key=lambda t: t.last_updated or datetime.min
)
if latest_translation.last_updated:
metadata['PO-Revision-Date'] = latest_translation.last_updated.strftime(
'%Y-%m-%d %H:%M%z'
)
if latest_translation.last_translator:
metadata['Last-Translator'] = latest_translation.last_translator.display_name
metadata.update({
'Language': locale.code.replace('-', '_'),
'X-Generator': 'Pontoon',
'Plural-Forms': ('nplurals={locale.nplurals}; plural={locale.plural_rule};'
.format(locale=locale))
})
self.pofile.save()
def __repr__(self):
return '<POResource {self.pofile.fpath}>'.format(self=self)
def parse(path):
pofile = polib.pofile(path)
return POResource(pofile)
|
{
"content_hash": "871efb8d179e6e7f5e6c7cd2efed8e5e",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 93,
"avg_line_length": 32.40816326530612,
"alnum_prop": 0.5802896725440806,
"repo_name": "Osmose/pontoon",
"id": "f821beffc2b390810466899085df0fe53b7e4155",
"size": "3176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pontoon/base/formats/po.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "72275"
},
{
"name": "HTML",
"bytes": "50378"
},
{
"name": "JavaScript",
"bytes": "799755"
},
{
"name": "Python",
"bytes": "353686"
},
{
"name": "Shell",
"bytes": "199"
}
],
"symlink_target": ""
}
|
"""Support for Fronius devices."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Final
from homeassistant.components.sensor import (
DOMAIN as SENSOR_DOMAIN,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ELECTRIC_CURRENT_AMPERE,
ELECTRIC_POTENTIAL_VOLT,
ENERGY_WATT_HOUR,
FREQUENCY_HERTZ,
PERCENTAGE,
POWER_VOLT_AMPERE,
POWER_VOLT_AMPERE_REACTIVE,
POWER_WATT,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
if TYPE_CHECKING:
from . import FroniusSolarNet
from .coordinator import (
FroniusCoordinatorBase,
FroniusInverterUpdateCoordinator,
FroniusLoggerUpdateCoordinator,
FroniusMeterUpdateCoordinator,
FroniusOhmpilotUpdateCoordinator,
FroniusPowerFlowUpdateCoordinator,
FroniusStorageUpdateCoordinator,
)
ENERGY_VOLT_AMPERE_REACTIVE_HOUR: Final = "varh"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Fronius sensor entities based on a config entry."""
solar_net: FroniusSolarNet = hass.data[DOMAIN][config_entry.entry_id]
for inverter_coordinator in solar_net.inverter_coordinators:
inverter_coordinator.add_entities_for_seen_keys(
async_add_entities, InverterSensor
)
if solar_net.logger_coordinator is not None:
solar_net.logger_coordinator.add_entities_for_seen_keys(
async_add_entities, LoggerSensor
)
if solar_net.meter_coordinator is not None:
solar_net.meter_coordinator.add_entities_for_seen_keys(
async_add_entities, MeterSensor
)
if solar_net.ohmpilot_coordinator is not None:
solar_net.ohmpilot_coordinator.add_entities_for_seen_keys(
async_add_entities, OhmpilotSensor
)
if solar_net.power_flow_coordinator is not None:
solar_net.power_flow_coordinator.add_entities_for_seen_keys(
async_add_entities, PowerFlowSensor
)
if solar_net.storage_coordinator is not None:
solar_net.storage_coordinator.add_entities_for_seen_keys(
async_add_entities, StorageSensor
)
INVERTER_ENTITY_DESCRIPTIONS: list[SensorEntityDescription] = [
SensorEntityDescription(
key="energy_day",
name="Energy day",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="energy_year",
name="Energy year",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="energy_total",
name="Energy total",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="frequency_ac",
name="Frequency AC",
native_unit_of_measurement=FREQUENCY_HERTZ,
device_class=SensorDeviceClass.FREQUENCY,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="current_ac",
name="AC current",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="current_dc",
name="DC current",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
),
SensorEntityDescription(
key="current_dc_2",
name="DC current 2",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
),
SensorEntityDescription(
key="power_ac",
name="AC power",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="voltage_ac",
name="AC voltage",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_dc",
name="DC voltage",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
),
SensorEntityDescription(
key="voltage_dc_2",
name="DC voltage 2",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
),
# device status entities
SensorEntityDescription(
key="inverter_state",
name="Inverter state",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="error_code",
name="Error code",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="status_code",
name="Status code",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="led_state",
name="LED state",
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="led_color",
name="LED color",
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
]
LOGGER_ENTITY_DESCRIPTIONS: list[SensorEntityDescription] = [
SensorEntityDescription(
key="co2_factor",
name="CO₂ factor",
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:molecule-co2",
),
SensorEntityDescription(
key="cash_factor",
name="Grid export tariff",
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:cash-plus",
),
SensorEntityDescription(
key="delivery_factor",
name="Grid import tariff",
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:cash-minus",
),
]
METER_ENTITY_DESCRIPTIONS: list[SensorEntityDescription] = [
SensorEntityDescription(
key="current_ac_phase_1",
name="Current AC phase 1",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="current_ac_phase_2",
name="Current AC phase 2",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="current_ac_phase_3",
name="Current AC phase 3",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_reactive_ac_consumed",
name="Energy reactive AC consumed",
native_unit_of_measurement=ENERGY_VOLT_AMPERE_REACTIVE_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
icon="mdi:lightning-bolt-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_reactive_ac_produced",
name="Energy reactive AC produced",
native_unit_of_measurement=ENERGY_VOLT_AMPERE_REACTIVE_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
icon="mdi:lightning-bolt-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_real_ac_minus",
name="Energy real AC minus",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_real_ac_plus",
name="Energy real AC plus",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_real_consumed",
name="Energy real consumed",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="energy_real_produced",
name="Energy real produced",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="frequency_phase_average",
name="Frequency phase average",
native_unit_of_measurement=FREQUENCY_HERTZ,
device_class=SensorDeviceClass.FREQUENCY,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="meter_location",
name="Meter location",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="power_apparent_phase_1",
name="Power apparent phase 1",
native_unit_of_measurement=POWER_VOLT_AMPERE,
device_class=SensorDeviceClass.APPARENT_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_apparent_phase_2",
name="Power apparent phase 2",
native_unit_of_measurement=POWER_VOLT_AMPERE,
device_class=SensorDeviceClass.APPARENT_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_apparent_phase_3",
name="Power apparent phase 3",
native_unit_of_measurement=POWER_VOLT_AMPERE,
device_class=SensorDeviceClass.APPARENT_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_apparent",
name="Power apparent",
native_unit_of_measurement=POWER_VOLT_AMPERE,
device_class=SensorDeviceClass.APPARENT_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_factor_phase_1",
name="Power factor phase 1",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_factor_phase_2",
name="Power factor phase 2",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_factor_phase_3",
name="Power factor phase 3",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_factor",
name="Power factor",
device_class=SensorDeviceClass.POWER_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="power_reactive_phase_1",
name="Power reactive phase 1",
native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE,
device_class=SensorDeviceClass.REACTIVE_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_reactive_phase_2",
name="Power reactive phase 2",
native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE,
device_class=SensorDeviceClass.REACTIVE_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_reactive_phase_3",
name="Power reactive phase 3",
native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE,
device_class=SensorDeviceClass.REACTIVE_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_reactive",
name="Power reactive",
native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE,
device_class=SensorDeviceClass.REACTIVE_POWER,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:flash-outline",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_real_phase_1",
name="Power real phase 1",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_real_phase_2",
name="Power real phase 2",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_real_phase_3",
name="Power real phase 3",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power_real",
name="Power real",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="voltage_ac_phase_1",
name="Voltage AC phase 1",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_ac_phase_2",
name="Voltage AC phase 2",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_ac_phase_3",
name="Voltage AC phase 3",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_ac_phase_to_phase_12",
name="Voltage AC phase 1-2",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_ac_phase_to_phase_23",
name="Voltage AC phase 2-3",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_ac_phase_to_phase_31",
name="Voltage AC phase 3-1",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
]
OHMPILOT_ENTITY_DESCRIPTIONS: list[SensorEntityDescription] = [
SensorEntityDescription(
key="energy_real_ac_consumed",
name="Energy consumed",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="power_real_ac",
name="Power",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="temperature_channel_1",
name="Temperature Channel 1",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="error_code",
name="Error code",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="state_code",
name="State code",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="state_message",
name="State message",
entity_category=EntityCategory.DIAGNOSTIC,
),
]
POWER_FLOW_ENTITY_DESCRIPTIONS: list[SensorEntityDescription] = [
SensorEntityDescription(
key="energy_day",
name="Energy day",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_year",
name="Energy year",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="energy_total",
name="Energy total",
native_unit_of_measurement=ENERGY_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="meter_mode",
name="Mode",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="power_battery",
name="Power battery",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="power_grid",
name="Power grid",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="power_load",
name="Power load",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="power_photovoltaics",
name="Power photovoltaics",
native_unit_of_measurement=POWER_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="relative_autonomy",
name="Relative autonomy",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:home-circle-outline",
),
SensorEntityDescription(
key="relative_self_consumption",
name="Relative self consumption",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:solar-power",
),
]
STORAGE_ENTITY_DESCRIPTIONS: list[SensorEntityDescription] = [
SensorEntityDescription(
key="capacity_maximum",
name="Capacity maximum",
native_unit_of_measurement=ENERGY_WATT_HOUR,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="capacity_designed",
name="Capacity designed",
native_unit_of_measurement=ENERGY_WATT_HOUR,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="current_dc",
name="Current DC",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
),
SensorEntityDescription(
key="voltage_dc",
name="Voltage DC",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
),
SensorEntityDescription(
key="voltage_dc_maximum_cell",
name="Voltage DC maximum cell",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="voltage_dc_minimum_cell",
name="Voltage DC minimum cell",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:current-dc",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="state_of_charge",
name="State of charge",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="temperature_cell",
name="Temperature cell",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
]
class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEntity):
"""Defines a Fronius coordinator entity."""
entity_descriptions: list[SensorEntityDescription]
_entity_id_prefix: str
def __init__(
self,
coordinator: FroniusCoordinatorBase,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius meter sensor."""
super().__init__(coordinator)
self.entity_description = next(
desc for desc in self.entity_descriptions if desc.key == key
)
# default entity_id added 2021.12
# used for migration from non-unique_id entities of previous integration implementation
# when removed after migration period `_entity_id_prefix` will also no longer be needed
self.entity_id = f"{SENSOR_DOMAIN}.{key}_{DOMAIN}_{self._entity_id_prefix}_{coordinator.solar_net.host}"
self.solar_net_id = solar_net_id
self._attr_native_value = self._get_entity_value()
def _device_data(self) -> dict[str, Any]:
"""Extract information for SolarNet device from coordinator data."""
return self.coordinator.data[self.solar_net_id]
def _get_entity_value(self) -> Any:
"""Extract entity value from coordinator. Raises KeyError if not included in latest update."""
new_value = self.coordinator.data[self.solar_net_id][
self.entity_description.key
]["value"]
return round(new_value, 4) if isinstance(new_value, float) else new_value
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
try:
self._attr_native_value = self._get_entity_value()
except KeyError:
return
self.async_write_ha_state()
class InverterSensor(_FroniusSensorEntity):
"""Defines a Fronius inverter device sensor entity."""
entity_descriptions = INVERTER_ENTITY_DESCRIPTIONS
def __init__(
self,
coordinator: FroniusInverterUpdateCoordinator,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius inverter sensor."""
self._entity_id_prefix = f"inverter_{solar_net_id}"
super().__init__(coordinator, key, solar_net_id)
# device_info created in __init__ from a `GetInverterInfo` request
self._attr_device_info = coordinator.inverter_info.device_info
self._attr_unique_id = f"{coordinator.inverter_info.unique_id}-{key}"
class LoggerSensor(_FroniusSensorEntity):
"""Defines a Fronius logger device sensor entity."""
entity_descriptions = LOGGER_ENTITY_DESCRIPTIONS
_entity_id_prefix = "logger_info_0"
def __init__(
self,
coordinator: FroniusLoggerUpdateCoordinator,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius meter sensor."""
super().__init__(coordinator, key, solar_net_id)
logger_data = self._device_data()
# Logger device is already created in FroniusSolarNet._create_solar_net_device
self._attr_device_info = coordinator.solar_net.system_device_info
self._attr_native_unit_of_measurement = logger_data[key].get("unit")
self._attr_unique_id = f'{logger_data["unique_identifier"]["value"]}-{key}'
class MeterSensor(_FroniusSensorEntity):
"""Defines a Fronius meter device sensor entity."""
entity_descriptions = METER_ENTITY_DESCRIPTIONS
def __init__(
self,
coordinator: FroniusMeterUpdateCoordinator,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius meter sensor."""
self._entity_id_prefix = f"meter_{solar_net_id}"
super().__init__(coordinator, key, solar_net_id)
meter_data = self._device_data()
# S0 meters connected directly to inverters respond "n.a." as serial number
# `model` contains the inverter id: "S0 Meter at inverter 1"
if (meter_uid := meter_data["serial"]["value"]) == "n.a.":
meter_uid = (
f"{coordinator.solar_net.solar_net_device_id}:"
f'{meter_data["model"]["value"]}'
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, meter_uid)},
manufacturer=meter_data["manufacturer"]["value"],
model=meter_data["model"]["value"],
name=meter_data["model"]["value"],
via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id),
)
self._attr_unique_id = f"{meter_uid}-{key}"
class OhmpilotSensor(_FroniusSensorEntity):
"""Defines a Fronius Ohmpilot sensor entity."""
entity_descriptions = OHMPILOT_ENTITY_DESCRIPTIONS
def __init__(
self,
coordinator: FroniusOhmpilotUpdateCoordinator,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius meter sensor."""
self._entity_id_prefix = f"ohmpilot_{solar_net_id}"
super().__init__(coordinator, key, solar_net_id)
device_data = self._device_data()
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device_data["serial"]["value"])},
manufacturer=device_data["manufacturer"]["value"],
model=f"{device_data['model']['value']} {device_data['hardware']['value']}",
name=device_data["model"]["value"],
sw_version=device_data["software"]["value"],
via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id),
)
self._attr_unique_id = f'{device_data["serial"]["value"]}-{key}'
class PowerFlowSensor(_FroniusSensorEntity):
"""Defines a Fronius power flow sensor entity."""
entity_descriptions = POWER_FLOW_ENTITY_DESCRIPTIONS
_entity_id_prefix = "power_flow_0"
def __init__(
self,
coordinator: FroniusPowerFlowUpdateCoordinator,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius power flow sensor."""
super().__init__(coordinator, key, solar_net_id)
# SolarNet device is already created in FroniusSolarNet._create_solar_net_device
self._attr_device_info = coordinator.solar_net.system_device_info
self._attr_unique_id = (
f"{coordinator.solar_net.solar_net_device_id}-power_flow-{key}"
)
class StorageSensor(_FroniusSensorEntity):
"""Defines a Fronius storage device sensor entity."""
entity_descriptions = STORAGE_ENTITY_DESCRIPTIONS
def __init__(
self,
coordinator: FroniusStorageUpdateCoordinator,
key: str,
solar_net_id: str,
) -> None:
"""Set up an individual Fronius storage sensor."""
self._entity_id_prefix = f"storage_{solar_net_id}"
super().__init__(coordinator, key, solar_net_id)
storage_data = self._device_data()
self._attr_unique_id = f'{storage_data["serial"]["value"]}-{key}'
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, storage_data["serial"]["value"])},
manufacturer=storage_data["manufacturer"]["value"],
model=storage_data["model"]["value"],
name=storage_data["model"]["value"],
via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id),
)
|
{
"content_hash": "2ea6e0c45b6b825cd1ee811ed4c647dc",
"timestamp": "",
"source": "github",
"line_count": 844,
"max_line_length": 112,
"avg_line_length": 36.088862559241704,
"alnum_prop": 0.6574411503988968,
"repo_name": "toddeye/home-assistant",
"id": "c3b219c4b22d29ba0ab1bc2f0e2e4cf777a35756",
"size": "30461",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/fronius/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3005"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "47414832"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
import pygame
from pygame.locals import *
# ti = pygame.init()
# print(ti)
# f = pygame.font.init()
# print(f)
pygame.init()
display_width = 800
display_height = 600
game_display = pygame.display.set_mode((display_width, display_height))
pygame.display.update()
#while True:
#for event in pygame.event.get():
#print(event)
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
quit()
|
{
"content_hash": "c547fba52ac43ea3341306d8b2b05768",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 18.125,
"alnum_prop": 0.6781609195402298,
"repo_name": "mykespb/pythoner",
"id": "37255e9045a872c34201caa7583bc771c8326b23",
"size": "472",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pygamer/pyg-t1.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "11136650"
},
{
"name": "Jupyter Notebook",
"bytes": "3213"
},
{
"name": "Python",
"bytes": "198584"
},
{
"name": "Shell",
"bytes": "408"
}
],
"symlink_target": ""
}
|
from qqweibo.auth import OAuthHandler
from qqweibo.api import API
from qqweibo.parsers import (ModelParser, JSONParser, XMLRawParser,
XMLDomParser, XMLETreeParser)
from qqweibo.error import QWeiboError
from qqweibo.cache import MemoryCache, FileCache
__all__ = ['OAuthHandler', 'API', 'QWeiboError', 'version',
'XMLRawParser', 'XMLDomParser', 'XMLETreeParser',
'ModelParser', 'JSONParser',
'MemoryCache', 'FileCache']
version = '0.3.9'
|
{
"content_hash": "62cf7b1a18de171f59d3f51b7d105b61",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 67,
"avg_line_length": 35.785714285714285,
"alnum_prop": 0.6766467065868264,
"repo_name": "de1o/moedj",
"id": "4368a59aa0f84edcc152e74707c2a8e40634ebb4",
"size": "664",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "moedjpack/qqweibo/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25964"
},
{
"name": "JavaScript",
"bytes": "4036"
},
{
"name": "Python",
"bytes": "325868"
}
],
"symlink_target": ""
}
|
"""Get useful information from live Python objects.
This module encapsulates the interface provided by the internal special
attributes (co_*, im_*, tb_*, etc.) in a friendlier fashion.
It also provides some help for examining source code and class layout.
Here are some of the useful functions provided by this module:
ismodule(), isclass(), ismethod(), isfunction(), isgeneratorfunction(),
isgenerator(), istraceback(), isframe(), iscode(), isbuiltin(),
isroutine() - check object types
getmembers() - get members of an object that satisfy a given condition
getfile(), getsourcefile(), getsource() - find an object's source code
getdoc(), getcomments() - get documentation on an object
getmodule() - determine the module that an object came from
getclasstree() - arrange classes so as to represent their hierarchy
getargspec(), getargvalues(), getcallargs() - get info about function arguments
getfullargspec() - same, with support for Python-3000 features
formatargspec(), formatargvalues() - format an argument spec
getouterframes(), getinnerframes() - get info about frames
currentframe() - get the current stack frame
stack(), trace() - get info about frames on the stack or in a traceback
signature() - get a Signature object for the callable
"""
# This module is in the public domain. No warranties.
__author__ = ('Ka-Ping Yee <ping@lfw.org>',
'Yury Selivanov <yselivanov@sprymix.com>')
import ast
import importlib.machinery
import itertools
import linecache
import os
import re
import sys
import tokenize
import token
import types
import warnings
import functools
import builtins
from operator import attrgetter
from collections import namedtuple, OrderedDict
# Create constants for the compiler flags in Include/code.h
# We try to get them from dis to avoid duplication, but fall
# back to hardcoding so the dependency is optional
try:
from dis import COMPILER_FLAG_NAMES as _flag_names
except ImportError:
CO_OPTIMIZED, CO_NEWLOCALS = 0x1, 0x2
CO_VARARGS, CO_VARKEYWORDS = 0x4, 0x8
CO_NESTED, CO_GENERATOR, CO_NOFREE = 0x10, 0x20, 0x40
else:
mod_dict = globals()
for k, v in _flag_names.items():
mod_dict["CO_" + v] = k
# See Include/object.h
TPFLAGS_IS_ABSTRACT = 1 << 20
# ----------------------------------------------------------- type-checking
def ismodule(object):
"""Return true if the object is a module.
Module objects provide these attributes:
__cached__ pathname to byte compiled file
__doc__ documentation string
__file__ filename (missing for built-in modules)"""
return isinstance(object, types.ModuleType)
def isclass(object):
"""Return true if the object is a class.
Class objects provide these attributes:
__doc__ documentation string
__module__ name of module in which this class was defined"""
return isinstance(object, type)
def ismethod(object):
"""Return true if the object is an instance method.
Instance method objects provide these attributes:
__doc__ documentation string
__name__ name with which this method was defined
__func__ function object containing implementation of method
__self__ instance to which this method is bound"""
return isinstance(object, types.MethodType)
def ismethoddescriptor(object):
"""Return true if the object is a method descriptor.
But not if ismethod() or isclass() or isfunction() are true.
This is new in Python 2.2, and, for example, is true of int.__add__.
An object passing this test has a __get__ attribute but not a __set__
attribute, but beyond that the set of attributes varies. __name__ is
usually sensible, and __doc__ often is.
Methods implemented via descriptors that also pass one of the other
tests return false from the ismethoddescriptor() test, simply because
the other tests promise more -- you can, e.g., count on having the
__func__ attribute (etc) when an object passes ismethod()."""
if isclass(object) or ismethod(object) or isfunction(object):
# mutual exclusion
return False
tp = type(object)
return hasattr(tp, "__get__") and not hasattr(tp, "__set__")
def isdatadescriptor(object):
"""Return true if the object is a data descriptor.
Data descriptors have both a __get__ and a __set__ attribute. Examples are
properties (defined in Python) and getsets and members (defined in C).
Typically, data descriptors will also have __name__ and __doc__ attributes
(properties, getsets, and members have both of these attributes), but this
is not guaranteed."""
if isclass(object) or ismethod(object) or isfunction(object):
# mutual exclusion
return False
tp = type(object)
return hasattr(tp, "__set__") and hasattr(tp, "__get__")
if hasattr(types, 'MemberDescriptorType'):
# CPython and equivalent
def ismemberdescriptor(object):
"""Return true if the object is a member descriptor.
Member descriptors are specialized descriptors defined in extension
modules."""
return isinstance(object, types.MemberDescriptorType)
else:
# Other implementations
def ismemberdescriptor(object):
"""Return true if the object is a member descriptor.
Member descriptors are specialized descriptors defined in extension
modules."""
return False
if hasattr(types, 'GetSetDescriptorType'):
# CPython and equivalent
def isgetsetdescriptor(object):
"""Return true if the object is a getset descriptor.
getset descriptors are specialized descriptors defined in extension
modules."""
return isinstance(object, types.GetSetDescriptorType)
else:
# Other implementations
def isgetsetdescriptor(object):
"""Return true if the object is a getset descriptor.
getset descriptors are specialized descriptors defined in extension
modules."""
return False
def isfunction(object):
"""Return true if the object is a user-defined function.
Function objects provide these attributes:
__doc__ documentation string
__name__ name with which this function was defined
__code__ code object containing compiled function bytecode
__defaults__ tuple of any default values for arguments
__globals__ global namespace in which this function was defined
__annotations__ dict of parameter annotations
__kwdefaults__ dict of keyword only parameters with defaults"""
return isinstance(object, types.FunctionType)
def isgeneratorfunction(object):
"""Return true if the object is a user-defined generator function.
Generator function objects provides same attributes as functions.
See help(isfunction) for attributes listing."""
return bool((isfunction(object) or ismethod(object)) and
object.__code__.co_flags & CO_GENERATOR)
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support iteration over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
def istraceback(object):
"""Return true if the object is a traceback.
Traceback objects provide these attributes:
tb_frame frame object at this level
tb_lasti index of last attempted instruction in bytecode
tb_lineno current line number in Python source code
tb_next next inner traceback object (called by this level)"""
return isinstance(object, types.TracebackType)
def isframe(object):
"""Return true if the object is a frame object.
Frame objects provide these attributes:
f_back next outer frame object (this frame's caller)
f_builtins built-in namespace seen by this frame
f_code code object being executed in this frame
f_globals global namespace seen by this frame
f_lasti index of last attempted instruction in bytecode
f_lineno current line number in Python source code
f_locals local namespace seen by this frame
f_trace tracing function for this frame, or None"""
return isinstance(object, types.FrameType)
def iscode(object):
"""Return true if the object is a code object.
Code objects provide these attributes:
co_argcount number of arguments (not including * or ** args)
co_code string of raw compiled bytecode
co_consts tuple of constants used in the bytecode
co_filename name of file in which this code object was created
co_firstlineno number of first line in Python source code
co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
co_lnotab encoded mapping of line numbers to bytecode indices
co_name name with which this code object was defined
co_names tuple of names of local variables
co_nlocals number of local variables
co_stacksize virtual machine stack space required
co_varnames tuple of names of arguments and local variables"""
return isinstance(object, types.CodeType)
def isbuiltin(object):
"""Return true if the object is a built-in function or method.
Built-in functions and methods provide these attributes:
__doc__ documentation string
__name__ original name of this function or method
__self__ instance to which a method is bound, or None"""
return isinstance(object, types.BuiltinFunctionType)
def isroutine(object):
"""Return true if the object is any kind of function or method."""
return (isbuiltin(object)
or isfunction(object)
or ismethod(object)
or ismethoddescriptor(object))
def isabstract(object):
"""Return true if the object is an abstract base class (ABC)."""
return bool(isinstance(object, type) and object.__flags__ & TPFLAGS_IS_ABSTRACT)
def getmembers(object, predicate=None):
"""Return all members of an object as (name, value) pairs sorted by name.
Optionally, only return members that satisfy a given predicate."""
if isclass(object):
mro = (object,) + getmro(object)
else:
mro = ()
results = []
processed = set()
names = dir(object)
# :dd any DynamicClassAttributes to the list of names if object is a class;
# this may result in duplicate entries if, for example, a virtual
# attribute with the same name as a DynamicClassAttribute exists
try:
for base in object.__bases__:
for k, v in base.__dict__.items():
if isinstance(v, types.DynamicClassAttribute):
names.append(k)
except AttributeError:
pass
for key in names:
# First try to get the value via getattr. Some descriptors don't
# like calling their __get__ (see bug #1785), so fall back to
# looking in the __dict__.
try:
value = getattr(object, key)
# handle the duplicate key
if key in processed:
raise AttributeError
except AttributeError:
for base in mro:
if key in base.__dict__:
value = base.__dict__[key]
break
else:
# could be a (currently) missing slot member, or a buggy
# __dir__; discard and move on
continue
if not predicate or predicate(value):
results.append((key, value))
processed.add(key)
results.sort(key=lambda pair: pair[0])
return results
Attribute = namedtuple('Attribute', 'name kind defining_class object')
def classify_class_attrs(cls):
"""Return list of attribute-descriptor tuples.
For each name in dir(cls), the return list contains a 4-tuple
with these elements:
0. The name (a string).
1. The kind of attribute this is, one of these strings:
'class method' created via classmethod()
'static method' created via staticmethod()
'property' created via property()
'method' any other flavor of method or descriptor
'data' not a method
2. The class which defined this attribute (a class).
3. The object as obtained by calling getattr; if this fails, or if the
resulting object does not live anywhere in the class' mro (including
metaclasses) then the object is looked up in the defining class's
dict (found by walking the mro).
If one of the items in dir(cls) is stored in the metaclass it will now
be discovered and not have None be listed as the class in which it was
defined. Any items whose home class cannot be discovered are skipped.
"""
mro = getmro(cls)
metamro = getmro(type(cls)) # for attributes stored in the metaclass
metamro = tuple([cls for cls in metamro if cls not in (type, object)])
class_bases = (cls,) + mro
all_bases = class_bases + metamro
names = dir(cls)
# :dd any DynamicClassAttributes to the list of names;
# this may result in duplicate entries if, for example, a virtual
# attribute with the same name as a DynamicClassAttribute exists.
for base in mro:
for k, v in base.__dict__.items():
if isinstance(v, types.DynamicClassAttribute):
names.append(k)
result = []
processed = set()
for name in names:
# Get the object associated with the name, and where it was defined.
# Normal objects will be looked up with both getattr and directly in
# its class' dict (in case getattr fails [bug #1785], and also to look
# for a docstring).
# For DynamicClassAttributes on the second pass we only look in the
# class's dict.
#
# Getting an obj from the __dict__ sometimes reveals more than
# using getattr. Static and class methods are dramatic examples.
homecls = None
get_obj = None
dict_obj = None
if name not in processed:
try:
if name == '__dict__':
raise Exception("__dict__ is special, don't want the proxy")
get_obj = getattr(cls, name)
except Exception as exc:
pass
else:
homecls = getattr(get_obj, "__objclass__", homecls)
if homecls not in class_bases:
# if the resulting object does not live somewhere in the
# mro, drop it and search the mro manually
homecls = None
last_cls = None
# first look in the classes
for srch_cls in class_bases:
srch_obj = getattr(srch_cls, name, None)
if srch_obj == get_obj:
last_cls = srch_cls
# then check the metaclasses
for srch_cls in metamro:
try:
srch_obj = srch_cls.__getattr__(cls, name)
except AttributeError:
continue
if srch_obj == get_obj:
last_cls = srch_cls
if last_cls is not None:
homecls = last_cls
for base in all_bases:
if name in base.__dict__:
dict_obj = base.__dict__[name]
if homecls not in metamro:
homecls = base
break
if homecls is None:
# unable to locate the attribute anywhere, most likely due to
# buggy custom __dir__; discard and move on
continue
obj = get_obj or dict_obj
# Classify the object or its descriptor.
if isinstance(dict_obj, staticmethod):
kind = "static method"
obj = dict_obj
elif isinstance(dict_obj, classmethod):
kind = "class method"
obj = dict_obj
elif isinstance(dict_obj, property):
kind = "property"
obj = dict_obj
elif isroutine(obj):
kind = "method"
else:
kind = "data"
result.append(Attribute(name, kind, homecls, obj))
processed.add(name)
return result
# ----------------------------------------------------------- class helpers
def getmro(cls):
"Return tuple of base classes (including cls) in method resolution order."
return cls.__mro__
# -------------------------------------------------------- function helpers
def unwrap(func, *, stop=None):
"""Get the object wrapped by *func*.
Follows the chain of :attr:`__wrapped__` attributes returning the last
object in the chain.
*stop* is an optional callback accepting an object in the wrapper chain
as its sole argument that allows the unwrapping to be terminated early if
the callback returns a true value. If the callback never returns a true
value, the last object in the chain is returned as usual. For example,
:func:`signature` uses this to stop unwrapping if any object in the
chain has a ``__signature__`` attribute defined.
:exc:`ValueError` is raised if a cycle is encountered.
"""
if stop is None:
def _is_wrapper(f):
return hasattr(f, '__wrapped__')
else:
def _is_wrapper(f):
return hasattr(f, '__wrapped__') and not stop(f)
f = func # remember the original func for error reporting
memo = {id(f)} # Memoise by id to tolerate non-hashable objects
while _is_wrapper(func):
func = func.__wrapped__
id_func = id(func)
if id_func in memo:
raise ValueError('wrapper loop when unwrapping {!r}'.format(f))
memo.add(id_func)
return func
# -------------------------------------------------- source code extraction
def indentsize(line):
"""Return the indent size, in spaces, at the start of a line of text."""
expline = line.expandtabs()
return len(expline) - len(expline.lstrip())
def getdoc(object):
"""Get the documentation string for an object.
All tabs are expanded to spaces. To clean up docstrings that are
indented to line up with blocks of code, any whitespace than can be
uniformly removed from the second line onwards is removed."""
try:
doc = object.__doc__
except AttributeError:
return None
if not isinstance(doc, str):
return None
return cleandoc(doc)
def cleandoc(doc):
"""Clean up indentation from docstrings.
Any whitespace that can be uniformly removed from the second line
onwards is removed."""
try:
lines = doc.expandtabs().split('\n')
except UnicodeError:
return None
else:
# Find minimum indentation of any non-blank lines after first line.
margin = sys.maxsize
for line in lines[1:]:
content = len(line.lstrip())
if content:
indent = len(line) - content
margin = min(margin, indent)
# Remove indentation.
if lines:
lines[0] = lines[0].lstrip()
if margin < sys.maxsize:
for i in range(1, len(lines)): lines[i] = lines[i][margin:]
# Remove any trailing or leading blank lines.
while lines and not lines[-1]:
lines.pop()
while lines and not lines[0]:
lines.pop(0)
return '\n'.join(lines)
def getfile(object):
"""Work out which source or compiled file an object was defined in."""
if ismodule(object):
if hasattr(object, '__file__'):
return object.__file__
raise TypeError('{!r} is a built-in module'.format(object))
if isclass(object):
if hasattr(object, '__module__'):
object = sys.modules.get(object.__module__)
if hasattr(object, '__file__'):
return object.__file__
raise TypeError('{!r} is a built-in class'.format(object))
if ismethod(object):
object = object.__func__
if isfunction(object):
object = object.__code__
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
return object.co_filename
raise TypeError('{!r} is not a module, class, method, '
'function, traceback, frame, or code object'.format(object))
ModuleInfo = namedtuple('ModuleInfo', 'name suffix mode module_type')
def getmoduleinfo(path):
"""Get the module name, suffix, mode, and module type for a given file."""
warnings.warn('inspect.getmoduleinfo() is deprecated', DeprecationWarning,
2)
with warnings.catch_warnings():
warnings.simplefilter('ignore', PendingDeprecationWarning)
import imp
filename = os.path.basename(path)
suffixes = [(-len(suffix), suffix, mode, mtype)
for suffix, mode, mtype in imp.get_suffixes()]
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix, mode, mtype in suffixes:
if filename[neglen:] == suffix:
return ModuleInfo(filename[:neglen], suffix, mode, mtype)
def getmodulename(path):
"""Return the module name for a given file, or None."""
fname = os.path.basename(path)
# Check for paths that look like an actual module file
suffixes = [(-len(suffix), suffix)
for suffix in importlib.machinery.all_suffixes()]
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix in suffixes:
if fname.endswith(suffix):
return fname[:neglen]
return None
def getsourcefile(object):
"""Return the filename that can be used to locate an object's source.
Return None if no way can be identified to get the source.
"""
filename = getfile(object)
all_bytecode_suffixes = importlib.machinery.DEBUG_BYTECODE_SUFFIXES[:]
all_bytecode_suffixes += importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES[:]
if any(filename.endswith(s) for s in all_bytecode_suffixes):
filename = (os.path.splitext(filename)[0] +
importlib.machinery.SOURCE_SUFFIXES[0])
elif any(filename.endswith(s) for s in
importlib.machinery.EXTENSION_SUFFIXES):
return None
if os.path.exists(filename):
return filename
# only return a non-existent filename if the module has a PEP 302 loader
if getattr(getmodule(object, filename), '__loader__', None) is not None:
return filename
# or it is in the linecache
if filename in linecache.cache:
return filename
def getabsfile(object, _filename=None):
"""Return an absolute path to the source or compiled file for an object.
The idea is for each object to have a unique origin, so this routine
normalizes the result as much as possible."""
if _filename is None:
_filename = getsourcefile(object) or getfile(object)
return os.path.normcase(os.path.abspath(_filename))
modulesbyfile = {}
_filesbymodname = {}
def getmodule(object, _filename=None):
"""Return the module an object was defined in, or None if not found."""
if ismodule(object):
return object
if hasattr(object, '__module__'):
return sys.modules.get(object.__module__)
# Try the filename to modulename cache
if _filename is not None and _filename in modulesbyfile:
return sys.modules.get(modulesbyfile[_filename])
# Try the cache again with the absolute file name
try:
file = getabsfile(object, _filename)
except TypeError:
return None
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Update the filename to module name cache and check yet again
# Copy sys.modules in order to cope with changes while iterating
for modname, module in list(sys.modules.items()):
if ismodule(module) and hasattr(module, '__file__'):
f = module.__file__
if f == _filesbymodname.get(modname, None):
# Have already mapped this module, so skip it
continue
_filesbymodname[modname] = f
f = getabsfile(module)
# Always map to the name the module knows itself by
modulesbyfile[f] = modulesbyfile[
os.path.realpath(f)] = module.__name__
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Check the main module
main = sys.modules['__main__']
if not hasattr(object, '__name__'):
return None
if hasattr(main, object.__name__):
mainobject = getattr(main, object.__name__)
if mainobject is object:
return main
# Check builtins
builtin = sys.modules['builtins']
if hasattr(builtin, object.__name__):
builtinobject = getattr(builtin, object.__name__)
if builtinobject is object:
return builtin
def findsource(object):
"""Return the entire source file and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of all the lines
in the file and the line number indexes a line in that list. An OSError
is raised if the source code cannot be retrieved."""
file = getfile(object)
sourcefile = getsourcefile(object)
if not sourcefile and file[:1] + file[-1:] != '<>':
raise OSError('source code not available')
file = sourcefile if sourcefile else file
module = getmodule(object, file)
if module:
lines = linecache.getlines(file, module.__dict__)
else:
lines = linecache.getlines(file)
if not lines:
raise OSError('could not get source code')
if ismodule(object):
return lines, 0
if isclass(object):
name = object.__name__
pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
# make some effort to find the best matching class definition:
# use the one with the least indentation, which is the one
# that's most probably not inside a function definition.
candidates = []
for i in range(len(lines)):
match = pat.match(lines[i])
if match:
# if it's at toplevel, it's already the best one
if lines[i][0] == 'c':
return lines, i
# else add whitespace to candidate list
candidates.append((match.group(1), i))
if candidates:
# this will sort by whitespace, and by line number,
# less whitespace first
candidates.sort()
return lines, candidates[0][1]
else:
raise OSError('could not find class definition')
if ismethod(object):
object = object.__func__
if isfunction(object):
object = object.__code__
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
if not hasattr(object, 'co_firstlineno'):
raise OSError('could not find function definition')
lnum = object.co_firstlineno - 1
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
if pat.match(lines[lnum]): break
lnum = lnum - 1
return lines, lnum
raise OSError('could not find code object')
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code.
Returns None when source can't be found.
"""
try:
lines, lnum = findsource(object)
except (OSError, TypeError):
return None
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#!': start = 1
while start < len(lines) and lines[start].strip() in ('', '#'):
start = start + 1
if start < len(lines) and lines[start][:1] == '#':
comments = []
end = start
while end < len(lines) and lines[end][:1] == '#':
comments.append(lines[end].expandtabs())
end = end + 1
return ''.join(comments)
# Look for a preceding block of comments at the same indentation.
elif lnum > 0:
indent = indentsize(lines[lnum])
end = lnum - 1
if end >= 0 and lines[end].lstrip()[:1] == '#' and \
indentsize(lines[end]) == indent:
comments = [lines[end].expandtabs().lstrip()]
if end > 0:
end = end - 1
comment = lines[end].expandtabs().lstrip()
while comment[:1] == '#' and indentsize(lines[end]) == indent:
comments[:0] = [comment]
end = end - 1
if end < 0: break
comment = lines[end].expandtabs().lstrip()
while comments and comments[0].strip() == '#':
comments[:1] = []
while comments and comments[-1].strip() == '#':
comments[-1:] = []
return ''.join(comments)
class EndOfBlock(Exception): pass
class BlockFinder:
"""Provide a tokeneater() method to detect the end of a code block."""
def __init__(self):
self.indent = 0
self.islambda = False
self.started = False
self.passline = False
self.last = 1
def tokeneater(self, type, token, srowcol, erowcol, line):
if not self.started:
# look for the first "def", "class" or "lambda"
if token in ("def", "class", "lambda"):
if token == "lambda":
self.islambda = True
self.started = True
self.passline = True # skip to the end of the line
elif type == tokenize.NEWLINE:
self.passline = False # stop skipping when a NEWLINE is seen
self.last = srowcol[0]
if self.islambda: # lambdas always end at the first NEWLINE
raise EndOfBlock
elif self.passline:
pass
elif type == tokenize.INDENT:
self.indent = self.indent + 1
self.passline = True
elif type == tokenize.DEDENT:
self.indent = self.indent - 1
# the end of matching indent/dedent pairs end a block
# (note that this only works for "def"/"class" blocks,
# not e.g. for "if: else:" or "try: finally:" blocks)
if self.indent <= 0:
raise EndOfBlock
elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL):
# any other token on the same indentation level end the previous
# block as well, except the pseudo-tokens COMMENT and NL.
raise EndOfBlock
def getblock(lines):
"""Extract the block of code at the top of the given list of lines."""
blockfinder = BlockFinder()
try:
tokens = tokenize.generate_tokens(iter(lines).__next__)
for _token in tokens:
blockfinder.tokeneater(*_token)
except (EndOfBlock, IndentationError):
pass
return lines[:blockfinder.last]
def getsourcelines(object):
"""Return a list of source lines and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of the lines
corresponding to the object and the line number indicates where in the
original source file the first line of code was found. An OSError is
raised if the source code cannot be retrieved."""
lines, lnum = findsource(object)
if ismodule(object): return lines, 0
else: return getblock(lines[lnum:]), lnum + 1
def getsource(object):
"""Return the text of the source code for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a single string. An
OSError is raised if the source code cannot be retrieved."""
lines, lnum = getsourcelines(object)
return ''.join(lines)
# --------------------------------------------------- class tree extraction
def walktree(classes, children, parent):
"""Recursive helper function for getclasstree()."""
results = []
classes.sort(key=attrgetter('__module__', '__name__'))
for c in classes:
results.append((c, c.__bases__))
if c in children:
results.append(walktree(children[c], children, c))
return results
def getclasstree(classes, unique=False):
"""Arrange the given list of classes into a hierarchy of nested lists.
Where a nested list appears, it contains classes derived from the class
whose entry immediately precedes the list. Each entry is a 2-tuple
containing a class and a tuple of its base classes. If the 'unique'
argument is true, exactly one entry appears in the returned structure
for each class in the given list. Otherwise, classes using multiple
inheritance and their descendants will appear multiple times."""
children = {}
roots = []
for c in classes:
if c.__bases__:
for parent in c.__bases__:
if not parent in children:
children[parent] = []
if c not in children[parent]:
children[parent].append(c)
if unique and parent in classes: break
elif c not in roots:
roots.append(c)
for parent in children:
if parent not in classes:
roots.append(parent)
return walktree(roots, children, None)
# ------------------------------------------------ argument list extraction
Arguments = namedtuple('Arguments', 'args, varargs, varkw')
def getargs(co):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where
'args' is the list of argument names. Keyword-only arguments are
appended. 'varargs' and 'varkw' are the names of the * and **
arguments or None."""
args, varargs, kwonlyargs, varkw = _getfullargs(co)
return Arguments(args + kwonlyargs, varargs, varkw)
def _getfullargs(co):
"""Get information about the arguments accepted by a code object.
Four things are returned: (args, varargs, kwonlyargs, varkw), where
'args' and 'kwonlyargs' are lists of argument names, and 'varargs'
and 'varkw' are the names of the * and ** arguments or None."""
if not iscode(co):
raise TypeError('{!r} is not a code object'.format(co))
nargs = co.co_argcount
names = co.co_varnames
nkwargs = co.co_kwonlyargcount
args = list(names[:nargs])
kwonlyargs = list(names[nargs:nargs+nkwargs])
step = 0
nargs += nkwargs
varargs = None
if co.co_flags & CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return args, varargs, kwonlyargs, varkw
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
def getargspec(func):
"""Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names.
'args' will include keyword-only argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
Use the getfullargspec() API for Python-3000 code, as annotations
and keyword arguments are supported. getargspec() will raise ValueError
if the func has either annotations or keyword arguments.
"""
args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = \
getfullargspec(func)
if kwonlyargs or ann:
raise ValueError("Function has keyword-only arguments or annotations"
", use getfullargspec() API which can support them")
return ArgSpec(args, varargs, varkw, defaults)
FullArgSpec = namedtuple('FullArgSpec',
'args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations')
def getfullargspec(func):
"""Get the names and default values of a callable object's arguments.
A tuple of seven things is returned:
(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults annotations).
'args' is a list of the argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
'kwonlyargs' is a list of keyword-only argument names.
'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults.
'annotations' is a dictionary mapping argument names to annotations.
The first four items in the tuple correspond to getargspec().
"""
try:
# Re: `skip_bound_arg=False`
#
# There is a notable difference in behaviour between getfullargspec
# and Signature: the former always returns 'self' parameter for bound
# methods, whereas the Signature always shows the actual calling
# signature of the passed object.
#
# To simulate this behaviour, we "unbind" bound methods, to trick
# inspect.signature to always return their first parameter ("self",
# usually)
# Re: `follow_wrapper_chains=False`
#
# getfullargspec() historically ignored __wrapped__ attributes,
# so we ensure that remains the case in 3.3+
sig = _signature_internal(func,
follow_wrapper_chains=False,
skip_bound_arg=False)
except Exception as ex:
# Most of the times 'signature' will raise ValueError.
# But, it can also raise AttributeError, and, maybe something
# else. So to be fully backwards compatible, we catch all
# possible exceptions here, and reraise a TypeError.
raise TypeError('unsupported callable') from ex
args = []
varargs = None
varkw = None
kwonlyargs = []
defaults = ()
annotations = {}
defaults = ()
kwdefaults = {}
if sig.return_annotation is not sig.empty:
annotations['return'] = sig.return_annotation
for param in sig.parameters.values():
kind = param.kind
name = param.name
if kind is _POSITIONAL_ONLY:
args.append(name)
elif kind is _POSITIONAL_OR_KEYWORD:
args.append(name)
if param.default is not param.empty:
defaults += (param.default,)
elif kind is _VAR_POSITIONAL:
varargs = name
elif kind is _KEYWORD_ONLY:
kwonlyargs.append(name)
if param.default is not param.empty:
kwdefaults[name] = param.default
elif kind is _VAR_KEYWORD:
varkw = name
if param.annotation is not param.empty:
annotations[name] = param.annotation
if not kwdefaults:
# compatibility with 'func.__kwdefaults__'
kwdefaults = None
if not defaults:
# compatibility with 'func.__defaults__'
defaults = None
return FullArgSpec(args, varargs, varkw, defaults,
kwonlyargs, kwdefaults, annotations)
ArgInfo = namedtuple('ArgInfo', 'args varargs keywords locals')
def getargvalues(frame):
"""Get information about arguments passed into a particular frame.
A tuple of four things is returned: (args, varargs, varkw, locals).
'args' is a list of the argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'locals' is the locals dictionary of the given frame."""
args, varargs, varkw = getargs(frame.f_code)
return ArgInfo(args, varargs, varkw, frame.f_locals)
def formatannotation(annotation, base_module=None):
if isinstance(annotation, type):
if annotation.__module__ in ('builtins', base_module):
return annotation.__name__
return annotation.__module__+'.'+annotation.__name__
return repr(annotation)
def formatannotationrelativeto(object):
module = getattr(object, '__module__', None)
def _formatannotation(annotation):
return formatannotation(annotation, module)
return _formatannotation
def formatargspec(args, varargs=None, varkw=None, defaults=None,
kwonlyargs=(), kwonlydefaults={}, annotations={},
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
formatreturns=lambda text: ' -> ' + text,
formatannotation=formatannotation):
"""Format an argument spec from the values returned by getargspec
or getfullargspec.
The first seven arguments are (args, varargs, varkw, defaults,
kwonlyargs, kwonlydefaults, annotations). The other five arguments
are the corresponding optional formatting functions that are called to
turn names and values into strings. The last argument is an optional
function to format the sequence of arguments."""
def formatargandannotation(arg):
result = formatarg(arg)
if arg in annotations:
result += ': ' + formatannotation(annotations[arg])
return result
specs = []
if defaults:
firstdefault = len(args) - len(defaults)
for i, arg in enumerate(args):
spec = formatargandannotation(arg)
if defaults and i >= firstdefault:
spec = spec + formatvalue(defaults[i - firstdefault])
specs.append(spec)
if varargs is not None:
specs.append(formatvarargs(formatargandannotation(varargs)))
else:
if kwonlyargs:
specs.append('*')
if kwonlyargs:
for kwonlyarg in kwonlyargs:
spec = formatargandannotation(kwonlyarg)
if kwonlydefaults and kwonlyarg in kwonlydefaults:
spec += formatvalue(kwonlydefaults[kwonlyarg])
specs.append(spec)
if varkw is not None:
specs.append(formatvarkw(formatargandannotation(varkw)))
result = '(' + ', '.join(specs) + ')'
if 'return' in annotations:
result += formatreturns(formatannotation(annotations['return']))
return result
def formatargvalues(args, varargs, varkw, locals,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value)):
"""Format an argument spec from the 4 values returned by getargvalues.
The first four arguments are (args, varargs, varkw, locals). The
next four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional function to format the sequence of arguments."""
def convert(name, locals=locals,
formatarg=formatarg, formatvalue=formatvalue):
return formatarg(name) + formatvalue(locals[name])
specs = []
for i in range(len(args)):
specs.append(convert(args[i]))
if varargs:
specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
if varkw:
specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
return '(' + ', '.join(specs) + ')'
def _missing_arguments(f_name, argnames, pos, values):
names = [repr(name) for name in argnames if name not in values]
missing = len(names)
if missing == 1:
s = names[0]
elif missing == 2:
s = "{} and {}".format(*names)
else:
tail = ", {} and {}".format(names[-2:])
del names[-2:]
s = ", ".join(names) + tail
raise TypeError("%s() missing %i required %s argument%s: %s" %
(f_name, missing,
"positional" if pos else "keyword-only",
"" if missing == 1 else "s", s))
def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
atleast = len(args) - defcount
kwonly_given = len([arg for arg in kwonly if arg in values])
if varargs:
plural = atleast != 1
sig = "at least %d" % (atleast,)
elif defcount:
plural = True
sig = "from %d to %d" % (atleast, len(args))
else:
plural = len(args) != 1
sig = str(len(args))
kwonly_sig = ""
if kwonly_given:
msg = " positional argument%s (and %d keyword-only argument%s)"
kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
"s" if kwonly_given != 1 else ""))
raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
(f_name, sig, "s" if plural else "", given, kwonly_sig,
"was" if given == 1 and not kwonly_given else "were"))
def getcallargs(*func_and_positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
func = func_and_positional[0]
positional = func_and_positional[1:]
spec = getfullargspec(func)
args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = spec
f_name = func.__name__
arg2value = {}
if ismethod(func) and func.__self__ is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.__self__,) + positional
num_pos = len(positional)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
n = min(num_pos, num_args)
for i in range(n):
arg2value[args[i]] = positional[i]
if varargs:
arg2value[varargs] = tuple(positional[n:])
possible_kwargs = set(args + kwonlyargs)
if varkw:
arg2value[varkw] = {}
for kw, value in named.items():
if kw not in possible_kwargs:
if not varkw:
raise TypeError("%s() got an unexpected keyword argument %r" %
(f_name, kw))
arg2value[varkw][kw] = value
continue
if kw in arg2value:
raise TypeError("%s() got multiple values for argument %r" %
(f_name, kw))
arg2value[kw] = value
if num_pos > num_args and not varargs:
_too_many(f_name, args, kwonlyargs, varargs, num_defaults,
num_pos, arg2value)
if num_pos < num_args:
req = args[:num_args - num_defaults]
for arg in req:
if arg not in arg2value:
_missing_arguments(f_name, req, True, arg2value)
for i, arg in enumerate(args[num_args - num_defaults:]):
if arg not in arg2value:
arg2value[arg] = defaults[i]
missing = 0
for kwarg in kwonlyargs:
if kwarg not in arg2value:
if kwarg in kwonlydefaults:
arg2value[kwarg] = kwonlydefaults[kwarg]
else:
missing += 1
if missing:
_missing_arguments(f_name, kwonlyargs, False, arg2value)
return arg2value
ClosureVars = namedtuple('ClosureVars', 'nonlocals globals builtins unbound')
def getclosurevars(func):
"""
Get the mapping of free variables to their current values.
Returns a named tuple of dicts mapping the current nonlocal, global
and builtin references as seen by the body of the function. A final
set of unbound names that could not be resolved is also provided.
"""
if ismethod(func):
func = func.__func__
if not isfunction(func):
raise TypeError("'{!r}' is not a Python function".format(func))
code = func.__code__
# Nonlocal references are named in co_freevars and resolved
# by looking them up in __closure__ by positional index
if func.__closure__ is None:
nonlocal_vars = {}
else:
nonlocal_vars = {
var : cell.cell_contents
for var, cell in zip(code.co_freevars, func.__closure__)
}
# Global and builtin references are named in co_names and resolved
# by looking them up in __globals__ or __builtins__
global_ns = func.__globals__
builtin_ns = global_ns.get("__builtins__", builtins.__dict__)
if ismodule(builtin_ns):
builtin_ns = builtin_ns.__dict__
global_vars = {}
builtin_vars = {}
unbound_names = set()
for name in code.co_names:
if name in ("None", "True", "False"):
# Because these used to be builtins instead of keywords, they
# may still show up as name references. We ignore them.
continue
try:
global_vars[name] = global_ns[name]
except KeyError:
try:
builtin_vars[name] = builtin_ns[name]
except KeyError:
unbound_names.add(name)
return ClosureVars(nonlocal_vars, global_vars,
builtin_vars, unbound_names)
# -------------------------------------------------- stack frame extraction
Traceback = namedtuple('Traceback', 'filename lineno function code_context index')
def getframeinfo(frame, context=1):
"""Get information about a frame or traceback object.
A tuple of five things is returned: the filename, the line number of
the current line, the function name, a list of lines of context from
the source code, and the index of the current line within that list.
The optional second argument specifies the number of lines of context
to return, which are centered around the current line."""
if istraceback(frame):
lineno = frame.tb_lineno
frame = frame.tb_frame
else:
lineno = frame.f_lineno
if not isframe(frame):
raise TypeError('{!r} is not a frame or traceback object'.format(frame))
filename = getsourcefile(frame) or getfile(frame)
if context > 0:
start = lineno - 1 - context//2
try:
lines, lnum = findsource(frame)
except OSError:
lines = index = None
else:
start = max(start, 1)
start = max(0, min(start, len(lines) - context))
lines = lines[start:start+context]
index = lineno - 1 - start
else:
lines = index = None
return Traceback(filename, lineno, frame.f_code.co_name, lines, index)
def getlineno(frame):
"""Get the line number from a frame object, allowing for optimization."""
# FrameType.f_lineno is now a descriptor that grovels co_lnotab
return frame.f_lineno
def getouterframes(frame, context=1):
"""Get a list of records for a frame and all higher (calling) frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while frame:
framelist.append((frame,) + getframeinfo(frame, context))
frame = frame.f_back
return framelist
def getinnerframes(tb, context=1):
"""Get a list of records for a traceback's frame and all lower frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while tb:
framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
tb = tb.tb_next
return framelist
def currentframe():
"""Return the frame of the caller or None if this is not possible."""
return sys._getframe(1) if hasattr(sys, "_getframe") else None
def stack(context=1):
"""Return a list of records for the stack above the caller's frame."""
return getouterframes(sys._getframe(1), context)
def trace(context=1):
"""Return a list of records for the stack below the current exception."""
return getinnerframes(sys.exc_info()[2], context)
# ------------------------------------------------ static version of getattr
_sentinel = object()
def _static_getmro(klass):
return type.__dict__['__mro__'].__get__(klass)
def _check_instance(obj, attr):
instance_dict = {}
try:
instance_dict = object.__getattribute__(obj, "__dict__")
except AttributeError:
pass
return dict.get(instance_dict, attr, _sentinel)
def _check_class(klass, attr):
for entry in _static_getmro(klass):
if _shadowed_dict(type(entry)) is _sentinel:
try:
return entry.__dict__[attr]
except KeyError:
pass
return _sentinel
def _is_type(obj):
try:
_static_getmro(obj)
except TypeError:
return False
return True
def _shadowed_dict(klass):
dict_attr = type.__dict__["__dict__"]
for entry in _static_getmro(klass):
try:
class_dict = dict_attr.__get__(entry)["__dict__"]
except KeyError:
pass
else:
if not (type(class_dict) is types.GetSetDescriptorType and
class_dict.__name__ == "__dict__" and
class_dict.__objclass__ is entry):
return class_dict
return _sentinel
def getattr_static(obj, attr, default=_sentinel):
"""Retrieve attributes without triggering dynamic lookup via the
descriptor protocol, __getattr__ or __getattribute__.
Note: this function may not be able to retrieve all attributes
that getattr can fetch (like dynamically created attributes)
and may find attributes that getattr can't (like descriptors
that raise AttributeError). It can also return descriptor objects
instead of instance members in some cases. See the
documentation for details.
"""
instance_result = _sentinel
if not _is_type(obj):
klass = type(obj)
dict_attr = _shadowed_dict(klass)
if (dict_attr is _sentinel or
type(dict_attr) is types.MemberDescriptorType):
instance_result = _check_instance(obj, attr)
else:
klass = obj
klass_result = _check_class(klass, attr)
if instance_result is not _sentinel and klass_result is not _sentinel:
if (_check_class(type(klass_result), '__get__') is not _sentinel and
_check_class(type(klass_result), '__set__') is not _sentinel):
return klass_result
if instance_result is not _sentinel:
return instance_result
if klass_result is not _sentinel:
return klass_result
if obj is klass:
# for types we check the metaclass too
for entry in _static_getmro(type(klass)):
if _shadowed_dict(type(entry)) is _sentinel:
try:
return entry.__dict__[attr]
except KeyError:
pass
if default is not _sentinel:
return default
raise AttributeError(attr)
# ------------------------------------------------ generator introspection
GEN_CREATED = 'GEN_CREATED'
GEN_RUNNING = 'GEN_RUNNING'
GEN_SUSPENDED = 'GEN_SUSPENDED'
GEN_CLOSED = 'GEN_CLOSED'
def getgeneratorstate(generator):
"""Get current state of a generator-iterator.
Possible states are:
GEN_CREATED: Waiting to start execution.
GEN_RUNNING: Currently being executed by the interpreter.
GEN_SUSPENDED: Currently suspended at a yield expression.
GEN_CLOSED: Execution has completed.
"""
if generator.gi_running:
return GEN_RUNNING
if generator.gi_frame is None:
return GEN_CLOSED
if generator.gi_frame.f_lasti == -1:
return GEN_CREATED
return GEN_SUSPENDED
def getgeneratorlocals(generator):
"""
Get the mapping of generator local variables to their current values.
A dict is returned, with the keys the local variable names and values the
bound values."""
if not isgenerator(generator):
raise TypeError("'{!r}' is not a Python generator".format(generator))
frame = getattr(generator, "gi_frame", None)
if frame is not None:
return generator.gi_frame.f_locals
else:
return {}
###############################################################################
### Function Signature Object (PEP 362)
###############################################################################
_WrapperDescriptor = type(type.__call__)
_MethodWrapper = type(all.__call__)
_ClassMethodWrapper = type(int.__dict__['from_bytes'])
_NonUserDefinedCallables = (_WrapperDescriptor,
_MethodWrapper,
_ClassMethodWrapper,
types.BuiltinFunctionType)
def _signature_get_user_defined_method(cls, method_name):
try:
meth = getattr(cls, method_name)
except AttributeError:
return
else:
if not isinstance(meth, _NonUserDefinedCallables):
# Once '__signature__' will be added to 'C'-level
# callables, this check won't be necessary
return meth
def _signature_get_partial(wrapped_sig, partial, extra_args=()):
# Internal helper to calculate how 'wrapped_sig' signature will
# look like after applying a 'functools.partial' object (or alike)
# on it.
new_params = OrderedDict(wrapped_sig.parameters.items())
partial_args = partial.args or ()
partial_keywords = partial.keywords or {}
if extra_args:
partial_args = extra_args + partial_args
try:
ba = wrapped_sig.bind_partial(*partial_args, **partial_keywords)
except TypeError as ex:
msg = 'partial object {!r} has incorrect arguments'.format(partial)
raise ValueError(msg) from ex
for arg_name, arg_value in ba.arguments.items():
param = new_params[arg_name]
if arg_name in partial_keywords:
# We set a new default value, because the following code
# is correct:
#
# >>> def foo(a): print(a)
# >>> print(partial(partial(foo, a=10), a=20)())
# 20
# >>> print(partial(partial(foo, a=10), a=20)(a=30))
# 30
#
# So, with 'partial' objects, passing a keyword argument is
# like setting a new default value for the corresponding
# parameter
#
# We also mark this parameter with '_partial_kwarg'
# flag. Later, in '_bind', the 'default' value of this
# parameter will be added to 'kwargs', to simulate
# the 'functools.partial' real call.
new_params[arg_name] = param.replace(default=arg_value,
_partial_kwarg=True)
elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
not param._partial_kwarg):
new_params.pop(arg_name)
return wrapped_sig.replace(parameters=new_params.values())
def _signature_bound_method(sig):
# Internal helper to transform signatures for unbound
# functions to bound methods
params = tuple(sig.parameters.values())
if not params or params[0].kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
raise ValueError('invalid method signature')
kind = params[0].kind
if kind in (_POSITIONAL_OR_KEYWORD, _POSITIONAL_ONLY):
# Drop first parameter:
# '(p1, p2[, ...])' -> '(p2[, ...])'
params = params[1:]
else:
if kind is not _VAR_POSITIONAL:
# Unless we add a new parameter type we never
# get here
raise ValueError('invalid argument type')
# It's a var-positional parameter.
# Do nothing. '(*args[, ...])' -> '(*args[, ...])'
return sig.replace(parameters=params)
def _signature_is_builtin(obj):
# Internal helper to test if `obj` is a callable that might
# support Argument Clinic's __text_signature__ protocol.
return (isbuiltin(obj) or
ismethoddescriptor(obj) or
isinstance(obj, _NonUserDefinedCallables) or
# Can't test 'isinstance(type)' here, as it would
# also be True for regular python classes
obj in (type, object))
def _signature_is_functionlike(obj):
# Internal helper to test if `obj` is a duck type of FunctionType.
# A good example of such objects are functions compiled with
# Cython, which have all attributes that a pure Python function
# would have, but have their code statically compiled.
if not callable(obj) or isclass(obj):
# All function-like objects are obviously callables,
# and not classes.
return False
name = getattr(obj, '__name__', None)
code = getattr(obj, '__code__', None)
defaults = getattr(obj, '__defaults__', _void) # Important to use _void ...
kwdefaults = getattr(obj, '__kwdefaults__', _void) # ... and not None here
annotations = getattr(obj, '__annotations__', None)
return (isinstance(code, types.CodeType) and
isinstance(name, str) and
(defaults is None or isinstance(defaults, tuple)) and
(kwdefaults is None or isinstance(kwdefaults, dict)) and
isinstance(annotations, dict))
def _signature_get_bound_param(spec):
# Internal helper to get first parameter name from a
# __text_signature__ of a builtin method, which should
# be in the following format: '($param1, ...)'.
# Assumptions are that the first argument won't have
# a default value or an annotation.
assert spec.startswith('($')
pos = spec.find(',')
if pos == -1:
pos = spec.find(')')
cpos = spec.find(':')
assert cpos == -1 or cpos > pos
cpos = spec.find('=')
assert cpos == -1 or cpos > pos
return spec[2:pos]
def _signature_strip_non_python_syntax(signature):
"""
Takes a signature in Argument Clinic's extended signature format.
Returns a tuple of three things:
* that signature re-rendered in standard Python syntax,
* the index of the "self" parameter (generally 0), or None if
the function does not have a "self" parameter, and
* the index of the last "positional only" parameter,
or None if the signature has no positional-only parameters.
"""
if not signature:
return signature, None, None
self_parameter = None
last_positional_only = None
lines = [l.encode('ascii') for l in signature.split('\n')]
generator = iter(lines).__next__
token_stream = tokenize.tokenize(generator)
delayed_comma = False
skip_next_comma = False
text = []
add = text.append
current_parameter = 0
OP = token.OP
ERRORTOKEN = token.ERRORTOKEN
# token stream always starts with ENCODING token, skip it
t = next(token_stream)
assert t.type == tokenize.ENCODING
for t in token_stream:
type, string = t.type, t.string
if type == OP:
if string == ',':
if skip_next_comma:
skip_next_comma = False
else:
assert not delayed_comma
delayed_comma = True
current_parameter += 1
continue
if string == '/':
assert not skip_next_comma
assert last_positional_only is None
skip_next_comma = True
last_positional_only = current_parameter - 1
continue
if (type == ERRORTOKEN) and (string == '$'):
assert self_parameter is None
self_parameter = current_parameter
continue
if delayed_comma:
delayed_comma = False
if not ((type == OP) and (string == ')')):
add(', ')
add(string)
if (string == ','):
add(' ')
clean_signature = ''.join(text)
return clean_signature, self_parameter, last_positional_only
def _signature_fromstr(cls, obj, s, skip_bound_arg=True):
# Internal helper to parse content of '__text_signature__'
# and return a Signature based on it
Parameter = cls._parameter_cls
clean_signature, self_parameter, last_positional_only = \
_signature_strip_non_python_syntax(s)
program = "def foo" + clean_signature + ": pass"
try:
module = ast.parse(program)
except SyntaxError:
module = None
if not isinstance(module, ast.Module):
raise ValueError("{!r} builtin has invalid signature".format(obj))
f = module.body[0]
parameters = []
empty = Parameter.empty
invalid = object()
module = None
module_dict = {}
module_name = getattr(obj, '__module__', None)
if module_name:
module = sys.modules.get(module_name, None)
if module:
module_dict = module.__dict__
sys_module_dict = sys.modules
def parse_name(node):
assert isinstance(node, ast.arg)
if node.annotation != None:
raise ValueError("Annotations are not currently supported")
return node.arg
def wrap_value(s):
try:
value = eval(s, module_dict)
except NameError:
try:
value = eval(s, sys_module_dict)
except NameError:
raise RuntimeError()
if isinstance(value, str):
return ast.Str(value)
if isinstance(value, (int, float)):
return ast.Num(value)
if isinstance(value, bytes):
return ast.Bytes(value)
if value in (True, False, None):
return ast.NameConstant(value)
raise RuntimeError()
class RewriteSymbolics(ast.NodeTransformer):
def visit_Attribute(self, node):
a = []
n = node
while isinstance(n, ast.Attribute):
a.append(n.attr)
n = n.value
if not isinstance(n, ast.Name):
raise RuntimeError()
a.append(n.id)
value = ".".join(reversed(a))
return wrap_value(value)
def visit_Name(self, node):
if not isinstance(node.ctx, ast.Load):
raise ValueError()
return wrap_value(node.id)
def p(name_node, default_node, default=empty):
name = parse_name(name_node)
if name is invalid:
return None
if default_node and default_node is not _empty:
try:
default_node = RewriteSymbolics().visit(default_node)
o = ast.literal_eval(default_node)
except ValueError:
o = invalid
if o is invalid:
return None
default = o if o is not invalid else default
parameters.append(Parameter(name, kind, default=default, annotation=empty))
# non-keyword-only parameters
args = reversed(f.args.args)
defaults = reversed(f.args.defaults)
iter = itertools.zip_longest(args, defaults, fillvalue=None)
if last_positional_only is not None:
kind = Parameter.POSITIONAL_ONLY
else:
kind = Parameter.POSITIONAL_OR_KEYWORD
for i, (name, default) in enumerate(reversed(list(iter))):
p(name, default)
if i == last_positional_only:
kind = Parameter.POSITIONAL_OR_KEYWORD
# *args
if f.args.vararg:
kind = Parameter.VAR_POSITIONAL
p(f.args.vararg, empty)
# keyword-only arguments
kind = Parameter.KEYWORD_ONLY
for name, default in zip(f.args.kwonlyargs, f.args.kw_defaults):
p(name, default)
# **kwargs
if f.args.kwarg:
kind = Parameter.VAR_KEYWORD
p(f.args.kwarg, empty)
if self_parameter is not None:
# Possibly strip the bound argument:
# - We *always* strip first bound argument if
# it is a module.
# - We don't strip first bound argument if
# skip_bound_arg is False.
assert parameters
_self = getattr(obj, '__self__', None)
self_isbound = _self is not None
self_ismodule = ismodule(_self)
if self_isbound and (self_ismodule or skip_bound_arg):
parameters.pop(0)
else:
# for builtins, self parameter is always positional-only!
p = parameters[0].replace(kind=Parameter.POSITIONAL_ONLY)
parameters[0] = p
return cls(parameters, return_annotation=cls.empty)
def _signature_from_builtin(cls, func, skip_bound_arg=True):
# Internal helper function to get signature for
# builtin callables
if not _signature_is_builtin(func):
raise TypeError("{!r} is not a Python builtin "
"function".format(func))
s = getattr(func, "__text_signature__", None)
if not s:
raise ValueError("no signature found for builtin {!r}".format(func))
return _signature_fromstr(cls, func, s, skip_bound_arg)
def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True):
if not callable(obj):
raise TypeError('{!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
sig = _signature_internal(obj.__func__,
follow_wrapper_chains,
skip_bound_arg)
if skip_bound_arg:
return _signature_bound_method(sig)
else:
return sig
# Was this function wrapped by a decorator?
if follow_wrapper_chains:
obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__")))
try:
sig = obj.__signature__
except AttributeError:
pass
else:
if sig is not None:
return sig
try:
partialmethod = obj._partialmethod
except AttributeError:
pass
else:
if isinstance(partialmethod, functools.partialmethod):
# Unbound partialmethod (see functools.partialmethod)
# This means, that we need to calculate the signature
# as if it's a regular partial object, but taking into
# account that the first positional argument
# (usually `self`, or `cls`) will not be passed
# automatically (as for boundmethods)
wrapped_sig = _signature_internal(partialmethod.func,
follow_wrapper_chains,
skip_bound_arg)
sig = _signature_get_partial(wrapped_sig, partialmethod, (None,))
first_wrapped_param = tuple(wrapped_sig.parameters.values())[0]
new_params = (first_wrapped_param,) + tuple(sig.parameters.values())
return sig.replace(parameters=new_params)
if isfunction(obj) or _signature_is_functionlike(obj):
# If it's a pure Python function, or an object that is duck type
# of a Python function (Cython functions, for instance), then:
return Signature.from_function(obj)
if _signature_is_builtin(obj):
return _signature_from_builtin(Signature, obj,
skip_bound_arg=skip_bound_arg)
if isinstance(obj, functools.partial):
wrapped_sig = _signature_internal(obj.func,
follow_wrapper_chains,
skip_bound_arg)
return _signature_get_partial(wrapped_sig, obj)
sig = None
if isinstance(obj, type):
# obj is a class or a metaclass
# First, let's see if it has an overloaded __call__ defined
# in its metaclass
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
sig = _signature_internal(call,
follow_wrapper_chains,
skip_bound_arg)
else:
# Now we check if the 'obj' class has a '__new__' method
new = _signature_get_user_defined_method(obj, '__new__')
if new is not None:
sig = _signature_internal(new,
follow_wrapper_chains,
skip_bound_arg)
else:
# Finally, we should have at least __init__ implemented
init = _signature_get_user_defined_method(obj, '__init__')
if init is not None:
sig = _signature_internal(init,
follow_wrapper_chains,
skip_bound_arg)
if sig is None:
# At this point we know, that `obj` is a class, with no user-
# defined '__init__', '__new__', or class-level '__call__'
for base in obj.__mro__[:-1]:
# Since '__text_signature__' is implemented as a
# descriptor that extracts text signature from the
# class docstring, if 'obj' is derived from a builtin
# class, its own '__text_signature__' may be 'None'.
# Therefore, we go through the MRO (except the last
# class in there, which is 'object') to find the first
# class with non-empty text signature.
try:
text_sig = base.__text_signature__
except AttributeError:
pass
else:
if text_sig:
# If 'obj' class has a __text_signature__ attribute:
# return a signature based on it
return _signature_fromstr(Signature, obj, text_sig)
# No '__text_signature__' was found for the 'obj' class.
# Last option is to check if its '__init__' is
# object.__init__ or type.__init__.
if type not in obj.__mro__:
# We have a class (not metaclass), but no user-defined
# __init__ or __new__ for it
if obj.__init__ is object.__init__:
# Return a signature of 'object' builtin.
return signature(object)
elif not isinstance(obj, _NonUserDefinedCallables):
# An object with __call__
# We also check that the 'obj' is not an instance of
# _WrapperDescriptor or _MethodWrapper to avoid
# infinite recursion (and even potential segfault)
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
try:
sig = _signature_internal(call,
follow_wrapper_chains,
skip_bound_arg)
except ValueError as ex:
msg = 'no signature found for {!r}'.format(obj)
raise ValueError(msg) from ex
if sig is not None:
# For classes and objects we skip the first parameter of their
# __call__, __new__, or __init__ methods
if skip_bound_arg:
return _signature_bound_method(sig)
else:
return sig
if isinstance(obj, types.BuiltinFunctionType):
# Raise a nicer error message for builtins
msg = 'no signature found for builtin function {!r}'.format(obj)
raise ValueError(msg)
raise ValueError('callable {!r} is not supported by signature'.format(obj))
def signature(obj):
'''Get a signature object for the passed callable.'''
return _signature_internal(obj)
class _void:
'''A private marker - used in Parameter & Signature'''
class _empty:
pass
class _ParameterKind(int):
def __new__(self, *args, name):
obj = int.__new__(self, *args)
obj._name = name
return obj
def __str__(self):
return self._name
def __repr__(self):
return '<_ParameterKind: {!r}>'.format(self._name)
_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
class Parameter:
'''Represents a parameter in a function signature.
Has the following public attributes:
* name : str
The name of the parameter as a string.
* default : object
The default value for the parameter if specified. If the
parameter has no default value, this attribute is set to
`Parameter.empty`.
* annotation
The annotation for the parameter if specified. If the
parameter has no annotation, this attribute is set to
`Parameter.empty`.
* kind : str
Describes how argument values are bound to the parameter.
Possible values: `Parameter.POSITIONAL_ONLY`,
`Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
`Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
'''
__slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
POSITIONAL_ONLY = _POSITIONAL_ONLY
POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
VAR_POSITIONAL = _VAR_POSITIONAL
KEYWORD_ONLY = _KEYWORD_ONLY
VAR_KEYWORD = _VAR_KEYWORD
empty = _empty
def __init__(self, name, kind, *, default=_empty, annotation=_empty,
_partial_kwarg=False):
if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
_VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
raise ValueError("invalid value for 'Parameter.kind' attribute")
self._kind = kind
if default is not _empty:
if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
msg = '{} parameters cannot have default values'.format(kind)
raise ValueError(msg)
self._default = default
self._annotation = annotation
if name is _empty:
raise ValueError('name is a required attribute for Parameter')
if not isinstance(name, str):
raise TypeError("name must be a str, not a {!r}".format(name))
if not name.isidentifier():
raise ValueError('{!r} is not a valid parameter name'.format(name))
self._name = name
self._partial_kwarg = _partial_kwarg
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def annotation(self):
return self._annotation
@property
def kind(self):
return self._kind
def replace(self, *, name=_void, kind=_void, annotation=_void,
default=_void, _partial_kwarg=_void):
'''Creates a customized copy of the Parameter.'''
if name is _void:
name = self._name
if kind is _void:
kind = self._kind
if annotation is _void:
annotation = self._annotation
if default is _void:
default = self._default
if _partial_kwarg is _void:
_partial_kwarg = self._partial_kwarg
return type(self)(name, kind, default=default, annotation=annotation,
_partial_kwarg=_partial_kwarg)
def __str__(self):
kind = self.kind
formatted = self._name
# Add annotation and default value
if self._annotation is not _empty:
formatted = '{}:{}'.format(formatted,
formatannotation(self._annotation))
if self._default is not _empty:
formatted = '{}={}'.format(formatted, repr(self._default))
if kind == _VAR_POSITIONAL:
formatted = '*' + formatted
elif kind == _VAR_KEYWORD:
formatted = '**' + formatted
return formatted
def __repr__(self):
return '<{} at {:#x} {!r}>'.format(self.__class__.__name__,
id(self), self.name)
def __eq__(self, other):
# NB: We deliberately do not compare '_partial_kwarg' attributes
# here. Imagine we have a following situation:
#
# def foo(a, b=1): pass
# def bar(a, b): pass
# bar2 = functools.partial(bar, b=1)
#
# For the above scenario, signatures for `foo` and `bar2` should
# be equal. '_partial_kwarg' attribute is an internal flag, to
# distinguish between keyword parameters with defaults and
# keyword parameters which got their defaults from functools.partial
return (issubclass(other.__class__, Parameter) and
self._name == other._name and
self._kind == other._kind and
self._default == other._default and
self._annotation == other._annotation)
def __ne__(self, other):
return not self.__eq__(other)
class BoundArguments:
'''Result of `Signature.bind` call. Holds the mapping of arguments
to the function's parameters.
Has the following public attributes:
* arguments : OrderedDict
An ordered mutable mapping of parameters' names to arguments' values.
Does not contain arguments' default values.
* signature : Signature
The Signature object that created this instance.
* args : tuple
Tuple of positional arguments values.
* kwargs : dict
Dict of keyword arguments values.
'''
def __init__(self, signature, arguments):
self.arguments = arguments
self._signature = signature
@property
def signature(self):
return self._signature
@property
def args(self):
args = []
for param_name, param in self._signature.parameters.items():
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
# Keyword arguments mapped by 'functools.partial'
# (Parameter._partial_kwarg is True) are mapped
# in 'BoundArguments.kwargs', along with VAR_KEYWORD &
# KEYWORD_ONLY
break
try:
arg = self.arguments[param_name]
except KeyError:
# We're done here. Other arguments
# will be mapped in 'BoundArguments.kwargs'
break
else:
if param.kind == _VAR_POSITIONAL:
# *args
args.extend(arg)
else:
# plain argument
args.append(arg)
return tuple(args)
@property
def kwargs(self):
kwargs = {}
kwargs_started = False
for param_name, param in self._signature.parameters.items():
if not kwargs_started:
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
kwargs_started = True
else:
if param_name not in self.arguments:
kwargs_started = True
continue
if not kwargs_started:
continue
try:
arg = self.arguments[param_name]
except KeyError:
pass
else:
if param.kind == _VAR_KEYWORD:
# **kwargs
kwargs.update(arg)
else:
# plain keyword argument
kwargs[param_name] = arg
return kwargs
def __eq__(self, other):
return (issubclass(other.__class__, BoundArguments) and
self.signature == other.signature and
self.arguments == other.arguments)
def __ne__(self, other):
return not self.__eq__(other)
class Signature:
'''A Signature object represents the overall signature of a function.
It stores a Parameter object for each parameter accepted by the
function, as well as information specific to the function itself.
A Signature object has the following public attributes and methods:
* parameters : OrderedDict
An ordered mapping of parameters' names to the corresponding
Parameter objects (keyword-only arguments are in the same order
as listed in `code.co_varnames`).
* return_annotation : object
The annotation for the return type of the function if specified.
If the function has no annotation for its return type, this
attribute is set to `Signature.empty`.
* bind(*args, **kwargs) -> BoundArguments
Creates a mapping from positional and keyword arguments to
parameters.
* bind_partial(*args, **kwargs) -> BoundArguments
Creates a partial mapping from positional and keyword arguments
to parameters (simulating 'functools.partial' behavior.)
'''
__slots__ = ('_return_annotation', '_parameters')
_parameter_cls = Parameter
_bound_arguments_cls = BoundArguments
empty = _empty
def __init__(self, parameters=None, *, return_annotation=_empty,
__validate_parameters__=True):
'''Constructs Signature from the given list of Parameter
objects and 'return_annotation'. All arguments are optional.
'''
if parameters is None:
params = OrderedDict()
else:
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
kind_defaults = False
for idx, param in enumerate(parameters):
kind = param.kind
name = param.name
if kind < top_kind:
msg = 'wrong parameter order: {} before {}'
msg = msg.format(top_kind, kind)
raise ValueError(msg)
elif kind > top_kind:
kind_defaults = False
top_kind = kind
if (kind in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD) and
not param._partial_kwarg):
# If we have a positional-only or positional-or-keyword
# parameter, that does not have its default value set
# by 'functools.partial' or other "partial" signature:
if param.default is _empty:
if kind_defaults:
# No default for this parameter, but the
# previous parameter of the same kind had
# a default
msg = 'non-default argument follows default ' \
'argument'
raise ValueError(msg)
else:
# There is a default for this parameter.
kind_defaults = True
if name in params:
msg = 'duplicate parameter name: {!r}'.format(name)
raise ValueError(msg)
params[name] = param
else:
params = OrderedDict(((param.name, param)
for param in parameters))
self._parameters = types.MappingProxyType(params)
self._return_annotation = return_annotation
@classmethod
def from_function(cls, func):
'''Constructs Signature for the given python function'''
is_duck_function = False
if not isfunction(func):
if _signature_is_functionlike(func):
is_duck_function = True
else:
# If it's not a pure Python function, and not a duck type
# of pure function:
raise TypeError('{!r} is not a Python function'.format(func))
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
positional = tuple(arg_names[:pos_count])
keyword_only_count = func_code.co_kwonlyargcount
keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
annotations = func.__annotations__
defaults = func.__defaults__
kwdefaults = func.__kwdefaults__
if defaults:
pos_default_count = len(defaults)
else:
pos_default_count = 0
parameters = []
# Non-keyword-only parameters w/o defaults.
non_default_count = pos_count - pos_default_count
for name in positional[:non_default_count]:
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD))
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD,
default=defaults[offset]))
# *args
if func_code.co_flags & CO_VARARGS:
name = arg_names[pos_count + keyword_only_count]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_POSITIONAL))
# Keyword-only parameters.
for name in keyword_only:
default = _empty
if kwdefaults is not None:
default = kwdefaults.get(name, _empty)
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_KEYWORD_ONLY,
default=default))
# **kwargs
if func_code.co_flags & CO_VARKEYWORDS:
index = pos_count + keyword_only_count
if func_code.co_flags & CO_VARARGS:
index += 1
name = arg_names[index]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_KEYWORD))
# Is 'func' is a pure Python function - don't validate the
# parameters list (for correct order and defaults), it should be OK.
return cls(parameters,
return_annotation=annotations.get('return', _empty),
__validate_parameters__=is_duck_function)
@classmethod
def from_builtin(cls, func):
return _signature_from_builtin(cls, func)
@property
def parameters(self):
return self._parameters
@property
def return_annotation(self):
return self._return_annotation
def replace(self, *, parameters=_void, return_annotation=_void):
'''Creates a customized copy of the Signature.
Pass 'parameters' and/or 'return_annotation' arguments
to override them in the new copy.
'''
if parameters is _void:
parameters = self.parameters.values()
if return_annotation is _void:
return_annotation = self._return_annotation
return type(self)(parameters,
return_annotation=return_annotation)
def __eq__(self, other):
if (not issubclass(type(other), Signature) or
self.return_annotation != other.return_annotation or
len(self.parameters) != len(other.parameters)):
return False
other_positions = {param: idx
for idx, param in enumerate(other.parameters.keys())}
for idx, (param_name, param) in enumerate(self.parameters.items()):
if param.kind == _KEYWORD_ONLY:
try:
other_param = other.parameters[param_name]
except KeyError:
return False
else:
if param != other_param:
return False
else:
try:
other_idx = other_positions[param_name]
except KeyError:
return False
else:
if (idx != other_idx or
param != other.parameters[param_name]):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def _bind(self, args, kwargs, *, partial=False):
'''Private method. Don't use directly.'''
arguments = OrderedDict()
parameters = iter(self.parameters.values())
parameters_ex = ()
arg_vals = iter(args)
if partial:
# Support for binding arguments to 'functools.partial' objects.
# See 'functools.partial' case in 'signature()' implementation
# for details.
for param_name, param in self.parameters.items():
if (param._partial_kwarg and param_name not in kwargs):
# Simulating 'functools.partial' behavior
kwargs[param_name] = param.default
while True:
# Let's iterate through the positional arguments and corresponding
# parameters
try:
arg_val = next(arg_vals)
except StopIteration:
# No more positional arguments
try:
param = next(parameters)
except StopIteration:
# No more parameters. That's it. Just need to check that
# we have no `kwargs` after this while loop
break
else:
if param.kind == _VAR_POSITIONAL:
# That's OK, just empty *args. Let's start parsing
# kwargs
break
elif param.name in kwargs:
if param.kind == _POSITIONAL_ONLY:
msg = '{arg!r} parameter is positional only, ' \
'but was passed as a keyword'
msg = msg.format(arg=param.name)
raise TypeError(msg) from None
parameters_ex = (param,)
break
elif (param.kind == _VAR_KEYWORD or
param.default is not _empty):
# That's fine too - we have a default value for this
# parameter. So, lets start parsing `kwargs`, starting
# with the current parameter
parameters_ex = (param,)
break
else:
# No default, not VAR_KEYWORD, not VAR_POSITIONAL,
# not in `kwargs`
if partial:
parameters_ex = (param,)
break
else:
msg = '{arg!r} parameter lacking default value'
msg = msg.format(arg=param.name)
raise TypeError(msg) from None
else:
# We have a positional argument to process
try:
param = next(parameters)
except StopIteration:
raise TypeError('too many positional arguments') from None
else:
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
# Looks like we have no parameter for this positional
# argument
raise TypeError('too many positional arguments')
if param.kind == _VAR_POSITIONAL:
# We have an '*args'-like argument, let's fill it with
# all positional arguments we have left and move on to
# the next phase
values = [arg_val]
values.extend(arg_vals)
arguments[param.name] = tuple(values)
break
if param.name in kwargs:
raise TypeError('multiple values for argument '
'{arg!r}'.format(arg=param.name))
arguments[param.name] = arg_val
# Now, we iterate through the remaining parameters to process
# keyword arguments
kwargs_param = None
for param in itertools.chain(parameters_ex, parameters):
if param.kind == _VAR_KEYWORD:
# Memorize that we have a '**kwargs'-like parameter
kwargs_param = param
continue
if param.kind == _VAR_POSITIONAL:
# Named arguments don't refer to '*args'-like parameters.
# We only arrive here if the positional arguments ended
# before reaching the last parameter before *args.
continue
param_name = param.name
try:
arg_val = kwargs.pop(param_name)
except KeyError:
# We have no value for this parameter. It's fine though,
# if it has a default value, or it is an '*args'-like
# parameter, left alone by the processing of positional
# arguments.
if (not partial and param.kind != _VAR_POSITIONAL and
param.default is _empty):
raise TypeError('{arg!r} parameter lacking default value'. \
format(arg=param_name)) from None
else:
if param.kind == _POSITIONAL_ONLY:
# This should never happen in case of a properly built
# Signature object (but let's have this check here
# to ensure correct behaviour just in case)
raise TypeError('{arg!r} parameter is positional only, '
'but was passed as a keyword'. \
format(arg=param.name))
arguments[param_name] = arg_val
if kwargs:
if kwargs_param is not None:
# Process our '**kwargs'-like parameter
arguments[kwargs_param.name] = kwargs
else:
raise TypeError('too many keyword arguments')
return self._bound_arguments_cls(self, arguments)
def bind(*args, **kwargs):
'''Get a BoundArguments object, that maps the passed `args`
and `kwargs` to the function's signature. Raises `TypeError`
if the passed arguments can not be bound.
'''
return args[0]._bind(args[1:], kwargs)
def bind_partial(*args, **kwargs):
'''Get a BoundArguments object, that partially maps the
passed `args` and `kwargs` to the function's signature.
Raises `TypeError` if the passed arguments can not be bound.
'''
return args[0]._bind(args[1:], kwargs, partial=True)
def __str__(self):
result = []
render_pos_only_separator = False
render_kw_only_separator = True
for param in self.parameters.values():
formatted = str(param)
kind = param.kind
if kind == _POSITIONAL_ONLY:
render_pos_only_separator = True
elif render_pos_only_separator:
# It's not a positional-only parameter, and the flag
# is set to 'True' (there were pos-only params before.)
result.append('/')
render_pos_only_separator = False
if kind == _VAR_POSITIONAL:
# OK, we have an '*args'-like parameter, so we won't need
# a '*' to separate keyword-only arguments
render_kw_only_separator = False
elif kind == _KEYWORD_ONLY and render_kw_only_separator:
# We have a keyword-only parameter to render and we haven't
# rendered an '*args'-like parameter before, so add a '*'
# separator to the parameters list ("foo(arg1, *, arg2)" case)
result.append('*')
# This condition should be only triggered once, so
# reset the flag
render_kw_only_separator = False
result.append(formatted)
if render_pos_only_separator:
# There were only positional-only parameters, hence the
# flag was not reset to 'False'
result.append('/')
rendered = '({})'.format(', '.join(result))
if self.return_annotation is not _empty:
anno = formatannotation(self.return_annotation)
rendered += ' -> {}'.format(anno)
return rendered
def _main():
""" Logic for inspecting an object given at command line """
import argparse
import importlib
parser = argparse.ArgumentParser()
parser.add_argument(
'object',
help="The object to be analysed. "
"It supports the 'module:qualname' syntax")
parser.add_argument(
'-d', '--details', action='store_true',
help='Display info about the module rather than its source code')
args = parser.parse_args()
target = args.object
mod_name, has_attrs, attrs = target.partition(":")
try:
obj = module = importlib.import_module(mod_name)
except Exception as exc:
msg = "Failed to import {} ({}: {})".format(mod_name,
type(exc).__name__,
exc)
print(msg, file=sys.stderr)
exit(2)
if has_attrs:
parts = attrs.split(".")
obj = module
for part in parts:
obj = getattr(obj, part)
if module.__name__ in sys.builtin_module_names:
print("Can't get info for builtin modules.", file=sys.stderr)
exit(1)
if args.details:
print('Target: {}'.format(target))
print('Origin: {}'.format(getsourcefile(module)))
print('Cached: {}'.format(module.__cached__))
if obj is module:
print('Loader: {}'.format(repr(module.__loader__)))
if hasattr(module, '__path__'):
print('Submodule search path: {}'.format(module.__path__))
else:
try:
__, lineno = findsource(obj)
except Exception:
pass
else:
print('Line: {}'.format(lineno))
print('\n')
else:
print(getsource(obj))
if __name__ == "__main__":
_main()
|
{
"content_hash": "838e97d5ca75e5b2946edcb6a3549cdd",
"timestamp": "",
"source": "github",
"line_count": 2766,
"max_line_length": 84,
"avg_line_length": 37.87816341287057,
"alnum_prop": 0.5816208683700642,
"repo_name": "paweljasinski/ironpython3",
"id": "c7a2cf8833f00bf4f5642d6e6224aa71502333e5",
"size": "104771",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Src/StdLib/Lib/inspect.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "11099"
},
{
"name": "C#",
"bytes": "12284108"
},
{
"name": "CSS",
"bytes": "96"
},
{
"name": "Groff",
"bytes": "21080"
},
{
"name": "HTML",
"bytes": "13117230"
},
{
"name": "Makefile",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PowerShell",
"bytes": "62360"
},
{
"name": "Python",
"bytes": "27267678"
},
{
"name": "R",
"bytes": "4949"
},
{
"name": "Ruby",
"bytes": "19"
},
{
"name": "Shell",
"bytes": "5147"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
import argparse
import os.path
import queue
import subprocess
import threading
import time
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--video_dir', '-d', dest='video_dir',
required=True, type=str)
parser.add_argument('--video_ext', '-e', dest='video_ext',
default='.mp4', type=str)
return vars(parser.parse_args())
def get_time(msec):
return time.strftime("%Y%m%d_%H%M%S", time.localtime(msec))
def print_file_info(path):
print('atime: %s' % get_time(os.path.getatime(path)))
print('mtime: %s' % get_time(os.path.getmtime(path)))
print('ctime: %s' % get_time(os.path.getctime(path)))
print('size: %s' % '{:,d}'.format(os.path.getsize(path)))
print('')
def ikalog_with_queue(video_queue):
ika_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
video_path = ''
while True:
# Wait for video_path.
while video_path is '':
video_path = video_queue.get()
# Termination check.
if video_path is None:
return
# Wait for the next file until a timeout.
try:
queued_data = video_queue.get(timeout=180)
except queue.Empty:
queued_data = ''
# If not a new file, keep listening.
# Note: video_path is not '' or None here.
if queued_data == video_path:
continue
command = [os.path.join(ika_path, 'IkaLog.py'), '-f', video_path]
subprocess.call(command)
video_path = queued_data
class WatchdogHandler(FileSystemEventHandler):
def __init__(self, video_queue, video_ext):
super(WatchdogHandler, self).__init__()
self._video_queue = video_queue
self._video_ext = video_ext
def on_created(self, event):
path = event.src_path
if not path.endswith(self._video_ext):
return
print('%s: on_created(%s)' % (get_time(time.time()), path))
print_file_info(path)
self._video_queue.put('')
def on_modified(self, event):
path = event.src_path
if not path.endswith(self._video_ext):
return
print('%s: on_modified(%s)' % (get_time(time.time()), path))
print_file_info(path)
self._video_queue.put(path)
def on_deleted(self, event):
path = event.src_path
if not path.endswith(self._video_ext):
return
print('%s: on_deleted(%s)' % (get_time(time.time()), path))
def main():
video_queue = queue.Queue()
args = get_args()
video_dir = args['video_dir']
video_ext = args['video_ext']
watchdog_dir = os.path.expanduser(args['video_dir'])
watchdog_handler = WatchdogHandler(video_queue, args['video_ext'])
observer = Observer()
observer.schedule(watchdog_handler, watchdog_dir, recursive=False)
observer.start()
ika_thread = threading.Thread(
target=ikalog_with_queue, name='ikalog', args=(video_queue,))
ika_thread.start()
print('==== Started IkaWatcher ====')
print('Automatically run IkaLog when the following files are created.')
print('Target video files: %s' % os.path.join(video_dir, '*%s' % video_ext))
try:
while True:
input('') # Wait a key input.
key = input('Start processing? ([Y] or N): ')
if key in ['', 'Y']:
video_queue.put('') # '' triggers processing a queued value.
except KeyboardInterrupt:
print('==== KeyboardInterrupt ====')
observer.stop()
video_queue.put(None) # None in the queue stops ika_thread.
ika_thread.join()
if __name__ in '__main__':
main()
|
{
"content_hash": "f6233d203fbf0bebfb9f238f769ed50f",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 80,
"avg_line_length": 29.16793893129771,
"alnum_prop": 0.5870191049463491,
"repo_name": "hasegaw/IkaLog",
"id": "4b88795be4a1021ff4261771c5b28d5c5c9bd15d",
"size": "4943",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tools/IkaWatcher.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "37511"
},
{
"name": "Makefile",
"bytes": "1057"
},
{
"name": "Python",
"bytes": "749095"
},
{
"name": "Shell",
"bytes": "3312"
}
],
"symlink_target": ""
}
|
import doctest
import unittest
import typeschema
import typeschema.properties as ty
class TestCase(unittest.TestCase):
def test_properties_doc(self):
fails, tested = doctest.testmod(ty)
if fails > 0:
self.fail('Doctest failed!')
def test_mutable_default(self):
l = [1, 2, 3]
class MyClass(object):
my_attr = ty.property('my_attr', {'type': 'array'}, default=l)
a = MyClass()
b = MyClass()
a.my_attr.append(4)
self.assertEqual(a.my_attr, [1, 2, 3, 4])
self.assertEqual(b.my_attr, [1, 2, 3])
|
{
"content_hash": "298775344c1c11733e98a89cd14fb80d",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 74,
"avg_line_length": 24.04,
"alnum_prop": 0.5806988352745425,
"repo_name": "tcard/typeschema",
"id": "6f3abc521f81c7df3946d9b75981ae80b7b6d5d5",
"size": "601",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/properties/properties_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32900"
}
],
"symlink_target": ""
}
|
from typing import TYPE_CHECKING, Optional, Sequence
from airflow.models import BaseOperator
from airflow.providers.mysql.hooks.mysql import MySqlHook
from airflow.providers.trino.hooks.trino import TrinoHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class TrinoToMySqlOperator(BaseOperator):
"""
Moves data from Trino to MySQL, note that for now the data is loaded
into memory before being pushed to MySQL, so this operator should
be used for smallish amount of data.
:param sql: SQL query to execute against Trino. (templated)
:type sql: str
:param mysql_table: target MySQL table, use dot notation to target a
specific database. (templated)
:type mysql_table: str
:param mysql_conn_id: Reference to :ref:`mysql connection id <howto/connection:mysql>`.
:type mysql_conn_id: str
:param trino_conn_id: source trino connection
:type trino_conn_id: str
:param mysql_preoperator: sql statement to run against mysql prior to
import, typically use to truncate of delete in place
of the data coming in, allowing the task to be idempotent (running
the task twice won't double load data). (templated)
:type mysql_preoperator: str
"""
template_fields: Sequence[str] = ('sql', 'mysql_table', 'mysql_preoperator')
template_ext: Sequence[str] = ('.sql',)
template_fields_renderers = {"sql": "sql", "mysql_preoperator": "sql"}
ui_color = '#a0e08c'
def __init__(
self,
*,
sql: str,
mysql_table: str,
trino_conn_id: str = 'trino_default',
mysql_conn_id: str = 'mysql_default',
mysql_preoperator: Optional[str] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.mysql_table = mysql_table
self.mysql_conn_id = mysql_conn_id
self.mysql_preoperator = mysql_preoperator
self.trino_conn_id = trino_conn_id
def execute(self, context: 'Context') -> None:
trino = TrinoHook(trino_conn_id=self.trino_conn_id)
self.log.info("Extracting data from Trino: %s", self.sql)
results = trino.get_records(self.sql)
mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
if self.mysql_preoperator:
self.log.info("Running MySQL preoperator")
self.log.info(self.mysql_preoperator)
mysql.run(self.mysql_preoperator)
self.log.info("Inserting rows into MySQL")
mysql.insert_rows(table=self.mysql_table, rows=results)
|
{
"content_hash": "f41691bbb958764b2d17e10eb695c8bf",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 91,
"avg_line_length": 38.11940298507463,
"alnum_prop": 0.6601409553641346,
"repo_name": "mistercrunch/airflow",
"id": "081a3163a15184282de2871a70b5e0a103485c8e",
"size": "3341",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "airflow/providers/mysql/transfers/trino_to_mysql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36341"
},
{
"name": "HTML",
"bytes": "99243"
},
{
"name": "JavaScript",
"bytes": "891460"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "773270"
},
{
"name": "Shell",
"bytes": "5659"
}
],
"symlink_target": ""
}
|
"""
WSGI config for rest_svc_template project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rest_svc_template.settings")
application = get_wsgi_application()
|
{
"content_hash": "855242dfdf4110594ebbbb187bc34d5d",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 25.6875,
"alnum_prop": 0.7712895377128953,
"repo_name": "DePaul-Medix/SmartCAD",
"id": "4f0451002163d6d623715d8a61de58f427bda4bd",
"size": "411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/REST_web_services-DjangoRestFramework/rest_svc_template/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "14089"
},
{
"name": "HTML",
"bytes": "50044"
},
{
"name": "JavaScript",
"bytes": "756454"
},
{
"name": "Python",
"bytes": "7093"
},
{
"name": "Shell",
"bytes": "604"
}
],
"symlink_target": ""
}
|
import xml.etree.ElementTree as ET
import os
import cPickle
import numpy as np
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text),
int(bbox.find('ymin').text),
int(bbox.find('xmax').text),
int(bbox.find('ymax').text)]
objects.append(obj_struct)
return objects
def voc_ap(rec, prec, use_07_metric=False):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:False).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=False):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default False)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath.format(imagename))
if i % 100 == 0:
print 'Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames))
# save
print 'Saving cached annotations to {:s}'.format(cachefile)
with open(cachefile, 'w') as f:
cPickle.dump(recs, f)
else:
# load
with open(cachefile, 'r') as f:
recs = cPickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin + 1., 0.)
ih = np.maximum(iymax - iymin + 1., 0.)
inters = iw * ih
# union
uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) +
(BBGT[:, 2] - BBGT[:, 0] + 1.) *
(BBGT[:, 3] - BBGT[:, 1] + 1.) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
return rec, prec, ap
|
{
"content_hash": "39415e37f0293de85b5e9da203c69688",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 78,
"avg_line_length": 34.597938144329895,
"alnum_prop": 0.524284862932062,
"repo_name": "HaydenFaulkner/bottom-up-attention",
"id": "137930820c0664872dd5e041ddf3e60a4730b13a",
"size": "6937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/datasets/voc_eval.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9632"
},
{
"name": "C++",
"bytes": "2942366"
},
{
"name": "CMake",
"bytes": "106151"
},
{
"name": "CSS",
"bytes": "9155"
},
{
"name": "Cuda",
"bytes": "244481"
},
{
"name": "HTML",
"bytes": "7048"
},
{
"name": "Jupyter Notebook",
"bytes": "7429298"
},
{
"name": "Makefile",
"bytes": "25451"
},
{
"name": "Matlab",
"bytes": "33910"
},
{
"name": "Python",
"bytes": "767221"
},
{
"name": "Shell",
"bytes": "54311"
}
],
"symlink_target": ""
}
|
from django.views import generic
from gwells.models import Survey
class RegistryView(generic.TemplateView):
template_name = 'gwells/registry.html'
def get_context_data(self, **kwargs):
"""
Return the context for the page.
"""
context = super(RegistryView, self).get_context_data(**kwargs)
surveys = Survey.objects.order_by('create_date')
context['surveys'] = surveys
context['page'] = 'r'
return context
|
{
"content_hash": "d6fe1789d4eabd4cb675f76b2cd7da07",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 70,
"avg_line_length": 29.9375,
"alnum_prop": 0.6388308977035491,
"repo_name": "rstens/gwells",
"id": "36112268726e06b671112c197a8ded722655280c",
"size": "479",
"binary": false,
"copies": "1",
"ref": "refs/heads/developer",
"path": "gwells/views/RegistryView.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1707"
},
{
"name": "CSS",
"bytes": "71007"
},
{
"name": "Groovy",
"bytes": "91669"
},
{
"name": "HTML",
"bytes": "136711"
},
{
"name": "JavaScript",
"bytes": "193917"
},
{
"name": "PLpgSQL",
"bytes": "49465"
},
{
"name": "Python",
"bytes": "481602"
},
{
"name": "Shell",
"bytes": "52420"
},
{
"name": "TSQL",
"bytes": "3727"
},
{
"name": "Vue",
"bytes": "65417"
}
],
"symlink_target": ""
}
|
from .components import ComponentMeta
data_statistic_cpn_meta = ComponentMeta("DataStatistics")
@data_statistic_cpn_meta.bind_param
def data_statistics_param():
from federatedml.param.statistics_param import StatisticsParam
return StatisticsParam
@data_statistic_cpn_meta.bind_runner.on_guest.on_host
def data_statistics_runner():
from federatedml.statistic.data_statistics import DataStatistics
return DataStatistics
|
{
"content_hash": "346c71e4ae4551c57e44e77cbe2d1ffe",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 68,
"avg_line_length": 25.941176470588236,
"alnum_prop": 0.7981859410430839,
"repo_name": "FederatedAI/FATE",
"id": "1794486c1e4efb9dcfd867e99be78c28390e61e1",
"size": "1059",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/federatedml/components/data_statistics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "19716"
},
{
"name": "Python",
"bytes": "5121767"
},
{
"name": "Rust",
"bytes": "3971"
},
{
"name": "Shell",
"bytes": "19676"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/lair/base/shared_poi_all_lair_warren_small_fog_gray.iff"
result.attribute_template_id = -1
result.stfName("lair_n","warren")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "1f999d68b19cf3f604d595a1784b3c19",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 92,
"avg_line_length": 24.307692307692307,
"alnum_prop": 0.6930379746835443,
"repo_name": "anhstudios/swganh",
"id": "86c77a5d1309f7f7445a141ddbbc8a9bb2a5d2e6",
"size": "461",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_warren_small_fog_gray.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('academics', '0003_teacher_unique_name'),
]
operations = [
migrations.AlterField(
model_name='teacher',
name='teacher_id',
field=models.CharField(max_length=4, unique=True),
),
]
|
{
"content_hash": "71d404310e2afa4021997c23fbe12bd6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 62,
"avg_line_length": 22.11111111111111,
"alnum_prop": 0.6005025125628141,
"repo_name": "rectory-school/rectory-apps",
"id": "2d66513a7d0d8bbec01db786d0772f523e3f3d54",
"size": "422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "academics/migrations/0004_auto_20151203_0834.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1150635"
},
{
"name": "HTML",
"bytes": "2337278"
},
{
"name": "JavaScript",
"bytes": "30707"
},
{
"name": "PHP",
"bytes": "51712"
},
{
"name": "Python",
"bytes": "455392"
},
{
"name": "Ruby",
"bytes": "524"
}
],
"symlink_target": ""
}
|
import unittest
import os, sys, commands
import comm
class TestWebAppFunctions(unittest.TestCase):
def test_create(self):
comm.setUp()
app_name = "helloworld"
pkg_name = "com.example." + app_name.lower()
content = "<a href='http://www.intel.com'>Intel</a>\n</body>"
key = "</body>"
replace_index_list = [key, content]
comm.create(app_name, pkg_name, comm.MODE, None, replace_index_list, self)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "8772ddc16a7aa876b27641156b8f5df4",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 82,
"avg_line_length": 31.5,
"alnum_prop": 0.6031746031746031,
"repo_name": "pk-sam/crosswalk-test-suite",
"id": "c2d7b812a7bac35c6eeaa15c6afafc77e3106b99",
"size": "2059",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cordova/cordova-webapp-android-tests/webapp/webapp_create.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "28136"
},
{
"name": "CSS",
"bytes": "697706"
},
{
"name": "CoffeeScript",
"bytes": "18978"
},
{
"name": "Cucumber",
"bytes": "63597"
},
{
"name": "GLSL",
"bytes": "3495"
},
{
"name": "Groff",
"bytes": "12"
},
{
"name": "HTML",
"bytes": "39810614"
},
{
"name": "Java",
"bytes": "602994"
},
{
"name": "JavaScript",
"bytes": "17479410"
},
{
"name": "Makefile",
"bytes": "1044"
},
{
"name": "PHP",
"bytes": "44946"
},
{
"name": "Python",
"bytes": "4304927"
},
{
"name": "Shell",
"bytes": "1100341"
},
{
"name": "XSLT",
"bytes": "767778"
}
],
"symlink_target": ""
}
|
from model import GoogleNet
m = GoogleNet()
m.load("bvlc_googlenet.caffemodel")
m.load_label("labels.txt")
m.print_prediction("image.png")
|
{
"content_hash": "5cfbca85a6506f39fce6c2ddea6535f0",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 35,
"avg_line_length": 23.333333333333332,
"alnum_prop": 0.75,
"repo_name": "dsanno/chainer-recognize",
"id": "6424ff267e715119f3f31acd1f21cef9df447698",
"size": "140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sample.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3465"
}
],
"symlink_target": ""
}
|
import unittest
from unittest import mock
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook
from airflow.providers.amazon.aws.sensors.sagemaker_tuning import SageMakerTuningSensor
DESCRIBE_TUNING_INPROGRESS_RESPONSE = {
'HyperParameterTuningJobStatus': 'InProgress',
'ResponseMetadata': {
'HTTPStatusCode': 200,
},
}
DESCRIBE_TUNING_COMPLETED_RESPONSE = {
'HyperParameterTuningJobStatus': 'Completed',
'ResponseMetadata': {
'HTTPStatusCode': 200,
},
}
DESCRIBE_TUNING_FAILED_RESPONSE = {
'HyperParameterTuningJobStatus': 'Failed',
'ResponseMetadata': {
'HTTPStatusCode': 200,
},
'FailureReason': 'Unknown',
}
DESCRIBE_TUNING_STOPPING_RESPONSE = {
'HyperParameterTuningJobStatus': 'Stopping',
'ResponseMetadata': {
'HTTPStatusCode': 200,
},
}
class TestSageMakerTuningSensor(unittest.TestCase):
@mock.patch.object(SageMakerHook, 'get_conn')
@mock.patch.object(SageMakerHook, 'describe_tuning_job')
def test_sensor_with_failure(self, mock_describe_job, mock_client):
mock_describe_job.side_effect = [DESCRIBE_TUNING_FAILED_RESPONSE]
sensor = SageMakerTuningSensor(
task_id='test_task', poke_interval=2, aws_conn_id='aws_test', job_name='test_job_name'
)
self.assertRaises(AirflowException, sensor.execute, None)
mock_describe_job.assert_called_once_with('test_job_name')
@mock.patch.object(SageMakerHook, 'get_conn')
@mock.patch.object(SageMakerHook, '__init__')
@mock.patch.object(SageMakerHook, 'describe_tuning_job')
def test_sensor(self, mock_describe_job, hook_init, mock_client):
hook_init.return_value = None
mock_describe_job.side_effect = [
DESCRIBE_TUNING_INPROGRESS_RESPONSE,
DESCRIBE_TUNING_STOPPING_RESPONSE,
DESCRIBE_TUNING_COMPLETED_RESPONSE,
]
sensor = SageMakerTuningSensor(
task_id='test_task', poke_interval=2, aws_conn_id='aws_test', job_name='test_job_name'
)
sensor.execute(None)
# make sure we called 3 times(terminated when its completed)
self.assertEqual(mock_describe_job.call_count, 3)
# make sure the hook was initialized with the specific params
calls = [mock.call(aws_conn_id='aws_test')]
hook_init.assert_has_calls(calls)
|
{
"content_hash": "6d9407b328da86ac07a3d8102626f875",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 98,
"avg_line_length": 34.25352112676056,
"alnum_prop": 0.6817434210526315,
"repo_name": "airbnb/airflow",
"id": "32b2553f1f5c63c9957cadda9e03dfbfe1627a90",
"size": "3220",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/providers/amazon/aws/sensors/test_sagemaker_tuning.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36374"
},
{
"name": "HTML",
"bytes": "99535"
},
{
"name": "JavaScript",
"bytes": "891618"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "796220"
},
{
"name": "Shell",
"bytes": "9040"
}
],
"symlink_target": ""
}
|
"""
profiling.remote.background
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Utilities to run a profiler in a background thread.
"""
from __future__ import absolute_import
import os
import signal
import threading
from ..profiler import ProfilerWrapper
__all__ = ['BackgroundProfiler']
class BackgroundProfiler(ProfilerWrapper):
signum = signal.SIGUSR2
def __init__(self, profiler, signum=None):
super(BackgroundProfiler, self).__init__(profiler)
if signum is not None:
self.signum = signum
self.event = threading.Event()
def prepare(self):
"""Registers :meth:`_signal_handler` as a signal handler to start
and/or stop the profiler from the background thread. So this function
must be called at the main thread.
"""
return signal.signal(self.signum, self._signal_handler)
def run(self):
self._send_signal()
yield
self._send_signal()
def _send_signal(self):
self.event.clear()
os.kill(os.getpid(), self.signum)
self.event.wait()
def _signal_handler(self, signum, frame):
if self.profiler.is_running():
self.profiler.stop()
else:
self.profiler.start()
self.event.set()
|
{
"content_hash": "524fc6a904e2c7ddbc34ed130da561f5",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 78,
"avg_line_length": 24.403846153846153,
"alnum_prop": 0.6091410559495666,
"repo_name": "JeanPaulShapo/profiling",
"id": "4aa735edd83e48c1d12e9d026657fc9c5df36dc7",
"size": "1293",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "profiling/remote/background.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1705"
},
{
"name": "Python",
"bytes": "126861"
}
],
"symlink_target": ""
}
|
from setuptools import setup
setup(
name='drone-tower',
version='0.2',
description="Ansible Tower plugin for Drone.",
url="https://github.com/msteinert/drone-tower",
maintainer="Drone Contributors",
maintainer_email="support@drone.io",
packages=["drone_tower"],
scripts=["bin/drone-tower"],
install_requires=["ansible-tower-cli", "drone>=0.2"],
)
|
{
"content_hash": "bf45ad8ee594d2322b938d8efc38e95d",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 57,
"avg_line_length": 27.5,
"alnum_prop": 0.6675324675324675,
"repo_name": "msteinert/drone-tower",
"id": "551cc9990cc24c067badfbdea3b4739cd208ff05",
"size": "385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4503"
}
],
"symlink_target": ""
}
|
from __future__ import division
import sys, glob, re, math, datetime, argparse
import imp
ntl = imp.load_source('ntl', 'steps/nnet3/nnet3_train_lib.py')
#exp/nnet3/lstm_self_repair_ld5_sp/log/progress.9.log:component name=Lstm3_i type=SigmoidComponent, dim=1280, self-repair-scale=1e-05, count=1.96e+05, value-avg=[percentiles(0,1,2,5 10,20,50,80,90 95,98,99,100)=(0.05,0.09,0.11,0.15 0.19,0.27,0.50,0.72,0.83 0.88,0.92,0.94,0.99), mean=0.502, stddev=0.23], deriv-avg=[percentiles(0,1,2,5 10,20,50,80,90 95,98,99,100)=(0.009,0.04,0.05,0.06 0.08,0.10,0.14,0.17,0.18 0.19,0.20,0.20,0.21), mean=0.134, stddev=0.0397]
def ParseProgressLogsForNonlinearityStats(exp_dir):
progress_log_files = "%s/log/progress.*.log" % (exp_dir)
stats_per_component_per_iter = {}
progress_log_lines = ntl.RunKaldiCommand('grep -e "value-avg.*deriv-avg" {0}'.format(progress_log_files))[0]
parse_regex = re.compile(".*progress.([0-9]+).log:component name=(.+) type=(.*)Component,.*value-avg=\[.*mean=([0-9\.\-e]+), stddev=([0-9\.e\-]+)\].*deriv-avg=\[.*mean=([0-9\.\-e]+), stddev=([0-9\.e\-]+)\]")
for line in progress_log_lines.split("\n") :
mat_obj = parse_regex.search(line)
if mat_obj is None:
continue
groups = mat_obj.groups()
# groups = ('9', 'Lstm3_i', 'Sigmoid', '0.502', '0.23', '0.134', '0.0397')
iteration = int(groups[0])
component_name = groups[1]
component_type = groups[2]
value_mean = float(groups[3])
value_stddev = float(groups[4])
deriv_mean = float(groups[5])
deriv_stddev = float(groups[6])
try:
stats_per_component_per_iter[component_name]['stats'][iteration] = [value_mean, value_stddev, deriv_mean, deriv_stddev]
except KeyError:
stats_per_component_per_iter[component_name] = {}
stats_per_component_per_iter[component_name]['type'] = component_type
stats_per_component_per_iter[component_name]['stats'] = {}
stats_per_component_per_iter[component_name]['stats'][iteration] = [value_mean, value_stddev, deriv_mean, deriv_stddev]
return stats_per_component_per_iter
def ParseDifferenceString(string):
dict = {}
for parts in string.split():
sub_parts = parts.split(":")
dict[sub_parts[0]] = float(sub_parts[1])
return dict
#exp/chain/cwrnn_trial2_ld5_sp/log/progress.245.log:component name=BLstm1_forward_c type=ClipGradientComponent, dim=512, norm-based-clipping=true, clipping-threshold=30, clipped-proportion=0.000565527, self-repair-clipped-proportion-threshold=0.01, self-repair-target=0, self-repair-scale=1
class MalformedClippedProportionLineException(Exception):
def __init__(self, line):
Exception.__init__(self, "Malformed line encountered while trying to"
" extract clipped-proportions.\n"+line)
def ParseProgressLogsForClippedProportion(exp_dir):
progress_log_files = "%s/log/progress.*.log" % (exp_dir)
component_names = set([])
progress_log_lines = ntl.RunKaldiCommand('grep -e "{0}" {1}'.format("clipped-proportion", progress_log_files))[0]
parse_regex = re.compile(".*progress\.([0-9]+)\.log:component name=(.*) type=.* clipped-proportion=([0-9\.e\-]+)")
cp_per_component_per_iter = {}
max_iteration = 0
component_names = set([])
for line in progress_log_lines.split("\n") :
mat_obj = parse_regex.search(line)
if mat_obj is None:
if line.strip() == "":
continue
raise MalformedClippedProportionLineException(line)
groups = mat_obj.groups()
iteration = int(groups[0])
max_iteration = max(max_iteration, iteration)
name = groups[1]
clipped_proportion = float(groups[2])
if clipped_proportion > 1:
raise MalformedClippedProportionLineException(line)
if not cp_per_component_per_iter.has_key(iteration):
cp_per_component_per_iter[iteration] = {}
cp_per_component_per_iter[iteration][name] = clipped_proportion
component_names.add(name)
component_names = list(component_names)
component_names.sort()
# re arranging the data into an array
# and into an cp_per_iter_per_component
cp_per_iter_per_component = {}
for component_name in component_names:
cp_per_iter_per_component[component_name] = []
data = []
data.append(["iteration"]+component_names)
for iter in range(max_iteration+1):
if not cp_per_component_per_iter.has_key(iter):
continue
comp_dict = cp_per_component_per_iter[iter]
row = [iter]
for component in component_names:
try:
row.append(comp_dict[component])
cp_per_iter_per_component[component].append([iter, comp_dict[component]])
except KeyError:
# if clipped proportion is not available for a particular
# component it is set to None
# this usually happens during layer-wise discriminative training
row.append(None)
data.append(row)
return {'table' : data,
'cp_per_component_per_iter' : cp_per_component_per_iter,
'cp_per_iter_per_component' : cp_per_iter_per_component}
#exp/chain/cwrnn_trial2_ld5_sp/log/progress.245.log:LOG (nnet3-show-progress:main():nnet3-show-progress.cc:144) Relative parameter differences per layer are [ Cwrnn1_T3_W_r:0.0171537 Cwrnn1_T3_W_x:1.33338e-07 Cwrnn1_T2_W_r:0.048075 Cwrnn1_T2_W_x:1.34088e-07 Cwrnn1_T1_W_r:0.0157277 Cwrnn1_T1_W_x:0.0212704 Final_affine:0.0321521 Cwrnn2_T3_W_r:0.0212082 Cwrnn2_T3_W_x:1.33691e-07 Cwrnn2_T2_W_r:0.0212978 Cwrnn2_T2_W_x:1.33401e-07 Cwrnn2_T1_W_r:0.014976 Cwrnn2_T1_W_x:0.0233588 Cwrnn3_T3_W_r:0.0237165 Cwrnn3_T3_W_x:1.33184e-07 Cwrnn3_T2_W_r:0.0239754 Cwrnn3_T2_W_x:1.3296e-07 Cwrnn3_T1_W_r:0.0194809 Cwrnn3_T1_W_x:0.0271934 ]
def ParseProgressLogsForParamDiff(exp_dir, pattern, logger = None):
if pattern not in set(["Relative parameter differences", "Parameter differences"]):
raise Exception("Unknown value for pattern : {0}".format(pattern))
progress_log_files = "%s/log/progress.*.log" % (exp_dir)
progress_per_iter = {}
component_names = set([])
progress_log_lines = ntl.RunKaldiCommand('grep -e "{0}" {1}'.format(pattern, progress_log_files))[0]
parse_regex = re.compile(".*progress\.([0-9]+)\.log:LOG.*{0}.*\[(.*)\]".format(pattern))
for line in progress_log_lines.split("\n") :
mat_obj = parse_regex.search(line)
if mat_obj is None:
continue
groups = mat_obj.groups()
iteration = groups[0]
differences = ParseDifferenceString(groups[1])
component_names = component_names.union(differences.keys())
progress_per_iter[int(iteration)] = differences
component_names = list(component_names)
component_names.sort()
# rearranging the parameter differences available per iter
# into parameter differences per component
progress_per_component = {}
for cn in component_names:
progress_per_component[cn] = {}
max_iter = max(progress_per_iter.keys())
total_missing_iterations = 0
gave_user_warning = False
for iter in range(max_iter + 1):
try:
component_dict = progress_per_iter[iter]
except KeyError:
continue
for component_name in component_names:
try:
progress_per_component[component_name][iter] = component_dict[component_name]
except KeyError:
total_missing_iterations += 1
# the component was not found this iteration, may be because of layerwise discriminative training
pass
if (total_missing_iterations/len(component_names) > 20) and not gave_user_warning and logger is not None:
logger.warning("There are more than {0} missing iterations per component. Something might be wrong.".format(total_missing_iterations/len(component_names)))
gave_user_warning = True
return {'progress_per_component' : progress_per_component,
'component_names' : component_names,
'max_iter' : max_iter}
def ParseTrainLogs(exp_dir):
train_log_files = "%s/log/train.*.log" % (exp_dir)
train_log_lines = ntl.RunKaldiCommand('grep -e Accounting {0}'.format(train_log_files))[0]
parse_regex = re.compile(".*train\.([0-9]+)\.([0-9]+)\.log:# Accounting: time=([0-9]+) thread.*")
train_times = {}
for line in train_log_lines.split('\n'):
mat_obj = parse_regex.search(line)
if mat_obj is not None:
groups = mat_obj.groups()
try:
train_times[int(groups[0])][int(groups[1])] = float(groups[2])
except KeyError:
train_times[int(groups[0])] = {}
train_times[int(groups[0])][int(groups[1])] = float(groups[2])
iters = train_times.keys()
for iter in iters:
values = train_times[iter].values()
train_times[iter] = max(values)
return train_times
def ParseProbLogs(exp_dir, key = 'accuracy'):
train_prob_files = "%s/log/compute_prob_train.*.log" % (exp_dir)
valid_prob_files = "%s/log/compute_prob_valid.*.log" % (exp_dir)
train_prob_strings = ntl.RunKaldiCommand('grep -e {0} {1}'.format(key, train_prob_files), wait = True)[0]
valid_prob_strings = ntl.RunKaldiCommand('grep -e {0} {1}'.format(key, valid_prob_files))[0]
#LOG (nnet3-chain-compute-prob:PrintTotalStats():nnet-chain-diagnostics.cc:149) Overall log-probability for 'output' is -0.399395 + -0.013437 = -0.412832 per frame, over 20000 fra
#LOG (nnet3-chain-compute-prob:PrintTotalStats():nnet-chain-diagnostics.cc:144) Overall log-probability for 'output' is -0.307255 per frame, over 20000 frames.
parse_regex = re.compile(".*compute_prob_.*\.([0-9]+).log:LOG .nnet3.*compute-prob:PrintTotalStats..:nnet.*diagnostics.cc:[0-9]+. Overall ([a-zA-Z\-]+) for 'output'.*is ([0-9.\-e]+) .*per frame")
train_loss={}
valid_loss={}
for line in train_prob_strings.split('\n'):
mat_obj = parse_regex.search(line)
if mat_obj is not None:
groups = mat_obj.groups()
if groups[1] == key:
train_loss[int(groups[0])] = groups[2]
for line in valid_prob_strings.split('\n'):
mat_obj = parse_regex.search(line)
if mat_obj is not None:
groups = mat_obj.groups()
if groups[1] == key:
valid_loss[int(groups[0])] = groups[2]
iters = list(set(valid_loss.keys()).intersection(train_loss.keys()))
iters.sort()
return map(lambda x: (int(x), float(train_loss[x]), float(valid_loss[x])), iters)
def GenerateAccuracyReport(exp_dir, key = "accuracy"):
times = ParseTrainLogs(exp_dir)
data = ParseProbLogs(exp_dir, key)
report = []
report.append("%Iter\tduration\ttrain_loss\tvalid_loss\tdifference")
for x in data:
try:
report.append("%d\t%s\t%g\t%g\t%g" % (x[0], str(times[x[0]]), x[1], x[2], x[2]-x[1]))
except KeyError:
continue
total_time = 0
for iter in times.keys():
total_time += times[iter]
report.append("Total training time is {0}\n".format(str(datetime.timedelta(seconds = total_time))))
return ["\n".join(report), times, data]
|
{
"content_hash": "08b4664a7a032211d747631444e521e3",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 624,
"avg_line_length": 49.81578947368421,
"alnum_prop": 0.6341785525620708,
"repo_name": "keighrim/kaldi-yesno-tutorial",
"id": "7d01400310288d4165fbbfff13d7affc679cfa4c",
"size": "11412",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "steps/nnet3/report/nnet3_log_parse_lib.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "376036"
},
{
"name": "Python",
"bytes": "749628"
},
{
"name": "Shell",
"bytes": "2221970"
}
],
"symlink_target": ""
}
|
from urllib.parse import urlencode
try:
import json as simplejson
except ImportError:
try:
import simplejson
except ImportError:
from django.utils import simplejson
from social_auth.backends import BaseOAuth2, OAuthBackend
from social_auth.utils import dsa_urlopen
FOURSQUARE_SERVER = 'foursquare.com'
FOURSQUARE_AUTHORIZATION_URL = 'https://foursquare.com/oauth2/authenticate'
FOURSQUARE_ACCESS_TOKEN_URL = 'https://foursquare.com/oauth2/access_token'
FOURSQUARE_CHECK_AUTH = 'https://api.foursquare.com/v2/users/self'
class FoursquareBackend(OAuthBackend):
name = 'foursquare'
def get_user_id(self, details, response):
return response['response']['user']['id']
def get_user_details(self, response):
"""Return user details from Foursquare account"""
firstName = response['response']['user']['firstName']
lastName = response['response']['user'].get('lastName', '')
email = response['response']['user']['contact']['email']
return {'username': firstName + ' ' + lastName,
'first_name': firstName,
'last_name': lastName,
'email': email}
class FoursquareAuth(BaseOAuth2):
"""Foursquare OAuth mechanism"""
AUTHORIZATION_URL = FOURSQUARE_AUTHORIZATION_URL
ACCESS_TOKEN_URL = FOURSQUARE_ACCESS_TOKEN_URL
AUTH_BACKEND = FoursquareBackend
SETTINGS_KEY_NAME = 'FOURSQUARE_CONSUMER_KEY'
SETTINGS_SECRET_NAME = 'FOURSQUARE_CONSUMER_SECRET'
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
params = {'oauth_token': access_token}
url = FOURSQUARE_CHECK_AUTH + '?' + urlencode(params)
try:
return simplejson.load(dsa_urlopen(url))
except ValueError:
return None
# Backend definition
BACKENDS = {
'foursquare': FoursquareAuth,
}
|
{
"content_hash": "ae49d9496bb4d8dfe084e5c9ca5f8010",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 75,
"avg_line_length": 32.13559322033898,
"alnum_prop": 0.6656118143459916,
"repo_name": "limdauto/django-social-auth",
"id": "f641c9681985c58fb6dc24ae12443f19567e232b",
"size": "1896",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "social_auth/backends/contrib/foursquare.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "20595"
},
{
"name": "Makefile",
"bytes": "983"
},
{
"name": "Python",
"bytes": "366255"
}
],
"symlink_target": ""
}
|
import os
# We'll render HTML templates and access data sent by POST
# using the request object from flask. Redirect and url_for
# will be used to redirect the user once the upload is done
# and send_from_directory will help us to send/show on the
# browser the file that the user just uploaded
from flask import Flask, render_template, request, redirect, url_for
from flask import send_from_directory
from werkzeug import secure_filename
from flask import jsonify
# Initialize the Flask application
app = Flask(__name__)
# This is the path to the upload directory
app.config['UPLOAD_FOLDER'] = 'uploads/'
# These are the extension that we are accepting to be uploaded
app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg', 'gif'])
app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 # 2MB
# If the file you are trying to upload is too big, you'll get this message
@app.errorhandler(413)
def request_entity_too_large(error):
message = 'The file is too large, my friend.<br>'
maxFileSizeKB = app.config['MAX_CONTENT_LENGTH']/(1024)
message += "The biggest I can handle is " + str(maxFileSizeKB) + "KB"
message += "<a href='" + url_for("index") + "'>Try again</a>"
return message, 413
# For a given file, return whether it's an allowed type or not
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
# The root where we ask user to enter a file
@app.route('/')
def index():
return render_template('index.html')
# Route that will process the file upload
@app.route('/upload', methods=['POST'])
def upload():
# Get the name of the uploaded file
file = request.files['file']
# Check if the file is one of the allowed types/extensions
if not allowed_file(file.filename):
message = "Sorry. Only files that end with one of these "
message += "extensions is permitted: "
message += str(app.config['ALLOWED_EXTENSIONS'])
message += "<a href='" + url_for("index") + "'>Try again</a>"
return message
elif not file:
message = "Sorry. There was an error with that file.<br>"
message += "<a href='" + url_for("index") + "'>Try again</a>"
return message
else:
# Make the filename safe, remove unsupported chars
filename = secure_filename(file.filename)
# Move the file form the temporal folder to
# the upload folder we setup
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
# Redirect the user to the uploaded_file route, which
# will basicaly show on the browser the uploaded file
return redirect(url_for('uploaded_file',filename=filename))
# This route is expecting a parameter containing the name
# of a file. Then it will locate that file on the upload
# directory and show it on the browser, so if the user uploads
# an image, that image is going to be show after the upload
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename)
if __name__ == '__main__':
app.run(host="0.0.0.0",port=8888,debug=False)
|
{
"content_hash": "74a32c34e152511892d955c4558c97e6",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 74,
"avg_line_length": 40.0875,
"alnum_prop": 0.672903024633614,
"repo_name": "pconrad/heroku-try-file-upload",
"id": "c775bda6bd5beaacdb0cf5511fcbe1fb0613169d",
"size": "3230",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "554"
},
{
"name": "Python",
"bytes": "3230"
}
],
"symlink_target": ""
}
|
import logging
import threading
import warnings
from django.db import models
from django.http import HttpResponse
from django.contrib.auth import get_user_model
from django.conf import settings
from django.dispatch import Signal
from .json import jsondumps, JsonResponse
transaction_commit = Signal(providing_args=['changeset'])
class Changeset(models.Model):
source = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, blank=True, related_name='changesets')
date = models.DateTimeField(auto_now=True) # When this changeset is "final". Ideally equal to the moment the DB commits the transaction.
uuid = models.CharField(max_length=36, blank=True, null=True)
def __str__(self):
uuid = self.uuid[:8] if self.uuid else None
username = self.user.username if self.user else None
return '{}/{} by {} on {}'.format(self.id, uuid, username, self.date.strftime('%Y%m%d-%H%M%S'))
class Meta:
ordering = ['id']
class Change(models.Model):
changeset = models.ForeignKey(Changeset, on_delete=models.CASCADE, db_index=True, related_name='changes')
model = models.CharField(max_length=64, db_index=True)
oid = models.IntegerField(db_index=True)
field = models.CharField(max_length=64, db_index=True)
diff = models.BooleanField(default=False)
before = models.TextField(blank=True, null=True)
after = models.TextField(blank=True, null=True)
def __str__(self):
return '{}: {}({}).{} {} -> {}'.format(self.id, self.model, self.oid, self.field, self.before[:20], self.after[:20])
class Meta:
ordering = ['id']
logger = logging.getLogger(__name__)
class __Transaction(threading.local):
def __init__(self):
logger.info('Creating new _Transaction for thread {}'.format(threading.current_thread().name))
self.user = None
self.uuid = None
self.source = None
self.started = False
self.changes = {}
def start(self, *, user=None, uuid=None, source=None):
if self.started:
raise RuntimeError('Called _Transaction.start() while there is an open transaction')
self.started = True
self.changes.clear()
self.user = user
self.uuid = uuid
self.source = source
def stop(self):
if not self.started:
raise RuntimeError('Called _Transaction.stop() while there is no open transaction')
self.started = False
self.changes.clear()
_Transaction = __Transaction()
class NewInstanceField:
pass
class DeferredM2M:
pass
# History context manager. Use this.
class atomic:
def __init__(self, source=None, user=None, uuid=None):
self.source = source
self.user = user
self.uuid = uuid
def __enter__(self):
_start(self.source, self.user, self.uuid)
def __exit__(self, etype, value, traceback):
if etype is None:
_commit()
return True
else:
_abort()
return False # reraise
def _start(source=None, user=None, uuid=None):
if source is None:
raise ValueError('source may not be None')
_Transaction.start(source=source, user=user, uuid=uuid)
# old can be NewInstanceField, which will translate to None on commit.
# old and new can be DeferredM2M. But only for actual m2m fields or SHIT WILL BREAK.
def change(model, oid, field, old, new):
# FK fields on newly created objects cause annoyances. Ignore them.
if oid is NewInstanceField:
return
hid = model, oid, field
# Re-use old old value (so we accumulate all changes in one)
if hid in _Transaction.changes:
old = _Transaction.changes[hid][0]
elif old is DeferredM2M:
# If we haven't seen this field before, and it's a m2m of
# unknown value, we need to get the value now.
#
# The target model may be a non-Binder model (e.g. User), so lbyl.
if hasattr(model, 'binder_serialize_m2m_field'):
old = model(id=oid).binder_serialize_m2m_field(field)
_Transaction.changes[hid] = old, new, False
def m2m_diff(old, new):
return sorted(old - new), sorted(new - old), True
# FIXME: use bulk inserts for efficiency.
def _commit():
# Fill in the deferred m2ms
for (model, oid, field), (old, new, diff) in _Transaction.changes.items():
if new is DeferredM2M:
# The target model may be a non-Binder model (e.g. User), so lbyl.
if hasattr(model, 'binder_serialize_m2m_field'):
new = model(id=oid).binder_serialize_m2m_field(field)
_Transaction.changes[model, oid, field] = m2m_diff(old, new)
# Filter non-changes
_Transaction.changes = {idx: (old, new, diff) for idx, (old, new, diff) in _Transaction.changes.items() if old != new}
if not _Transaction.changes:
_Transaction.stop()
return
user = _Transaction.user if _Transaction.user and not _Transaction.user.is_anonymous else None
changeset = Changeset(
source=_Transaction.source,
user=user,
uuid=_Transaction.uuid,
)
changeset.save()
for (model, oid, field), (old, new, diff) in _Transaction.changes.items():
# New instances get None for all the before values
if old is NewInstanceField:
old = None
# Actually record the change
change = Change(
changeset=changeset,
model=model.__name__,
oid=oid,
field=field,
diff=diff,
before=jsondumps(old),
after=jsondumps(new),
)
change.save()
transaction_commit.send(sender=None, changeset=changeset)
# Save the changeset again, to update the date to be as close to DB transaction commit start as possible.
changeset.save()
_Transaction.stop()
def _abort():
_Transaction.stop()
def view_changesets(request, changesets):
data = []
userids = set()
for cs in changesets:
changes = []
for c in cs.changes.order_by('model', 'oid', 'field'):
changes.append({'model': c.model, 'oid': c.oid, 'field': c.field, 'diff': c.diff, 'before': c.before, 'after': c.after})
data.append({'date': cs.date, 'uuid': cs.uuid, 'id': cs.id, 'source': cs.source, 'user': cs.user_id, 'changes': changes})
if cs.user_id:
userids.add(cs.user_id)
users = []
for u in get_user_model().objects.filter(id__in=userids):
users.append({'id': u.id, 'username': u.username, 'email': u.email, 'first_name': u.first_name, 'last_name': u.last_name})
return JsonResponse({'data': data, 'with': {'user': users}})
def view_changesets_debug(request, changesets):
body = ['<html>', '<head>', '<style type="text/css">td {padding: 0px 20px;} th {padding: 0px 20px;}</style>', '</head>', '<body>']
for cs in changesets:
username = cs.user.username if cs.user else None
body.append('<h3>Changeset {} by {}: {} on {} {{{}}}'.format(cs.id, cs.source, username, cs.date.strftime('%Y-%m-%d %H:%M:%S'), cs.uuid))
body.append('<br><br>')
body.append('<table>')
body.append('<tr><th>model</th><th>object id</th><th>field</th><th><diff</th><th>before</th><th>after</th></tr>')
for c in cs.changes.order_by('model', 'oid', 'field'):
body.append('<tr><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td><td>{}</td></tr>'.format(
c.model, c.oid, c.field, c.diff, c.before, c.after))
body.append('</table>')
body.append('<br><br>')
body.append('</body>')
body.append('</html>')
return HttpResponse('\n'.join(body))
# Deprecation wrappers, remove at some point
def start(*args, **kwargs):
warnings.warn("Don't call history.start() directly, use the history.atomic() context manager", DeprecationWarning)
_start(*args, **kwargs)
def abort(*args, **kwargs):
warnings.warn("Don't call history.abort() directly, use the history.atomic() context manager", DeprecationWarning)
_abort(*args, **kwargs)
def commit(*args, **kwargs):
warnings.warn("Don't call history.commit() directly, use the history.atomic() context manager", DeprecationWarning)
_commit(*args, **kwargs)
|
{
"content_hash": "4ab69dfb61d68e4af4fb7c6f94d6ecc4",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 139,
"avg_line_length": 29.846456692913385,
"alnum_prop": 0.687508244294948,
"repo_name": "CodeYellowBV/django-binder",
"id": "698f93770e67482b4a3eca988577e3c0d087b9dd",
"size": "7581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "binder/history.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "85"
},
{
"name": "Python",
"bytes": "578095"
}
],
"symlink_target": ""
}
|
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:49876")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:49876")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
{
"content_hash": "485c97d3a032c9c31fc426f30c756479",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 79,
"avg_line_length": 24.188271604938272,
"alnum_prop": 0.661860405767513,
"repo_name": "goldbuck/goldbuck",
"id": "660a4d0b7ec56d904b99c102f3403a29d0553446",
"size": "7837",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/bitrpc/bitrpc.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "14758"
},
{
"name": "C++",
"bytes": "1442144"
},
{
"name": "Erlang",
"bytes": "6839"
},
{
"name": "JavaScript",
"bytes": "12"
},
{
"name": "Makefile",
"bytes": "79141"
},
{
"name": "Objective-C++",
"bytes": "2463"
},
{
"name": "PHP",
"bytes": "1948"
},
{
"name": "Perl",
"bytes": "16919"
},
{
"name": "Python",
"bytes": "47543"
},
{
"name": "Shell",
"bytes": "2615"
}
],
"symlink_target": ""
}
|
"""Defines the InstrumentationOptions named tuple."""
import collections
InstrumentationOptions = collections.namedtuple('InstrumentationOptions', [
'tool',
'annotations',
'exclude_annotations',
'test_filter',
'test_data',
'save_perf_json',
'screenshot_failures',
'wait_for_debugger',
'coverage_dir',
'test_apk',
'test_apk_path',
'test_apk_jar_path',
'test_runner',
'test_support_apk_path',
'device_flags',
'isolate_file_path',
'set_asserts',
'delete_stale_data',
'timeout_scale'])
|
{
"content_hash": "e702b553f167e656215ef777eb2d498c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 75,
"avg_line_length": 23.333333333333332,
"alnum_prop": 0.6321428571428571,
"repo_name": "Workday/OpenFrame",
"id": "b5276b77134b8cd4293619e2768da216aba5f313",
"size": "723",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "build/android/pylib/instrumentation/test_options.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
version_info = (5, 6, 1)
pre_info = ''
dev_info = ''
def create_valid_version(release_info, epoch=None, pre_input='', dev_input=''):
'''
Creates a pep440 valid version of version number given a tuple integers
and optional epoch, prerelease and developmental info.
Parameters
----------
release_info : Tuple(Int)
epoch : Int, default None
pre_input : Str, default ''
dev_input : Str, default ''
'''
pep440_err = "The version number is not a pep 440 compliant version number"
if epoch is not None:
epoch_seg = str(epoch) + '!'
else:
epoch_seg = ''
release_seg = '.'.join(map(str, release_info))
_magic_pre = ['a','b','rc']
if pre_input!='' and not any([pre_input.startswith(prefix) for prefix in _magic_pre]):
raise ValueError(pep440_err + "\n please fix your prerelease segment.")
else:
pre_seg = pre_input
if dev_input=='':
dev_seg = dev_input
elif not dev_input.startswith('.') and dev_input.startswith('dev'):
dev_seg = ''.join(['.', dev_input])
elif dev_input.startswith('.dev'):
dev_seg = dev_input
elif dev_input!='':
raise ValueError(pep440_err + "\n please fix your development segment.")
if dev_input!='' and not any([dev_seg.endswith(str(n)) for n in range(10)]):
dev_seg = ''.join([dev_seg,'0'])
out_version = ''.join([epoch_seg, release_seg, pre_seg, dev_seg])
import re
def is_canonical(version):
return re.match(r'^([1-9]\d*!)?(0|[1-9]\d*)'
r'(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?'
r'(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*))?$',
version
) is not None
if is_canonical(out_version):
return out_version
else:
raise ValueError(pep440_err)
__version__ = create_valid_version(version_info, pre_input=pre_info, dev_input=dev_info)
|
{
"content_hash": "8022e6571b290bf1f5ba8656ebe2df97",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 90,
"avg_line_length": 31.047619047619047,
"alnum_prop": 0.5587934560327198,
"repo_name": "sserrot/champion_relationships",
"id": "3598207218f67f87264df16fb9e39b62f524097f",
"size": "1956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "venv/Lib/site-packages/nbconvert/_version.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "128"
},
{
"name": "HTML",
"bytes": "18324224"
},
{
"name": "Jupyter Notebook",
"bytes": "9131072"
},
{
"name": "Python",
"bytes": "10702"
}
],
"symlink_target": ""
}
|
import sys
import math
def readline():
return sys.stdin.readline().strip()
def main():
while True:
record = readline()
if record == "#":
break
charging_step, phone_num, start_hour, start_minute, end_hour, end_minute = [x for x in record.split()]
breakdown = break_down_into_categories(int(start_hour), int(start_minute), int(end_hour), int(end_minute))
total_charge = (breakdown[day_rate] * schedule[charging_step][day_rate]
+ breakdown[evening_rate] * schedule[charging_step][evening_rate]
+ breakdown[night_rate] * schedule[charging_step][night_rate])
print("{0:>10}{1:>6}{2:>6}{3:>6}{4:>3}{5:>8.2f}".format(phone_num, breakdown[day_rate], breakdown[evening_rate], breakdown[night_rate], charging_step, total_charge))
schedule = {
'A':[0.1,0.06,0.02],
'B':[0.25,0.15,0.05],
'C':[0.53,0.33,0.13],
'D':[0.87,0.47,0.17],
'E':[1.44,0.8,0.3]
}
day_rate, evening_rate, night_rate = 0, 1, 2
def rate_type_for_minute_ending_at(hour, minute):
if (hour > 8 and hour < 18) or (hour == 8 and minute > 0) or (hour == 18 and minute == 0):
return day_rate
if (hour > 18 and hour < 22) or (hour == 18 and minute > 0) or (hour == 22 and minute == 0):
return evening_rate
return night_rate
def break_down_into_categories(start_hour, start_minute, end_hour, end_minute):
breakdown = {day_rate:0, evening_rate:0, night_rate:0}
hour, minute = increment_by_one_minute(start_hour, start_minute)
while True:
rate_type = rate_type_for_minute_ending_at(hour, minute)
breakdown[rate_type] += 1
if hour == end_hour and minute == end_minute:
break
hour, minute = increment_by_one_minute(hour, minute)
return breakdown
def increment_by_one_minute(hour, minute):
if minute + 1 == 60:
if hour + 1 == 24:
return (0, 0)
else:
return (hour + 1, 0)
return (hour, minute + 1)
if __name__=="__main__":
main()
|
{
"content_hash": "ccf1df87ab30ba7caf7e93e0e75b694c",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 167,
"avg_line_length": 33.107142857142854,
"alnum_prop": 0.6521035598705501,
"repo_name": "andrey-yemelyanov/competitive-programming",
"id": "1fbb879614f2e426d275423dc9f440edb39048d4",
"size": "1986",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cp-book/ch1/adhoc/reallifeharder/145_GongnawalandTelecom.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1554061"
},
{
"name": "Python",
"bytes": "73302"
}
],
"symlink_target": ""
}
|
from mock import patch, Mock
from nose.tools import raises
from twilio.rest.resources import Transcriptions
from tools import create_mock_json
BASE_URI = "https://api.twilio.com/2010-04-01/Accounts/AC123"
ACCOUNT_SID = "AC123"
AUTH = (ACCOUNT_SID, "token")
transcriptions = Transcriptions(BASE_URI, AUTH)
@patch("twilio.rest.resources.make_twilio_request")
def test_paging(mock):
resp = create_mock_json("tests/resources/transcriptions_list.json")
mock.return_value = resp
uri = "%s/Transcriptions" % (BASE_URI)
transcriptions.list(page=2)
mock.assert_called_with("GET", uri, params={"Page": 2}, auth=AUTH)
@patch("twilio.rest.resources.make_twilio_request")
def test_get(mock):
resp = create_mock_json("tests/resources/transcriptions_instance.json")
mock.return_value = resp
uri = "%s/Transcriptions/TR123" % (BASE_URI)
transcriptions.get("TR123")
mock.assert_called_with("GET", uri, auth=AUTH)
@raises(AttributeError)
def test_create():
transcriptions.create
@raises(AttributeError)
def test_update():
transcriptions.update
@raises(AttributeError)
def test_update():
transcriptions.delete
|
{
"content_hash": "61314317ac3dabfac72fa80f03b295b2",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 75,
"avg_line_length": 26.272727272727273,
"alnum_prop": 0.7257785467128027,
"repo_name": "RobSpectre/twilio-python",
"id": "3d10a5f7e1b25c651aca0288b9b3aec169fbb663",
"size": "1156",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_transcriptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "231009"
}
],
"symlink_target": ""
}
|
"""Python binding of Joystick wrapper of LetMeCreate library."""
import ctypes
_LIB = ctypes.CDLL('libletmecreate_click.so')
def get_x():
"""Returns the X position of the joystick.
Note: An exception is thrown if it fails to read the X position from the
chip.
"""
pos_x = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_x(ctypes.byref(pos_x))
if ret < 0:
raise Exception("joystick click get x failed")
return pos_x.value
def get_y():
"""Returns the Y position of the joystick.
Note: An exception is thrown if it fails to read the Y position from the
chip.
"""
pos_y = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_y(ctypes.byref(pos_y))
if ret < 0:
raise Exception("joystick click get y failed")
return pos_y.value
def get_position():
"""Returns the X position of the joystick.
Note: An exception is thrown if it fails to read the position from the
chip.
"""
pos_x = ctypes.c_int8(0)
pos_y = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_position(ctypes.byref(pos_x),
ctypes.byref(pos_y))
if ret < 0:
raise Exception("joystick click get position failed")
return (pos_x.value, pos_y.value)
|
{
"content_hash": "88afe6d5507eeef2a1a56d32c6742221",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 76,
"avg_line_length": 27.565217391304348,
"alnum_prop": 0.6246056782334385,
"repo_name": "francois-berder/PyLetMeCreate",
"id": "a9391b96568c6bac2f8fe2e5ef17a43b46df3099",
"size": "1291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "letmecreate/click/joystick.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "85404"
}
],
"symlink_target": ""
}
|
"""Tests for IPython.utils.module_paths.py"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import with_statement
import os
import shutil
import sys
import tempfile
from os.path import join, abspath, split
from IPython.testing.tools import make_tempfile
import IPython.utils.module_paths as mp
env = os.environ
TEST_FILE_PATH = split(abspath(__file__))[0]
TMP_TEST_DIR = tempfile.mkdtemp()
#
# Setup/teardown functions/decorators
#
old_syspath = sys.path
def make_empty_file(fname):
f = open(fname, 'w')
f.close()
def setup():
"""Setup testenvironment for the module:
"""
# Do not mask exceptions here. In particular, catching WindowsError is a
# problem because that exception is only defined on Windows...
os.makedirs(join(TMP_TEST_DIR, "xmod"))
os.makedirs(join(TMP_TEST_DIR, "nomod"))
make_empty_file(join(TMP_TEST_DIR, "xmod/__init__.py"))
make_empty_file(join(TMP_TEST_DIR, "xmod/sub.py"))
make_empty_file(join(TMP_TEST_DIR, "pack.py"))
make_empty_file(join(TMP_TEST_DIR, "packpyc.pyc"))
sys.path = [TMP_TEST_DIR]
def teardown():
"""Teardown testenvironment for the module:
- Remove tempdir
- restore sys.path
"""
# Note: we remove the parent test dir, which is the root of all test
# subdirs we may have created. Use shutil instead of os.removedirs, so
# that non-empty directories are all recursively removed.
shutil.rmtree(TMP_TEST_DIR)
sys.path = old_syspath
def test_get_init_1():
"""See if get_init can find __init__.py in this testdir"""
with make_tempfile(join(TMP_TEST_DIR, "__init__.py")):
assert mp.get_init(TMP_TEST_DIR)
def test_get_init_2():
"""See if get_init can find __init__.pyw in this testdir"""
with make_tempfile(join(TMP_TEST_DIR, "__init__.pyw")):
assert mp.get_init(TMP_TEST_DIR)
def test_get_init_3():
"""get_init can't find __init__.pyc in this testdir"""
with make_tempfile(join(TMP_TEST_DIR, "__init__.pyc")):
assert mp.get_init(TMP_TEST_DIR) is None
def test_get_init_4():
"""get_init can't find __init__ in empty testdir"""
assert mp.get_init(TMP_TEST_DIR) is None
def test_find_mod_1():
modpath = join(TMP_TEST_DIR, "xmod", "__init__.py")
assert mp.find_mod("xmod") == modpath
def test_find_mod_2():
modpath = join(TMP_TEST_DIR, "xmod", "__init__.py")
assert mp.find_mod("xmod") == modpath
def test_find_mod_3():
modpath = join(TMP_TEST_DIR, "xmod", "sub.py")
assert mp.find_mod("xmod.sub") == modpath
def test_find_mod_4():
modpath = join(TMP_TEST_DIR, "pack.py")
assert mp.find_mod("pack") == modpath
def test_find_mod_5():
assert mp.find_mod("packpyc") is None
def test_find_module_1():
modpath = join(TMP_TEST_DIR, "xmod")
assert mp.find_module("xmod") == modpath
def test_find_module_2():
"""Testing sys.path that is empty"""
assert mp.find_module("xmod", []) is None
def test_find_module_3():
"""Testing sys.path that is empty"""
assert mp.find_module(None, None) is None
def test_find_module_4():
"""Testing sys.path that is empty"""
assert mp.find_module(None) is None
def test_find_module_5():
assert mp.find_module("xmod.nopack") is None
|
{
"content_hash": "80d4f47a90adaf9dbc9ff2dbdf1f921d",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 78,
"avg_line_length": 27.202898550724637,
"alnum_prop": 0.597229621736814,
"repo_name": "mattvonrocketstein/smash",
"id": "4e7c25b2855c5e2ee3f7aa39d3914c1f292ba91e",
"size": "3772",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smashlib/ipy3x/utils/tests/test_module_paths.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "162188"
},
{
"name": "HTML",
"bytes": "32106"
},
{
"name": "JavaScript",
"bytes": "1615935"
},
{
"name": "Makefile",
"bytes": "550"
},
{
"name": "Python",
"bytes": "4934398"
},
{
"name": "Shell",
"bytes": "2990"
}
],
"symlink_target": ""
}
|
import argparse
import multiprocessing
import os
import site
import sys
from configparser import ConfigParser
from configparser import ExtendedInterpolation
from pathlib import Path
from typing import Any
from typing import AnyStr
from maps_generator.utils.md5 import md5_ext
from maps_generator.utils.system import total_virtual_memory
ETC_DIR = os.path.join(os.path.dirname(__file__), "..", "var", "etc")
parser = argparse.ArgumentParser(add_help=False)
opt_config = "--config"
parser.add_argument(opt_config, type=str, default="", help="Path to config")
def get_config_path(config_path: AnyStr):
"""
It tries to get an opt_config value.
If doesn't get the value a function returns config_path.
"""
argv = sys.argv
indexes = (-1, -1)
for i, opt in enumerate(argv):
if opt.startswith(f"{opt_config}="):
indexes = (i, i + 1)
if opt == opt_config:
indexes = (i, i + 2)
config_args = argv[indexes[0] : indexes[1]]
if config_args:
return parser.parse_args(config_args).config
config_var = os.environ.get(f"MM_GEN__CONFIG")
return config_path if config_var is None else config_var
class CfgReader:
"""
Config reader.
There are 3 way of getting an option. In priority order:
1. From system env.
2. From config.
3. From default values.
For using the option from system env you can build an option name as
MM__ + [SECTION_NAME] + _ + [VALUE_NAME].
"""
def __init__(self, default_settings_path: AnyStr):
self.config = ConfigParser(interpolation=ExtendedInterpolation())
self.config.read([get_config_path(default_settings_path)])
def get_opt(self, s: AnyStr, v: AnyStr, default: Any = None):
val = CfgReader._get_env_val(s, v)
if val is not None:
return val
return self.config.get(s, v) if self.config.has_option(s, v) else default
def get_opt_path(self, s: AnyStr, v: AnyStr, default: AnyStr = ""):
return os.path.expanduser(self.get_opt(s, v, default))
@staticmethod
def _get_env_val(s: AnyStr, v: AnyStr):
return os.environ.get(f"MM_GEN__{s.upper()}_{v.upper()}")
DEFAULT_PLANET_URL = "https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf"
DEFAULT_PLANET_MD5_URL = md5_ext(DEFAULT_PLANET_URL)
# Main section:
# If DEBUG is True, a little special planet is downloaded.
DEBUG = True
_HOME_PATH = str(Path.home())
_WORK_PATH = _HOME_PATH
TMPDIR = os.path.join(_HOME_PATH, "tmp")
MAIN_OUT_PATH = os.path.join(_WORK_PATH, "generation")
CACHE_PATH = ""
# Developer section:
BUILD_PATH = os.path.join(_WORK_PATH, "omim-build-release")
OMIM_PATH = os.path.join(_WORK_PATH, "omim")
# Osm tools section:
OSM_TOOLS_SRC_PATH = os.path.join(OMIM_PATH, "tools", "osmctools")
OSM_TOOLS_PATH = os.path.join(_WORK_PATH, "osmctools")
# Generator tool section:
USER_RESOURCE_PATH = os.path.join(OMIM_PATH, "data")
NODE_STORAGE = "mem" if total_virtual_memory() / 10 ** 9 >= 64 else "map"
# Stages section:
NEED_PLANET_UPDATE = False
THREADS_COUNT_FEATURES_STAGE = multiprocessing.cpu_count()
DATA_ARCHIVE_DIR = USER_RESOURCE_PATH
DIFF_VERSION_DEPTH = 2
# Logging section:
LOG_FILE_PATH = os.path.join(MAIN_OUT_PATH, "generation.log")
# External resources section:
PLANET_URL = DEFAULT_PLANET_URL
PLANET_MD5_URL = DEFAULT_PLANET_MD5_URL
PLANET_COASTS_URL = ""
UGC_URL = ""
HOTELS_URL = ""
PROMO_CATALOG_CITIES_URL = ""
PROMO_CATALOG_COUNTRIES_URL = ""
POPULARITY_URL = ""
SUBWAY_URL = ""
TRANSIT_URL = ""
NEED_BUILD_WORLD_ROADS = True
FOOD_URL = ""
FOOD_TRANSLATIONS_URL = ""
UK_POSTCODES_URL = ""
US_POSTCODES_URL = ""
SRTM_PATH = ""
ISOLINES_PATH = ""
# Stats section:
STATS_TYPES_CONFIG = os.path.join(ETC_DIR, "stats_types_config.txt")
# Other variables:
PLANET = "planet"
POSSIBLE_GEN_TOOL_NAMES = ("generator_tool", "omim-generator_tool")
VERSION_FILE_NAME = "version.txt"
# Osm tools:
OSM_TOOL_CONVERT = "osmconvert"
OSM_TOOL_FILTER = "osmfilter"
OSM_TOOL_UPDATE = "osmupdate"
OSM_TOOLS_CC = "cc"
OSM_TOOLS_CC_FLAGS = [
"-O3",
]
# Planet and coasts:
PLANET_COASTS_GEOM_URL = os.path.join(PLANET_COASTS_URL, "latest_coasts.geom")
PLANET_COASTS_RAWGEOM_URL = os.path.join(PLANET_COASTS_URL, "latest_coasts.rawgeom")
if DEBUG:
PLANET_URL = "https://www.dropbox.com/s/m3ru5tnj8g9u4cz/planet-latest.o5m?raw=1"
PLANET_MD5_URL = (
"https://www.dropbox.com/s/8wdl2hy22jgisk5/planet-latest.o5m.md5?raw=1"
)
NODE_STORAGE = "map"
NEED_PLANET_UPDATE = False
# Common:
THREADS_COUNT = multiprocessing.cpu_count()
# for lib logging
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {"format": "[%(asctime)s] %(levelname)s %(module)s %(message)s"},
},
"handlers": {
"stdout": {
"level": "INFO",
"class": "logging.StreamHandler",
"formatter": "standard",
},
"file": {
"level": "DEBUG",
"class": "logging.handlers.WatchedFileHandler",
"formatter": "standard",
"filename": LOG_FILE_PATH,
},
},
"loggers": {
"maps_generator": {
"handlers": ["stdout", "file"],
"level": "DEBUG",
"propagate": True,
}
},
}
def init(default_settings_path: AnyStr):
# Try to read a config and to overload default settings
cfg = CfgReader(default_settings_path)
# Main section:
global DEBUG
global TMPDIR
global MAIN_OUT_PATH
global CACHE_PATH
_DEBUG = cfg.get_opt("Main", "DEBUG")
DEBUG = DEBUG if _DEBUG is None else int(_DEBUG)
TMPDIR = cfg.get_opt_path("Main", "TMPDIR", TMPDIR)
MAIN_OUT_PATH = cfg.get_opt_path("Main", "MAIN_OUT_PATH", MAIN_OUT_PATH)
CACHE_PATH = cfg.get_opt_path("Main", "CACHE_PATH", CACHE_PATH)
# Developer section:
global BUILD_PATH
global OMIM_PATH
BUILD_PATH = cfg.get_opt_path("Developer", "BUILD_PATH", BUILD_PATH)
OMIM_PATH = cfg.get_opt_path("Developer", "OMIM_PATH", OMIM_PATH)
# Osm tools section:
global OSM_TOOLS_SRC_PATH
global OSM_TOOLS_PATH
OSM_TOOLS_SRC_PATH = cfg.get_opt_path(
"Osm tools", "OSM_TOOLS_SRC_PATH", OSM_TOOLS_SRC_PATH
)
OSM_TOOLS_PATH = cfg.get_opt_path("Osm tools", "OSM_TOOLS_PATH", OSM_TOOLS_PATH)
# Generator tool section:
global USER_RESOURCE_PATH
global NODE_STORAGE
USER_RESOURCE_PATH = cfg.get_opt_path(
"Generator tool", "USER_RESOURCE_PATH", USER_RESOURCE_PATH
)
NODE_STORAGE = cfg.get_opt("Generator tool", "NODE_STORAGE", NODE_STORAGE)
if not os.path.exists(USER_RESOURCE_PATH):
from data_files import find_data_files
USER_RESOURCE_PATH = find_data_files("omim-data")
assert USER_RESOURCE_PATH is not None
import borders
# Issue: If maps_generator is installed in your system as a system
# package and borders.init() is called first time, call borders.init()
# might return False, because you need root permission.
assert borders.init()
# Stages section:
global NEED_PLANET_UPDATE
global DATA_ARCHIVE_DIR
global DIFF_VERSION_DEPTH
global THREADS_COUNT_FEATURES_STAGE
NEED_PLANET_UPDATE = cfg.get_opt("Stages", "NEED_PLANET_UPDATE", NEED_PLANET_UPDATE)
DATA_ARCHIVE_DIR = cfg.get_opt_path(
"Generator tool", "DATA_ARCHIVE_DIR", DATA_ARCHIVE_DIR
)
DIFF_VERSION_DEPTH = cfg.get_opt(
"Generator tool", "DIFF_VERSION_DEPTH", DIFF_VERSION_DEPTH
)
threads_count = int(
cfg.get_opt(
"Generator tool",
"THREADS_COUNT_FEATURES_STAGE",
THREADS_COUNT_FEATURES_STAGE,
)
)
if threads_count > 0:
THREADS_COUNT_FEATURES_STAGE = threads_count
# Logging section:
global LOG_FILE_PATH
global LOGGING
LOG_FILE_PATH = os.path.join(MAIN_OUT_PATH, "generation.log")
LOG_FILE_PATH = cfg.get_opt_path("Logging", "MAIN_LOG", LOG_FILE_PATH)
os.makedirs(os.path.dirname(os.path.abspath(LOG_FILE_PATH)), exist_ok=True)
LOGGING["handlers"]["file"]["filename"] = LOG_FILE_PATH
# External section:
global PLANET_URL
global PLANET_MD5_URL
global PLANET_COASTS_URL
global UGC_URL
global HOTELS_URL
global PROMO_CATALOG_CITIES_URL
global PROMO_CATALOG_COUNTRIES_URL
global POPULARITY_URL
global SUBWAY_URL
global TRANSIT_URL
global NEED_BUILD_WORLD_ROADS
global FOOD_URL
global UK_POSTCODES_URL
global US_POSTCODES_URL
global FOOD_TRANSLATIONS_URL
global SRTM_PATH
global ISOLINES_PATH
PLANET_URL = cfg.get_opt_path("External", "PLANET_URL", PLANET_URL)
PLANET_MD5_URL = cfg.get_opt_path("External", "PLANET_MD5_URL", PLANET_MD5_URL)
PLANET_COASTS_URL = cfg.get_opt_path(
"External", "PLANET_COASTS_URL", PLANET_COASTS_URL
)
UGC_URL = cfg.get_opt_path("External", "UGC_URL", UGC_URL)
HOTELS_URL = cfg.get_opt_path("External", "HOTELS_URL", HOTELS_URL)
PROMO_CATALOG_CITIES_URL = cfg.get_opt_path(
"External", "PROMO_CATALOG_CITIES_URL", PROMO_CATALOG_CITIES_URL
)
PROMO_CATALOG_COUNTRIES_URL = cfg.get_opt_path(
"External", "PROMO_CATALOG_COUNTRIES_URL", PROMO_CATALOG_COUNTRIES_URL
)
POPULARITY_URL = cfg.get_opt_path("External", "POPULARITY_URL", POPULARITY_URL)
SUBWAY_URL = cfg.get_opt("External", "SUBWAY_URL", SUBWAY_URL)
TRANSIT_URL = cfg.get_opt("External", "TRANSIT_URL", TRANSIT_URL)
NEED_BUILD_WORLD_ROADS = cfg.get_opt("External", "NEED_BUILD_WORLD_ROADS", NEED_BUILD_WORLD_ROADS)
FOOD_URL = cfg.get_opt("External", "FOOD_URL", FOOD_URL)
UK_POSTCODES_URL = cfg.get_opt("External", "UK_POSTCODES_URL", UK_POSTCODES_URL)
US_POSTCODES_URL = cfg.get_opt("External", "US_POSTCODES_URL", US_POSTCODES_URL)
FOOD_TRANSLATIONS_URL = cfg.get_opt(
"External", "FOOD_TRANSLATIONS_URL", FOOD_TRANSLATIONS_URL
)
SRTM_PATH = cfg.get_opt_path("External", "SRTM_PATH", SRTM_PATH)
ISOLINES_PATH = cfg.get_opt_path("External", "ISOLINES_PATH", ISOLINES_PATH)
# Stats section:
global STATS_TYPES_CONFIG
STATS_TYPES_CONFIG = cfg.get_opt_path(
"Stats", "STATS_TYPES_CONFIG", STATS_TYPES_CONFIG
)
# Common:
global THREADS_COUNT
threads_count = int(cfg.get_opt("Common", "THREADS_COUNT", THREADS_COUNT))
if threads_count > 0:
THREADS_COUNT = threads_count
# Planet and costs:
global PLANET_COASTS_GEOM_URL
global PLANET_COASTS_RAWGEOM_URL
PLANET_COASTS_GEOM_URL = os.path.join(PLANET_COASTS_URL, "latest_coasts.geom")
PLANET_COASTS_RAWGEOM_URL = os.path.join(PLANET_COASTS_URL, "latest_coasts.rawgeom")
if DEBUG:
PLANET_URL = "https://www.dropbox.com/s/m3ru5tnj8g9u4cz/planet-latest.o5m?raw=1"
PLANET_MD5_URL = (
"https://www.dropbox.com/s/8wdl2hy22jgisk5/planet-latest.o5m.md5?raw=1"
)
NODE_STORAGE = "map"
NEED_PLANET_UPDATE = False
|
{
"content_hash": "b9b743696f7d6c8523d0327f112a25a9",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 102,
"avg_line_length": 32.528023598820056,
"alnum_prop": 0.6528520903237508,
"repo_name": "mapsme/omim",
"id": "a16dd0a0fd8bbd56171056eb6848c5ff3e03b3f6",
"size": "11027",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/python/maps_generator/generator/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "3962"
},
{
"name": "Batchfile",
"bytes": "3328"
},
{
"name": "C",
"bytes": "13762107"
},
{
"name": "C++",
"bytes": "39847686"
},
{
"name": "CMake",
"bytes": "335312"
},
{
"name": "CSS",
"bytes": "26798"
},
{
"name": "Common Lisp",
"bytes": "17587"
},
{
"name": "DIGITAL Command Language",
"bytes": "36710"
},
{
"name": "GLSL",
"bytes": "60719"
},
{
"name": "Gherkin",
"bytes": "305230"
},
{
"name": "Go",
"bytes": "12771"
},
{
"name": "HTML",
"bytes": "1305869"
},
{
"name": "Inno Setup",
"bytes": "4337"
},
{
"name": "Java",
"bytes": "2758722"
},
{
"name": "JavaScript",
"bytes": "3265"
},
{
"name": "Lua",
"bytes": "57672"
},
{
"name": "M4",
"bytes": "48177"
},
{
"name": "Makefile",
"bytes": "303910"
},
{
"name": "Metal",
"bytes": "80149"
},
{
"name": "Module Management System",
"bytes": "2080"
},
{
"name": "Objective-C",
"bytes": "532953"
},
{
"name": "Objective-C++",
"bytes": "1052627"
},
{
"name": "PHP",
"bytes": "2841"
},
{
"name": "Perl",
"bytes": "47465"
},
{
"name": "PowerShell",
"bytes": "1885"
},
{
"name": "Python",
"bytes": "759461"
},
{
"name": "Roff",
"bytes": "13545"
},
{
"name": "Ruby",
"bytes": "64691"
},
{
"name": "Shell",
"bytes": "959349"
},
{
"name": "Starlark",
"bytes": "965"
},
{
"name": "Swift",
"bytes": "869832"
},
{
"name": "TSQL",
"bytes": "3430"
},
{
"name": "sed",
"bytes": "236"
}
],
"symlink_target": ""
}
|
import unittest
import matplotlib.pyplot as plt
import numpy as np
from numpy.testing import assert_array_almost_equal
import control as ctrl
from control.statesp import StateSpace
from control.xferfcn import TransferFunction
from control.matlab import ss, tf, bode, rss
from control.exception import slycot_check
class TestFreqresp(unittest.TestCase):
def setUp(self):
self.A = np.matrix('1,1;0,1')
self.C = np.matrix('1,0')
self.omega = np.linspace(10e-2,10e2,1000)
def test_siso(self):
B = np.matrix('0;1')
D = 0
sys = StateSpace(self.A,B,self.C,D)
# test frequency response
frq=sys.freqresp(self.omega)
# test bode plot
bode(sys)
# Convert to transfer function and test bode
systf = tf(sys)
bode(systf)
def test_superimpose(self):
# Test to make sure that multiple calls to plots superimpose their
# data on the same axes unless told to do otherwise
# Generate two plots in a row; should be on the same axes
plt.figure(1); plt.clf()
ctrl.bode_plot(ctrl.tf([1], [1,2,1]))
ctrl.bode_plot(ctrl.tf([5], [1, 1]))
# Check to make sure there are two axes and that each axes has two lines
self.assertEqual(len(plt.gcf().axes), 2)
for ax in plt.gcf().axes:
# Make sure there are 2 lines in each subplot
assert len(ax.get_lines()) == 2
# Generate two plots as a list; should be on the same axes
plt.figure(2); plt.clf();
ctrl.bode_plot([ctrl.tf([1], [1,2,1]), ctrl.tf([5], [1, 1])])
# Check to make sure there are two axes and that each axes has two lines
self.assertEqual(len(plt.gcf().axes), 2)
for ax in plt.gcf().axes:
# Make sure there are 2 lines in each subplot
assert len(ax.get_lines()) == 2
# Generate two separate plots; only the second should appear
plt.figure(3); plt.clf();
ctrl.bode_plot(ctrl.tf([1], [1,2,1]))
plt.clf()
ctrl.bode_plot(ctrl.tf([5], [1, 1]))
# Check to make sure there are two axes and that each axes has one line
self.assertEqual(len(plt.gcf().axes), 2)
for ax in plt.gcf().axes:
# Make sure there is only 1 line in the subplot
assert len(ax.get_lines()) == 1
# Now add a line to the magnitude plot and make sure if is there
for ax in plt.gcf().axes:
if ax.get_label() == 'control-bode-magnitude':
break
ax.semilogx([1e-2, 1e1], 20 * np.log10([1, 1]), 'k-')
self.assertEqual(len(ax.get_lines()), 2)
def test_doubleint(self):
# 30 May 2016, RMM: added to replicate typecast bug in freqresp.py
A = np.matrix('0, 1; 0, 0');
B = np.matrix('0; 1');
C = np.matrix('1, 0');
D = 0;
sys = ss(A, B, C, D);
bode(sys);
@unittest.skipIf(not slycot_check(), "slycot not installed")
def test_mimo(self):
# MIMO
B = np.matrix('1,0;0,1')
D = np.matrix('0,0')
sysMIMO = ss(self.A,B,self.C,D)
frqMIMO = sysMIMO.freqresp(self.omega)
tfMIMO = tf(sysMIMO)
#bode(sysMIMO) # - should throw not implemented exception
#bode(tfMIMO) # - should throw not implemented exception
#plt.figure(3)
#plt.semilogx(self.omega,20*np.log10(np.squeeze(frq[0])))
#plt.figure(4)
#bode(sysMIMO,self.omega)
def test_bode_margin(self):
num = [1000]
den = [1, 25, 100, 0]
sys = ctrl.tf(num, den)
plt.figure()
ctrl.bode_plot(sys, margins=True,dB=False,deg = True, Hz=False)
fig = plt.gcf()
allaxes = fig.get_axes()
mag_to_infinity = (np.array([6.07828691, 6.07828691]),
np.array([1.00000000e+00, 1.00000000e-08]))
assert_array_almost_equal(mag_to_infinity, allaxes[0].lines[2].get_data())
gm_to_infinty = (np.array([10., 10.]), np.array([4.00000000e-01, 1.00000000e-08]))
assert_array_almost_equal(gm_to_infinty, allaxes[0].lines[3].get_data())
one_to_gm = (np.array([10., 10.]), np.array([1., 0.4]))
assert_array_almost_equal(one_to_gm, allaxes[0].lines[4].get_data())
pm_to_infinity = (np.array([6.07828691, 6.07828691]),
np.array([100000., -157.46405841]))
assert_array_almost_equal(pm_to_infinity, allaxes[1].lines[2].get_data())
pm_to_phase = (np.array([6.07828691, 6.07828691]), np.array([-157.46405841, -180.]))
assert_array_almost_equal(pm_to_phase, allaxes[1].lines[3].get_data())
phase_to_infinity = (np.array([10., 10.]), np.array([1.00000000e-08, -1.80000000e+02]))
assert_array_almost_equal(phase_to_infinity, allaxes[1].lines[4].get_data())
def test_discrete(self):
# Test discrete time frequency response
# SISO state space systems with either fixed or unspecified sampling times
sys = rss(3, 1, 1)
siso_ss1d = StateSpace(sys.A, sys.B, sys.C, sys.D, 0.1)
siso_ss2d = StateSpace(sys.A, sys.B, sys.C, sys.D, True)
# MIMO state space systems with either fixed or unspecified sampling times
A = [[-3., 4., 2.], [-1., -3., 0.], [2., 5., 3.]]
B = [[1., 4.], [-3., -3.], [-2., 1.]]
C = [[4., 2., -3.], [1., 4., 3.]]
D = [[-2., 4.], [0., 1.]]
mimo_ss1d = StateSpace(A, B, C, D, 0.1)
mimo_ss2d = StateSpace(A, B, C, D, True)
# SISO transfer functions
siso_tf1d = TransferFunction([1, 1], [1, 2, 1], 0.1)
siso_tf2d = TransferFunction([1, 1], [1, 2, 1], True)
# Go through each system and call the code, checking return types
for sys in (siso_ss1d, siso_ss2d, mimo_ss1d, mimo_ss2d,
siso_tf1d, siso_tf2d):
# Set frequency range to just below Nyquist freq (for Bode)
omega_ok = np.linspace(10e-4,0.99,100) * np.pi/sys.dt
# Test frequency response
ret = sys.freqresp(omega_ok)
# Check for warning if frequency is out of range
import warnings
warnings.simplefilter('always', UserWarning) # don't supress
with warnings.catch_warnings(record=True) as w:
# Set up warnings filter to only show warnings in control module
warnings.filterwarnings("ignore")
warnings.filterwarnings("always", module="control")
# Look for a warning about sampling above Nyquist frequency
omega_bad = np.linspace(10e-4,1.1,10) * np.pi/sys.dt
ret = sys.freqresp(omega_bad)
print("len(w) =", len(w))
self.assertEqual(len(w), 1)
self.assertIn("above", str(w[-1].message))
self.assertIn("Nyquist", str(w[-1].message))
# Test bode plots (currently only implemented for SISO)
if (sys.inputs == 1 and sys.outputs == 1):
# Generic call (frequency range calculated automatically)
ret_ss = bode(sys)
# Convert to transfer function and test bode again
systf = tf(sys);
ret_tf = bode(systf)
# Make sure we can pass a frequency range
bode(sys, omega_ok)
else:
# Calling bode should generate a not implemented error
self.assertRaises(NotImplementedError, bode, (sys,))
def test_options(self):
"""Test ability to set parameter values"""
# Generate a Bode plot of a transfer function
sys = ctrl.tf([1000], [1, 25, 100, 0])
fig1 = plt.figure()
ctrl.bode_plot(sys, dB=False, deg = True, Hz=False)
# Save the parameter values
left1, right1 = fig1.axes[0].xaxis.get_data_interval()
numpoints1 = len(fig1.axes[0].lines[0].get_data()[0])
# Same transfer function, but add a decade on each end
ctrl.config.set_defaults('freqplot', feature_periphery_decades=2)
fig2 = plt.figure()
ctrl.bode_plot(sys, dB=False, deg = True, Hz=False)
left2, right2 = fig2.axes[0].xaxis.get_data_interval()
# Make sure we got an extra decade on each end
self.assertAlmostEqual(left2, 0.1 * left1)
self.assertAlmostEqual(right2, 10 * right1)
# Same transfer function, but add more points to the plot
ctrl.config.set_defaults(
'freqplot', feature_periphery_decades=2, number_of_samples=13)
fig3 = plt.figure()
ctrl.bode_plot(sys, dB=False, deg = True, Hz=False)
numpoints3 = len(fig3.axes[0].lines[0].get_data()[0])
# Make sure we got the right number of points
self.assertNotEqual(numpoints1, numpoints3)
self.assertEqual(numpoints3, 13)
# Reset default parameters to avoid contamination
ctrl.config.reset_defaults()
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "564362c412f8d4cdd8c7aaf31ca1e6e3",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 93,
"avg_line_length": 37.1587982832618,
"alnum_prop": 0.6008316008316008,
"repo_name": "roryyorke/python-control",
"id": "9d59a1972e5af7a07cb9d6a50d4eed0aeec71f8f",
"size": "8882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "control/tests/freqresp_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "126"
},
{
"name": "Python",
"bytes": "1020841"
}
],
"symlink_target": ""
}
|
import os
import subprocess
import sys
import unittest
# Since we execute this script directly as part of the unit tests, we need to ensure
# that Tools/Scripts is in sys.path for the next imports to work correctly.
script_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
if script_dir not in sys.path:
sys.path.append(script_dir)
from webkitpy.common.system.executive import Executive, ScriptError
class ScriptErrorTest(unittest.TestCase):
def test_message_with_output(self):
error = ScriptError('My custom message!', '', -1)
self.assertEqual(error.message_with_output(), 'My custom message!')
error = ScriptError('My custom message!', '', -1, 'My output.')
self.assertEqual(error.message_with_output(), 'My custom message!\n\noutput: My output.')
error = ScriptError('', 'my_command!', -1, 'My output.', '/Users/username/blah')
self.assertEqual(error.message_with_output(),
'Failed to run "\'my_command!\'" exit_code: -1 cwd: /Users/username/blah\n\noutput: My output.')
error = ScriptError('', 'my_command!', -1, 'ab' + '1' * 499)
self.assertEqual(error.message_with_output(),
'Failed to run "\'my_command!\'" exit_code: -1\n\noutput: Last 500 characters of output:\nb' + '1' * 499)
def test_message_with_tuple(self):
error = ScriptError('', ('my', 'command'), -1, 'My output.', '/Users/username/blah')
self.assertEqual(error.message_with_output(),
'Failed to run "(\'my\', \'command\')" exit_code: -1 cwd: /Users/username/blah\n\noutput: My output.')
def never_ending_command():
"""Arguments for a command that will never end (useful for testing process
killing). It should be a process that is unlikely to already be running
because all instances will be killed.
"""
if sys.platform == 'win32':
return ['wmic']
return ['yes']
def command_line(cmd, *args):
return [sys.executable, __file__, '--' + cmd] + list(args)
class ExecutiveTest(unittest.TestCase):
def test_run_command_with_bad_command(self):
def run_bad_command():
Executive().run_command(['foo_bar_command_blah'], error_handler=Executive.ignore_error, return_exit_code=True)
self.assertRaises(OSError, run_bad_command)
def test_run_command_args_type(self):
executive = Executive()
self.assertRaises(AssertionError, executive.run_command, 'echo')
self.assertRaises(AssertionError, executive.run_command, u"echo")
executive.run_command(command_line('echo', 'foo'))
executive.run_command(tuple(command_line('echo', 'foo')))
def test_auto_stringify_args(self):
executive = Executive()
executive.run_command(command_line('echo', 1))
executive.popen(command_line('echo', 1), stdout=executive.PIPE).wait()
self.assertEqual('echo 1', executive.command_for_printing(['echo', 1]))
def test_popen_args(self):
executive = Executive()
# Explicitly naming the 'args' argument should not throw an exception.
executive.popen(args=command_line('echo', 1), stdout=executive.PIPE).wait()
def test_run_command_with_unicode(self):
"""Validate that it is safe to pass unicode() objects
to Executive.run* methods, and they will return unicode()
objects by default unless decode_output=False
"""
unicode_tor_input = u"WebKit \u2661 Tor Arne Vestb\u00F8!"
if sys.platform == 'win32':
encoding = 'mbcs'
else:
encoding = 'utf-8'
encoded_tor = unicode_tor_input.encode(encoding)
# On Windows, we expect the unicode->mbcs->unicode roundtrip to be
# lossy. On other platforms, we expect a lossless roundtrip.
if sys.platform == 'win32':
unicode_tor_output = encoded_tor.decode(encoding)
else:
unicode_tor_output = unicode_tor_input
executive = Executive()
output = executive.run_command(command_line('cat'), input=unicode_tor_input)
self.assertEqual(output, unicode_tor_output)
output = executive.run_command(command_line('echo', unicode_tor_input))
self.assertEqual(output, unicode_tor_output)
output = executive.run_command(command_line('echo', unicode_tor_input), decode_output=False)
self.assertEqual(output, encoded_tor)
# Make sure that str() input also works.
output = executive.run_command(command_line('cat'), input=encoded_tor, decode_output=False)
self.assertEqual(output, encoded_tor)
def test_kill_process(self):
executive = Executive()
process = subprocess.Popen(never_ending_command(), stdout=subprocess.PIPE)
self.assertEqual(process.poll(), None) # Process is running
executive.kill_process(process.pid)
# Killing again should fail silently.
executive.kill_process(process.pid)
def test_timeout_exceeded(self):
executive = Executive()
def timeout():
executive.run_command(command_line('sleep', 'infinity'), timeout_seconds=0.01)
self.assertRaises(ScriptError, timeout)
def test_timeout_exceeded_exit_code(self):
executive = Executive()
exit_code = executive.run_command(command_line('sleep', 'infinity'), timeout_seconds=0.01, return_exit_code=True)
self.assertNotEqual(exit_code, 0)
def test_timeout_satisfied(self):
executive = Executive()
executive.run_command(command_line('sleep', '0'), timeout_seconds=1000)
def test_check_running_pid(self):
executive = Executive()
self.assertTrue(executive.check_running_pid(os.getpid()))
# According to the proc(5) man page, on 64-bit linux systems,
# pid_max can be set to any value up to 2^22 (approximately 4 million).
self.assertFalse(executive.check_running_pid(5000000))
def test_running_pids(self):
executive = Executive()
pids = executive.running_pids()
self.assertIn(os.getpid(), pids)
def test_run_in_parallel_assert_nonempty(self):
self.assertRaises(AssertionError, Executive().run_in_parallel, [])
def main(platform, stdin, stdout, cmd, args):
if platform == 'win32' and hasattr(stdout, 'fileno'):
import msvcrt
msvcrt.setmode(stdout.fileno(), os.O_BINARY)
if cmd == '--cat':
stdout.write(stdin.read())
elif cmd == '--echo':
stdout.write(' '.join(args))
return 0
if __name__ == '__main__' and len(sys.argv) > 1 and sys.argv[1] in ('--cat', '--echo'):
sys.exit(main(sys.platform, sys.stdin, sys.stdout, sys.argv[1], sys.argv[2:]))
|
{
"content_hash": "4da74cc479e3a48dd99171e8dbc6b4e1",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 130,
"avg_line_length": 42.56603773584906,
"alnum_prop": 0.6459810874704491,
"repo_name": "youtube/cobalt",
"id": "f457650e741e8c9a0bd3415a44776f1163649867",
"size": "8373",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/blink/Tools/Scripts/webkitpy/common/system/executive_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.