text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import os
import subprocess
from logging import getLogger
local_logger = getLogger(__name__)
def doc_to_xml(filename, logger=None):
if not logger:
logger = local_logger
cmd = 'antiword -x db ' + filename + ' > ' + filename + '.awdb.xml'
logger.debug('Generated antiword command %s' % cmd)
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
logger.debug('Antiword output: %s' % output)
with open(filename + '.awdb.xml', 'r') as f:
xmldata = f.read()
logger.debug('len(xmldata) = ' + str(len(xmldata)))
os.remove(filename + '.awdb.xml')
return xmldata
|
{
"content_hash": "cdeee241817d8792905d57e19503df77",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 79,
"avg_line_length": 31.55,
"alnum_prop": 0.6481774960380349,
"repo_name": "alonisser/Open-Knesset",
"id": "822ff78b3f19c6c08c8c6f91f8849f17254d313c",
"size": "631",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "simple/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "350330"
},
{
"name": "HTML",
"bytes": "763573"
},
{
"name": "JavaScript",
"bytes": "220620"
},
{
"name": "Python",
"bytes": "4504427"
},
{
"name": "Shell",
"bytes": "383"
}
],
"symlink_target": ""
}
|
"""Pylons extension to simplify using couchdbkit with pylons. This features the
following:
* Simple configuration
* Authentication
* View synchronization
* Testing
Configuration
-------------
Add this to your ini file:
couchdb.uri = http://localhost:5984
couchdb.dbname = mydbname
cookies.secret = randomuniquestringforauth
And this into environment.py:
from couchdbreq.ext.pylons import init_from_config
init_from_config(config)
Authentication
--------------
You first need to define a User model, add this into model/user.py:
from couchdbreq import StringProperty
from couchdbreq.ext.pylons.auth.model import User as UserBase
class User(UserBase):
first_name = StringProperty()
last_name = StringProperty()
email = StringProperty()
Then add this into middleware.py:
from yourapp.model.user import User
from couchdbreq.ext.pylons.auth.basic import AuthBasicMiddleware
app = AuthBasicMiddleware(app, config, User)
NOTE: This authentication by default uses sha-256 hashing with a salt, the behaviour
can be changed by overriding methods.
Now we need the views required for authentication:
Create yourapp/_design/user/views/by_login/map.js and make it look like this:
function(doc) {
if(doc.doc_type == "User") {
emit(doc.login, doc);
}
}
And yourapp/_design/group/views/by_name/map.js:
function(doc) {
if(doc.doc_type == "Group") {
emit(doc.name, doc);
}
}
And yourapp/_design/group/views/show_permissions/map.js:
function(doc) {
if (doc.doc_type == "Group") {
for (var i = 0; i < doc.permissions.length; i++) {
emit(doc.name, doc.permissions[i].name);
}
}
}
View synchronization
--------------------
This will sync yourapp/_design to the CouchDB database described in the config.
couchdbkit has a built-in syncdb command that will automatically sync it. We
need to open up setup.py and add the command there as an entry point:
[paste.paster_command]
syncdb = couchdbkit.ext.pylons.commands:SyncDbCommand
And then add 'couchdbkit' to paster_plugins in the same file.
Syncing the database is then as simple as: paster syncdb /path/to/config.ini
Testing
-------
This will make it easier to create unit and functional tests that use couchdb
and load fixtures, this is not done yet and is TBC.
"""
from .db import init_from_config
|
{
"content_hash": "d9f104a0f7151625ae75ced61063afb4",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 84,
"avg_line_length": 27.4,
"alnum_prop": 0.7252039501932159,
"repo_name": "adamlofts/couchdb-requests",
"id": "11e860ec7eb118f4dacd40bd0ebd5858b62cce85",
"size": "2466",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "couchdbreq/ext/pylons/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21"
},
{
"name": "JavaScript",
"bytes": "2755"
},
{
"name": "Makefile",
"bytes": "5604"
},
{
"name": "Python",
"bytes": "275833"
}
],
"symlink_target": ""
}
|
from west import log
log.load_config()
import logging
logger = logging.getLogger(__name__)
import requests
from requests.auth import HTTPBasicAuth
BEAKER_KEY = "beaker.session.id"
DOMAIN = "http://localhost:8080"
session_id = None
# basic use case
for uri, params, use_auth in [
("/ping", None, False),
("/pong", None, False),
("/auth/login", {"username": "admin", "password": "admin"}, False),
("/auth/info", None, False),
("/auth/info/user", None, False),
("/auth/info", {"username": "user"}, False),
("/auth/list", None, False),
("/auth/logout", None, False),
("/auth/info", None, False),
("/auth/login", {"username": "user", "password": "user"}, False),
("/auth/info", None, False),
("/ping", None, True), # basic auth
("/auth/info", None, False),
("/auth/logout", None, False),
("/auth/info", None, False),
]:
# call server
auth = HTTPBasicAuth("admin", "admin") if use_auth else None
resp = requests.get(DOMAIN + uri, params=params,
cookies={BEAKER_KEY: session_id},
auth=auth)
# reset session_id if has changed
session_id = None
# get server cookie
for key, val in resp.cookies.items():
if key == BEAKER_KEY:
session_id = val
break
logger.info(uri)
logger.info(resp.status_code)
content = resp.text if resp.status_code == 401\
else resp.json()
logger.info(content)
|
{
"content_hash": "f82900a6da79de6432446ac73ce75e79",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 71,
"avg_line_length": 30.367346938775512,
"alnum_prop": 0.5759408602150538,
"repo_name": "toopy/west",
"id": "75bc0508c9683023cdb53f438cb2e6b6b8eb9e17",
"size": "1510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sample/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22735"
}
],
"symlink_target": ""
}
|
"""Imports a filtered subset of the scores and configurations computed
by apm_quality_assessment.py into a pandas data frame.
"""
import argparse
import glob
import logging
import os
import re
import sys
try:
import pandas as pd
except ImportError:
logging.critical('Cannot import the third-party Python package pandas')
sys.exit(1)
from . import data_access as data_access
from . import simulation as sim
# Compiled regular expressions used to extract score descriptors.
RE_CONFIG_NAME = re.compile(
sim.ApmModuleSimulator.GetPrefixApmConfig() + r'(.+)')
RE_CAPTURE_NAME = re.compile(
sim.ApmModuleSimulator.GetPrefixCapture() + r'(.+)')
RE_RENDER_NAME = re.compile(
sim.ApmModuleSimulator.GetPrefixRender() + r'(.+)')
RE_ECHO_SIM_NAME = re.compile(
sim.ApmModuleSimulator.GetPrefixEchoSimulator() + r'(.+)')
RE_TEST_DATA_GEN_NAME = re.compile(
sim.ApmModuleSimulator.GetPrefixTestDataGenerator() + r'(.+)')
RE_TEST_DATA_GEN_PARAMS = re.compile(
sim.ApmModuleSimulator.GetPrefixTestDataGeneratorParameters() + r'(.+)')
RE_SCORE_NAME = re.compile(
sim.ApmModuleSimulator.GetPrefixScore() + r'(.+)(\..+)')
def InstanceArgumentsParser():
"""Arguments parser factory.
"""
parser = argparse.ArgumentParser(description=(
'Override this description in a user script by changing'
' `parser.description` of the returned parser.'))
parser.add_argument('-o', '--output_dir', required=True,
help=('the same base path used with the '
'apm_quality_assessment tool'))
parser.add_argument('-c', '--config_names', type=re.compile,
help=('regular expression to filter the APM configuration'
' names'))
parser.add_argument('-i', '--capture_names', type=re.compile,
help=('regular expression to filter the capture signal '
'names'))
parser.add_argument('-r', '--render_names', type=re.compile,
help=('regular expression to filter the render signal '
'names'))
parser.add_argument('-e', '--echo_simulator_names', type=re.compile,
help=('regular expression to filter the echo simulator '
'names'))
parser.add_argument('-t', '--test_data_generators', type=re.compile,
help=('regular expression to filter the test data '
'generator names'))
parser.add_argument('-s', '--eval_scores', type=re.compile,
help=('regular expression to filter the evaluation score '
'names'))
return parser
def _GetScoreDescriptors(score_filepath):
"""Extracts a score descriptor from the given score file path.
Args:
score_filepath: path to the score file.
Returns:
A tuple of strings (APM configuration name, capture audio track name,
render audio track name, echo simulator name, test data generator name,
test data generator parameters as string, evaluation score name).
"""
fields = score_filepath.split(os.sep)[-7:]
extract_name = lambda index, reg_expr: (
reg_expr.match(fields[index]).groups(0)[0])
return (
extract_name(0, RE_CONFIG_NAME),
extract_name(1, RE_CAPTURE_NAME),
extract_name(2, RE_RENDER_NAME),
extract_name(3, RE_ECHO_SIM_NAME),
extract_name(4, RE_TEST_DATA_GEN_NAME),
extract_name(5, RE_TEST_DATA_GEN_PARAMS),
extract_name(6, RE_SCORE_NAME),
)
def _ExcludeScore(config_name, capture_name, render_name, echo_simulator_name,
test_data_gen_name, score_name, args):
"""Decides whether excluding a score.
A set of optional regular expressions in args is used to determine if the
score should be excluded (depending on its |*_name| descriptors).
Args:
config_name: APM configuration name.
capture_name: capture audio track name.
render_name: render audio track name.
echo_simulator_name: echo simulator name.
test_data_gen_name: test data generator name.
score_name: evaluation score name.
args: parsed arguments.
Returns:
A boolean.
"""
value_regexpr_pairs = [
(config_name, args.config_names),
(capture_name, args.capture_names),
(render_name, args.render_names),
(echo_simulator_name, args.echo_simulator_names),
(test_data_gen_name, args.test_data_generators),
(score_name, args.eval_scores),
]
# Score accepted if each value matches the corresponding regular expression.
for value, regexpr in value_regexpr_pairs:
if regexpr is None:
continue
if not regexpr.match(value):
return True
return False
def FindScores(src_path, args):
"""Given a search path, find scores and return a DataFrame object.
Args:
src_path: Search path pattern.
args: parsed arguments.
Returns:
A DataFrame object.
"""
# Get scores.
scores = []
for score_filepath in glob.iglob(src_path):
# Extract score descriptor fields from the path.
(config_name,
capture_name,
render_name,
echo_simulator_name,
test_data_gen_name,
test_data_gen_params,
score_name) = _GetScoreDescriptors(score_filepath)
# Ignore the score if required.
if _ExcludeScore(
config_name,
capture_name,
render_name,
echo_simulator_name,
test_data_gen_name,
score_name,
args):
logging.info(
'ignored score: %s %s %s %s %s %s',
config_name,
capture_name,
render_name,
echo_simulator_name,
test_data_gen_name,
score_name)
continue
# Read metadata and score.
metadata = data_access.Metadata.LoadAudioTestDataPaths(
os.path.split(score_filepath)[0])
score = data_access.ScoreFile.Load(score_filepath)
# Add a score with its descriptor fields.
scores.append((
metadata['clean_capture_input_filepath'],
metadata['echo_free_capture_filepath'],
metadata['echo_filepath'],
metadata['render_filepath'],
metadata['capture_filepath'],
metadata['apm_output_filepath'],
metadata['apm_reference_filepath'],
config_name,
capture_name,
render_name,
echo_simulator_name,
test_data_gen_name,
test_data_gen_params,
score_name,
score,
))
return pd.DataFrame(
data=scores,
columns=(
'clean_capture_input_filepath',
'echo_free_capture_filepath',
'echo_filepath',
'render_filepath',
'capture_filepath',
'apm_output_filepath',
'apm_reference_filepath',
'apm_config',
'capture',
'render',
'echo_simulator',
'test_data_gen',
'test_data_gen_params',
'eval_score_name',
'score',
))
def ConstructSrcPath(args):
return os.path.join(
args.output_dir,
sim.ApmModuleSimulator.GetPrefixApmConfig() + '*',
sim.ApmModuleSimulator.GetPrefixCapture() + '*',
sim.ApmModuleSimulator.GetPrefixRender() + '*',
sim.ApmModuleSimulator.GetPrefixEchoSimulator() + '*',
sim.ApmModuleSimulator.GetPrefixTestDataGenerator() + '*',
sim.ApmModuleSimulator.GetPrefixTestDataGeneratorParameters() + '*',
sim.ApmModuleSimulator.GetPrefixScore() + '*')
|
{
"content_hash": "9971a59fa9a2ede9fcbf8dea0618b654",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 80,
"avg_line_length": 31.85042735042735,
"alnum_prop": 0.6295451496041863,
"repo_name": "koobonil/Boss2D",
"id": "fc1f44bcf888808d4eec5ee5f70318cd72565ecc",
"size": "7847",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Boss2D/addon/webrtc-jumpingyang001_for_boss/modules/audio_processing/test/py_quality_assessment/quality_assessment/collect_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "4820445"
},
{
"name": "Awk",
"bytes": "4272"
},
{
"name": "Batchfile",
"bytes": "89930"
},
{
"name": "C",
"bytes": "119747922"
},
{
"name": "C#",
"bytes": "87505"
},
{
"name": "C++",
"bytes": "272329620"
},
{
"name": "CMake",
"bytes": "1199656"
},
{
"name": "CSS",
"bytes": "42679"
},
{
"name": "Clojure",
"bytes": "1487"
},
{
"name": "Cuda",
"bytes": "1651996"
},
{
"name": "DIGITAL Command Language",
"bytes": "239527"
},
{
"name": "Dockerfile",
"bytes": "9638"
},
{
"name": "Emacs Lisp",
"bytes": "15570"
},
{
"name": "Go",
"bytes": "858185"
},
{
"name": "HLSL",
"bytes": "3314"
},
{
"name": "HTML",
"bytes": "2958385"
},
{
"name": "Java",
"bytes": "2921052"
},
{
"name": "JavaScript",
"bytes": "178190"
},
{
"name": "Jupyter Notebook",
"bytes": "1833654"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "M4",
"bytes": "775724"
},
{
"name": "MATLAB",
"bytes": "74606"
},
{
"name": "Makefile",
"bytes": "3941551"
},
{
"name": "Meson",
"bytes": "2847"
},
{
"name": "Module Management System",
"bytes": "2626"
},
{
"name": "NSIS",
"bytes": "4505"
},
{
"name": "Objective-C",
"bytes": "4090702"
},
{
"name": "Objective-C++",
"bytes": "1702390"
},
{
"name": "PHP",
"bytes": "3530"
},
{
"name": "Perl",
"bytes": "11096338"
},
{
"name": "Perl 6",
"bytes": "11802"
},
{
"name": "PowerShell",
"bytes": "38571"
},
{
"name": "Python",
"bytes": "24123805"
},
{
"name": "QMake",
"bytes": "18188"
},
{
"name": "Roff",
"bytes": "1261269"
},
{
"name": "Ruby",
"bytes": "5890"
},
{
"name": "Scala",
"bytes": "5683"
},
{
"name": "Shell",
"bytes": "2879948"
},
{
"name": "TeX",
"bytes": "243507"
},
{
"name": "TypeScript",
"bytes": "1593696"
},
{
"name": "Verilog",
"bytes": "1215"
},
{
"name": "Vim Script",
"bytes": "3759"
},
{
"name": "Visual Basic",
"bytes": "16186"
},
{
"name": "eC",
"bytes": "9705"
}
],
"symlink_target": ""
}
|
'''
Tesselate Demonstration
=======================
This demonstrates the experimental library for tesselating polygons. You
should see a hollow square with some buttons below it. You can click and
drag to create additional shapes, watching the number of vertices and elements
at the top of the screen. The 'debug' button toggles showing the mesh in
different colors.
'''
from kivy.app import App
from kivy.graphics import Mesh, Color
from kivy.graphics.tesselator import Tesselator, WINDING_ODD, TYPE_POLYGONS
from kivy.uix.floatlayout import FloatLayout
from kivy.lang import Builder
from kivy.logger import Logger
Builder.load_string("""
<ShapeBuilder>:
BoxLayout:
size_hint_y: None
height: "48dp"
spacing: "2dp"
padding: "2dp"
ToggleButton:
text: "Debug"
id: debug
on_release: root.build()
Button:
text: "New shape"
on_release: root.push_shape()
Button:
text: "Build"
on_release: root.build()
Button:
text: "Reset"
on_release: root.reset()
BoxLayout:
size_hint_y: None
height: "48dp"
top: root.top
spacing: "2dp"
padding: "2dp"
Label:
id: status
text: "Status"
""")
class ShapeBuilder(FloatLayout):
def __init__(self, **kwargs):
super(ShapeBuilder, self).__init__(**kwargs)
self.shapes = [
[100, 100, 300, 100, 300, 300, 100, 300],
[150, 150, 250, 150, 250, 250, 150, 250]
] # the 'hollow square' shape
self.shape = []
self.build()
def on_touch_down(self, touch):
if super(ShapeBuilder, self).on_touch_down(touch):
return True
Logger.info('tesselate: on_touch_down (%5.2f, %5.2f)' % touch.pos)
self.shape.extend(touch.pos)
self.build()
return True
def on_touch_move(self, touch):
if super(ShapeBuilder, self).on_touch_move(touch):
return True
Logger.info('tesselate: on_touch_move (%5.2f, %5.2f)' % touch.pos)
self.shape.extend(touch.pos)
self.build()
return True
def on_touch_up(self, touch):
if super(ShapeBuilder, self).on_touch_up(touch):
return True
Logger.info('tesselate: on_touch_up (%5.2f, %5.2f)' % touch.pos)
self.push_shape()
self.build()
def push_shape(self):
self.shapes.append(self.shape)
self.shape = []
def build(self):
tess = Tesselator()
count = 0
for shape in self.shapes:
if len(shape) >= 3:
tess.add_contour(shape)
count += 1
if self.shape and len(self.shape) >= 3:
tess.add_contour(self.shape)
count += 1
if not count:
return
ret = tess.tesselate(WINDING_ODD, TYPE_POLYGONS)
Logger.info('tesselate: build: tess.tesselate returns {}'.format(ret))
self.canvas.after.clear()
debug = self.ids.debug.state == "down"
if debug:
with self.canvas.after:
c = 0
for vertices, indices in tess.meshes:
Color(c, 1, 1, mode="hsv")
c += 0.3
indices = [0]
for i in range(1, len(vertices) // 4):
if i > 0:
indices.append(i)
indices.append(i)
indices.append(0)
indices.append(i)
indices.pop(-1)
Mesh(vertices=vertices, indices=indices, mode="lines")
else:
with self.canvas.after:
Color(1, 1, 1, 1)
for vertices, indices in tess.meshes:
Mesh(vertices=vertices, indices=indices,
mode="triangle_fan")
self.ids.status.text = "Shapes: {} - Vertex: {} - Elements: {}".format(
count, tess.vertex_count, tess.element_count)
def reset(self):
self.shapes = []
self.shape = []
self.ids.status.text = "Shapes: {} - Vertex: {} - Elements: {}".format(
0, 0, 0)
self.canvas.after.clear()
class TessApp(App):
def build(self):
return ShapeBuilder()
TessApp().run()
|
{
"content_hash": "a376437bc59d108e107f1f17a2e0c7e8",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 79,
"avg_line_length": 30.17808219178082,
"alnum_prop": 0.5297321833862915,
"repo_name": "akshayaurora/kivy",
"id": "3fb5a4c2c576115a12599175514d1e76ba29174b",
"size": "4406",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "examples/canvas/tesselate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "328705"
},
{
"name": "Cython",
"bytes": "1105798"
},
{
"name": "Emacs Lisp",
"bytes": "9839"
},
{
"name": "GLSL",
"bytes": "323"
},
{
"name": "Makefile",
"bytes": "5494"
},
{
"name": "Objective-C",
"bytes": "26870"
},
{
"name": "PowerShell",
"bytes": "4836"
},
{
"name": "Python",
"bytes": "3286901"
},
{
"name": "Shell",
"bytes": "21930"
},
{
"name": "Vim script",
"bytes": "2120"
},
{
"name": "kvlang",
"bytes": "43643"
}
],
"symlink_target": ""
}
|
"""
NURBS Python Package
Licensed under MIT License
Developed by Onur Rauf Bingol (c) 2016-2017
"""
import sys
import itertools
import nurbs.utilities as utils
class Surface(object):
def __init__(self):
self._mDegreeU = 0
self._mDegreeV = 0
self._mKnotVectorU = []
self._mKnotVectorV = []
self._mCtrlPts = []
self._mCtrlPts2D = [] # in [u][v] format
self._mCtrlPts_sizeU = 0 # columns
self._mCtrlPts_sizeV = 0 # rows
self._mWeights = []
self._mDelta = 0.01
self._mSurfPts = []
@property
def degree_u(self):
""" Getter method for degree U.
:return: degree of U
"""
return self._mDegreeU
@degree_u.setter
def degree_u(self, value):
""" Setter method for degree U.
:param value: input degree
"""
if value < 0:
raise ValueError("Degree cannot be less than zero.")
# Clean up the surface points lists, if necessary
self._reset_surface()
# Set degree u
self._mDegreeU = value
@property
def degree_v(self):
""" Getter method for degree V.
:return: degree of V
"""
return self._mDegreeV
@degree_v.setter
def degree_v(self, value):
""" Setter method for degree V.
:param value: input degree
"""
if value < 0:
raise ValueError("Degree cannot be less than zero.")
# Clean up the surface points lists, if necessary
self._reset_surface()
# Set degree v
self._mDegreeV = value
@property
def ctrlpts(self):
""" Getter method for 1D array of control points.
:return: A tuple containing (x, y, z) values of the control points
"""
ret_list = []
for pt in self._mCtrlPts:
ret_list.append(tuple(pt))
return tuple(ret_list)
@ctrlpts.setter
def ctrlpts(self, value):
""" Setter method for 1D array of control points.
:param value: input control points
"""
# Clean up the surface and control points lists, if necessary
self._reset_surface()
self._reset_ctrlpts()
# First check v-direction
if len(value) < self._mDegreeV + 1:
raise ValueError("Number of control points in v-direction should be at least degree + 1.")
# Then, check u direction
u_cnt = 0
for u_coords in value:
if len(u_coords) < self._mDegreeU + 1:
raise ValueError("Number of control points in u-direction should be at least degree + 1.")
u_cnt += 1
for coord in u_coords:
# Save the control points as a list of 3D coordinates
if len(coord) < 0 or len(coord) > 3:
raise ValueError("Please input 3D coordinates")
# Convert to list of floats
coord_float = [float(c) for c in coord]
self._mCtrlPts.append(coord_float)
# Set u and v sizes
self._mCtrlPts_sizeU = u_cnt
self._mCtrlPts_sizeV = len(value)
# Generate a 2D list of control points
for i in range(0, self._mCtrlPts_sizeU):
ctrlpts_v = []
for j in range(0, self._mCtrlPts_sizeV):
ctrlpts_v.append(self._mCtrlPts[i + (j * self._mCtrlPts_sizeU)])
self._mCtrlPts2D.append(ctrlpts_v)
# Automatically generate a weights vector of 1.0s in the size of ctrlpts array
self._mWeights = [1.0] * self._mCtrlPts_sizeU * self._mCtrlPts_sizeV
@property
def ctrlpts2d(self):
""" Getter method for 2D array of control points.
This list is automatically generated by ctrlpts() or read_ctrlpts()
:return: A 2D list of control points in [u][v] format
"""
return self._mCtrlPts2D
@property
def weights(self):
""" Getter method for 1D array of weights.
:return: A tuple containing the weights vector
"""
return tuple(self._mWeights)
@weights.setter
def weights(self, value):
""" Setter method for 1D array of weights.
ctrlpts() and read_ctrlpts() automatically generate a weights vector of 1.0s in the size of control points array
:param value: input weights vector
"""
if len(value) != self._mCtrlPts_sizeU * self._mCtrlPts_sizeV:
raise ValueError("Size of the weight vector should be equal to size of control points.")
# Clean up the surface points lists, if necessary
self._reset_surface()
# Set weights vector
value_float = [float(w) for w in value]
self._mWeights = value_float
@property
def knotvector_u(self):
""" Getter method for knot vector U.
:return: A tuple containing the knot vector U
"""
return tuple(self._mKnotVectorU)
@knotvector_u.setter
def knotvector_u(self, value):
""" Setter method for knot vector U.
:param value: input knot vector
"""
# Clean up the surface points lists, if necessary
self._reset_surface()
# Set knot vector u
value_float = [float(kv) for kv in value]
self._mKnotVectorU = utils.normalize_knotvector(value_float)
@property
def knotvector_v(self):
""" Getter method for knot vector V.
:return: A tuple containing the knot vector V
"""
return tuple(self._mKnotVectorV)
@knotvector_v.setter
def knotvector_v(self, value):
""" Setter method for knot vector V.
:param value: input knot vector
"""
# Clean up the surface points lists, if necessary
self._reset_surface()
# Set knot vector u
value_float = [float(kv) for kv in value]
self._mKnotVectorV = utils.normalize_knotvector(value_float)
@property
def delta(self):
""" Getter method for surface point calculation delta.
:return: the delta value used to generate surface points
"""
return self._mDelta
@delta.setter
def delta(self, value):
""" Setter method for surface point calculation delta.
:param value: input delta
"""
# Delta value for surface calculations should be between 0 and 1
if float(value) <= 0 or float(value) >= 1:
raise ValueError("Surface calculation delta should be between 0.0 and 1.0.")
# Clean up the surface points lists, if necessary
self._reset_surface()
# Set a new delta value
self._mDelta = float(value)
@property
def ctrlptsw(self):
""" Getter method for 1D array of weighted control points.
:return: A tuple containing (x*w, y*w, z*w, w) values of the control points
"""
ret_list = []
for c, w in itertools.product(self._mCtrlPts, self._mWeights):
temp = (float(c[0]) * float(w), float(c[1]) * float(w), float(c[2]) * float(w), float(w))
ret_list.append(temp)
return tuple(ret_list)
@ctrlptsw.setter
def ctrlptsw(self, value):
""" Setter method for 1D array of weighted control points.
:param value: input weighted control points
"""
# Start with clean lists
ctrlpts_uv = []
weights_uv = []
# Split the weights vector from the input list for v-direction
for udir in value:
ctrlpts_u = []
weights_u = []
for i, c in enumerate(udir):
temp_list = [float(c[0]) / float(c[3]), float(c[1]) / float(c[3]), float(c[2]) / float(c[3])]
ctrlpts_u.append(temp_list)
weights_u.append(float(c[3]))
ctrlpts_uv.append(ctrlpts_u)
weights_uv.append(weights_u)
# Assign unzipped values to the class fields
self._mCtrlPts = ctrlpts_uv
self._mWeights = weights_uv
@property
def surfpts(self):
""" Getter method for calculated surface points.
:return: 1D array of calculated surface points
"""
return self._mSurfPts
# Cleans up the control points and the weights (private)
def _reset_ctrlpts(self):
if self._mCtrlPts:
# Delete control points
del self._mCtrlPts[:]
del self._mCtrlPts2D[:]
# Delete weight vector
del self._mWeights[:]
# Set the control point sizes to zero
self._mCtrlPts_sizeU = 0
self._mCtrlPts_sizeV = 0
# Cleans the calculated surface points (private)
def _reset_surface(self):
if self._mSurfPts:
# Delete the calculated surface points
del self._mSurfPts[:]
# Checks if the calculation operations are possible (private)
def _check_variables(self):
works = True
# Check degree values
if self._mDegreeU == 0 or self._mDegreeV == 0:
works = False
if not self._mCtrlPts:
works = False
if not self._mKnotVectorU or not self._mKnotVectorV:
works = False
if not works:
raise ValueError("Some required parameters for calculations are not set.")
# Reads control points from a text file
def read_ctrlpts(self, filename=''):
# Clean up the surface and control points lists, if necessary
self._reset_ctrlpts()
self._reset_surface()
# Try reading the file
try:
# Open the file
with open(filename, 'r') as fp:
for line in fp:
# Remove whitespace
line = line.strip()
# Convert the string containing the coordinates into a list
control_point_row = line.split(';')
self._mCtrlPts_sizeU = 0
for cpr in control_point_row:
cpt = cpr.split(',')
# Create a temporary dictionary for appending coordinates into ctrlpts list
pt = [float(cpt[0]), float(cpt[1]), float(cpt[2])]
# Add control points to the global control point list
self._mCtrlPts.append(pt)
self._mCtrlPts_sizeU += 1
self._mCtrlPts_sizeV += 1
# Generate a 2D list of control points
for i in range(0, self._mCtrlPts_sizeU):
ctrlpts_v = []
for j in range(0, self._mCtrlPts_sizeV):
ctrlpts_v.append(self._mCtrlPts[i + (j * self._mCtrlPts_sizeU)])
self._mCtrlPts2D.append(ctrlpts_v)
# Generate a 1D list of weights
self._mWeights = [1.0] * self._mCtrlPts_sizeU * self._mCtrlPts_sizeV
except IOError:
print('ERROR: Cannot open file ' + filename)
sys.exit(1)
# Reads control points and weights from a text file
def read_ctrlptsw(self, filename=''):
# Clean up the surface and control points lists, if necessary
self._reset_ctrlpts()
self._reset_surface()
# Try reading the file
try:
# Open the file
with open(filename, 'r') as fp:
for line in fp:
# Remove whitespace
line = line.strip()
# Convert the string containing the coordinates into a list
control_point_row = line.split(';')
self._mCtrlPts_sizeU = 0
for cpr in control_point_row:
cpt = cpr.split(',')
# Create a temporary dictionary for appending coordinates into ctrlpts list
pt = [float(cpt[0]) / float(cpt[3]), float(cpt[1]) / float(cpt[3]),
float(cpt[2]) / float(cpt[3])]
self._mWeights.append(float(cpt[3]))
# Add control points to the global control point list
self._mCtrlPts.append(pt)
self._mCtrlPts_sizeU += 1
self._mCtrlPts_sizeV += 1
# Generate a 2D list of control points
for i in range(0, self._mCtrlPts_sizeU):
ctrlpts_v = []
for j in range(0, self._mCtrlPts_sizeV):
ctrlpts_v.append(self._mCtrlPts[i + (j * self._mCtrlPts_sizeU)])
self._mCtrlPts2D.append(ctrlpts_v)
except IOError:
print('ERROR: Cannot open file ' + filename)
sys.exit(1)
def transpose(self):
# Transpose existing data
degree_u_new = self._mDegreeV
degree_v_new = self._mDegreeU
kv_u_new = self._mKnotVectorV
kv_v_new = self._mKnotVectorU
ctrlpts2D_new = []
for v in range(0, self._mCtrlPts_sizeV):
ctrlpts_u = []
for u in range(0, self._mCtrlPts_sizeU):
temp = self._mCtrlPts2D[u][v]
ctrlpts_u.append(temp)
ctrlpts2D_new.append(ctrlpts_u)
ctrlpts_new_sizeU = self._mCtrlPts_sizeV
ctrlpts_new_sizeV = self._mCtrlPts_sizeU
ctrlpts_new = []
weights_new = []
for v in range(0, ctrlpts_new_sizeV):
for u in range(0, ctrlpts_new_sizeU):
ctrlpts_new.append(ctrlpts2D_new[u][v])
weights_new.append(self._mWeights[v + (u * ctrlpts_new_sizeV)])
# Clean up the surface points lists, if necessary
self._reset_surface()
# Save transposed data
self._mDegreeU = degree_u_new
self._mDegreeV = degree_v_new
self._mKnotVectorU = kv_u_new
self._mKnotVectorV = kv_v_new
self._mCtrlPts = ctrlpts_new
self._mCtrlPts_sizeU = ctrlpts_new_sizeU
self._mCtrlPts_sizeV = ctrlpts_new_sizeV
self._mCtrlPts2D = ctrlpts2D_new
self._mWeights = weights_new
# Evaluates the B-Spline surface
def evaluate(self):
# Check all parameters are set before calculations
self._check_variables()
# Clean up the surface points lists, if necessary
self._reset_surface()
# Algorithm A3.5
for v in utils.frange(0, 1, self._mDelta):
span_v = utils.find_span(self._mDegreeV, tuple(self._mKnotVectorV), self._mCtrlPts_sizeV, v)
basis_v = utils.basis_functions(self._mDegreeV, tuple(self._mKnotVectorV), span_v, v)
for u in utils.frange(0, 1, self._mDelta):
span_u = utils.find_span(self._mDegreeU, tuple(self._mKnotVectorU), self._mCtrlPts_sizeU, u)
basis_u = utils.basis_functions(self._mDegreeU, tuple(self._mKnotVectorU), span_u, u)
idx_u = span_u - self._mDegreeU
surfpt = [0.0, 0.0, 0.0]
for l in range(0, self._mDegreeV + 1):
temp = [0.0, 0.0, 0.0]
idx_v = span_v - self._mDegreeV + l
for k in range(0, self._mDegreeU + 1):
temp[0] += (basis_u[k] * self._mCtrlPts2D[idx_u + k][idx_v][0])
temp[1] += (basis_u[k] * self._mCtrlPts2D[idx_u + k][idx_v][1])
temp[2] += (basis_u[k] * self._mCtrlPts2D[idx_u + k][idx_v][2])
surfpt[0] += (basis_v[l] * temp[0])
surfpt[1] += (basis_v[l] * temp[1])
surfpt[2] += (basis_v[l] * temp[2])
self._mSurfPts.append(surfpt)
# Evaluates the NURBS surface
def evaluate_rational(self):
# Check all parameters are set before calculations
self._check_variables()
# Clean up the surface points lists, if necessary
self._reset_surface()
# Prepare a 2D weighted control points array
ctrlptsw = []
c_u = 0
while c_u < self._mCtrlPts_sizeU:
ctrlptsw_v = []
c_v = 0
while c_v < self._mCtrlPts_sizeV:
temp = [self._mCtrlPts2D[c_u][c_v][0] * self._mWeights[c_u + (c_v * self._mCtrlPts_sizeU)],
self._mCtrlPts2D[c_u][c_v][1] * self._mWeights[c_u + (c_v * self._mCtrlPts_sizeU)],
self._mCtrlPts2D[c_u][c_v][2] * self._mWeights[c_u + (c_v * self._mCtrlPts_sizeU)],
self._mWeights[c_u + (c_v * self._mCtrlPts_sizeU)]]
ctrlptsw_v.append(temp)
c_v += 1
ctrlptsw.append(ctrlptsw_v)
c_u += 1
# Algorithm A4.3
for v in utils.frange(0, 1, self._mDelta):
span_v = utils.find_span(self._mDegreeV, tuple(self._mKnotVectorV), self._mCtrlPts_sizeV, v)
basis_v = utils.basis_functions(self._mDegreeV, tuple(self._mKnotVectorV), span_v, v)
for u in utils.frange(0, 1, self._mDelta):
span_u = utils.find_span(self._mDegreeU, tuple(self._mKnotVectorU), self._mCtrlPts_sizeU, u)
basis_u = utils.basis_functions(self._mDegreeU, tuple(self._mKnotVectorU), span_u, u)
idx_u = span_u - self._mDegreeU
surfptw = [0.0, 0.0, 0.0, 0.0]
for l in range(0, self._mDegreeV + 1):
temp = [0.0, 0.0, 0.0, 0.0]
idx_v = span_v - self._mDegreeV + l
for k in range(0, self._mDegreeU + 1):
temp[0] += (basis_u[k] * ctrlptsw[idx_u + k][idx_v][0])
temp[1] += (basis_u[k] * ctrlptsw[idx_u + k][idx_v][1])
temp[2] += (basis_u[k] * ctrlptsw[idx_u + k][idx_v][2])
temp[3] += (basis_u[k] * ctrlptsw[idx_u + k][idx_v][3])
surfptw[0] += (basis_v[l] * temp[0])
surfptw[1] += (basis_v[l] * temp[1])
surfptw[2] += (basis_v[l] * temp[2])
surfptw[3] += (basis_v[l] * temp[3])
# Divide by weight to obtain 3D surface points
surfpt = [surfptw[0] / surfptw[3], surfptw[1] / surfptw[3], surfptw[2] / surfptw[3]]
self._mSurfPts.append(surfpt)
# Calculates n-th order surface derivatives at the given (u,v) parameter
def derivatives(self, u=-1, v=-1, order=0):
# Check all parameters are set before calculations
self._check_variables()
# Check u and v parameters are correct
utils.check_uv(u, v)
# Algorithm A3.6
du = min(self._mDegreeU, order)
dv = min(self._mDegreeV, order)
SKL = [[[0.0 for x in range(3)] for y in range(dv + 1)] for z in range(du + 1)]
span_u = utils.find_span(self._mDegreeU, tuple(self._mKnotVectorU), self._mCtrlPts_sizeU, u)
bfunsders_u = utils.basis_functions_ders(self._mDegreeU, self._mKnotVectorU, span_u, u, du)
span_v = utils.find_span(self._mDegreeV, tuple(self._mKnotVectorV), self._mCtrlPts_sizeV, v)
bfunsders_v = utils.basis_functions_ders(self._mDegreeV, self._mKnotVectorV, span_v, v, dv)
for k in range(0, du + 1):
temp = [[] for y in range(self._mDegreeV + 1)]
for s in range(0, self._mDegreeV + 1):
temp[s] = [0.0 for x in range(3)]
for r in range(0, self._mDegreeU + 1):
cu = span_u - self._mDegreeU + r
cv = span_v - self._mDegreeV + s
temp[s][0] += (bfunsders_u[k][r] * self._mCtrlPts2D[cu][cv][0])
temp[s][1] += (bfunsders_u[k][r] * self._mCtrlPts2D[cu][cv][1])
temp[s][2] += (bfunsders_u[k][r] * self._mCtrlPts2D[cu][cv][2])
dd = min(order - k, dv)
for l in range(0, dd + 1):
for s in range(0, self._mDegreeV + 1):
SKL[k][l][0] += (bfunsders_v[l][s] * temp[s][0])
SKL[k][l][1] += (bfunsders_v[l][s] * temp[s][1])
SKL[k][l][2] += (bfunsders_v[l][s] * temp[s][2])
return SKL
# Calculates surface tangent at the given (u, v) parameter
def tangent(self, u=-1, v=-1):
# Tangent is the 1st derivative of the surface
skl = self.derivatives(u, v, 1)
# Doing this just for readability
point = skl[0][0]
der_u = skl[1][0]
der_v = skl[0][1]
# Return the list of tangents w.r.t. u and v
return tuple(point), tuple(der_u), tuple(der_v)
# Calculates surface normal at the given (u, v) parameter
def normal(self, u=-1, v=-1, normalized=True):
# Check u and v parameters are correct for normal calculations
utils.check_uv(u, v, test_normal=True, delta=self._mDelta)
# Take the 1st derivative of the surface
skl = self.derivatives(u, v, 1)
point = skl[0][0]
der_u = skl[1][0]
der_v = skl[0][1]
normal = utils.cross_vector(der_u, der_v)
if normalized:
normal = utils.normalize_vector(tuple(normal))
# Return the surface normal at the input u,v location
return tuple(normal)
|
{
"content_hash": "7e0bfc6d8178912ed7559cd2bdb0a371",
"timestamp": "",
"source": "github",
"line_count": 524,
"max_line_length": 120,
"avg_line_length": 40.353053435114504,
"alnum_prop": 0.5433435800425632,
"repo_name": "jedufour/NURBS-Python",
"id": "cba91269fa86a0e10916c242306940e812257e85",
"size": "21145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nurbs/Surface.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49013"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.urls import reverse
from django.template import RequestContext
from django.apps import apps
import hmac, base64, hashlib, binascii, os
import shopify
def _new_session(shop_url):
api_version = apps.get_app_config('shopify_app').SHOPIFY_API_VERSION
return shopify.Session(shop_url, api_version)
# Ask user for their ${shop}.myshopify.com address
def login(request):
# If the ${shop}.myshopify.com address is already provided in the URL,
# just skip to authenticate
if request.GET.get('shop'):
return authenticate(request)
return render(request, 'shopify_app/login.html', {})
def authenticate(request):
shop_url = request.GET.get('shop', request.POST.get('shop')).strip()
if not shop_url:
messages.error(request, "A shop param is required")
return redirect(reverse(login))
scope = apps.get_app_config('shopify_app').SHOPIFY_API_SCOPE
redirect_uri = request.build_absolute_uri(reverse(finalize))
state = binascii.b2a_hex(os.urandom(15)).decode("utf-8")
request.session['shopify_oauth_state_param'] = state
permission_url = _new_session(shop_url).create_permission_url(scope, redirect_uri, state)
return redirect(permission_url)
def finalize(request):
api_secret = apps.get_app_config('shopify_app').SHOPIFY_API_SECRET
params = request.GET.dict()
if request.session['shopify_oauth_state_param'] != params['state']:
messages.error(request, 'Anti-forgery state token does not match the initial request.')
return redirect(reverse(login))
else:
request.session.pop('shopify_oauth_state_param', None)
myhmac = params.pop('hmac')
line = '&'.join([
'%s=%s' % (key, value)
for key, value in sorted(params.items())
])
h = hmac.new(api_secret.encode('utf-8'), line.encode('utf-8'), hashlib.sha256)
if hmac.compare_digest(h.hexdigest(), myhmac) == False:
messages.error(request, "Could not verify a secure login")
return redirect(reverse(login))
try:
shop_url = params['shop']
session = _new_session(shop_url)
request.session['shopify'] = {
"shop_url": shop_url,
"access_token": session.request_token(request.GET)
}
except Exception:
messages.error(request, "Could not log in to Shopify store.")
return redirect(reverse(login))
messages.info(request, "Logged in to shopify store.")
request.session.pop('return_to', None)
return redirect(request.session.get('return_to', reverse('root_path')))
def logout(request):
request.session.pop('shopify', None)
messages.info(request, "Successfully logged out.")
return redirect(reverse(login))
|
{
"content_hash": "286029ec6c9dc54d61c76a879b6f6047",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 95,
"avg_line_length": 39.94285714285714,
"alnum_prop": 0.6795422031473534,
"repo_name": "Shopify/shopify_django_app",
"id": "d2999c4313654ecc6aadac2cd2bf66681044e262",
"size": "2796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shopify_app/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5128"
},
{
"name": "HTML",
"bytes": "5524"
},
{
"name": "Python",
"bytes": "12669"
}
],
"symlink_target": ""
}
|
"""
Tests for the module that encompasses second species counterpoint.
"""
import unittest
from foox.species.second import (
Genome,
create_population,
is_parallel,
make_fitness_function,
make_generate_function,
make_halt_function,
MAX_REWARD,
REWARD_STEPWISE_MOTION,
)
# The cantus firmus to use in the test suite.
CANTUS_FIRMUS = [5, 7, 6, 5, 8, 7, 9, 8, 7, 6, 5]
class TestCreatePopulation(unittest.TestCase):
"""
Ensures the create_population function works as expected.
"""
def test_returns_valid_genomes(self):
"""
Checks the genomes returned by the create_population function are
of the correct type.
"""
result = create_population(1, CANTUS_FIRMUS)
self.assertEqual(Genome, type(result[0]))
def test_returns_correct_number_of_genomes(self):
"""
Ensures the correct number of genomes are returned by the function.
"""
result = create_population(100, CANTUS_FIRMUS)
self.assertEqual(100, len(result))
def test_uses_only_valid_intervals(self):
"""
Tests that only valid consonant intervals are used.
"""
valid_first_beat_intervals = [2, 4, 5, 7, 9, 11]
valid_third_beat_intervals = valid_first_beat_intervals + [3, 6, 8, 10]
result = create_population(20, CANTUS_FIRMUS)
for genome in result:
for i in range(len(genome.chromosome)):
contrapunctus_note = genome.chromosome[i]
cantus_firmus_note = CANTUS_FIRMUS[i // 2]
interval = contrapunctus_note - cantus_firmus_note
if i % 2:
self.assertIn(interval, valid_third_beat_intervals)
else:
self.assertIn(interval, valid_first_beat_intervals)
def test_solutions_have_correct_number_of_notes(self):
"""
Ensures that all solutions have the expected number of notes.
"""
result = create_population(20, CANTUS_FIRMUS)
expected_length = (len(CANTUS_FIRMUS) * 2) - 1
for genome in result:
self.assertEqual(expected_length, len(genome.chromosome))
class TestFitnessFunction(unittest.TestCase):
"""
Ensures that the fitness function works as expected.
"""
def test_make_fitness_function_returns_callable(self):
"""
Ensures the make_fitness_function returns a callable.
"""
result = make_fitness_function(CANTUS_FIRMUS)
self.assertTrue(callable(result))
def test_fitness_function_returns_float(self):
"""
Makes sure the generated fitness function returns a fitness score as a
float.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
result = fitness_function(genome)
self.assertTrue(float, type(result))
def test_fitness_function_sets_fitness_on_genome(self):
"""
Ensures the fitness score is set in the genome's fitness attribute and
is the same as the returned fitness score.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
self.assertEqual(None, genome.fitness)
result = fitness_function(genome)
self.assertNotEqual(None, genome.fitness)
self.assertEqual(result, genome.fitness)
def test_fitness_function_uses_cached_genome_fitness(self):
"""
Ensures the fitness function bails if there is already a score set for
the genome.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
genome.fitness = 12345
result = fitness_function(genome)
self.assertEqual(12345, result)
class TestHalt(unittest.TestCase):
"""
Ensure the halting function works as expected.
"""
def test_halt_expected(self):
"""
Ensure the function returns true if we're in a halting state.
"""
halt = make_halt_function([6, 5])
g1 = Genome([6, 6, 5])
g1.fitness = MAX_REWARD
population = [
g1,
]
result = halt(population, 1)
self.assertTrue(result)
def test_halt_checks_dissonance_count(self):
"""
If the solution contains dissonances the halt function should ensure
that the MAX_REWARD is incremented by the number of dissonances
(rewarded because they're part of a valid step wise motion).
"""
halt = make_halt_function([6, 5])
g1 = Genome([9, 9, 12])
# only one our of two "correct" dissonances
g1.fitness = MAX_REWARD + REWARD_STEPWISE_MOTION
population = [
g1,
]
result = halt(population, 1)
self.assertFalse(result)
# Try again
# two out of two "correct" dissonances
g1.fitness = MAX_REWARD + (REWARD_STEPWISE_MOTION * 2)
population = [
g1,
]
result = halt(population, 1)
self.assertTrue(result)
def test_halt_not(self):
"""
Ensures if the fittest genome has fitness < MAX_REWARD then halt
doesn't succeed.
"""
halt = make_halt_function([3, 2, 1])
g1 = Genome([1, 2, 3])
g1.fitness = MAX_REWARD - 0.1
g2 = Genome([1, 2, 3])
g2.fitness = 3
g3 = Genome([1, 2, 3])
g3.fitness = 2
# Any fittest solution with fitness < MAX_REWARD means no halt.
population = [g1, g2, g3]
result = halt(population, 1)
self.assertFalse(result)
class TestGenome(unittest.TestCase):
"""
Ensures that the Genome class is overridden as expected.
"""
def test_mutate_is_implemented(self):
"""
Ensures that we have a mutate method implemented.
"""
genome = Genome([1, 2, 3])
self.assertNotEqual(NotImplemented, genome.mutate(2, 0.2, [1, 2, 3]))
def test_mutate_bounded_by_arg_values(self):
"""
A rather contrived test but it proves that both the mutation_range and
mutation_rate are used correctly given the context given by a cantus
firmus.
"""
cantus_firmus = [1, 1, 1, 1, 1]
# mutate every time.
mutation_rate = 1
# will always mutate to thirds above the cf note.
mutation_range = 2
genome = Genome([5, 6, 7, 8, 9])
genome.mutate(mutation_range, mutation_rate, cantus_firmus)
self.assertEqual([3, 3, 3, 3, 3], genome.chromosome)
|
{
"content_hash": "c8cbad43f24422ca4c7a3b3c0977ec0f",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 79,
"avg_line_length": 33.24120603015076,
"alnum_prop": 0.600151171579743,
"repo_name": "ntoll/foox",
"id": "ebe4b1ef3e50d8cfadc840a0f3f0dae221ee7783",
"size": "6615",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/species/test_second.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "27853"
},
{
"name": "Python",
"bytes": "117376"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from django.conf import settings
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from purefap.core.models import FTPClient, FTPUser, FTPStaff
from purefap.core.forms import *
class FTPUserAdmin(UserAdmin):
form = FTPUserChangeForm
add_form = FTPUserAddForm
fieldsets = (
('User Info', {
'fields' : ('username', 'password', 'company',)
# 'first_name', 'last_name', 'email',)
}),
('FTP Info', {
'fields' : ('ftpuid', 'ftpgid', 'download_limit', 'upload_limit', 'quota', 'ip_address', 'expiry_date','homedir',)
}),
('User permissions', {
'fields' : ('is_staff', 'is_superuser', 'is_active', 'groups', 'user_permissions',)
}),
)
def queryset(self, request):
return self.model.objects.filter(is_staff=True, is_superuser=False)
class FTPClientAdmin(UserAdmin):
form = FTPClientChangeForm
add_form = FTPClientAddForm
fieldsets = (
('User Info', {
'fields' : ('username', 'password', 'company',)
# 'first_name', 'last_name', 'email',)
}),
('FTP Info', {
'fields' : ('ftpuid', 'ftpgid', 'download_limit', 'upload_limit', 'quota', 'ip_address', 'expiry_date','homedir',)
}),
('User permissions', {
'fields' : ('is_active',)
}),
)
def queryset(self, request):
return self.model.objects.filter(is_staff=False)
class FTPStaffAdmin(UserAdmin):
form = FTPStaffChangeForm
add_form = FTPStaffAddForm
fieldsets = (
('User Info', {
'fields' : ('username', 'password', 'company',)
# 'first_name', 'last_name', 'email',)
}),
('FTP Info', {
'fields' : ('ftpuid', 'ftpgid', 'download_limit', 'upload_limit', 'quota', 'ip_address', 'expiry_date','homedir',)
}),
('User permissions', {
'fields' : ('is_superuser', 'is_active', 'groups', 'user_permissions',)
}),
)
if settings.COMPLEX_MODE:
admin.site.register(FTPClient, FTPClientAdmin)
admin.site.register(FTPStaff, FTPStaffAdmin)
else:
admin.site.register(FTPUser, FTPUserAdmin)
|
{
"content_hash": "2a21126e31d6492c398564b3da964bc3",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 131,
"avg_line_length": 36.298507462686565,
"alnum_prop": 0.5361842105263158,
"repo_name": "fim/purefap",
"id": "911ff80cda7e1ef4cad7591b1373b4199fd6ef57",
"size": "2432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "purefap/core/admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "28974"
}
],
"symlink_target": ""
}
|
""" losses for training neural networks """
from __future__ import absolute_import
__all__ = ['Loss', 'L2Loss', 'L1Loss',
'SigmoidBinaryCrossEntropyLoss', 'SigmoidBCELoss',
'SoftmaxCrossEntropyLoss', 'SoftmaxCELoss',
'KLDivLoss', 'CTCLoss', 'HuberLoss', 'HingeLoss',
'SquaredHingeLoss', 'LogisticLoss', 'TripletLoss']
from .. import ndarray
from ..base import numeric_types
from .block import HybridBlock
def _apply_weighting(F, loss, weight=None, sample_weight=None):
"""Apply weighting to loss.
Parameters
----------
loss : Symbol
The loss to be weighted.
weight : float or None
Global scalar weight for loss.
sample_weight : Symbol or None
Per sample weighting. Must be broadcastable to
the same shape as loss. For example, if loss has
shape (64, 10) and you want to weight each sample
in the batch separately, `sample_weight` should have
shape (64, 1).
Returns
-------
loss : Symbol
Weighted loss
"""
if sample_weight is not None:
loss = F.broadcast_mul(loss, sample_weight)
if weight is not None:
assert isinstance(weight, numeric_types), "weight must be a number"
loss = loss * weight
return loss
def _reshape_like(F, x, y):
"""Reshapes x to the same shape as y."""
return x.reshape(y.shape) if F is ndarray else F.reshape_like(x, y)
class Loss(HybridBlock):
"""Base class for loss.
Parameters
----------
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
"""
def __init__(self, weight, batch_axis, **kwargs):
super(Loss, self).__init__(**kwargs)
self._weight = weight
self._batch_axis = batch_axis
def __repr__(self):
s = '{name}(batch_axis={_batch_axis}, w={_weight})'
return s.format(name=self.__class__.__name__, **self.__dict__)
def hybrid_forward(self, F, x, *args, **kwargs):
"""Overrides to construct symbolic graph for this `Block`.
Parameters
----------
x : Symbol or NDArray
The first input tensor.
*args : list of Symbol or list of NDArray
Additional input tensors.
"""
# pylint: disable= invalid-name
raise NotImplementedError
class L2Loss(Loss):
r"""Calculates the mean squared error between `pred` and `label`.
.. math:: L = \frac{1}{2} \sum_i \vert {pred}_i - {label}_i \vert^2.
`pred` and `label` can have arbitrary shape as long as they have the same
number of elements.
Parameters
----------
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape
- **label**: target tensor with the same size as pred.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, weight=1., batch_axis=0, **kwargs):
super(L2Loss, self).__init__(weight, batch_axis, **kwargs)
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
loss = F.square(pred - label)
loss = _apply_weighting(F, loss, self._weight/2, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class L1Loss(Loss):
r"""Calculates the mean absolute error between `pred` and `label`.
.. math:: L = \sum_i \vert {pred}_i - {label}_i \vert.
`pred` and `label` can have arbitrary shape as long as they have the same
number of elements.
Parameters
----------
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape
- **label**: target tensor with the same size as pred.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, weight=None, batch_axis=0, **kwargs):
super(L1Loss, self).__init__(weight, batch_axis, **kwargs)
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
loss = F.abs(pred - label)
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class SigmoidBinaryCrossEntropyLoss(Loss):
r"""The cross-entropy loss for binary classification. (alias: SigmoidBCELoss)
BCE loss is useful when training logistic regression. If `from_sigmoid`
is False (default), this loss computes:
.. math::
prob = \frac{1}{1 + \exp(-{pred})}
L = - \sum_i {label}_i * \log({prob}_i) +
(1 - {label}_i) * \log(1 - {prob}_i)
If `from_sigmoid` is True, this loss computes:
.. math::
L = - \sum_i {label}_i * \log({pred}_i) +
(1 - {label}_i) * \log(1 - {pred}_i)
`pred` and `label` can have arbitrary shape as long as they have the same
number of elements.
Parameters
----------
from_sigmoid : bool, default is `False`
Whether the input is from the output of sigmoid. Set this to false will make
the loss calculate sigmoid and BCE together, which is more numerically
stable through log-sum-exp trick.
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape
- **label**: target tensor with values in range `[0, 1]`. Must have the
same size as `pred`.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, from_sigmoid=False, weight=None, batch_axis=0, **kwargs):
super(SigmoidBinaryCrossEntropyLoss, self).__init__(weight, batch_axis, **kwargs)
self._from_sigmoid = from_sigmoid
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
if not self._from_sigmoid:
# We use the stable formula: max(x, 0) - x * z + log(1 + exp(-abs(x)))
loss = F.relu(pred) - pred * label + F.Activation(-F.abs(pred), act_type='softrelu')
else:
loss = -(F.log(pred+1e-12)*label + F.log(1.-pred+1e-12)*(1.-label))
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
SigmoidBCELoss = SigmoidBinaryCrossEntropyLoss
class SoftmaxCrossEntropyLoss(Loss):
r"""Computes the softmax cross entropy loss. (alias: SoftmaxCELoss)
If `sparse_label` is `True` (default), label should contain integer
category indicators:
.. math::
\DeclareMathOperator{softmax}{softmax}
p = \softmax({pred})
L = -\sum_i \log p_{i,{label}_i}
`label`'s shape should be `pred`'s shape with the `axis` dimension removed.
i.e. for `pred` with shape (1,2,3,4) and `axis = 2`, `label`'s shape should
be (1,2,4).
If `sparse_label` is `False`, `label` should contain probability distribution
and `label`'s shape should be the same with `pred`:
.. math::
p = \softmax({pred})
L = -\sum_i \sum_j {label}_j \log p_{ij}
Parameters
----------
axis : int, default -1
The axis to sum over when computing softmax and entropy.
sparse_label : bool, default True
Whether label is an integer array instead of probability distribution.
from_logits : bool, default False
Whether input is a log probability (usually from log_softmax) instead
of unnormalized numbers.
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: the prediction tensor, where the `batch_axis` dimension
ranges over batch size and `axis` dimension ranges over the number
of classes.
- **label**: the truth tensor. When `sparse_label` is True, `label`'s
shape should be `pred`'s shape with the `axis` dimension removed.
i.e. for `pred` with shape (1,2,3,4) and `axis = 2`, `label`'s shape
should be (1,2,4) and values should be integers between 0 and 2. If
`sparse_label` is False, `label`'s shape must be the same as `pred`
and values should be floats in the range `[0, 1]`.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as label. For example, if label has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, axis=-1, sparse_label=True, from_logits=False, weight=None,
batch_axis=0, **kwargs):
super(SoftmaxCrossEntropyLoss, self).__init__(weight, batch_axis, **kwargs)
self._axis = axis
self._sparse_label = sparse_label
self._from_logits = from_logits
def hybrid_forward(self, F, pred, label, sample_weight=None):
if not self._from_logits:
pred = F.log_softmax(pred, self._axis)
if self._sparse_label:
loss = -F.pick(pred, label, axis=self._axis, keepdims=True)
else:
label = _reshape_like(F, label, pred)
loss = -F.sum(pred*label, axis=self._axis, keepdims=True)
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
SoftmaxCELoss = SoftmaxCrossEntropyLoss
class KLDivLoss(Loss):
r"""The Kullback-Leibler divergence loss.
KL divergence measures the distance between contiguous distributions. It
can be used to minimize information loss when approximating a distribution.
If `from_logits` is True (default), loss is defined as:
.. math::
L = \sum_i {label}_i * \big[\log({label}_i) - {pred}_i\big]
If `from_logits` is False, loss is defined as:
.. math::
\DeclareMathOperator{softmax}{softmax}
prob = \softmax({pred})
L = \sum_i {label}_i * \big[\log({label}_i) - log({pred}_i)\big]
`pred` and `label` can have arbitrary shape as long as they have the same
number of elements.
Parameters
----------
from_logits : bool, default is `True`
Whether the input is log probability (usually from log_softmax) instead
of unnormalized numbers.
axis : int, default -1
The dimension along with to compute softmax. Only used when `from_logits`
is False.
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape. If `from_logits` is
True, `pred` should be log probabilities. Otherwise, it should be
unnormalized predictions, i.e. from a dense layer.
- **label**: truth tensor with values in range `(0, 1)`. Must have
the same size as `pred`.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
References
----------
`Kullback-Leibler divergence
<https://en.wikipedia.org/wiki/Kullback-Leibler_divergence>`_
"""
def __init__(self, from_logits=True, axis=-1, weight=None, batch_axis=0,
**kwargs):
super(KLDivLoss, self).__init__(weight, batch_axis, **kwargs)
self._from_logits = from_logits
self._axis = axis
def hybrid_forward(self, F, pred, label, sample_weight=None):
if not self._from_logits:
pred = F.log_softmax(pred, self._axis)
loss = label * (F.log(label+1e-12) - pred)
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class CTCLoss(Loss):
r"""Connectionist Temporal Classification Loss.
Parameters
----------
layout : str, default 'NTC'
Layout of prediction tensor. 'N', 'T', 'C' stands for batch size,
sequence length, and alphabet_size respectively.
label_layout : str, default 'NT'
Layout of the labels. 'N', 'T' stands for batch size, and sequence
length respectively.
weight : float or None
Global scalar weight for loss.
Inputs:
- **pred**: unnormalized prediction tensor (before softmax).
Its shape depends on `layout`. If `layout` is 'TNC', pred
should have shape `(sequence_length, batch_size, alphabet_size)`.
Note that in the last dimension, index `alphabet_size-1` is reserved
for internal use as blank label. So `alphabet_size` is one plus the
actual alphabet size.
- **label**: zero-based label tensor. Its shape depends on `label_layout`.
If `label_layout` is 'TN', `label` should have shape
`(label_sequence_length, batch_size)`.
- **pred_lengths**: optional (default None), used for specifying the
length of each entry when different `pred` entries in the same batch
have different lengths. `pred_lengths` should have shape `(batch_size,)`.
- **label_lengths**: optional (default None), used for specifying the
length of each entry when different `label` entries in the same batch
have different lengths. `label_lengths` should have shape `(batch_size,)`.
Outputs:
- **loss**: output loss has shape `(batch_size,)`.
**Example**: suppose the vocabulary is `[a, b, c]`, and in one batch we
have three sequences 'ba', 'cbb', and 'abac'. We can index the labels as
`{'a': 0, 'b': 1, 'c': 2, blank: 3}`. Then `alphabet_size` should be 4,
where label 3 is reserved for internal use by `CTCLoss`. We then need to
pad each sequence with `-1` to make a rectangular `label` tensor::
[[1, 0, -1, -1],
[2, 1, 1, -1],
[0, 1, 0, 2]]
References
----------
`Connectionist Temporal Classification: Labelling Unsegmented
Sequence Data with Recurrent Neural Networks
<http://www.cs.toronto.edu/~graves/icml_2006.pdf>`_
"""
def __init__(self, layout='NTC', label_layout='NT', weight=None, **kwargs):
assert layout in ['NTC', 'TNC'],\
"Only 'NTC' and 'TNC' layouts for pred are supported. Got: %s"%layout
assert label_layout in ['NT', 'TN'],\
"Only 'NT' and 'TN' layouts for label are supported. Got: %s"%label_layout
self._layout = layout
self._label_layout = label_layout
batch_axis = label_layout.find('N')
super(CTCLoss, self).__init__(weight, batch_axis, **kwargs)
def hybrid_forward(self, F, pred, label,
pred_lengths=None, label_lengths=None, sample_weight=None):
if self._layout == 'NTC':
pred = F.swapaxes(pred, 0, 1)
if self._batch_axis == 1:
label = F.swapaxes(label, 0, 1)
loss = F.contrib.CTCLoss(pred, label, pred_lengths, label_lengths,
use_data_lengths=pred_lengths is not None,
use_label_lengths=label_lengths is not None,
blank_label='last')
return _apply_weighting(F, loss, self._weight, sample_weight)
class HuberLoss(Loss):
r"""Calculates smoothed L1 loss that is equal to L1 loss if absolute error
exceeds rho but is equal to L2 loss otherwise. Also called SmoothedL1 loss.
.. math::
L = \sum_i \begin{cases} \frac{1}{2 {rho}} ({pred}_i - {label}_i)^2 &
\text{ if } |{pred}_i - {label}_i| < {rho} \\
|{pred}_i - {label}_i| - \frac{{rho}}{2} &
\text{ otherwise }
\end{cases}
`pred` and `label` can have arbitrary shape as long as they have the same
number of elements.
Parameters
----------
rho : float, default 1
Threshold for trimmed mean estimator.
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape
- **label**: target tensor with the same size as pred.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, rho=1, weight=None, batch_axis=0, **kwargs):
super(HuberLoss, self).__init__(weight, batch_axis, **kwargs)
self._rho = rho
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
loss = F.abs(pred - label)
loss = F.where(loss > self._rho, loss - 0.5 * self._rho,
(0.5/self._rho) * F.square(loss))
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class HingeLoss(Loss):
r"""Calculates the hinge loss function often used in SVMs:
.. math::
L = \sum_i max(0, {margin} - {pred}_i \cdot {label}_i)
where `pred` is the classifier prediction and `label` is the target tensor
containing values -1 or 1. `pred` and `label` must have the same number of
elements.
Parameters
----------
margin : float
The margin in hinge loss. Defaults to 1.0
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape.
- **label**: truth tensor with values -1 or 1. Must have the same size
as pred.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, margin=1, weight=None, batch_axis=0, **kwargs):
super(HingeLoss, self).__init__(weight, batch_axis, **kwargs)
self._margin = margin
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
loss = F.relu(self._margin - pred * label)
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class SquaredHingeLoss(Loss):
r"""Calculates the soft-margin loss function used in SVMs:
.. math::
L = \sum_i max(0, {margin} - {pred}_i \cdot {label}_i)^2
where `pred` is the classifier prediction and `label` is the target tensor
containing values -1 or 1. `pred` and `label` can have arbitrary shape as
long as they have the same number of elements.
Parameters
----------
margin : float
The margin in hinge loss. Defaults to 1.0
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape
- **label**: truth tensor with values -1 or 1. Must have the same size
as pred.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, margin=1, weight=None, batch_axis=0, **kwargs):
super(SquaredHingeLoss, self).__init__(weight, batch_axis, **kwargs)
self._margin = margin
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
loss = F.square(F.relu(self._margin - pred * label))
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class LogisticLoss(Loss):
r"""Calculates the logistic loss (for binary losses only):
.. math::
L = \sum_i \log(1 + \exp(- {pred}_i \cdot {label}_i))
where `pred` is the classifier prediction and `label` is the target tensor
containing values -1 or 1 (0 or 1 if `label_format` is binary).
`pred` and `label` can have arbitrary shape as long as they have the same number of elements.
Parameters
----------
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
label_format : str, default 'signed'
Can be either 'signed' or 'binary'. If the label_format is 'signed', all label values should
be either -1 or 1. If the label_format is 'binary', all label values should be either
0 or 1.
Inputs:
- **pred**: prediction tensor with arbitrary shape.
- **label**: truth tensor with values -1/1 (label_format is 'signed')
or 0/1 (label_format is 'binary'). Must have the same size as pred.
- **sample_weight**: element-wise weighting tensor. Must be broadcastable
to the same shape as pred. For example, if pred has shape (64, 10)
and you want to weigh each sample in the batch separately,
sample_weight should have shape (64, 1).
Outputs:
- **loss**: loss tensor with shape (batch_size,). Dimenions other than
batch_axis are averaged out.
"""
def __init__(self, weight=None, batch_axis=0, label_format='signed', **kwargs):
super(LogisticLoss, self).__init__(weight, batch_axis, **kwargs)
self._label_format = label_format
if self._label_format not in ["signed", "binary"]:
raise ValueError("label_format can only be signed or binary, recieved %s."
% label_format)
def hybrid_forward(self, F, pred, label, sample_weight=None):
label = _reshape_like(F, label, pred)
if self._label_format == 'signed':
label = (label + 1.0) / 2.0 # Transform label to be either 0 or 1
# Use a stable formula in computation
loss = F.relu(pred) - pred * label + F.Activation(-F.abs(pred), act_type='softrelu')
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class TripletLoss(Loss):
r"""Calculates triplet loss given three input tensors and a positive margin.
Triplet loss measures the relative similarity between prediction, a positive
example and a negative example:
.. math::
L = \sum_i \max(\Vert {pred}_i - {pos_i} \Vert_2^2 -
\Vert {pred}_i - {neg_i} \Vert_2^2 + {margin}, 0)
`pred`, `positive` and `negative` can have arbitrary shape as long as they
have the same number of elements.
Parameters
----------
margin : float
Margin of separation between correct and incorrect pair.
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
Inputs:
- **pred**: prediction tensor with arbitrary shape
- **positive**: positive example tensor with arbitrary shape. Must have
the same size as pred.
- **negative**: negative example tensor with arbitrary shape Must have
the same size as pred.
Outputs:
- **loss**: loss tensor with shape (batch_size,).
"""
def __init__(self, margin=1, weight=None, batch_axis=0, **kwargs):
super(TripletLoss, self).__init__(weight, batch_axis, **kwargs)
self._margin = margin
def hybrid_forward(self, F, pred, positive, negative):
positive = _reshape_like(F, positive, pred)
negative = _reshape_like(F, negative, pred)
loss = F.sum(F.square(pred-positive) - F.square(pred-negative),
axis=self._batch_axis, exclude=True)
loss = F.relu(loss + self._margin)
return _apply_weighting(F, loss, self._weight, None)
|
{
"content_hash": "e2440ec6d7305cae41d3b573534805ba",
"timestamp": "",
"source": "github",
"line_count": 689,
"max_line_length": 100,
"avg_line_length": 38.426705370101594,
"alnum_prop": 0.6105151835624717,
"repo_name": "jiajiechen/mxnet",
"id": "2be43981a64c4716b498bc32fb5475383233515b",
"size": "27313",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "python/mxnet/gluon/loss.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12255"
},
{
"name": "C",
"bytes": "109701"
},
{
"name": "C++",
"bytes": "4534403"
},
{
"name": "CMake",
"bytes": "73731"
},
{
"name": "Cuda",
"bytes": "779964"
},
{
"name": "Groovy",
"bytes": "217"
},
{
"name": "Java",
"bytes": "20406"
},
{
"name": "Jupyter Notebook",
"bytes": "1319611"
},
{
"name": "Makefile",
"bytes": "48611"
},
{
"name": "Matlab",
"bytes": "30187"
},
{
"name": "Perl",
"bytes": "1057595"
},
{
"name": "Perl 6",
"bytes": "4176"
},
{
"name": "Python",
"bytes": "4431956"
},
{
"name": "R",
"bytes": "287257"
},
{
"name": "Rebol",
"bytes": "353"
},
{
"name": "Scala",
"bytes": "910341"
},
{
"name": "Shell",
"bytes": "199166"
}
],
"symlink_target": ""
}
|
from djangox.test.depth import alltests
def suite():
return alltests(__file__, __name__)
|
{
"content_hash": "59aa73cab038432614a809adbf859eae",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 39,
"avg_line_length": 19,
"alnum_prop": 0.6736842105263158,
"repo_name": "ask/ghettoq",
"id": "97abbfe014f54d12a7ad47abd906628b1dabb21e",
"size": "95",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "ghettoq/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "50119"
}
],
"symlink_target": ""
}
|
from os_win import constants
from os_win import exceptions as os_win_exc
from os_win import utilsfactory
from oslo_log import log as logging
import nova.conf
from nova import utils
from nova.virt import event as virtevent
from nova.virt.hyperv import serialconsoleops
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
class InstanceEventHandler(object):
# The event listener timeout is set to 0 in order to return immediately
# and avoid blocking the thread.
_WAIT_TIMEOUT = 0
_TRANSITION_MAP = {
constants.HYPERV_VM_STATE_ENABLED: virtevent.EVENT_LIFECYCLE_STARTED,
constants.HYPERV_VM_STATE_DISABLED: virtevent.EVENT_LIFECYCLE_STOPPED,
constants.HYPERV_VM_STATE_PAUSED: virtevent.EVENT_LIFECYCLE_PAUSED,
constants.HYPERV_VM_STATE_SUSPENDED:
virtevent.EVENT_LIFECYCLE_SUSPENDED
}
def __init__(self, state_change_callback=None):
self._vmutils = utilsfactory.get_vmutils()
self._listener = self._vmutils.get_vm_power_state_change_listener(
timeframe=CONF.hyperv.power_state_check_timeframe,
event_timeout=CONF.hyperv.power_state_event_polling_interval,
filtered_states=list(self._TRANSITION_MAP.keys()),
get_handler=True)
self._serial_console_ops = serialconsoleops.SerialConsoleOps()
self._state_change_callback = state_change_callback
def start_listener(self):
utils.spawn_n(self._listener, self._event_callback)
def _event_callback(self, instance_name, instance_power_state):
# Instance uuid set by Nova. If this is missing, we assume that
# the instance was not created by Nova and ignore the event.
instance_uuid = self._get_instance_uuid(instance_name)
if instance_uuid:
self._emit_event(instance_name,
instance_uuid,
instance_power_state)
def _emit_event(self, instance_name, instance_uuid, instance_state):
virt_event = self._get_virt_event(instance_uuid,
instance_state)
utils.spawn_n(self._state_change_callback, virt_event)
utils.spawn_n(self._handle_serial_console_workers,
instance_name, instance_state)
def _handle_serial_console_workers(self, instance_name, instance_state):
if instance_state == constants.HYPERV_VM_STATE_ENABLED:
self._serial_console_ops.start_console_handler(instance_name)
else:
self._serial_console_ops.stop_console_handler(instance_name)
def _get_instance_uuid(self, instance_name):
try:
instance_uuid = self._vmutils.get_instance_uuid(instance_name)
if not instance_uuid:
LOG.warning("Instance uuid could not be retrieved for "
"instance %s. Instance state change event "
"will be ignored.", instance_name)
return instance_uuid
except os_win_exc.HyperVVMNotFoundException:
# The instance has been deleted.
pass
def _get_virt_event(self, instance_uuid, instance_state):
transition = self._TRANSITION_MAP[instance_state]
return virtevent.LifecycleEvent(uuid=instance_uuid,
transition=transition)
|
{
"content_hash": "b2a4a5f5c1fd280ba78252a04776d1a3",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 78,
"avg_line_length": 41.55555555555556,
"alnum_prop": 0.6464646464646465,
"repo_name": "gooddata/openstack-nova",
"id": "e958717bb80d6e585c477f66f9c4a90108a0c4e7",
"size": "4005",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "nova/virt/hyperv/eventhandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3858"
},
{
"name": "HTML",
"bytes": "1386"
},
{
"name": "PHP",
"bytes": "43584"
},
{
"name": "Python",
"bytes": "23012372"
},
{
"name": "Shell",
"bytes": "32567"
},
{
"name": "Smarty",
"bytes": "429290"
}
],
"symlink_target": ""
}
|
__author__ = 'jcastro'
class Player(object):
def __init__(self):
self.nick = None
self.name = None
self.team = None
self.number = None
self.games = {}
def add_game(self, game, split):
if game not in self.games:
team = split[3]
position = split[4]
score = []
for i in range(5, len(split)):
score.append(split[i])
self.games[game] = {'team': team, 'position': position, 'score': score}
|
{
"content_hash": "0365b902689df3ef166f2d98dfe94766",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 83,
"avg_line_length": 25.95,
"alnum_prop": 0.4932562620423892,
"repo_name": "jcastrojob/kata_tucan",
"id": "043dfe4c2bca735af7ad1fdeecfe2e5585fbf4d1",
"size": "519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "player.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5081"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SiteSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_rush_open', models.BooleanField(default=False, verbose_name='Is rush open')),
('voting_status', models.CharField(choices=[('C', 'Closed'), ('A', 'Applications'), ('X', 'Applications Closed'), ('V', 'Voting'), ('R', 'Results')], default='C', max_length=1, verbose_name='Voting Status')),
],
),
]
|
{
"content_hash": "2548a76588ce2aa66a0be45bf43c750e",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 224,
"avg_line_length": 34,
"alnum_prop": 0.5842245989304813,
"repo_name": "TexasLAN/texaslan.org",
"id": "45c660acc301b3e437cfaf851712a7032020be85",
"size": "819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "texaslan/site_settings/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "242691"
},
{
"name": "Dockerfile",
"bytes": "576"
},
{
"name": "HTML",
"bytes": "116042"
},
{
"name": "JavaScript",
"bytes": "50779"
},
{
"name": "Python",
"bytes": "165928"
},
{
"name": "Shell",
"bytes": "4612"
}
],
"symlink_target": ""
}
|
"""Utilities for the API to configure cross-project networking (XPN)."""
from googlecloudsdk.api_lib.compute import client_adapter
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.core import exceptions
# TODO(user): update it when a new XPN API version is released.
_DEFAULT_API_VERSION = 'alpha'
class XpnApiError(exceptions.Error):
pass
class XpnClient(object):
"""A client for interacting with the cross-project networking (XPN) API.
The XPN API is a subset of the Google Compute Engine API.
"""
def __init__(self, compute_client):
self.compute_client = compute_client
self.client = compute_client.apitools_client
self.messages = compute_client.messages
# TODO(b/30465957): Refactor to use apitools clients directly and not the
# compute utilities
def _MakeRequest(self, request, errors):
return self.compute_client.MakeRequests(
requests=[request],
errors_to_collect=errors)
def _MakeRequestSync(self, request_tuple, operation_msg=None):
errors = []
results = list(self._MakeRequest(request_tuple, errors))
if errors:
operation_msg = operation_msg or 'complete all requests'
msg = 'Could not {0}:'.format(operation_msg)
utils.RaiseException(errors, XpnApiError, msg)
return results[0] # if there were no errors, this will exist
def EnableHost(self, project):
"""Enable the project with the given ID as an XPN host."""
request_tuple = (
self.client.projects,
'EnableXpnHost',
self.messages.ComputeProjectsEnableXpnHostRequest(project=project))
msg = 'enable [{project}] as XPN host'.format(project=project)
self._MakeRequestSync(request_tuple, msg)
def DisableHost(self, project):
"""Disable the project with the given ID as an XPN host."""
request_tuple = (
self.client.projects,
'DisableXpnHost',
self.messages.ComputeProjectsDisableXpnHostRequest(project=project))
msg = 'disable [{project}] as XPN host'.format(project=project)
self._MakeRequestSync(request_tuple, msg)
def GetHostProject(self, project):
"""Get the XPN host for the given project."""
request_tuple = (
self.client.projects,
'GetXpnHost',
self.messages.ComputeProjectsGetXpnHostRequest(project=project))
msg = 'get XPN host for project [{project}]'.format(project=project)
return self._MakeRequestSync(request_tuple, msg)
def ListEnabledResources(self, project):
request_tuple = (
self.client.projects,
'GetXpnResources',
self.messages.ComputeProjectsGetXpnResourcesRequest(project=project))
msg = ('list resources that are enabled to use project [{project}] as an '
'XPN host').format(project=project)
return self._MakeRequestSync(request_tuple, msg)
def ListOrganizationHostProjects(self, project, organization_id):
"""List the projects in an organization that are enabled as XPN hosts.
Args:
project: str, project ID to make the request with.
organization_id: str, the ID of the organization to list XPN hosts
for. If None, the organization is inferred from the project.
Returns:
Generator for `Project`s corresponding to XPN hosts in the organization.
"""
request = self.messages.ComputeProjectsListXpnHostsRequest(
project=project,
projectsListXpnHostsRequest=self.messages.ProjectsListXpnHostsRequest(
organization=organization_id))
if organization_id:
msg = ('list XPN hosts for organization [{0}] '
'(current project is [{1}])').format(organization_id, project)
else:
msg = ('list XPN hosts for organization inferred from project [{0}]'
).format(project)
# TODO(b/29896285): Use apitools list_pager.YieldFromList when API fully
# supports paging
items = self._MakeRequestSync(
(self.client.projects, 'ListXpnHosts', request), msg).items
# Return a generator, since that's what will happend when we use
# YieldFromList
return iter(items)
def _EnableXpnAssociatedResource(self, host_project, associated_resource,
xpn_resource_type):
"""Associate the given resource with the given XPN host project.
Args:
host_project: str, ID of the XPN host project
associated_resource: ID of the resource to associate with host_project
xpn_resource_type: XpnResourceId.TypeValueValuesEnum, the type of the
resource
"""
projects_enable_request = self.messages.ProjectsEnableXpnResourceRequest(
xpnResource=self.messages.XpnResourceId(
id=associated_resource,
type=xpn_resource_type)
)
request = self.messages.ComputeProjectsEnableXpnResourceRequest(
project=host_project,
projectsEnableXpnResourceRequest=projects_enable_request)
request_tuple = (self.client.projects, 'EnableXpnResource', request)
msg = ('enable resource [{0}] as an associated resource '
'for project [{1}]').format(associated_resource, host_project)
self._MakeRequestSync(request_tuple, msg)
def EnableXpnAssociatedProject(self, host_project, associated_project):
"""Associate the given project with the given XPN host project.
Args:
host_project: str, ID of the XPN host project
associated_project: ID of the project to associate
"""
xpn_types = self.messages.XpnResourceId.TypeValueValuesEnum
self._EnableXpnAssociatedResource(
host_project, associated_project, xpn_resource_type=xpn_types.PROJECT)
def _DisableXpnAssociatedResource(self, host_project, associated_resource,
xpn_resource_type):
"""Disassociate the given resource from the given XPN host project.
Args:
host_project: str, ID of the XPN host project
associated_resource: ID of the resource to disassociate from host_project
xpn_resource_type: XpnResourceId.TypeValueValuesEnum, the type of the
resource
"""
projects_disable_request = self.messages.ProjectsDisableXpnResourceRequest(
xpnResource=self.messages.XpnResourceId(
id=associated_resource,
type=xpn_resource_type)
)
request = self.messages.ComputeProjectsDisableXpnResourceRequest(
project=host_project,
projectsDisableXpnResourceRequest=projects_disable_request)
request_tuple = (self.client.projects, 'DisableXpnResource', request)
msg = ('disable resource [{0}] as an associated resource '
'for project [{1}]').format(associated_resource, host_project)
self._MakeRequestSync(request_tuple, msg)
def DisableXpnAssociatedProject(self, host_project, associated_project):
"""Disassociate the given project from the given XPN host project.
Args:
host_project: str, ID of the XPN host project
associated_project: ID of the project to disassociate from host_project
"""
xpn_types = self.messages.XpnResourceId.TypeValueValuesEnum
self._DisableXpnAssociatedResource(
host_project, associated_project, xpn_resource_type=xpn_types.PROJECT)
def GetXpnClient(api_version=_DEFAULT_API_VERSION):
return XpnClient(client_adapter.ClientAdapter(api_version))
|
{
"content_hash": "a57ee265238b5ab50e2834e7360e89b4",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 79,
"avg_line_length": 40.625698324022345,
"alnum_prop": 0.7033828382838284,
"repo_name": "Sorsly/subtle",
"id": "0bfc33ac03b21b2e93bd541c8715e7d9de530ad6",
"size": "7867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/xpn_api.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1581"
},
{
"name": "CSS",
"bytes": "226"
},
{
"name": "HTML",
"bytes": "4637"
},
{
"name": "JavaScript",
"bytes": "3037"
},
{
"name": "PHP",
"bytes": "4543"
},
{
"name": "Pascal",
"bytes": "31"
},
{
"name": "Python",
"bytes": "13243860"
},
{
"name": "Roff",
"bytes": "1050600"
},
{
"name": "Shell",
"bytes": "16136"
},
{
"name": "Smarty",
"bytes": "2484"
},
{
"name": "SourcePawn",
"bytes": "308"
}
],
"symlink_target": ""
}
|
import os
import time
import numpy as np
import tensorflow as tf
from eight_mile.progress import create_progress_bar
from eight_mile.bleu import bleu
from baseline.utils import (
convert_seq2seq_golds,
convert_seq2seq_preds,
)
from baseline.train import Trainer, register_trainer
from baseline.tf.tfy import TRAIN_FLAG, SET_TRAIN_FLAG
from baseline.model import create_model_for
# Number of batches to prefetch if using tf.datasets
NUM_PREFETCH = 2
# The shuffle buffer
SHUF_BUF_SZ = 5000
def to_tensors(ts, src_lengths_key, dst=False):
"""Convert a data feed into a tuple of `features` (`dict`) and `y` values
This method is required to produce `tf.dataset`s from the input data feed.
Any fields ending with `_lengths` are ignored, unless they match the
`src_lengths_key` or `tgt_lengths_key`, in which case, they are converted to `src_len` and `tgt_len`
:param ts: The data feed to convert
:param lengths_key: This is a field passed from the model params specifying source of truth of the temporal lengths
:param dst: `bool` that says if we should prepare a `dst` tensor. This is needed in distributed mode
:return: A `tuple` of `features` and `y` (labels)
"""
keys = ts[0].keys()
# This is kind of a hack
keys = [k for k in keys if '_lengths' not in k and k != 'ids'] + [src_lengths_key, "tgt_lengths"]
features = dict((k, []) for k in keys)
for sample in ts:
for k in keys:
for s in sample[k]:
features[k].append(s)
features['src_len'] = features[src_lengths_key]
del features[src_lengths_key]
features['tgt_len'] = features['tgt_lengths']
del features['tgt_lengths']
features = dict((k, np.stack(v).astype(np.int32)) for k, v in features.items())
if dst:
features['dst'] = features['tgt'][:, :-1]
tgt = features.pop('tgt')
return features, tgt
|
{
"content_hash": "f6013942378bdfd740071542b22cefa0",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 119,
"avg_line_length": 34.56363636363636,
"alnum_prop": 0.6754339821146765,
"repo_name": "dpressel/baseline",
"id": "eec40f5356b74be6352e7046fa8dbae9704ec70a",
"size": "1901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "baseline/tf/seq2seq/training/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "9649"
},
{
"name": "CMake",
"bytes": "430"
},
{
"name": "HCL",
"bytes": "923"
},
{
"name": "Perl",
"bytes": "17554"
},
{
"name": "Python",
"bytes": "1281602"
},
{
"name": "Roff",
"bytes": "24"
},
{
"name": "Shell",
"bytes": "10168"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from lampost.di.resource import Injected, module_inject
from lampost.db.dbo import DBOField, ParentDBO, ChildDBO, CoreDBO
log = Injected('log')
db = Injected('datastore')
module_inject(__name__)
def create(config_id, raw_configs, set_defaults=False):
sections = {}
all_values = defaultdict(set)
def init_section(section_name, section_dto=None):
try:
return sections[section_name]
except KeyError:
section_dto = section_dto or {}
section_dto['dbo_id'] = '{}:{}'.format(config_id, section_name)
section = db.create_object(ConfigSection, section_dto)
sections[section_name] = section
return section
def add_raw(raw_config):
for section_name, section_dto in raw_config.pop('sections', {}).items():
init_section(section_name, section_dto)
for section_name, settings in raw_config.items():
section = init_section(section_name)
setting_map = {setting.name: setting for setting in section.settings}
for raw_setting in settings:
setting = Setting().hydrate(raw_setting)
if set_defaults:
setting.default = setting.value
try:
existing = setting_map[setting.name]
log.warn("Setting {} with value {} overwritten by {}", setting.name, existing.value, setting.value)
except KeyError:
pass
setting_map[setting.name] = setting
all_values[setting.name].add(section_name)
section.settings = setting_map.values()
db.save_object(section)
for rc in raw_configs:
add_raw(rc)
for setting_name, section_names in all_values.items():
if len(section_names) > 1:
log.warn("Setting name {} found in multiple sections: {}", setting_name, ' '.join(section_names))
return db.create_object(Config, {'dbo_id': config_id})
class Config(ParentDBO):
dbo_key_type = 'config'
dbo_set_key = 'configs'
dbo_children_types = ['c_sect']
def update_value(self, section, name, value):
section = db.load_object('c_sect:{}:{}'.format(self.dbo_id, section))
if section:
self.section_values['{}:{}'.format(section, name)] = value
for setting in section.settings:
if setting.name == name:
setting.value = value
db.save_object(section)
return
log.error("No setting found for {}:{}".format(section, name))
def _on_loaded(self):
self.section_values = {}
for child_key in self.dbo_child_keys('c_sect'):
section = db.load_object(child_key, ConfigSection)
if section:
for setting in section.settings:
self.section_values['{}:{}'.format(section.child_id, setting.name)] = setting.value
class ConfigSection(ChildDBO):
dbo_key_type = 'c_sect'
dbo_parent_type = 'config'
desc = DBOField()
editor_constants = DBOField(False)
settings = DBOField([], 'setting')
class Setting(CoreDBO):
class_id = 'setting'
name = DBOField()
value = DBOField()
desc = DBOField()
default = DBOField()
data_type = DBOField()
min_value = DBOField()
max_value = DBOField()
step = DBOField(1)
|
{
"content_hash": "f116f9c0fd52dc59548332d8ab382a00",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 119,
"avg_line_length": 34.46,
"alnum_prop": 0.587347649448636,
"repo_name": "genzgd/lampost_lib",
"id": "a9ce7fc208c850f043c784fe79bb0e4048069c69",
"size": "3446",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lampost/db/dbconfig.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "166384"
}
],
"symlink_target": ""
}
|
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
|
{
"content_hash": "f791d277a48deba47893862a915879a4",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 39,
"avg_line_length": 29,
"alnum_prop": 0.8103448275862069,
"repo_name": "cdagli/flask-api-starter",
"id": "1dc6828f1e96a904d0ff318e3b6d84d033724248",
"size": "101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/api/utils/database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14472"
}
],
"symlink_target": ""
}
|
"""Tests of the Analyzer CLI Backend."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug import debug_data
from tensorflow.python.debug import debug_utils
from tensorflow.python.debug.cli import analyzer_cli
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.platform import test
def parse_op_and_node(line):
"""Parse a line containing an op node followed by a node name.
For example, if the line is
" [Variable] hidden/weights",
this function will return ("Variable", "hidden/weights")
Args:
line: The line to be parsed, as a str.
Returns:
Name of the parsed op type.
Name of the parsed node.
"""
op_type = line.strip().split(" ")[0].replace("[", "").replace("]", "")
# Not using [-1], to tolerate any other items that might be present behind
# the node name.
node_name = line.strip().split(" ")[1]
return op_type, node_name
def assert_listed_tensors(tst,
out,
expected_tensor_names,
node_name_regex=None,
op_type_regex=None,
tensor_filter_name=None):
"""Check RichTextLines output for list_tensors commands.
Args:
tst: A test_util.TensorFlowTestCase instance.
out: The RichTextLines object to be checked.
expected_tensor_names: Expected tensor names in the list.
node_name_regex: Optional: node name regex filter.
op_type_regex: Optional: op type regex filter.
tensor_filter_name: Optional: name of the tensor filter.
"""
line_iter = iter(out.lines)
num_tensors = len(expected_tensor_names)
if tensor_filter_name is None:
tst.assertEqual("%d dumped tensor(s):" % num_tensors, next(line_iter))
else:
tst.assertEqual("%d dumped tensor(s) passing filter \"%s\":" %
(num_tensors, tensor_filter_name), next(line_iter))
if op_type_regex is not None:
tst.assertEqual("Op type regex filter: \"%s\"" % op_type_regex,
next(line_iter))
if node_name_regex is not None:
tst.assertEqual("Node name regex filter: \"%s\"" % node_name_regex,
next(line_iter))
tst.assertEqual("", next(line_iter))
# Verify the listed tensors and their timestamps.
tensor_timestamps = []
tensor_names = []
for line in line_iter:
rel_time = float(line.split("ms] ")[0].replace("[", ""))
tst.assertGreaterEqual(rel_time, 0.0)
tensor_timestamps.append(rel_time)
tensor_names.append(line.split("ms] ")[1])
# Verify that the tensors should be listed in ascending order of their
# timestamps.
tst.assertEqual(sorted(tensor_timestamps), tensor_timestamps)
# Verify that the tensors are all listed.
for tensor_name in expected_tensor_names:
tst.assertIn(tensor_name, tensor_names)
def assert_node_attribute_lines(tst,
out,
node_name,
op_type,
device,
input_op_type_node_name_pairs,
ctrl_input_op_type_node_name_pairs,
recipient_op_type_node_name_pairs,
ctrl_recipient_op_type_node_name_pairs,
attr_key_val_pairs=None,
num_dumped_tensors=None):
"""Check RichTextLines output for node_info commands.
Args:
tst: A test_util.TensorFlowTestCase instance.
out: The RichTextLines object to be checked.
node_name: Name of the node.
op_type: Op type of the node, as a str.
device: Name of the device on which the node resides.
input_op_type_node_name_pairs: A list of 2-tuples of op type and node name,
for the (non-control) inputs to the node.
ctrl_input_op_type_node_name_pairs: A list of 2-tuples of op type and node
name, for the control inputs to the node.
recipient_op_type_node_name_pairs: A list of 2-tuples of op type and node
name, for the (non-control) output recipients to the node.
ctrl_recipient_op_type_node_name_pairs: A list of 2-tuples of op type and
node name, for the control output recipients to the node.
attr_key_val_pairs: Optional: attribute key-value pairs of the node, as a
list of 2-tuples.
num_dumped_tensors: Optional: number of tensor dumps from the node.
"""
line_iter = iter(out.lines)
tst.assertEqual("Node %s" % node_name, next(line_iter))
tst.assertEqual("", next(line_iter))
tst.assertEqual(" Op: %s" % op_type, next(line_iter))
tst.assertEqual(" Device: %s" % device, next(line_iter))
tst.assertEqual("", next(line_iter))
tst.assertEqual(" %d input(s) + %d control input(s):" %
(len(input_op_type_node_name_pairs),
len(ctrl_input_op_type_node_name_pairs)), next(line_iter))
# Check inputs.
tst.assertEqual(" %d input(s):" % len(input_op_type_node_name_pairs),
next(line_iter))
for op_type, node_name in input_op_type_node_name_pairs:
tst.assertEqual(" [%s] %s" % (op_type, node_name), next(line_iter))
tst.assertEqual("", next(line_iter))
# Check control inputs.
if ctrl_input_op_type_node_name_pairs:
tst.assertEqual(" %d control input(s):" %
len(ctrl_input_op_type_node_name_pairs), next(line_iter))
for op_type, node_name in ctrl_input_op_type_node_name_pairs:
tst.assertEqual(" [%s] %s" % (op_type, node_name), next(line_iter))
tst.assertEqual("", next(line_iter))
tst.assertEqual(" %d recipient(s) + %d control recipient(s):" %
(len(recipient_op_type_node_name_pairs),
len(ctrl_recipient_op_type_node_name_pairs)),
next(line_iter))
# Check recipients, the order of which is not deterministic.
tst.assertEqual(" %d recipient(s):" %
len(recipient_op_type_node_name_pairs), next(line_iter))
t_recs = []
for _ in recipient_op_type_node_name_pairs:
line = next(line_iter)
op_type, node_name = parse_op_and_node(line)
t_recs.append((op_type, node_name))
tst.assertItemsEqual(recipient_op_type_node_name_pairs, t_recs)
# Check control recipients, the order of which is not deterministic.
if ctrl_recipient_op_type_node_name_pairs:
tst.assertEqual("", next(line_iter))
tst.assertEqual(" %d control recipient(s):" %
len(ctrl_recipient_op_type_node_name_pairs),
next(line_iter))
t_ctrl_recs = []
for _ in ctrl_recipient_op_type_node_name_pairs:
line = next(line_iter)
op_type, node_name = parse_op_and_node(line)
t_ctrl_recs.append((op_type, node_name))
tst.assertItemsEqual(ctrl_recipient_op_type_node_name_pairs, t_ctrl_recs)
# The order of multiple attributes can be non-deterministic.
if attr_key_val_pairs:
tst.assertEqual("", next(line_iter))
tst.assertEqual("Node attributes:", next(line_iter))
kv_pairs = []
for key, val in attr_key_val_pairs:
key = next(line_iter).strip().replace(":", "")
val = next(line_iter).strip()
kv_pairs.append((key, val))
tst.assertEqual("", next(line_iter))
tst.assertItemsEqual(attr_key_val_pairs, kv_pairs)
if num_dumped_tensors is not None:
tst.assertEqual("", next(line_iter))
tst.assertEqual("%d dumped tensor(s):" % num_dumped_tensors,
next(line_iter))
dump_timestamps_ms = []
for _ in xrange(num_dumped_tensors):
line = next(line_iter)
tst.assertStartsWith(line.strip(), "Slot 0 @ DebugIdentity @")
tst.assertTrue(line.strip().endswith(" ms"))
dump_timestamp_ms = float(line.strip().split(" @ ")[-1].replace("ms", ""))
tst.assertGreaterEqual(dump_timestamp_ms, 0.0)
dump_timestamps_ms.append(dump_timestamp_ms)
tst.assertEqual(sorted(dump_timestamps_ms), dump_timestamps_ms)
def check_syntax_error_output(tst, out, command_prefix):
"""Check RichTextLines output for valid command prefix but invalid syntax."""
tst.assertEqual([
"Syntax error for command: %s" % command_prefix,
"For help, do \"help %s\"" % command_prefix
], out.lines)
def check_error_output(tst, out, command_prefix, args):
"""Check RichTextLines output from invalid/erroneous commands.
Args:
tst: A test_util.TensorFlowTestCase instance.
out: The RichTextLines object to be checked.
command_prefix: The command prefix of the command that caused the error.
args: The arguments (excluding prefix) of the command that caused the error.
"""
tst.assertGreater(len(out.lines), 2)
tst.assertStartsWith(out.lines[0],
"Error occurred during handling of command: %s %s" %
(command_prefix, " ".join(args)))
class AnalyzerCLISimpleMulAddTest(test_util.TensorFlowTestCase):
@classmethod
def setUpClass(cls):
cls._dump_root = tempfile.mkdtemp()
cls._is_gpu_available = test.is_gpu_available()
if cls._is_gpu_available:
cls._main_device = "/job:localhost/replica:0/task:0/gpu:0"
else:
cls._main_device = "/job:localhost/replica:0/task:0/cpu:0"
with session.Session() as sess:
u_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]])
v_init_val = np.array([[2.0], [-1.0]])
u_name = "simple_mul_add/u"
v_name = "simple_mul_add/v"
u_init = constant_op.constant(u_init_val, shape=[2, 2])
u = variables.Variable(u_init, name=u_name)
v_init = constant_op.constant(v_init_val, shape=[2, 1])
v = variables.Variable(v_init, name=v_name)
w = math_ops.matmul(u, v, name="simple_mul_add/matmul")
x = math_ops.add(w, w, name="simple_mul_add/add")
u.initializer.run()
v.initializer.run()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity"],
debug_urls="file://%s" % cls._dump_root)
# Invoke Session.run().
run_metadata = config_pb2.RunMetadata()
sess.run(x, options=run_options, run_metadata=run_metadata)
cls._debug_dump = debug_data.DebugDumpDir(
cls._dump_root, partition_graphs=run_metadata.partition_graphs)
# Construct the analyzer.
cls._analyzer = analyzer_cli.DebugAnalyzer(cls._debug_dump)
# Construct the handler registry.
cls._registry = debugger_cli_common.CommandHandlerRegistry()
# Register command handlers.
cls._registry.register_command_handler(
"list_tensors",
cls._analyzer.list_tensors,
cls._analyzer.get_help("list_tensors"),
prefix_aliases=["lt"])
cls._registry.register_command_handler(
"node_info",
cls._analyzer.node_info,
cls._analyzer.get_help("node_info"),
prefix_aliases=["ni"])
cls._registry.register_command_handler(
"print_tensor",
cls._analyzer.print_tensor,
cls._analyzer.get_help("print_tensor"),
prefix_aliases=["pt"])
@classmethod
def tearDownClass(cls):
# Tear down temporary dump directory.
shutil.rmtree(cls._dump_root)
def testListTensors(self):
# Use shorthand alias for the command prefix.
out = self._registry.dispatch_command("lt", [])
assert_listed_tensors(self, out, [
"simple_mul_add/u:0", "simple_mul_add/v:0", "simple_mul_add/u/read:0",
"simple_mul_add/v/read:0", "simple_mul_add/matmul:0",
"simple_mul_add/add:0"
])
def testListTensorsFilterByNodeNameRegex(self):
out = self._registry.dispatch_command("list_tensors",
["--node_name_filter", ".*read.*"])
assert_listed_tensors(
self,
out, [
"simple_mul_add/u/read:0", "simple_mul_add/v/read:0"
],
node_name_regex=".*read.*")
out = self._registry.dispatch_command("list_tensors", ["-n", "^read"])
assert_listed_tensors(self, out, [], node_name_regex="^read")
def testListTensorFilterByOpTypeRegex(self):
out = self._registry.dispatch_command("list_tensors",
["--op_type_filter", "Identity"])
assert_listed_tensors(
self,
out, [
"simple_mul_add/u/read:0", "simple_mul_add/v/read:0"
],
op_type_regex="Identity")
out = self._registry.dispatch_command("list_tensors",
["-t", "(Add|MatMul)"])
assert_listed_tensors(
self,
out, [
"simple_mul_add/add:0", "simple_mul_add/matmul:0"
],
op_type_regex="(Add|MatMul)")
def testListTensorFilterByNodeNameRegexAndOpTypeRegex(self):
out = self._registry.dispatch_command(
"list_tensors", ["-t", "(Add|MatMul)", "-n", ".*add$"])
assert_listed_tensors(
self,
out, [
"simple_mul_add/add:0"
],
node_name_regex=".*add$",
op_type_regex="(Add|MatMul)")
def testListTensorsFilterNanOrInf(self):
"""Test register and invoke a tensor filter."""
# First, register the filter.
self._analyzer.add_tensor_filter("has_inf_or_nan",
debug_data.has_inf_or_nan)
# Use shorthand alias for the command prefix.
out = self._registry.dispatch_command("lt", ["-f", "has_inf_or_nan"])
# This TF graph run did not generate any bad numerical values.
assert_listed_tensors(self, out, [], tensor_filter_name="has_inf_or_nan")
# TODO(cais): A test with some actual bad numerical values.
def testListTensorNonexistentFilter(self):
"""Test attempt to use a nonexistent tensor filter."""
out = self._registry.dispatch_command("lt", ["-f", "foo_filter"])
self.assertEqual(["ERROR: There is no tensor filter named \"foo_filter\"."],
out.lines)
def testListTensorsInvalidOptions(self):
out = self._registry.dispatch_command("list_tensors", ["--foo"])
check_syntax_error_output(self, out, "list_tensors")
def testNodeInfoByNodeName(self):
out = self._registry.dispatch_command("node_info",
["simple_mul_add/matmul"])
recipients = [("Add", "simple_mul_add/add"), ("Add", "simple_mul_add/add")]
assert_node_attribute_lines(self, out, "simple_mul_add/matmul", "MatMul",
self._main_device,
[("Identity", "simple_mul_add/u/read"),
("Identity", "simple_mul_add/v/read")], [],
recipients, [])
def testNodeInfoShowAttributes(self):
out = self._registry.dispatch_command("node_info",
["-a", "simple_mul_add/matmul"])
assert_node_attribute_lines(
self,
out,
"simple_mul_add/matmul",
"MatMul",
self._main_device, [("Identity", "simple_mul_add/u/read"),
("Identity", "simple_mul_add/v/read")], [],
[("Add", "simple_mul_add/add"), ("Add", "simple_mul_add/add")], [],
attr_key_val_pairs=[("transpose_a", "b: false"),
("transpose_b", "b: false"),
("T", "type: DT_DOUBLE")])
def testNodeInfoShowDumps(self):
out = self._registry.dispatch_command("node_info",
["-d", "simple_mul_add/matmul"])
assert_node_attribute_lines(
self,
out,
"simple_mul_add/matmul",
"MatMul",
self._main_device, [("Identity", "simple_mul_add/u/read"),
("Identity", "simple_mul_add/v/read")], [],
[("Add", "simple_mul_add/add"), ("Add", "simple_mul_add/add")], [],
num_dumped_tensors=1)
def testNodeInfoByTensorName(self):
out = self._registry.dispatch_command("node_info",
["simple_mul_add/u/read:0"])
assert_node_attribute_lines(self, out, "simple_mul_add/u/read", "Identity",
self._main_device,
[("Variable", "simple_mul_add/u")], [],
[("MatMul", "simple_mul_add/matmul")], [])
def testNodeInfoNonexistentNodeName(self):
out = self._registry.dispatch_command("node_info", ["bar"])
self.assertEqual(
["ERROR: There is no node named \"bar\" in the partition graphs"],
out.lines)
# Check color indicating error.
self.assertEqual({0: [(0, 59, "red")]}, out.font_attr_segs)
def testPrintTensor(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul:0"], screen_info={"cols": 80})
self.assertEqual([
"Tensor \"simple_mul_add/matmul:0:DebugIdentity\":",
" dtype: float64",
" shape: (2, 1)",
"",
"array([[ 7.],",
" [-2.]])",
], out.lines)
self.assertIn("tensor_metadata", out.annotations)
self.assertIn(4, out.annotations)
self.assertIn(5, out.annotations)
def testPrintTensorHighlightingRanges(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul:0", "--ranges", "[-inf, 0.0]"],
screen_info={"cols": 80})
self.assertEqual([
"Tensor \"simple_mul_add/matmul:0:DebugIdentity\": "
"Highlighted([-inf, 0.0]): 1 of 2 element(s) (50.00%)",
" dtype: float64",
" shape: (2, 1)",
"",
"array([[ 7.],",
" [-2.]])",
], out.lines)
self.assertIn("tensor_metadata", out.annotations)
self.assertIn(4, out.annotations)
self.assertIn(5, out.annotations)
self.assertEqual([(8, 11, "bold")], out.font_attr_segs[5])
out = self._registry.dispatch_command(
"print_tensor",
["simple_mul_add/matmul:0", "--ranges", "[[-inf, -5.5], [5.5, inf]]"],
screen_info={"cols": 80})
self.assertEqual([
"Tensor \"simple_mul_add/matmul:0:DebugIdentity\": "
"Highlighted([[-inf, -5.5], [5.5, inf]]): "
"1 of 2 element(s) (50.00%)",
" dtype: float64",
" shape: (2, 1)",
"",
"array([[ 7.],",
" [-2.]])",
], out.lines)
self.assertIn("tensor_metadata", out.annotations)
self.assertIn(4, out.annotations)
self.assertIn(5, out.annotations)
self.assertEqual([(9, 11, "bold")], out.font_attr_segs[4])
self.assertNotIn(5, out.font_attr_segs)
def testPrintTensorWithSlicing(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul:0[1, :]"],
screen_info={"cols": 80})
self.assertEqual([
"Tensor \"simple_mul_add/matmul:0:DebugIdentity[1, :]\":",
" dtype: float64", " shape: (1,)", "", "array([-2.])"
], out.lines)
self.assertIn("tensor_metadata", out.annotations)
self.assertIn(4, out.annotations)
def testPrintTensorInvalidSlicingString(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul:0[1, foo()]"],
screen_info={"cols": 80})
self.assertEqual("Error occurred during handling of command: print_tensor "
"simple_mul_add/matmul:0[1, foo()]:", out.lines[0])
self.assertEqual("ValueError: Invalid tensor-slicing string.",
out.lines[-2])
def testPrintTensorValidExplicitNumber(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul:0", "-n", "0"],
screen_info={"cols": 80})
self.assertEqual([
"Tensor \"simple_mul_add/matmul:0:DebugIdentity\":",
" dtype: float64",
" shape: (2, 1)",
"",
"array([[ 7.],",
" [-2.]])",
], out.lines)
self.assertIn("tensor_metadata", out.annotations)
self.assertIn(4, out.annotations)
self.assertIn(5, out.annotations)
def testPrintTensorInvalidExplicitNumber(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul:0", "-n", "1"],
screen_info={"cols": 80})
self.assertEqual([
"ERROR: Invalid number (1) for tensor simple_mul_add/matmul:0, "
"which generated one dump."
], out.lines)
self.assertNotIn("tensor_metadata", out.annotations)
def testPrintTensorMissingOutputSlot(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul"])
self.assertEqual([
"ERROR: \"simple_mul_add/matmul\" is not a valid tensor name"
], out.lines)
def testPrintTensorNonexistentNodeName(self):
out = self._registry.dispatch_command(
"print_tensor", ["simple_mul_add/matmul/foo:0"])
self.assertEqual([
"ERROR: Node \"simple_mul_add/matmul/foo\" does not exist in partition "
"graphs"
], out.lines)
def testAddGetTensorFilterLambda(self):
analyzer = analyzer_cli.DebugAnalyzer(self._debug_dump)
analyzer.add_tensor_filter("foo_filter", lambda x, y: True)
self.assertTrue(analyzer.get_tensor_filter("foo_filter")(None, None))
def testAddGetTensorFilterNestedFunction(self):
analyzer = analyzer_cli.DebugAnalyzer(self._debug_dump)
def foo_filter(unused_arg_0, unused_arg_1):
return True
analyzer.add_tensor_filter("foo_filter", foo_filter)
self.assertTrue(analyzer.get_tensor_filter("foo_filter")(None, None))
def testAddTensorFilterEmptyName(self):
analyzer = analyzer_cli.DebugAnalyzer(self._debug_dump)
with self.assertRaisesRegexp(ValueError,
"Input argument filter_name cannot be empty."):
analyzer.add_tensor_filter("", lambda datum, tensor: True)
def testAddTensorFilterNonStrName(self):
analyzer = analyzer_cli.DebugAnalyzer(self._debug_dump)
with self.assertRaisesRegexp(
TypeError,
"Input argument filter_name is expected to be str, ""but is not"):
analyzer.add_tensor_filter(1, lambda datum, tensor: True)
def testAddGetTensorFilterNonCallable(self):
analyzer = analyzer_cli.DebugAnalyzer(self._debug_dump)
with self.assertRaisesRegexp(
TypeError, "Input argument filter_callable is expected to be callable, "
"but is not."):
analyzer.add_tensor_filter("foo_filter", "bar")
def testGetNonexistentTensorFilter(self):
analyzer = analyzer_cli.DebugAnalyzer(self._debug_dump)
analyzer.add_tensor_filter("foo_filter", lambda datum, tensor: True)
with self.assertRaisesRegexp(ValueError,
"There is no tensor filter named \"bar\""):
analyzer.get_tensor_filter("bar")
class AnalyzerCLIPrintLargeTensorTest(test_util.TensorFlowTestCase):
@classmethod
def setUpClass(cls):
cls._dump_root = tempfile.mkdtemp()
with session.Session() as sess:
# 2400 elements should exceed the default threshold (2000).
x = constant_op.constant(np.zeros([300, 8]), name="large_tensors/x")
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity"],
debug_urls="file://%s" % cls._dump_root)
# Invoke Session.run().
run_metadata = config_pb2.RunMetadata()
sess.run(x, options=run_options, run_metadata=run_metadata)
cls._debug_dump = debug_data.DebugDumpDir(
cls._dump_root, partition_graphs=run_metadata.partition_graphs)
# Construct the analyzer.
cls._analyzer = analyzer_cli.DebugAnalyzer(cls._debug_dump)
# Construct the handler registry.
cls._registry = debugger_cli_common.CommandHandlerRegistry()
# Register command handler.
cls._registry.register_command_handler(
"print_tensor",
cls._analyzer.print_tensor,
cls._analyzer.get_help("print_tensor"),
prefix_aliases=["pt"])
@classmethod
def tearDownClass(cls):
# Tear down temporary dump directory.
shutil.rmtree(cls._dump_root)
def testPrintLargeTensorWithoutAllOption(self):
out = self._registry.dispatch_command(
"print_tensor", ["large_tensors/x:0"], screen_info={"cols": 80})
# Assert that ellipses are present in the tensor value printout.
self.assertIn("...,", out.lines[4])
# 2100 still exceeds 2000.
out = self._registry.dispatch_command(
"print_tensor", ["large_tensors/x:0[:, 0:7]"],
screen_info={"cols": 80})
self.assertIn("...,", out.lines[4])
def testPrintLargeTensorWithAllOption(self):
out = self._registry.dispatch_command(
"print_tensor", ["large_tensors/x:0", "-a"],
screen_info={"cols": 80})
# Assert that ellipses are not present in the tensor value printout.
self.assertNotIn("...,", out.lines[4])
out = self._registry.dispatch_command(
"print_tensor", ["large_tensors/x:0[:, 0:7]", "--all"],
screen_info={"cols": 80})
self.assertNotIn("...,", out.lines[4])
class AnalyzerCLIControlDepTest(test_util.TensorFlowTestCase):
@classmethod
def setUpClass(cls):
cls._dump_root = tempfile.mkdtemp()
cls._is_gpu_available = test.is_gpu_available()
if cls._is_gpu_available:
cls._main_device = "/job:localhost/replica:0/task:0/gpu:0"
else:
cls._main_device = "/job:localhost/replica:0/task:0/cpu:0"
with session.Session() as sess:
x_init_val = np.array([5.0, 3.0])
x_init = constant_op.constant(x_init_val, shape=[2])
x = variables.Variable(x_init, name="control_deps/x")
y = math_ops.add(x, x, name="control_deps/y")
y = control_flow_ops.with_dependencies(
[x], y, name="control_deps/ctrl_dep_y")
z = math_ops.mul(x, y, name="control_deps/z")
z = control_flow_ops.with_dependencies(
[x, y], z, name="control_deps/ctrl_dep_z")
x.initializer.run()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity"],
debug_urls="file://%s" % cls._dump_root)
# Invoke Session.run().
run_metadata = config_pb2.RunMetadata()
sess.run(z, options=run_options, run_metadata=run_metadata)
debug_dump = debug_data.DebugDumpDir(
cls._dump_root, partition_graphs=run_metadata.partition_graphs)
# Construct the analyzer.
analyzer = analyzer_cli.DebugAnalyzer(debug_dump)
# Construct the handler registry.
cls._registry = debugger_cli_common.CommandHandlerRegistry()
# Register command handlers.
cls._registry.register_command_handler(
"node_info",
analyzer.node_info,
analyzer.get_help("node_info"),
prefix_aliases=["ni"])
cls._registry.register_command_handler(
"list_inputs",
analyzer.list_inputs,
analyzer.get_help("list_inputs"),
prefix_aliases=["li"])
cls._registry.register_command_handler(
"list_outputs",
analyzer.list_outputs,
analyzer.get_help("list_outputs"),
prefix_aliases=["lo"])
@classmethod
def tearDownClass(cls):
# Tear down temporary dump directory.
shutil.rmtree(cls._dump_root)
def testNodeInfoWithControlDependencies(self):
# Call node_info on a node with control inputs.
out = self._registry.dispatch_command("node_info",
["control_deps/ctrl_dep_y"])
assert_node_attribute_lines(
self, out, "control_deps/ctrl_dep_y", "Identity",
self._main_device, [("Add", "control_deps/y")],
[("Variable", "control_deps/x")],
[("Mul", "control_deps/z")],
[("Identity", "control_deps/ctrl_dep_z")])
# Call node info on a node with control recipients.
out = self._registry.dispatch_command("ni", ["control_deps/x"])
assert_node_attribute_lines(self, out, "control_deps/x", "Variable",
self._main_device, [], [],
[("Identity", "control_deps/x/read")],
[("Identity", "control_deps/ctrl_dep_y"),
("Identity", "control_deps/ctrl_dep_z")])
def testListInputsNonRecursiveNoControl(self):
"""List inputs non-recursively, without any control inputs."""
# Do not include node op types.
out = self._registry.dispatch_command("list_inputs", ["control_deps/z"])
self.assertEqual([
"Inputs to node \"control_deps/z\" (Depth limit = 1):",
"|- (1) control_deps/x/read",
"| |- ...",
"|- (1) control_deps/ctrl_dep_y",
" |- ...",
"", "Legend:", " (d): recursion depth = d."], out.lines)
# Include node op types.
out = self._registry.dispatch_command("li", ["-t", "control_deps/z"])
self.assertEqual([
"Inputs to node \"control_deps/z\" (Depth limit = 1):",
"|- (1) [Identity] control_deps/x/read",
"| |- ...",
"|- (1) [Identity] control_deps/ctrl_dep_y",
" |- ...",
"", "Legend:", " (d): recursion depth = d.",
" [Op]: Input node has op type Op."], out.lines)
def testListInputsNonRecursiveNoControlUsingTensorName(self):
"""List inputs using the name of an output tensor of the node."""
# Do not include node op types.
out = self._registry.dispatch_command("list_inputs", ["control_deps/z:0"])
self.assertEqual([
"Inputs to node \"control_deps/z\" (Depth limit = 1):",
"|- (1) control_deps/x/read",
"| |- ...",
"|- (1) control_deps/ctrl_dep_y",
" |- ...",
"", "Legend:", " (d): recursion depth = d."], out.lines)
def testListInputsNonRecursiveWithControls(self):
"""List inputs non-recursively, with control inputs."""
out = self._registry.dispatch_command(
"li", ["-t", "control_deps/ctrl_dep_z", "-c"])
self.assertEqual([
"Inputs to node \"control_deps/ctrl_dep_z\" (Depth limit = 1, "
"control inputs included):",
"|- (1) [Mul] control_deps/z",
"| |- ...",
"|- (1) (Ctrl) [Identity] control_deps/ctrl_dep_y",
"| |- ...",
"|- (1) (Ctrl) [Variable] control_deps/x",
"", "Legend:", " (d): recursion depth = d.",
" (Ctrl): Control input.",
" [Op]: Input node has op type Op."], out.lines)
def testListInputsRecursiveWithControls(self):
"""List inputs recursively, with control inputs."""
out = self._registry.dispatch_command(
"li", ["-c", "-r", "-t", "control_deps/ctrl_dep_z"])
self.assertEqual([
"Inputs to node \"control_deps/ctrl_dep_z\" (Depth limit = 20, "
"control inputs included):",
"|- (1) [Mul] control_deps/z",
"| |- (2) [Identity] control_deps/x/read",
"| | |- (3) [Variable] control_deps/x",
"| |- (2) [Identity] control_deps/ctrl_dep_y",
"| |- (3) [Add] control_deps/y",
"| | |- (4) [Identity] control_deps/x/read",
"| | | |- (5) [Variable] control_deps/x",
"| | |- (4) [Identity] control_deps/x/read",
"| | |- (5) [Variable] control_deps/x",
"| |- (3) (Ctrl) [Variable] control_deps/x",
"|- (1) (Ctrl) [Identity] control_deps/ctrl_dep_y",
"| |- (2) [Add] control_deps/y",
"| | |- (3) [Identity] control_deps/x/read",
"| | | |- (4) [Variable] control_deps/x",
"| | |- (3) [Identity] control_deps/x/read",
"| | |- (4) [Variable] control_deps/x",
"| |- (2) (Ctrl) [Variable] control_deps/x",
"|- (1) (Ctrl) [Variable] control_deps/x",
"", "Legend:", " (d): recursion depth = d.",
" (Ctrl): Control input.",
" [Op]: Input node has op type Op."], out.lines)
def testListInputsRecursiveWithControlsWithDepthLimit(self):
"""List inputs recursively, with control inputs and a depth limit."""
out = self._registry.dispatch_command(
"li", ["-c", "-r", "-t", "-d", "2", "control_deps/ctrl_dep_z"])
self.assertEqual([
"Inputs to node \"control_deps/ctrl_dep_z\" (Depth limit = 2, "
"control inputs included):",
"|- (1) [Mul] control_deps/z",
"| |- (2) [Identity] control_deps/x/read",
"| | |- ...",
"| |- (2) [Identity] control_deps/ctrl_dep_y",
"| |- ...",
"|- (1) (Ctrl) [Identity] control_deps/ctrl_dep_y",
"| |- (2) [Add] control_deps/y",
"| | |- ...",
"| |- (2) (Ctrl) [Variable] control_deps/x",
"|- (1) (Ctrl) [Variable] control_deps/x",
"", "Legend:", " (d): recursion depth = d.",
" (Ctrl): Control input.",
" [Op]: Input node has op type Op."], out.lines)
def testListInputsNodeWithoutInputs(self):
"""List the inputs to a node without any input."""
out = self._registry.dispatch_command(
"li", ["-c", "-r", "-t", "control_deps/x"])
self.assertEqual([
"Inputs to node \"control_deps/x\" (Depth limit = 20, control inputs "
"included):",
" [None]",
"", "Legend:", " (d): recursion depth = d.",
" (Ctrl): Control input.",
" [Op]: Input node has op type Op."], out.lines)
def testListInputsNonexistentNode(self):
out = self._registry.dispatch_command(
"list_inputs", ["control_deps/z/foo"])
self.assertEqual([
"ERROR: There is no node named \"control_deps/z/foo\" in the "
"partition graphs"], out.lines)
def testListRecipientsRecursiveWithControlsWithDepthLimit(self):
"""List recipients recursively, with control inputs and a depth limit."""
out = self._registry.dispatch_command(
"lo", ["-c", "-r", "-t", "-d", "1", "control_deps/x"])
self.assertEqual([
"Recipients of node \"control_deps/x\" (Depth limit = 1, control "
"recipients included):",
"|- (1) [Identity] control_deps/x/read",
"| |- ...",
"|- (1) (Ctrl) [Identity] control_deps/ctrl_dep_y",
"| |- ...",
"|- (1) (Ctrl) [Identity] control_deps/ctrl_dep_z",
"", "Legend:", " (d): recursion depth = d.",
" (Ctrl): Control input.",
" [Op]: Input node has op type Op."], out.lines)
class AnalyzerCLIWhileLoopTest(test_util.TensorFlowTestCase):
@classmethod
def setUpClass(cls):
cls._dump_root = tempfile.mkdtemp()
with session.Session() as sess:
loop_var = constant_op.constant(0, name="while_loop_test/loop_var")
cond = lambda loop_var: math_ops.less(loop_var, 10)
body = lambda loop_var: math_ops.add(loop_var, 1)
while_loop = control_flow_ops.while_loop(
cond, body, [loop_var], parallel_iterations=1)
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_url = "file://%s" % cls._dump_root
watch_opts = run_options.debug_options.debug_tensor_watch_opts
# Add debug tensor watch for "while/Identity".
watch = watch_opts.add()
watch.node_name = "while/Identity"
watch.output_slot = 0
watch.debug_ops.append("DebugIdentity")
watch.debug_urls.append(debug_url)
# Invoke Session.run().
run_metadata = config_pb2.RunMetadata()
sess.run(while_loop, options=run_options, run_metadata=run_metadata)
cls._debug_dump = debug_data.DebugDumpDir(
cls._dump_root, partition_graphs=run_metadata.partition_graphs)
cls._analyzer = analyzer_cli.DebugAnalyzer(cls._debug_dump)
cls._registry = debugger_cli_common.CommandHandlerRegistry()
cls._registry.register_command_handler(
"list_tensors",
cls._analyzer.list_tensors,
cls._analyzer.get_help("list_tensors"),
prefix_aliases=["lt"])
cls._registry.register_command_handler(
"print_tensor",
cls._analyzer.print_tensor,
cls._analyzer.get_help("print_tensor"),
prefix_aliases=["pt"])
@classmethod
def tearDownClass(cls):
# Tear down temporary dump directory.
shutil.rmtree(cls._dump_root)
def testMultipleDumpsPrintTensorNoNumber(self):
output = self._registry.dispatch_command("pt", ["while/Identity:0"])
self.assertEqual("Tensor \"while/Identity:0\" generated 10 dumps:",
output.lines[0])
for i in xrange(10):
self.assertTrue(output.lines[i + 1].startswith("#%d" % i))
self.assertTrue(output.lines[i + 1].endswith(
" ms] while/Identity:0:DebugIdentity"))
self.assertEqual(
"Use the -n (--number) flag to specify which dump to print.",
output.lines[-3])
self.assertEqual("For example:", output.lines[-2])
self.assertEqual(" print_tensor while/Identity:0 -n 0", output.lines[-1])
def testMultipleDumpsPrintTensorWithNumber(self):
for i in xrange(5):
output = self._registry.dispatch_command(
"pt", ["while/Identity:0", "-n", "%d" % i])
self.assertEqual("Tensor \"while/Identity:0:DebugIdentity (dump #%d)\":" %
i, output.lines[0])
self.assertEqual(" dtype: int32", output.lines[1])
self.assertEqual(" shape: ()", output.lines[2])
self.assertEqual("", output.lines[3])
self.assertEqual("array(%d, dtype=int32)" % i, output.lines[4])
def testMultipleDumpsPrintTensorInvalidNumber(self):
output = self._registry.dispatch_command("pt",
["while/Identity:0", "-n", "10"])
self.assertEqual([
"ERROR: Specified number (10) exceeds the number of available dumps "
"(10) for tensor while/Identity:0"
], output.lines)
if __name__ == "__main__":
googletest.main()
|
{
"content_hash": "97d9752d08ab31d843457a09a60763f5",
"timestamp": "",
"source": "github",
"line_count": 1055,
"max_line_length": 80,
"avg_line_length": 36.32037914691943,
"alnum_prop": 0.598074012213581,
"repo_name": "AndreasMadsen/tensorflow",
"id": "7d409aeccad445c6e91fac1ca01c8ef5fe9c4915",
"size": "39007",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/debug/cli/analyzer_cli_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6963"
},
{
"name": "C",
"bytes": "118477"
},
{
"name": "C++",
"bytes": "14646706"
},
{
"name": "CMake",
"bytes": "111373"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "96398"
},
{
"name": "HTML",
"bytes": "534568"
},
{
"name": "Java",
"bytes": "179112"
},
{
"name": "JavaScript",
"bytes": "13406"
},
{
"name": "Jupyter Notebook",
"bytes": "1833491"
},
{
"name": "Makefile",
"bytes": "23553"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64592"
},
{
"name": "Protocol Buffer",
"bytes": "152280"
},
{
"name": "Python",
"bytes": "15069264"
},
{
"name": "Shell",
"bytes": "312259"
},
{
"name": "TypeScript",
"bytes": "761620"
}
],
"symlink_target": ""
}
|
from django.http import HttpRequest
from django.utils.datastructures import MultiValueDict
from django.http.request import QueryDict
from django.conf import settings
def encode_request(request):
"""
Encodes a request to JSON-compatible datastructures
"""
# TODO: More stuff
value = {
"get": dict(request.GET.lists()),
"post": dict(request.POST.lists()),
"cookies": request.COOKIES,
"meta": {k: v for k, v in request.META.items() if not k.startswith("wsgi")},
"path": request.path,
"path_info": request.path_info,
"method": request.method,
"reply_channel": request.reply_channel,
}
return value
def decode_request(value):
"""
Decodes a request JSONish value to a HttpRequest object.
"""
request = HttpRequest()
request.GET = CustomQueryDict(value['get'])
request.POST = CustomQueryDict(value['post'])
request.COOKIES = value['cookies']
request.META = value['meta']
request.path = value['path']
request.method = value['method']
request.path_info = value['path_info']
request.reply_channel = value['reply_channel']
return request
class CustomQueryDict(QueryDict):
"""
Custom override of QueryDict that sets things directly.
"""
def __init__(self, values):
MultiValueDict.__init__(self, values)
|
{
"content_hash": "a1658912699a3dd851d2dc40a6020996",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 84,
"avg_line_length": 29.06382978723404,
"alnum_prop": 0.6493411420204978,
"repo_name": "octaflop/channels",
"id": "1c975ef4f99a2b728e8a2e43d1cc1e2d8a716266",
"size": "1366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "channels/request.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "57791"
}
],
"symlink_target": ""
}
|
import tornado.web
from sandstone.lib.db import sandstone_db
class DBHandler(tornado.web.RequestHandler):
def prepare(self):
self.db = sandstone_db
self.db.connect()
return super(DBHandler, self).prepare()
def on_finish(self):
if not self.db.is_closed():
self.db.close()
return super(DBHandler, self).on_finish()
|
{
"content_hash": "ceaae0193dc728c1e441aa0c68128725",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 49,
"avg_line_length": 25.133333333333333,
"alnum_prop": 0.6392572944297082,
"repo_name": "SandstoneHPC/OIDE",
"id": "26bd3360ebb77bb2233de62e163e0ae8dcfd78ae",
"size": "377",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sandstone/lib/db/mixins.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9804"
},
{
"name": "HTML",
"bytes": "37057"
},
{
"name": "JavaScript",
"bytes": "159011"
},
{
"name": "Python",
"bytes": "93507"
},
{
"name": "Smarty",
"bytes": "531"
}
],
"symlink_target": ""
}
|
from sqlalchemy import MetaData
tables = ['image_locations']
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
if migrate_engine.name == "mysql":
d = migrate_engine.execute("SHOW TABLE STATUS WHERE Engine!='InnoDB';")
for row in d.fetchall():
table_name = row[0]
if table_name in tables:
migrate_engine.execute("ALTER TABLE %s Engine=InnoDB" %
table_name)
def downgrade(migrate_engine):
pass
|
{
"content_hash": "37c52f85543fac0388e8ada9071597b5",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 79,
"avg_line_length": 28,
"alnum_prop": 0.5845864661654135,
"repo_name": "rickerc/glance_audit",
"id": "7d341251ea3cc12f205598f6c3eb127afe2dd117",
"size": "1240",
"binary": false,
"copies": "5",
"ref": "refs/heads/cis-havana-staging",
"path": "glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2566593"
},
{
"name": "Shell",
"bytes": "3488"
}
],
"symlink_target": ""
}
|
import random
max_num = 1e6
def gen_test(n = 1000, max_num = max_num):
# Generate a list of n items randomly, each number is within +/- of max_num
n = int(n)
data = [random.randint(-max_num, max_num) for i in range(n)]
return data
def verify_sort(data):
# Verify that the data is sorted ascendingly
n = len(data)
for i in range(n-1):
if data[i] > data[i+1]:
return False
return True
|
{
"content_hash": "ff12b8c43124ce9257b2bfc2ac39e7d7",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 79,
"avg_line_length": 25.764705882352942,
"alnum_prop": 0.6095890410958904,
"repo_name": "JasonVann/CLRS",
"id": "f5b3c769383d90428d4e30bc9fb2bb18fff3fe11",
"size": "438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "S1_Foundation/C2_GettingStarted/test_sort.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60634"
}
],
"symlink_target": ""
}
|
"""
Code used for both Travis and local (deploy and configure)
"""
# Color guide
#
# - red: Error and warning messages
# - green: Welcome messages (use sparingly)
# - yellow: warning message (only use on Travis)
# - blue: Default values
# - bold_magenta: Action items
# - bold_black: Parts of code to be run or copied that should be modified
def red(text):
return "\033[31m%s\033[0m" % text
def green(text):
return "\033[32m%s\033[0m" % text
def yellow(text):
return "\033[33m%s\033[0m" % text
def blue(text):
return "\033[34m%s\033[0m" % text
def bold_black(text):
return "\033[1;30m%s\033[0m" % text
def bold_magenta(text):
return "\033[1;35m%s\033[0m" % text
def bold(text):
return "\033[1m%s\033[0m" % text
# Use these when coloring individual parts of a larger string, e.g.,
# "{BOLD_MAGENTA}Bright text{RESET} normal text".format(BOLD_MAGENTA=BOLD_MAGENTA, RESET=RESET)
BOLD_BLACK = "\033[1;30m"
BOLD_MAGENTA = "\033[1;35m"
RESET = "\033[0m"
# Remove whitespace on inputs
_input = input
def input(prompt=None):
res = _input(prompt)
return res.strip()
|
{
"content_hash": "aaab1cd4d014330a58a6e1588de859d6",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 95,
"avg_line_length": 24,
"alnum_prop": 0.6702898550724637,
"repo_name": "drdoctr/doctr",
"id": "f5736c141df09c6295ed0e5c539d0aa98889ddbb",
"size": "1104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doctr/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "163003"
}
],
"symlink_target": ""
}
|
from gluon import *
from s3 import *
from s3layouts import *
try:
from .layouts import *
except ImportError:
pass
import s3menus as default
# =============================================================================
class S3MainMenu(default.S3MainMenu):
""" Custom Application Main Menu """
# -------------------------------------------------------------------------
@classmethod
def menu_modules(cls):
""" Custom Modules Menu """
return [
MM("News", c="cms", f="newsfeed", args="datalist",
icon="icon-news",
),
MM("Current Needs", c="req", f="site_needs"),
MM("Facilities", c="org", f="facility"),
MM("Organizations", c="org", f="organisation"),
homepage("gis"),
MM("More", link=False)(
MM("Requests", c="req", f="req", vars = {"type": "1"}),
homepage("inv"),
homepage("hrm"),
homepage("vol"),
homepage("project"),
#MM("Missing Persons", c="mpr", f="person"),
),
]
# =============================================================================
class S3OptionsMenu(default.S3OptionsMenu):
""" Custom Application Side Menu """
# -------------------------------------------------------------------------
@staticmethod
def org():
""" ORG / Organization Registry """
ADMIN = current.session.s3.system_roles.ADMIN
SECTORS = "Clusters" if current.deployment_settings.get_ui_label_cluster() \
else "Sectors"
return M(c="org")(
M("Organizations", f="organisation")(
M("Create", m="create"),
M("Import", m="import")
),
M("Facilities", f="facility")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import"),
),
M("Offices", f="office")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import")
),
M("Resources", f="resource", m="summary")(
M("Create", m="create"),
M("Import", m="import")
),
M("Organization Types", f="organisation_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Office Types", f="office_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Facility Types", f="facility_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M(SECTORS, f="sector", restrict=[ADMIN])(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def project():
""" PROJECT / Project Tracking & Management """
ADMIN = current.session.s3.system_roles.ADMIN
menu = M(c="project")(
M("Projects", f="project", m="summary")(
M("Create", m="create"),
),
M("Locations", f="location")(
M("Map", m="map"),
M("Contacts", f="location_contact"),
),
M("Reports", f="location", m="report")(
M("3W", f="location", m="report"),
M("Beneficiaries", f="beneficiary", m="report"),
#M("Indicators", f="indicator", m="report",
# check=indicators,
# ),
#M("Indicators over Time", f="indicator", m="timeplot",
# check=indicators,
# ),
M("Funding", f="organisation", m="report"),
),
M("Import", f="project", m="import", p="create", restrict=[ADMIN])(
M("Import Projects", m="import", p="create"),
M("Import Project Organizations", f="organisation",
m="import", p="create"),
M("Import Project Communities", f="location",
m="import", p="create"),
),
M("Activity Types", f="activity_type", restrict=[ADMIN])(
M("Create", m="create"),
),
M("Beneficiary Types", f="beneficiary_type", restrict=[ADMIN])(
M("Create", m="create"),
),
M("Sectors", f="sector", restrict=[ADMIN])(
M("Create", m="create"),
),
M("Themes", f="theme", restrict=[ADMIN])(
M("Create", m="create"),
),
)
return menu
# -------------------------------------------------------------------------
@staticmethod
def req():
""" REQ / Request Management """
if not current.auth.s3_logged_in():
return None
ADMIN = current.session.s3.system_roles.ADMIN
settings = current.deployment_settings
types = settings.get_req_req_type()
get_vars = {}
if len(types) == 1:
t = types[0]
if t == "Stock":
get_vars = {"type": "1"}
elif t == "People":
get_vars = {"type": "2"}
create_menu = M("Create", m="create", vars=get_vars)
recurring = lambda i: settings.get_req_recurring()
use_commit = lambda i: settings.get_req_use_commit()
req_items = lambda i: "Stock" in types
req_skills = lambda i: "People" in types
return M(c="req")(
M("Current Needs", f="site_needs")(
M("Create", m="create"),
),
M("Requests", f="req", vars=get_vars)(
create_menu,
M("List Recurring Requests", f="req_template", check=recurring),
M("Map", m="map"),
M("Report", m="report"),
M("Search All Requested Items", f="req_item",
check=req_items),
M("Search All Requested Skills", f="req_skill",
check=req_skills),
),
M("Commitments", f="commit", check=use_commit)(
),
M("Items", c="supply", f="item")(
M("Create", m="create"),
M("Report", m="report"),
M("Import", m="import", p="create"),
),
# Catalog Items moved to be next to the Item Categories
#M("Catalog Items", c="supply", f="catalog_item")(
#M("Create", m="create"),
#),
M("Catalogs", c="supply", f="catalog")(
M("Create", m="create"),
),
M("Item Categories", c="supply", f="item_category",
restrict=[ADMIN])(
M("Create", m="create"),
),
)
# END =========================================================================
|
{
"content_hash": "970d4b561f239d32330f054952f1c8a8",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 88,
"avg_line_length": 38.728205128205126,
"alnum_prop": 0.375926906779661,
"repo_name": "anurag-ks/eden",
"id": "721c948026084afea6576ff1d7d708ddc5bbb91b",
"size": "7577",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "modules/templates/RW/menus.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "727"
},
{
"name": "CSS",
"bytes": "2378623"
},
{
"name": "HTML",
"bytes": "1343047"
},
{
"name": "JavaScript",
"bytes": "20063127"
},
{
"name": "NSIS",
"bytes": "3934"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "30127285"
},
{
"name": "Ruby",
"bytes": "3611"
},
{
"name": "Shell",
"bytes": "5057"
},
{
"name": "XSLT",
"bytes": "2975882"
}
],
"symlink_target": ""
}
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mpp.models import SQLTestCase
from mpp.lib.gpConfig import GpConfig
from mpp.lib.gpstop import GpStop
import getpass
import os
class PartitionDDLTests(SQLTestCase):
"""
@product_version gpdb: [4.3-]
"""
sql_dir = 'sql/'
ans_dir = 'expected/'
@classmethod
def setUpClass(cls):
"""
This test suite does a \d on tables that lists the tablespace of all the tables.
And was failing because some other test suite running along with this did a gpconfig -c default_tablespace.
Removing default_tablespace at the beginning to avoid such failures.
"""
super(PartitionDDLTests, cls).setUpClass()
GpConfig().removeParameter('default_tablespace')
GpStop().run_gpstop_cmd(restart = True)
def get_substitutions(self):
substitutions = {}
MYD = self.get_source_dir()
USER = getpass.getuser()
substitutions['%PATH%'] = MYD
substitutions['%USER%'] = USER
substitutions['@out_dir@'] = self.get_out_dir()
substitutions["@abs_srcdir@"] = MYD
substitutions["@DBNAME@"] = os.environ.get('PGDATABASE', getpass.getuser())
return substitutions
|
{
"content_hash": "55353c2fc3ce65ec506a567befeb5dd6",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 115,
"avg_line_length": 34.107142857142854,
"alnum_prop": 0.6832460732984293,
"repo_name": "xuegang/gpdb",
"id": "c965940e7d35c4a1236e07237151d16c2fbf2611",
"size": "1910",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/test/tinc/tincrepo/mpp/gpdb/tests/storage/basic/partition/test_partition_ddl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5614"
},
{
"name": "Batchfile",
"bytes": "11028"
},
{
"name": "C",
"bytes": "35361773"
},
{
"name": "C++",
"bytes": "8133472"
},
{
"name": "CMake",
"bytes": "47394"
},
{
"name": "CSS",
"bytes": "7068"
},
{
"name": "Csound Score",
"bytes": "179"
},
{
"name": "Cucumber",
"bytes": "927827"
},
{
"name": "DTrace",
"bytes": "1160"
},
{
"name": "FORTRAN",
"bytes": "14777"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Groff",
"bytes": "703079"
},
{
"name": "HTML",
"bytes": "218703"
},
{
"name": "Java",
"bytes": "1011277"
},
{
"name": "Lex",
"bytes": "210708"
},
{
"name": "M4",
"bytes": "106028"
},
{
"name": "Makefile",
"bytes": "497542"
},
{
"name": "Objective-C",
"bytes": "24186"
},
{
"name": "PLSQL",
"bytes": "190951"
},
{
"name": "PLpgSQL",
"bytes": "53337057"
},
{
"name": "Perl",
"bytes": "4082990"
},
{
"name": "Perl6",
"bytes": "14219"
},
{
"name": "Python",
"bytes": "9782036"
},
{
"name": "Ruby",
"bytes": "3301"
},
{
"name": "SQLPL",
"bytes": "1892720"
},
{
"name": "Shell",
"bytes": "504084"
},
{
"name": "XS",
"bytes": "8309"
},
{
"name": "XSLT",
"bytes": "5779"
},
{
"name": "Yacc",
"bytes": "485235"
}
],
"symlink_target": ""
}
|
import importlib
import sys
import warnings
from inspect import isabstract
from unittest import mock
import pytest
from airflow.models.baseoperator import BaseOperator
from tests.deprecated_classes import ALL, RENAMED_ALL
class TestDeprecations:
@staticmethod
def assert_warning(msg: str, warnings):
error = f"Text '{msg}' not in warnings"
assert any(msg in str(w) for w in warnings), error
def assert_is_subclass(self, clazz, other):
assert issubclass(clazz, other), f"{clazz} is not subclass of {other}"
def assert_proper_import(self, old_resource, new_resource):
new_path, _, _ = new_resource.rpartition(".")
old_path, _, _ = old_resource.rpartition(".")
with pytest.warns(DeprecationWarning) as warnings:
# Reload to see deprecation warning each time
importlib.reload(importlib.import_module(old_path))
self.assert_warning(new_path, warnings)
def skip_test_with_mssql_in_py38(self, path_a="", path_b=""):
py_38 = sys.version_info >= (3, 8)
if py_38:
if "mssql" in path_a or "mssql" in path_b:
raise pytest.skip("Mssql package not available when Python >= 3.8.")
@staticmethod
def get_class_from_path(path_to_class, parent=False):
"""
:param path_to_class: the path to the class
:param parent: indicates if "path_to_class" arg is super class
"""
path, _, class_name = path_to_class.rpartition(".")
module = importlib.import_module(path)
class_ = getattr(module, class_name)
if isabstract(class_) and not parent:
class_name = f"Mock({class_.__name__})"
attributes = {a: mock.MagicMock() for a in class_.__abstractmethods__}
new_class = type(class_name, (class_,), attributes)
return new_class
return class_
@pytest.mark.parametrize("new_module, old_module", RENAMED_ALL)
def test_is_class_deprecated(self, new_module, old_module):
self.skip_test_with_mssql_in_py38(new_module, old_module)
deprecation_warning_msg = "This class is deprecated."
with pytest.warns(DeprecationWarning, match=deprecation_warning_msg) as warnings:
old_module_class = self.get_class_from_path(old_module)
warnings.clear()
with mock.patch(f"{new_module}.__init__") as init_mock:
init_mock.return_value = None
klass = old_module_class()
if isinstance(klass, BaseOperator):
# In case of operators we are validating that proper stacklevel
# is used (=3)
assert len(warnings) >= 1
# For nicer error reporting from pytest, create a static
# list of filenames
files = [warning.filename for warning in warnings]
assert __file__ in files, old_module
init_mock.assert_called_once()
@pytest.mark.parametrize("parent_class_path, sub_class_path", ALL)
def test_is_subclass(self, parent_class_path, sub_class_path):
self.skip_test_with_mssql_in_py38(parent_class_path, sub_class_path)
with mock.patch(f"{parent_class_path}.__init__"), warnings.catch_warnings(record=True):
parent_class_path = self.get_class_from_path(parent_class_path, parent=True)
sub_class_path = self.get_class_from_path(sub_class_path)
self.assert_is_subclass(sub_class_path, parent_class_path)
@pytest.mark.parametrize("new_path, old_path", ALL)
def test_warning_on_import(self, new_path, old_path):
self.skip_test_with_mssql_in_py38(new_path, old_path)
self.assert_proper_import(old_path, new_path)
def test_no_redirect_to_deprecated_classes(self):
"""
When we have the following items:
new_A, old_B
old_B, old_C
This will tell us to use new_A instead of old_B.
"""
all_classes_by_old = {old: new for new, old in ALL}
for new, old in ALL:
# Using if statement allows us to create a developer-friendly message only when we need it.
# Otherwise, it wouldn't always be possible - KeyError
if new in all_classes_by_old:
raise AssertionError(
f'Deprecation "{old}" to "{new}" is incorrect. '
f'Please use \"{all_classes_by_old[new]}\" instead of "{old}".'
)
|
{
"content_hash": "65518499d842d1d9df6a47003682c9e2",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 103,
"avg_line_length": 42.613207547169814,
"alnum_prop": 0.6096967013504538,
"repo_name": "lyft/incubator-airflow",
"id": "d2dbc220b98646d812cc935400bcb2f4fd033a2d",
"size": "5305",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "tests/always/test_deprecations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17280"
},
{
"name": "HTML",
"bytes": "161328"
},
{
"name": "JavaScript",
"bytes": "25360"
},
{
"name": "Jinja",
"bytes": "8565"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10019710"
},
{
"name": "Shell",
"bytes": "220780"
}
],
"symlink_target": ""
}
|
"""
WSGI config for counselling_site project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "counselling_site.settings")
application = get_wsgi_application()
|
{
"content_hash": "ee7d00c2bedb69b27d6e8188bd3e2a10",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 25.5625,
"alnum_prop": 0.7750611246943765,
"repo_name": "oinopion/cardiff-wellbeeing",
"id": "87b4604f120ab1a04734f4d064d1cef1d8cc2a3d",
"size": "409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "counselling_site/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5383"
},
{
"name": "HTML",
"bytes": "6580"
},
{
"name": "JavaScript",
"bytes": "7105"
},
{
"name": "Python",
"bytes": "20984"
}
],
"symlink_target": ""
}
|
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class K8sIoApiCoreV1ObjectFieldSelector(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'field_path': 'str'
}
attribute_map = {
'api_version': 'apiVersion',
'field_path': 'fieldPath'
}
def __init__(self, api_version=None, field_path=None):
"""
K8sIoApiCoreV1ObjectFieldSelector - a model defined in Swagger
"""
self._api_version = None
self._field_path = None
if api_version is not None:
self.api_version = api_version
self.field_path = field_path
@property
def api_version(self):
"""
Gets the api_version of this K8sIoApiCoreV1ObjectFieldSelector.
Version of the schema the FieldPath is written in terms of, defaults to \"v1\".
:return: The api_version of this K8sIoApiCoreV1ObjectFieldSelector.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this K8sIoApiCoreV1ObjectFieldSelector.
Version of the schema the FieldPath is written in terms of, defaults to \"v1\".
:param api_version: The api_version of this K8sIoApiCoreV1ObjectFieldSelector.
:type: str
"""
self._api_version = api_version
@property
def field_path(self):
"""
Gets the field_path of this K8sIoApiCoreV1ObjectFieldSelector.
Path of the field to select in the specified API version.
:return: The field_path of this K8sIoApiCoreV1ObjectFieldSelector.
:rtype: str
"""
return self._field_path
@field_path.setter
def field_path(self, field_path):
"""
Sets the field_path of this K8sIoApiCoreV1ObjectFieldSelector.
Path of the field to select in the specified API version.
:param field_path: The field_path of this K8sIoApiCoreV1ObjectFieldSelector.
:type: str
"""
if field_path is None:
raise ValueError("Invalid value for `field_path`, must not be `None`")
self._field_path = field_path
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, K8sIoApiCoreV1ObjectFieldSelector):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "078f3cf025060f3cd543b1cdf864c210",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 87,
"avg_line_length": 28.282894736842106,
"alnum_prop": 0.5666434054431263,
"repo_name": "kubevirt/client-python",
"id": "28e174bcc9020dddb186ac07fda12a0aa35d7e6a",
"size": "4316",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubevirt/models/k8s_io_api_core_v1_object_field_selector.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4224980"
},
{
"name": "Shell",
"bytes": "2209"
}
],
"symlink_target": ""
}
|
__version__ = "2.7.1"
import codecs
import sys
import os
import csv
import urllib.request, urllib.error, urllib.parse
import shutil
import glob
import tempfile
import platform
try:
import hashlib
sha256 = hashlib.sha256
except ImportError: # Python < 2.5
# FIXME this is totally broken bec. I don't want to support legacy Python
# Maybe throw an error?
import sha
sha256 = sha.new
try:
import subprocess
sh = lambda cmd: subprocess.call(cmd, shell=True)
except ImportError: # Python < 2.4
sh = os.system
NAME = os.path.basename(__file__)
VENVBURRITO = os.path.join(os.environ['HOME'], ".venvburrito")
VENVBURRITO_LIB = os.path.join(VENVBURRITO, "lib")
REPO_SOURCE = "skytreader"
BRANCH = "master"
VERSIONS_URL = "https://raw.githubusercontent.com/%s/virtualenv-burrito/%s/versions.csv" % (REPO_SOURCE, BRANCH)
def get_python_maj_min_str():
return ".".join(platform.python_version().split(".")[:2])
def get_python_lib_paths():
lib_paths = []
for pydir in glob.glob(os.path.join(VENVBURRITO_LIB, "python*")):
if os.path.exists(os.path.join(pydir, "site-packages")):
pydir = os.path.join(pydir, "site-packages")
lib_paths.append(pydir)
lib_paths.sort()
lib_paths.reverse()
return lib_paths
def get_installed_version(name):
"""Returns current version of `name`."""
versions = []
for pydir in get_python_lib_paths():
for egg_path in glob.glob("%s-*.egg*" % os.path.join(pydir, name)):
egg = os.path.basename(egg_path)
versions.append(list(map(int, egg.split('-')[1].split('.'))))
if versions:
return ".".join(map(str, max(versions)))
def download(url, digest):
"""Returns a filename containing the contents of the URL.
Downloads and checks the SHA1 of the data matches the given hex digest.
"""
name = url.split('/')[-1]
print(" Downloading", name, "...")
try:
download_data = urllib.request.urlopen(url).read()
except Exception as e:
sys.stderr.write("\nERROR - Unable to download %s: %s %s\n"
% (url, type(e), str(e)))
raise SystemExit(1)
filehash = sha256()
filehash.update(download_data)
if filehash.hexdigest() != digest:
print(("\nThe file %s didn't look like we expected.\n"
"It may have been moved or tampered with. You should tell me:"
" @brainsik." % name))
raise SystemExit(1)
downloaded_file = tempfile.NamedTemporaryFile("wb", delete=False)
downloaded_file.write(download_data)
downloaded_file.close()
return downloaded_file.name
def drop_startup_sh():
# create the startup script
script = """
export WORKON_HOME="$HOME/.virtualenvs"
export PIP_VIRTUALENV_BASE="$WORKON_HOME"
export PIP_RESPECT_VIRTUALENV=true
venvb_py_path="$HOME/.venvburrito/lib/python%s/site-packages"
if [ -z "$PYTHONPATH" ]; then
export PYTHONPATH="$venvb_py_path"
elif ! echo $PYTHONPATH | grep -q "$venvb_py_path"; then
export PYTHONPATH="$venvb_py_path:$PYTHONPATH"
fi
venvb_bin_path="$HOME/.venvburrito/bin"
if ! echo $PATH | grep -q "$venvb_bin_path"; then
export PATH="$venvb_bin_path:$PATH"
fi
export VIRTUALENVWRAPPER_PYTHON='%s'
. $HOME/.venvburrito/bin/virtualenvwrapper.sh
if ! [ -e $HOME/.venvburrito/.firstrun ]; then
echo
echo "To create a virtualenv, run:"
echo "mkvirtualenv <cool-name>"
touch $HOME/.venvburrito/.firstrun
fi
""" % (get_python_maj_min_str(), sys.executable)
startup_sh = open(os.path.join(VENVBURRITO, "startup.sh"), 'w')
startup_sh.write(script)
startup_sh.close()
def selfupdate(src):
"""Copy src to our destination and exec the new script."""
dst = os.path.join(VENVBURRITO, "bin", "virtualenv-burrito")
shutil.copyfile(src, dst)
os.remove(src)
os.chmod(dst, 0o755)
print(" Restarting!\n")
sys.stdout.flush()
os.execl(dst, "virtualenv-burrito", "upgrade", "selfupdated")
def _getcwd():
try:
return os.getcwd()
except OSError:
return None
def __debug_install(cmd):
print("Running: %s" % cmd)
sh(cmd)
def upgrade_package(filename, name, version):
"""Install Python package in tarball `filename`."""
pyver = "python%s" % get_python_maj_min_str()
lib_python = os.path.join(VENVBURRITO_LIB, pyver, "site-packages")
pythonpath = ''
for pydir in reversed(get_python_lib_paths()):
pythonpath += "%s:" % pydir
os.environ['PYTHONPATH'] = pythonpath.rstrip(":")
realname = "%s-%s" % (name, version)
print(" Installing", realname)
owd = _getcwd()
tmp = tempfile.mkdtemp(prefix='venvburrito.')
try:
# unpack the zip or tarball
if name == 'setuptools':
sh("unzip %s -d %s" % (filename, tmp))
else:
sh("tar xfz %s -C %s" % (filename, tmp))
os.chdir(os.path.join(tmp, realname))
if name in ['setuptools', 'distribute']:
# build and install the egg to avoid patching the system
__debug_install("%s setup.py bdist_egg" % sys.executable)
egg = glob.glob(os.path.join(os.getcwd(), "dist", "*egg"))[0]
__debug_install("%s setup.py easy_install --exclude-scripts --install-dir %s %s >/dev/null"
% (sys.executable, lib_python, egg))
elif name == 'pip':
libexec = os.path.join(VENVBURRITO, "libexec")
__debug_install("%s setup.py install --prefix='' --home='%s' --install-lib %s --install-scripts %s --no-compile >/dev/null"
% (sys.executable, VENVBURRITO, lib_python, libexec))
else:
pip = os.path.join(VENVBURRITO, "libexec", "pip3")
__debug_install("%s install --ignore-installed --prefix='%s' ." % (pip, VENVBURRITO))
finally:
os.chdir(owd or VENVBURRITO)
shutil.rmtree(tmp)
def check_versions(selfcheck=True):
"""Return packages which can be upgraded."""
try:
fp = urllib.request.urlopen(VERSIONS_URL)
except Exception as e:
sys.stderr.write("\nERROR - Couldn't open versions file at %s: %s %s\n"
% (VERSIONS_URL, type(e), str(e)))
raise SystemExit(1)
reader = list(csv.reader(codecs.iterdecode(fp, "utf-8")))
has_update = []
for name, version, url, digest in reader:
if name == '_virtualenv-burrito':
if not selfcheck:
continue
name = NAME
current = __version__
else:
current = get_installed_version(name)
if not current or version != current:
print("+ %s will upgrade (%s -> %s)" % (name, current, version))
has_update.append((name, version, url, digest))
if name == NAME:
break
return has_update
def handle_upgrade(selfupdated=False, firstrun=False):
"""Handles the upgrade command."""
if os.path.exists(VENVBURRITO_LIB):
if not os.path.exists(os.path.join(VENVBURRITO, "libexec")):
print("! Removing burrito < 2.7 setup and preparing fresh wrap")
# nuke old lib and get pip out of the bin PATH
shutil.rmtree(VENVBURRITO_LIB)
for pip in glob.glob(os.path.join(VENVBURRITO, "bin", "pip*")):
os.remove(pip)
# create versioned python site-packages dir
pyver = "python%s" % get_python_maj_min_str()
os.mkdir(VENVBURRITO_LIB)
os.mkdir(os.path.join(VENVBURRITO_LIB, pyver))
os.mkdir(os.path.join(VENVBURRITO_LIB, pyver, "site-packages"))
has_update = check_versions(selfupdated is False)
# update other packages
for update in has_update:
name, version, url, digest = update
filename = download(url, digest)
try:
if name == NAME:
print("* Upgrading ourself ...")
selfupdate(filename) # calls os.exec
else:
print("* Upgrading %s ..." % name)
upgrade_package(filename, name, version)
finally:
if filename and os.path.exists(filename):
os.remove(filename)
# startup.sh needs to be created after selfupdate AND on install
if selfupdated or firstrun:
drop_startup_sh()
if selfupdated:
print("\nTo finish the upgrade, run this:")
print("source %s/startup.sh" % VENVBURRITO)
elif not has_update:
print("Everything is up to date.")
return
else:
print("\nFin.")
def usage(returncode=1):
print("Use like this:\n\t%s upgrade" % NAME)
raise SystemExit(returncode)
def main(argv):
if len(argv) < 2:
usage()
if argv[1] in ['help', '--help', '-h', '-?']:
usage(returncode=0)
if argv[1] in ['version', '--version', '-V']:
print("virtualenv-burrito %s from %s" % (__version__, __file__))
raise SystemExit(0)
if argv[1] in ['upgrade', 'update']:
if len(argv) > 2:
if argv[2] in ['selfupdated', 'no-selfcheck']:
handle_upgrade(selfupdated=True)
elif argv[2] == 'firstrun':
handle_upgrade(firstrun=True)
else:
usage()
else:
handle_upgrade()
else:
usage()
if __name__ == '__main__':
main(sys.argv)
|
{
"content_hash": "fe13edc5bad96dad0ef2235393d3dadf",
"timestamp": "",
"source": "github",
"line_count": 300,
"max_line_length": 135,
"avg_line_length": 31.3,
"alnum_prop": 0.5970181043663472,
"repo_name": "skytreader/virtualenv-burrito",
"id": "629388cfd8fd4f1a062d0ed547188a71b329f4b5",
"size": "9509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "virtualenv-burrito.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17116"
},
{
"name": "Shell",
"bytes": "3715"
}
],
"symlink_target": ""
}
|
"""The volumes snapshots api."""
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import strutils
import webob
from webob import exc
from cinder.api import common
from cinder.api.openstack import wsgi
from cinder.api.views import snapshots as snapshot_views
from cinder import exception
from cinder.i18n import _, _LI
from cinder import utils
from cinder import volume
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
class SnapshotsController(wsgi.Controller):
"""The Snapshots API controller for the OpenStack API."""
_view_builder_class = snapshot_views.ViewBuilder
def __init__(self, ext_mgr=None):
self.volume_api = volume.API()
self.ext_mgr = ext_mgr
super(SnapshotsController, self).__init__()
def show(self, req, id):
"""Return data about the given snapshot."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
snapshot = self.volume_api.get_snapshot(context, id)
req.cache_db_snapshot(snapshot)
return self._view_builder.detail(req, snapshot)
def delete(self, req, id):
"""Delete a snapshot."""
context = req.environ['cinder.context']
LOG.info(_LI("Delete snapshot with id: %s"), id)
# Not found exception will be handled at the wsgi level
snapshot = self.volume_api.get_snapshot(context, id)
self.volume_api.delete_snapshot(context, snapshot)
return webob.Response(status_int=202)
def index(self, req):
"""Returns a summary list of snapshots."""
return self._items(req, is_detail=False)
def detail(self, req):
"""Returns a detailed list of snapshots."""
return self._items(req, is_detail=True)
def _items(self, req, is_detail=True):
"""Returns a list of snapshots, transformed through view builder."""
context = req.environ['cinder.context']
# Pop out non search_opts and create local variables
search_opts = req.GET.copy()
sort_keys, sort_dirs = common.get_sort_params(search_opts)
marker, limit, offset = common.get_pagination_params(search_opts)
# Filter out invalid options
allowed_search_options = ('status', 'volume_id', 'name')
utils.remove_invalid_filter_options(context, search_opts,
allowed_search_options)
# NOTE(thingee): v2 API allows name instead of display_name
if 'name' in search_opts:
search_opts['display_name'] = search_opts.pop('name')
snapshots = self.volume_api.get_all_snapshots(context,
search_opts=search_opts,
marker=marker,
limit=limit,
sort_keys=sort_keys,
sort_dirs=sort_dirs,
offset=offset)
req.cache_db_snapshots(snapshots.objects)
if is_detail:
snapshots = self._view_builder.detail_list(req, snapshots.objects)
else:
snapshots = self._view_builder.summary_list(req, snapshots.objects)
return snapshots
@wsgi.response(202)
def create(self, req, body):
"""Creates a new snapshot."""
kwargs = {}
context = req.environ['cinder.context']
self.assert_valid_body(body, 'snapshot')
snapshot = body['snapshot']
kwargs['metadata'] = snapshot.get('metadata', None)
try:
volume_id = snapshot['volume_id']
except KeyError:
msg = _("'volume_id' must be specified")
raise exc.HTTPBadRequest(explanation=msg)
volume = self.volume_api.get(context, volume_id)
force = snapshot.get('force', False)
msg = _LI("Create snapshot from volume %s")
LOG.info(msg, volume_id)
self.validate_name_and_description(snapshot)
# NOTE(thingee): v2 API allows name instead of display_name
if 'name' in snapshot:
snapshot['display_name'] = snapshot.pop('name')
try:
force = strutils.bool_from_string(force, strict=True)
except ValueError as error:
err_msg = encodeutils.exception_to_unicode(error)
msg = _("Invalid value for 'force': '%s'") % err_msg
raise exception.InvalidParameterValue(err=msg)
if force:
new_snapshot = self.volume_api.create_snapshot_force(
context,
volume,
snapshot.get('display_name'),
snapshot.get('description'),
**kwargs)
else:
new_snapshot = self.volume_api.create_snapshot(
context,
volume,
snapshot.get('display_name'),
snapshot.get('description'),
**kwargs)
req.cache_db_snapshot(new_snapshot)
return self._view_builder.detail(req, new_snapshot)
def update(self, req, id, body):
"""Update a snapshot."""
context = req.environ['cinder.context']
if not body:
msg = _("Missing request body")
raise exc.HTTPBadRequest(explanation=msg)
if 'snapshot' not in body:
msg = (_("Missing required element '%s' in request body") %
'snapshot')
raise exc.HTTPBadRequest(explanation=msg)
snapshot = body['snapshot']
update_dict = {}
valid_update_keys = (
'name',
'description',
'display_name',
'display_description',
)
self.validate_name_and_description(snapshot)
# NOTE(thingee): v2 API allows name instead of display_name
if 'name' in snapshot:
snapshot['display_name'] = snapshot.pop('name')
# NOTE(thingee): v2 API allows description instead of
# display_description
if 'description' in snapshot:
snapshot['display_description'] = snapshot.pop('description')
for key in valid_update_keys:
if key in snapshot:
update_dict[key] = snapshot[key]
# Not found exception will be handled at the wsgi level
snapshot = self.volume_api.get_snapshot(context, id)
volume_utils.notify_about_snapshot_usage(context, snapshot,
'update.start')
self.volume_api.update_snapshot(context, snapshot, update_dict)
snapshot.update(update_dict)
req.cache_db_snapshot(snapshot)
volume_utils.notify_about_snapshot_usage(context, snapshot,
'update.end')
return self._view_builder.detail(req, snapshot)
def create_resource(ext_mgr):
return wsgi.Resource(SnapshotsController(ext_mgr))
|
{
"content_hash": "5f33776179214e0296dd0741e1f4c332",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 79,
"avg_line_length": 35.405,
"alnum_prop": 0.5781669255754837,
"repo_name": "cloudbase/cinder",
"id": "b5ed4a27c69150dc027a1ccd7737ea1d9e3b78f3",
"size": "7717",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "cinder/api/v2/snapshots.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17586629"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
}
|
"Messages used to internally control thesplog settings."
from thespian.actors import ActorSystemMessage
class SetLogging(ActorSystemMessage):
def __init__(self, threshold, useLogging, useFile):
self.threshold = threshold
self.useLogging = useLogging
self.useFile = useFile
|
{
"content_hash": "e84325f9362aee1d298a46bf3ebf786b",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 56,
"avg_line_length": 30.8,
"alnum_prop": 0.724025974025974,
"repo_name": "godaddy/Thespian",
"id": "7f0792f4fb018355123edf8f009bb3d29b83a8cd",
"size": "308",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "thespian/system/messages/logcontrol.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1111138"
},
{
"name": "Shell",
"bytes": "48"
}
],
"symlink_target": ""
}
|
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Sequence
from sqlalchemy import Column
from sqlalchemy import BigInteger, Integer, SmallInteger
from sqlalchemy import DateTime, Float, String, Unicode
from datetime import datetime
from mabolab.database.dbsession import Base
class Avl_oee_period(Base):
__tablename__ = 'avl_oee_period'
avl_oee_period_id = Column( BigInteger, primary_key=True )
station = Column( Unicode(50) ) # character varying
start_time = Column( Unicode(30) ) # character varying
end_time = Column( Unicode(30) ) # character varying
test9_count = Column( BigInteger ) # bigint
test9_success = Column( BigInteger ) # bigint
test30_count = Column( BigInteger ) # bigint
test30_success = Column( BigInteger ) # bigint
runningtime = Column( Float ) # double precision
availablerate = Column( Float ) # double precision
date_ = Column( Date ) # date
active = Column( Integer ) # smallint
createdon = Column( Datetime ) # timestamp without time zone
createdby = Column( Unicode(30) ) # character varying
lastupdatedby = Column( Unicode(30) ) # character varying
lastupdateon = Column( Datetime ) # timestamp without time zone
rowversionstamp = Column( Integer ) # integer
def __init__(self):
"""init"""
pass
def __repr__(self):
return "<Avl_oee_period(%s, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')>" \
% (self.id, self.avl_oee_period_id, self.station, self.start_time, self.end_time, self.test9_count, self.test9_success, self.test30_count, self.test30_success, self.runningtime, self.availablerate, self.date_, self.active, self.createdon, self.createdby, self.lastupdatedby, self.lastupdateon, self.rowversionstamp)
|
{
"content_hash": "b50a16b06eb9e36777372b46f2cdd809",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 327,
"avg_line_length": 44.97872340425532,
"alnum_prop": 0.6017029328287606,
"repo_name": "mabotech/maboss.py",
"id": "461f6479abf78c770987f5a614f1115c765dabbb",
"size": "2138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "maboss/webx/tools/output/models/avl_oee_period.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "14864"
},
{
"name": "JavaScript",
"bytes": "4950"
},
{
"name": "Lua",
"bytes": "683"
},
{
"name": "Python",
"bytes": "433923"
},
{
"name": "Shell",
"bytes": "667"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import datetime
import os
import re
import sys
import types
from django.conf import settings
from django.http import (HttpResponse, HttpResponseServerError,
HttpResponseNotFound, HttpRequest, build_request_repr)
from django.template import Template, Context, TemplateDoesNotExist
from django.template.defaultfilters import force_escape, pprint
from django.utils.datastructures import MultiValueDict
from django.utils.html import escape
from django.utils.encoding import force_bytes, smart_text
from django.utils.module_loading import import_string
from django.utils import six
HIDDEN_SETTINGS = re.compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE')
CLEANSED_SUBSTITUTE = '********************'
def linebreak_iter(template_source):
yield 0
p = template_source.find('\n')
while p >= 0:
yield p + 1
p = template_source.find('\n', p + 1)
yield len(template_source) + 1
def cleanse_setting(key, value):
"""Cleanse an individual setting key/value of sensitive content.
If the value is a dictionary, recursively cleanse the keys in
that dictionary.
"""
try:
if HIDDEN_SETTINGS.search(key):
cleansed = CLEANSED_SUBSTITUTE
else:
if isinstance(value, dict):
cleansed = dict((k, cleanse_setting(k, v)) for k, v in value.items())
else:
cleansed = value
except TypeError:
# If the key isn't regex-able, just return as-is.
cleansed = value
if callable(cleansed):
cleansed.do_not_call_in_templates = True
return cleansed
def get_safe_settings():
"Returns a dictionary of the settings module, with sensitive settings blurred out."
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = cleanse_setting(k, getattr(settings, k))
return settings_dict
def technical_500_response(request, exc_type, exc_value, tb):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
if request.is_ajax():
text = reporter.get_traceback_text()
return HttpResponseServerError(text, content_type='text/plain')
else:
html = reporter.get_traceback_html()
return HttpResponseServerError(html, content_type='text/html')
# Cache for the default exception reporter filter instance.
default_exception_reporter_filter = None
def get_exception_reporter_filter(request):
global default_exception_reporter_filter
if default_exception_reporter_filter is None:
# Load the default filter for the first time and cache it.
default_exception_reporter_filter = import_string(
settings.DEFAULT_EXCEPTION_REPORTER_FILTER)()
if request:
return getattr(request, 'exception_reporter_filter', default_exception_reporter_filter)
else:
return default_exception_reporter_filter
class ExceptionReporterFilter(object):
"""
Base for all exception reporter filter classes. All overridable hooks
contain lenient default behaviors.
"""
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request, POST_override=self.get_post_parameters(request))
def get_post_parameters(self, request):
if request is None:
return {}
else:
return request.POST
def get_traceback_frame_variables(self, request, tb_frame):
return list(six.iteritems(tb_frame.f_locals))
class SafeExceptionReporterFilter(ExceptionReporterFilter):
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_cleansed_multivaluedict(self, request, multivaluedict):
"""
Replaces the keys in a MultiValueDict marked as sensitive with stars.
This mitigates leaking sensitive POST parameters if something like
request.POST['nonexistent_key'] throws an exception (#21098).
"""
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
multivaluedict = multivaluedict.copy()
for param in sensitive_post_parameters:
if param in multivaluedict:
multivaluedict[param] = CLEANSED_SUBSTITUTE
return multivaluedict
def get_post_parameters(self, request):
"""
Replaces the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == '__ALL__':
# Cleanse all parameters.
for k, v in cleansed.items():
cleansed[k] = CLEANSED_SUBSTITUTE
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = CLEANSED_SUBSTITUTE
return cleansed
else:
return request.POST
def cleanse_special_types(self, request, value):
if isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
elif isinstance(value, MultiValueDict):
# Cleanse MultiValueDicts (request.POST is the one we usually care about)
value = self.get_cleansed_multivaluedict(request, value)
return value
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replaces the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper, 'sensitive_variables', None)
break
current_frame = current_frame.f_back
cleansed = {}
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name, value in tb_frame.f_locals.items():
cleansed[name] = CLEANSED_SUBSTITUTE
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
else:
value = self.cleanse_special_types(request, value)
cleansed[name] = value
else:
# Potentially cleanse the request and any MultiValueDicts if they
# are one of the frame variables.
for name, value in tb_frame.f_locals.items():
cleansed[name] = self.cleanse_special_types(request, value)
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in tb_frame.f_locals):
# For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from
# the decorated function's frame.
cleansed['func_args'] = CLEANSED_SUBSTITUTE
cleansed['func_kwargs'] = CLEANSED_SUBSTITUTE
return cleansed.items()
class ExceptionReporter(object):
"""
A class to organize and coordinate reporting on exceptions.
"""
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = None
self.template_does_not_exist = False
self.loader_debug_info = None
# Handle deprecated string exceptions
if isinstance(self.exc_type, six.string_types):
self.exc_value = Exception('Deprecated String Exception: %r' % self.exc_type)
self.exc_type = type(self.exc_value)
def format_path_status(self, path):
if not os.path.exists(path):
return "File does not exist"
if not os.path.isfile(path):
return "Not a file"
if not os.access(path, os.R_OK):
return "File is not readable"
return "File exists"
def get_traceback_data(self):
"""Return a dictionary containing traceback information."""
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
from django.template.loader import template_source_loaders
self.template_does_not_exist = True
self.loader_debug_info = []
# If the template_source_loaders haven't been populated yet, you need
# to provide an empty list for this for loop to not fail.
if template_source_loaders is None:
template_source_loaders = []
for loader in template_source_loaders:
try:
source_list_func = loader.get_template_sources
# NOTE: This assumes exc_value is the name of the template that
# the loader attempted to load.
template_list = [{
'name': t,
'status': self.format_path_status(t),
} for t in source_list_func(str(self.exc_value))]
except AttributeError:
template_list = []
loader_name = loader.__module__ + '.' + loader.__class__.__name__
self.loader_debug_info.append({
'loader': loader_name,
'templates': template_list,
})
if (settings.TEMPLATE_DEBUG and
hasattr(self.exc_value, 'django_template_source')):
self.get_template_exception_info()
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if 'vars' in frame:
frame['vars'] = [(k, force_escape(pprint(v))) for k, v in frame['vars']]
frames[i] = frame
unicode_hint = ''
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, 'start', None)
end = getattr(self.exc_value, 'end', None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = smart_text(unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))], 'ascii', errors='replace')
from django import get_version
c = {
'is_email': self.is_email,
'unicode_hint': unicode_hint,
'frames': frames,
'request': self.request,
'filtered_POST': self.filter.get_post_parameters(self.request),
'settings': get_safe_settings(),
'sys_executable': sys.executable,
'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
'server_time': datetime.datetime.now(),
'django_version_info': get_version(),
'sys_path': sys.path,
'template_info': self.template_info,
'template_does_not_exist': self.template_does_not_exist,
'loader_debug_info': self.loader_debug_info,
}
# Check whether exception info is available
if self.exc_type:
c['exception_type'] = self.exc_type.__name__
if self.exc_value:
c['exception_value'] = smart_text(self.exc_value, errors='replace')
if frames:
c['lastframe'] = frames[-1]
return c
def get_traceback_html(self):
"Return HTML version of debug 500 HTTP error page."
t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template')
c = Context(self.get_traceback_data(), use_l10n=False)
return t.render(c)
def get_traceback_text(self):
"Return plain text version of debug 500 HTTP error page."
t = Template(TECHNICAL_500_TEXT_TEMPLATE, name='Technical 500 template')
c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False)
return t.render(c)
def get_template_exception_info(self):
origin, (start, end) = self.exc_value.django_template_source
template_source = origin.reload()
context_lines = 10
line = 0
upto = 0
source_lines = []
before = during = after = ""
for num, next in enumerate(linebreak_iter(template_source)):
if start >= upto and end <= next:
line = num
before = escape(template_source[upto:start])
during = escape(template_source[start:end])
after = escape(template_source[end:next])
source_lines.append((num, escape(template_source[upto:next])))
upto = next
total = len(source_lines)
top = max(1, line - context_lines)
bottom = min(total, line + 1 + context_lines)
# In some rare cases, exc_value.args might be empty.
try:
message = self.exc_value.args[0]
except IndexError:
message = '(Could not get exception message)'
self.template_info = {
'message': message,
'source_lines': source_lines[top:bottom],
'before': before,
'during': during,
'after': after,
'top': top,
'bottom': bottom,
'total': total,
'line': line,
'name': origin.name,
}
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if loader is not None and hasattr(loader, "get_source"):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, 'rb') as fp:
source = fp.read().splitlines()
except (OSError, IOError):
pass
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a Unicode
# string, then we should do that ourselves.
if isinstance(source[0], six.binary_type):
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (http://www.python.org/dev/peps/pep-0263/)
match = re.search(br'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match.group(1).decode('ascii')
break
source = [six.text_type(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1:upper_bound]
return lower_bound, pre_context, context_line, post_context
def get_traceback_frames(self):
frames = []
tb = self.tb
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get('__traceback_hide__'):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get('__loader__')
module_name = tb.tb_frame.f_globals.get('__name__') or ''
pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(filename, lineno, 7, loader, module_name)
if pre_context_lineno is not None:
frames.append({
'tb': tb,
'type': 'django' if module_name.startswith('django.') else 'user',
'filename': filename,
'function': function,
'lineno': lineno + 1,
'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame),
'id': id(tb),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
tb = tb.tb_next
return frames
def format_exception(self):
"""
Return the same data as from traceback.format_exception.
"""
import traceback
frames = self.get_traceback_frames()
tb = [(f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames]
list = ['Traceback (most recent call last):\n']
list += traceback.format_list(tb)
list += traceback.format_exception_only(self.exc_type, self.exc_value)
return list
def technical_404_response(request, exception):
"Create a technical 404 error response. The exception should be the Http404."
try:
error_url = exception.args[0]['path']
except (IndexError, TypeError, KeyError):
error_url = request.path_info[1:] # Trim leading slash
try:
tried = exception.args[0]['tried']
except (IndexError, TypeError, KeyError):
tried = []
else:
if (not tried # empty URLconf
or (request.path == '/'
and len(tried) == 1 # default URLconf
and len(tried[0]) == 1
and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin')):
return default_urlconf(request)
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
t = Template(TECHNICAL_404_TEMPLATE, name='Technical 404 template')
c = Context({
'urlconf': urlconf,
'root_urlconf': settings.ROOT_URLCONF,
'request_path': error_url,
'urlpatterns': tried,
'reason': force_bytes(exception, errors='replace'),
'request': request,
'settings': get_safe_settings(),
})
return HttpResponseNotFound(t.render(c), content_type='text/html')
def default_urlconf(request):
"Create an empty URLconf 404 error response."
t = Template(DEFAULT_URLCONF_TEMPLATE, name='Default URLconf template')
c = Context({})
return HttpResponse(t.render(c), content_type='text/html')
#
# Templates are embedded in the file so that we know the error handler will
# always work even if the template loader is broken.
#
TECHNICAL_500_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}{% if request %} at {{ request.path_info|escape }}{% endif %}</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
code, pre { font-size: 100%; white-space: pre-wrap; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; }
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
table.vars { margin:5px 0 2px 40px; }
table.vars td, table.req td { font-family:monospace; }
table td.code { width:100%; }
table td.code pre { overflow:hidden; }
table.source th { color:#666; }
table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; }
ul.traceback { list-style-type:none; color: #222; }
ul.traceback li.frame { padding-bottom:1em; color:#666; }
ul.traceback li.user { background-color:#e0e0e0; color:#000 }
div.context { padding:10px 0; overflow:hidden; }
div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; }
div.context ol li { font-family:monospace; white-space:pre; color:#777; cursor:pointer; }
div.context ol li pre { display:inline; }
div.context ol.context-line li { color:#505050; background-color:#dfdfdf; }
div.context ol.context-line li span { position:absolute; right:32px; }
.user div.context ol.context-line li { background-color:#bbb; color:#000; }
.user div.context ol li { color:#666; }
div.commands { margin-left: 40px; }
div.commands a { color:#555; text-decoration:none; }
.user div.commands a { color: black; }
#summary { background: #ffc; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#template, #template-not-exist { background:#f6f6f6; }
#template-not-exist ul { margin: 0 0 0 20px; }
#unicode-hint { background:#eee; }
#traceback { background:#eee; }
#requestinfo { background:#f6f6f6; padding-left:120px; }
#summary table { border:none; background:transparent; }
#requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; }
#requestinfo h3 { margin-bottom:-1em; }
.error { background: #ffc; }
.specific { color:#cc3300; font-weight:bold; }
h2 span.commands { font-size:.7em;}
span.commands a:link {color:#5E5694;}
pre.exception_value { font-family: sans-serif; color: #666; font-size: 1.5em; margin: 10px 0 10px 0; }
</style>
{% if not is_email %}
<script type="text/javascript">
//<!--
function getElementsByClassName(oElm, strTagName, strClassName){
// Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com
var arrElements = (strTagName == "*" && document.all)? document.all :
oElm.getElementsByTagName(strTagName);
var arrReturnElements = new Array();
strClassName = strClassName.replace(/\-/g, "\\-");
var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)");
var oElement;
for(var i=0; i<arrElements.length; i++){
oElement = arrElements[i];
if(oRegExp.test(oElement.className)){
arrReturnElements.push(oElement);
}
}
return (arrReturnElements)
}
function hideAll(elems) {
for (var e = 0; e < elems.length; e++) {
elems[e].style.display = 'none';
}
}
window.onload = function() {
hideAll(getElementsByClassName(document, 'table', 'vars'));
hideAll(getElementsByClassName(document, 'ol', 'pre-context'));
hideAll(getElementsByClassName(document, 'ol', 'post-context'));
hideAll(getElementsByClassName(document, 'div', 'pastebin'));
}
function toggle() {
for (var i = 0; i < arguments.length; i++) {
var e = document.getElementById(arguments[i]);
if (e) {
e.style.display = e.style.display == 'none' ? 'block': 'none';
}
}
return false;
}
function varToggle(link, id) {
toggle('v' + id);
var s = link.getElementsByTagName('span')[0];
var uarr = String.fromCharCode(0x25b6);
var darr = String.fromCharCode(0x25bc);
s.innerHTML = s.innerHTML == uarr ? darr : uarr;
return false;
}
function switchPastebinFriendly(link) {
s1 = "Switch to copy-and-paste view";
s2 = "Switch back to interactive view";
link.innerHTML = link.innerHTML == s1 ? s2: s1;
toggle('browserTraceback', 'pastebinTraceback');
return false;
}
//-->
</script>
{% endif %}
</head>
<body>
<div id="summary">
<h1>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}{% if request %} at {{ request.path_info|escape }}{% endif %}</h1>
<pre class="exception_value">{% if exception_value %}{{ exception_value|force_escape }}{% else %}No exception message supplied{% endif %}</pre>
<table class="meta">
{% if request %}
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
{% endif %}
<tr>
<th>Django Version:</th>
<td>{{ django_version_info }}</td>
</tr>
{% if exception_type %}
<tr>
<th>Exception Type:</th>
<td>{{ exception_type }}</td>
</tr>
{% endif %}
{% if exception_type and exception_value %}
<tr>
<th>Exception Value:</th>
<td><pre>{{ exception_value|force_escape }}</pre></td>
</tr>
{% endif %}
{% if lastframe %}
<tr>
<th>Exception Location:</th>
<td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td>
</tr>
{% endif %}
<tr>
<th>Python Executable:</th>
<td>{{ sys_executable|escape }}</td>
</tr>
<tr>
<th>Python Version:</th>
<td>{{ sys_version_info }}</td>
</tr>
<tr>
<th>Python Path:</th>
<td><pre>{{ sys_path|pprint }}</pre></td>
</tr>
<tr>
<th>Server time:</th>
<td>{{server_time|date:"r"}}</td>
</tr>
</table>
</div>
{% if unicode_hint %}
<div id="unicode-hint">
<h2>Unicode error hint</h2>
<p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|force_escape }}</strong></p>
</div>
{% endif %}
{% if template_does_not_exist %}
<div id="template-not-exist">
<h2>Template-loader postmortem</h2>
{% if loader_debug_info %}
<p>Django tried loading these templates, in this order:</p>
<ul>
{% for loader in loader_debug_info %}
<li>Using loader <code>{{ loader.loader }}</code>:
<ul>
{% for t in loader.templates %}<li><code>{{ t.name }}</code> ({{ t.status }})</li>{% endfor %}
</ul>
</li>
{% endfor %}
</ul>
{% else %}
<p>Django couldn't find any templates because your <code>TEMPLATE_LOADERS</code> setting is empty!</p>
{% endif %}
</div>
{% endif %}
{% if template_info %}
<div id="template">
<h2>Error during template rendering</h2>
<p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p>
<h3>{{ template_info.message }}</h3>
<table class="source{% if template_info.top %} cut-top{% endif %}{% ifnotequal template_info.bottom template_info.total %} cut-bottom{% endifnotequal %}">
{% for source_line in template_info.source_lines %}
{% ifequal source_line.0 template_info.line %}
<tr class="error"><th>{{ source_line.0 }}</th>
<td>{{ template_info.before }}<span class="specific">{{ template_info.during }}</span>{{ template_info.after }}</td></tr>
{% else %}
<tr><th>{{ source_line.0 }}</th>
<td>{{ source_line.1 }}</td></tr>
{% endifequal %}
{% endfor %}
</table>
</div>
{% endif %}
{% if frames %}
<div id="traceback">
<h2>Traceback <span class="commands">{% if not is_email %}<a href="#" onclick="return switchPastebinFriendly(this);">Switch to copy-and-paste view</a></span>{% endif %}</h2>
{% autoescape off %}
<div id="browserTraceback">
<ul class="traceback">
{% for frame in frames %}
<li class="frame {{ frame.type }}">
<code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code>
{% if frame.context_line %}
<div class="context" id="c{{ frame.id }}">
{% if frame.pre_context and not is_email %}
<ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}">{% for line in frame.pre_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>{% endfor %}</ol>
{% endif %}
<ol start="{{ frame.lineno }}" class="context-line"><li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ frame.context_line|escape }}</pre>{% if not is_email %} <span>...</span>{% endif %}</li></ol>
{% if frame.post_context and not is_email %}
<ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}">{% for line in frame.post_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>{% endfor %}</ol>
{% endif %}
</div>
{% endif %}
{% if frame.vars %}
<div class="commands">
{% if is_email %}
<h2>Local Vars</h2>
{% else %}
<a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>▶</span> Local vars</a>
{% endif %}
</div>
<table class="vars" id="v{{ frame.id }}">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in frame.vars|dictsort:"0" %}
<tr>
<td>{{ var.0|force_escape }}</td>
<td class="code"><pre>{{ var.1 }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
</li>
{% endfor %}
</ul>
</div>
{% endautoescape %}
<form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post">
{% if not is_email %}
<div id="pastebinTraceback" class="pastebin">
<input type="hidden" name="language" value="PythonConsole">
<input type="hidden" name="title" value="{{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}">
<input type="hidden" name="source" value="Django Dpaste Agent">
<input type="hidden" name="poster" value="Django">
<textarea name="content" id="traceback_area" cols="140" rows="25">
Environment:
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.build_absolute_uri|escape }}
{% endif %}
Django Version: {{ django_version_info }}
Python Version: {{ sys_version_info }}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template Loader Error:
{% if loader_debug_info %}Django tried loading these templates, in this order:
{% for loader in loader_debug_info %}Using loader {{ loader.loader }}:
{% for t in loader.templates %}{{ t.name }} ({{ t.status }})
{% endfor %}{% endfor %}
{% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty!
{% endif %}
{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %}
{{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}
{% else %}
{{ source_line.0 }} : {{ source_line.1 }}
{% endifequal %}{% endfor %}{% endif %}
Traceback:
{% for frame in frames %}File "{{ frame.filename|escape }}" in {{ frame.function|escape }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %}
{% endfor %}
Exception Type: {{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}
Exception Value: {{ exception_value|force_escape }}
</textarea>
<br><br>
<input type="submit" value="Share this traceback on a public Web site">
</div>
</form>
</div>
{% endif %}
{% endif %}
<div id="requestinfo">
<h2>Request information</h2>
{% if request %}
<h3 id="get-info">GET</h3>
{% if request.GET %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.GET.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No GET data</p>
{% endif %}
<h3 id="post-info">POST</h3>
{% if filtered_POST %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in filtered_POST.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No POST data</p>
{% endif %}
<h3 id="files-info">FILES</h3>
{% if request.FILES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.FILES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No FILES data</p>
{% endif %}
<h3 id="cookie-info">COOKIES</h3>
{% if request.COOKIES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.COOKIES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No cookie data</p>
{% endif %}
<h3 id="meta-info">META</h3>
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.META.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>Request data not supplied</p>
{% endif %}
<h3 id="settings-info">Settings</h3>
<h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4>
<table class="req">
<thead>
<tr>
<th>Setting</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in settings.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% if not is_email %}
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in your
Django settings file. Change that to <code>False</code>, and Django will
display a standard 500 page.
</p>
</div>
{% endif %}
</body>
</html>
"""
TECHNICAL_500_TEXT_TEMPLATE = """{% load firstof from future %}{% firstof exception_type 'Report' %}{% if request %} at {{ request.path_info }}{% endif %}
{% firstof exception_value 'No exception message supplied' %}
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.build_absolute_uri }}{% endif %}
Django Version: {{ django_version_info }}
Python Executable: {{ sys_executable }}
Python Version: {{ sys_version_info }}
Python Path: {{ sys_path }}
Server time: {{server_time|date:"r"}}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template loader Error:
{% if loader_debug_info %}Django tried loading these templates, in this order:
{% for loader in loader_debug_info %}Using loader {{ loader.loader }}:
{% for t in loader.templates %}{{ t.name }} ({{ t.status }})
{% endfor %}{% endfor %}
{% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty!
{% endif %}
{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %}
{{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}
{% else %}
{{ source_line.0 }} : {{ source_line.1 }}
{% endifequal %}{% endfor %}{% endif %}{% if frames %}
Traceback:
{% for frame in frames %}File "{{ frame.filename }}" in {{ frame.function }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line }}{% endif %}
{% endfor %}
{% if exception_type %}Exception Type: {{ exception_type }}{% if request %} at {{ request.path_info }}{% endif %}
{% if exception_value %}Exception Value: {{ exception_value }}{% endif %}{% endif %}{% endif %}
{% if request %}Request information:
GET:{% for k, v in request.GET.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No GET data{% endfor %}
POST:{% for k, v in filtered_POST.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No POST data{% endfor %}
FILES:{% for k, v in request.FILES.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No FILES data{% endfor %}
COOKIES:{% for k, v in request.COOKIES.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No cookie data{% endfor %}
META:{% for k, v in request.META.items|dictsort:"0" %}
{{ k }} = {{ v|stringformat:"r" }}{% endfor %}
{% else %}Request data not supplied
{% endif %}
Settings:
Using settings module {{ settings.SETTINGS_MODULE }}{% for k, v in settings.items|dictsort:"0" %}
{{ k }} = {{ v|stringformat:"r" }}{% endfor %}
You're seeing this error because you have DEBUG = True in your
Django settings file. Change that to False, and Django will
display a standard 500 page.
"""
TECHNICAL_404_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Page not found at {{ request.path_info|escape }}</title>
<meta name="robots" content="NONE,NOARCHIVE">
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
table { border:none; border-collapse: collapse; width:100%; }
td, th { vertical-align:top; padding:2px 3px; }
th { width:12em; text-align:right; color:#666; padding-right:.5em; }
#info { background:#f6f6f6; }
#info ol { margin: 0.5em 4em; }
#info ol li { font-family: monospace; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Page not found <span>(404)</span></h1>
<table class="meta">
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
</table>
</div>
<div id="info">
{% if urlpatterns %}
<p>
Using the URLconf defined in <code>{{ urlconf }}</code>,
Django tried these URL patterns, in this order:
</p>
<ol>
{% for pattern in urlpatterns %}
<li>
{% for pat in pattern %}
{{ pat.regex.pattern }}
{% if forloop.last and pat.name %}[name='{{ pat.name }}']{% endif %}
{% endfor %}
</li>
{% endfor %}
</ol>
<p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p>
{% else %}
<p>{{ reason }}</p>
{% endif %}
</div>
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in
your Django settings file. Change that to <code>False</code>, and Django
will display a standard 404 page.
</p>
</div>
</body>
</html>
"""
DEFAULT_URLCONF_TEMPLATE = """
<!DOCTYPE html>
<html lang="en"><head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE"><title>Welcome to Django</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; }
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
#summary { background: #e0ebff; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#instructions { background:#f6f6f6; }
#summary table { border:none; background:transparent; }
</style>
</head>
<body>
<div id="summary">
<h1>It worked!</h1>
<h2>Congratulations on your first Django-powered page.</h2>
</div>
<div id="instructions">
<p>
Of course, you haven't actually done any work yet.
Next, start your first app by running <code>python manage.py startapp [app_label]</code>.
</p>
</div>
<div id="explanation">
<p>
You're seeing this message because you have <code>DEBUG = True</code> in your
Django settings file and you haven't configured any URLs. Get to work!
</p>
</div>
</body></html>
"""
|
{
"content_hash": "3aea1ca2df07c73ad71b5d3a20840758",
"timestamp": "",
"source": "github",
"line_count": 1168,
"max_line_length": 251,
"avg_line_length": 37.97517123287671,
"alnum_prop": 0.5795964378311351,
"repo_name": "errx/django",
"id": "c1acd8191ca4e77d8068bf0a04071d858c6acb94",
"size": "44355",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/views/debug.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52957"
},
{
"name": "JavaScript",
"bytes": "102668"
},
{
"name": "Python",
"bytes": "9469402"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
}
|
import unittest
import os
from test.aiml_tests.client import TestClient
from programy.config.brain import BrainFileConfiguration
class WeathersTestsClient(TestClient):
def __init__(self):
TestClient.__init__(self, debug=True)
def load_configuration(self, arguments):
super(WeathersTestsClient, self).load_configuration(arguments)
self.configuration.brain_configuration._aiml_files = BrainFileConfiguration(os.path.dirname(__file__)+"/../../../../aiml/extensions/weather", ".aiml", False)
class WeathersAIMLTests(unittest.TestCase):
def setUp (self):
WeathersAIMLTests.test_client = WeathersTestsClient()
observation = os.path.dirname(__file__) + "/observation.json"
threehourly = os.path.dirname(__file__) + "/forecast_3hourly.json"
daily = os.path.dirname(__file__) + "/forecast_daily.json"
WeathersAIMLTests.test_client.bot.brain.license_keys.load_license_key_data("""
METOFFICE_API_KEY=TESTKEY
CURRENT_OBSERVATION_RESPONSE_FILE=%s
THREE_HOURLY_FORECAST_RESPONSE_FILE=%s
DAILY_FORECAST_RESPONSE_FILE=%s
"""%(observation, threehourly, daily))
def test_weather(self):
response = WeathersAIMLTests.test_client.bot.ask_question("testid", "WEATHER POSTCODE KY39UR WHEN TODAY")
self.assertIsNotNone(response)
self.assertEqual(response, "Today the weather is Partly cloudy (day) , with a temperature of 12 . 3 \'C Partly cloudy (day)")
|
{
"content_hash": "24b9adbd283229d048377c4b1066a23f",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 165,
"avg_line_length": 41.416666666666664,
"alnum_prop": 0.6928236083165661,
"repo_name": "CHT5/program-y",
"id": "4f6bc6b3aeadae92bcfd75dd9517e9f17ebfe4fb",
"size": "1491",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bots/y-bot/src/test/extensions/weather/test_aiml.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "937"
},
{
"name": "HTML",
"bytes": "1580"
},
{
"name": "Python",
"bytes": "1027605"
},
{
"name": "Shell",
"bytes": "2835"
}
],
"symlink_target": ""
}
|
import logging
import struct
from teuthology.exceptions import CommandFailedError
from teuthology.orchestra import run
log = logging.getLogger(__name__)
class DaemonState(object):
"""
Daemon State. A daemon exists for each instance of each role.
"""
def __init__(self, remote, role, id_, *command_args, **command_kwargs):
"""
Pass remote command information as parameters to remote site
:param remote: Remote site
:param role: Role (osd, rgw, mon, mds)
:param id_: Id within role (osd.1, osd.2, for eaxmple)
:param command_args: positional arguments (used in restart commands)
:param command_kwargs: keyword arguments (used in restart commands)
"""
self.remote = remote
self.command_args = command_args
self.command_kwargs = command_kwargs
self.role = role
self.cluster, self.type_ = self.role.split('.')[0:2]
self.id_ = id_
self.log = command_kwargs.get('logger', log)
self.fsid = command_kwargs.get('fsid')
self.proc = None
def check_status(self):
"""
Check to see if the process has exited.
:returns: The exit status, if any
:raises: CommandFailedError, if the process was run with
check_status=True
"""
if self.proc:
return self.proc.poll()
@property
def pid(self):
raise NotImplementedError
def reset(self):
"""
clear remote run command value.
"""
self.proc = None
def restart(self, *args, **kwargs):
"""
Restart with a new command passed in the arguments
:param args: positional arguments passed to remote.run
:param kwargs: keyword arguments passed to remote.run
"""
self.log.info('Restarting daemon')
if self.proc is not None:
self.log.info('Stopping old one...')
self.stop()
cmd_args = list(self.command_args)
cmd_args.extend(args)
cmd_kwargs = self.command_kwargs
cmd_kwargs.update(kwargs)
self.proc = self.remote.run(*cmd_args, **cmd_kwargs)
self.log.info('Started')
def restart_with_args(self, extra_args):
"""
Restart, adding new paramaters to the current command.
:param extra_args: Extra keyword arguments to be added.
"""
self.log.info('Restarting daemon with args')
if self.proc is not None:
self.log.info('Stopping old one...')
self.stop()
cmd_args = list(self.command_args)
# we only want to make a temporary mod of the args list
# so we shallow copy the dict, and deepcopy the args list
cmd_kwargs = self.command_kwargs.copy()
from copy import deepcopy
cmd_kwargs['args'] = deepcopy(self.command_kwargs['args'])
cmd_kwargs['args'].extend(extra_args)
self.proc = self.remote.run(*cmd_args, **cmd_kwargs)
self.log.info('Started')
def running(self):
"""
Are we running?
:return: True if remote run command value is set, False otherwise.
"""
return self.proc is not None
def signal(self, sig, silent=False):
"""
Send a signal to associated remote command.
:param sig: signal to send
"""
if self.running():
try:
self.proc.stdin.write(struct.pack('!b', sig))
except IOError as e:
log.exception('Failed to send signal %d: %s', sig, e.strerror)
if not silent:
self.log.info('Sent signal %d', sig)
else:
self.log.error('No such daemon running')
def start(self, timeout=300):
"""
Start this daemon instance.
"""
if self.running():
self.log.warn('Restarting a running daemon')
self.restart()
def stop(self, timeout=300):
"""
Stop this daemon instance.
Note: this can raise a CommandFailedError,
CommandCrashedError, or ConnectionLostError.
:param timeout: timeout to pass to orchestra.run.wait()
"""
if not self.running():
self.log.error('tried to stop a non-running daemon')
return
self.proc.stdin.close()
self.log.debug('waiting for process to exit')
try:
run.wait([self.proc], timeout=timeout)
except CommandFailedError:
log.exception("Error while waiting for process to exit")
self.proc = None
self.log.info('Stopped')
# FIXME why are there two wait methods?
def wait(self, timeout=300):
"""
Wait for daemon to exit
Wait for daemon to stop (but don't trigger the stop). Pass up
any exception. Mark the daemon as not running.
"""
self.log.debug('waiting for process to exit')
try:
run.wait([self.proc], timeout=timeout)
self.log.info('Stopped')
except:
self.log.info('Failed')
raise
finally:
self.proc = None
def wait_for_exit(self):
"""
clear remote run command value after waiting for exit.
"""
if self.proc:
try:
run.wait([self.proc])
finally:
self.proc = None
|
{
"content_hash": "dc29fc59170b3ea388ecf89815e7ce21",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 78,
"avg_line_length": 31.608187134502923,
"alnum_prop": 0.5715078630897318,
"repo_name": "dmick/teuthology",
"id": "3a6a17213d0c70dac390ba9d483c0a346af90300",
"size": "5405",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "teuthology/orchestra/daemon/state.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1096"
},
{
"name": "Makefile",
"bytes": "4194"
},
{
"name": "Python",
"bytes": "1413171"
},
{
"name": "Shell",
"bytes": "61271"
}
],
"symlink_target": ""
}
|
"""Generates test runner factory and tests for GTests."""
# pylint: disable=W0212
import fnmatch
import glob
import logging
import os
import shutil
import sys
from pylib import cmd_helper
from pylib import constants
from pylib.base import base_test_result
from pylib.base import test_dispatcher
from pylib.gtest import test_package_apk
from pylib.gtest import test_package_exe
from pylib.gtest import test_runner
sys.path.insert(0,
os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
'common'))
import unittest_util # pylint: disable=F0401
_ISOLATE_FILE_PATHS = {
'base_unittests': 'base/base_unittests.isolate',
'blink_heap_unittests':
'third_party/WebKit/Source/platform/heap/BlinkHeapUnitTests.isolate',
'breakpad_unittests': 'breakpad/breakpad_unittests.isolate',
'cc_perftests': 'cc/cc_perftests.isolate',
'components_unittests': 'components/components_unittests.isolate',
'content_browsertests': 'content/content_browsertests.isolate',
'content_unittests': 'content/content_unittests.isolate',
'media_perftests': 'media/media_perftests.isolate',
'media_unittests': 'media/media_unittests.isolate',
'net_unittests': 'net/net_unittests.isolate',
'ui_unittests': 'ui/ui_unittests.isolate',
'unit_tests': 'chrome/unit_tests.isolate',
'webkit_unit_tests':
'third_party/WebKit/Source/web/WebKitUnitTests.isolate',
}
# Paths relative to third_party/webrtc/ (kept separate for readability).
_WEBRTC_ISOLATE_FILE_PATHS = {
'audio_decoder_unittests':
'modules/audio_coding/neteq/audio_decoder_unittests.isolate',
'common_audio_unittests': 'common_audio/common_audio_unittests.isolate',
'common_video_unittests': 'common_video/common_video_unittests.isolate',
'modules_tests': 'modules/modules_tests.isolate',
'modules_unittests': 'modules/modules_unittests.isolate',
'system_wrappers_unittests':
'system_wrappers/source/system_wrappers_unittests.isolate',
'test_support_unittests': 'test/test_support_unittests.isolate',
'tools_unittests': 'tools/tools_unittests.isolate',
'video_engine_tests': 'video_engine_tests.isolate',
'video_engine_core_unittests':
'video_engine/video_engine_core_unittests.isolate',
'voice_engine_unittests': 'voice_engine/voice_engine_unittests.isolate',
'webrtc_perf_tests': 'webrtc_perf_tests.isolate',
}
# Append the WebRTC tests with the full path from Chromium's src/ root.
for webrtc_test, isolate_path in _WEBRTC_ISOLATE_FILE_PATHS.items():
_ISOLATE_FILE_PATHS[webrtc_test] = 'third_party/webrtc/%s' % isolate_path
# Used for filtering large data deps at a finer grain than what's allowed in
# isolate files since pushing deps to devices is expensive.
# Wildcards are allowed.
_DEPS_EXCLUSION_LIST = [
'chrome/test/data/extensions/api_test',
'chrome/test/data/extensions/secure_shell',
'chrome/test/data/firefox*',
'chrome/test/data/gpu',
'chrome/test/data/image_decoding',
'chrome/test/data/import',
'chrome/test/data/page_cycler',
'chrome/test/data/perf',
'chrome/test/data/pyauto_private',
'chrome/test/data/safari_import',
'chrome/test/data/scroll',
'chrome/test/data/third_party',
'third_party/hunspell_dictionaries/*.dic',
# crbug.com/258690
'webkit/data/bmp_decoder',
'webkit/data/ico_decoder',
]
_ISOLATE_SCRIPT = os.path.join(
constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client', 'isolate.py')
def _GenerateDepsDirUsingIsolate(suite_name, isolate_file_path=None):
"""Generate the dependency dir for the test suite using isolate.
Args:
suite_name: Name of the test suite (e.g. base_unittests).
isolate_file_path: .isolate file path to use. If there is a default .isolate
file path for the suite_name, this will override it.
"""
if os.path.isdir(constants.ISOLATE_DEPS_DIR):
shutil.rmtree(constants.ISOLATE_DEPS_DIR)
if isolate_file_path:
if os.path.isabs(isolate_file_path):
isolate_abs_path = isolate_file_path
else:
isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT,
isolate_file_path)
else:
isolate_rel_path = _ISOLATE_FILE_PATHS.get(suite_name)
if not isolate_rel_path:
logging.info('Did not find an isolate file for the test suite.')
return
isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT, isolate_rel_path)
isolated_abs_path = os.path.join(
constants.GetOutDirectory(), '%s.isolated' % suite_name)
assert os.path.exists(isolate_abs_path)
# This needs to be kept in sync with the cmd line options for isolate.py
# in src/build/isolate.gypi.
isolate_cmd = [
'python', _ISOLATE_SCRIPT,
'remap',
'--isolate', isolate_abs_path,
'--isolated', isolated_abs_path,
'--outdir', constants.ISOLATE_DEPS_DIR,
'--path-variable', 'DEPTH', constants.DIR_SOURCE_ROOT,
'--path-variable', 'PRODUCT_DIR', constants.GetOutDirectory(),
'--config-variable', 'OS', 'android',
'--config-variable', 'chromeos', '0',
'--config-variable', 'component', 'static_library',
'--config-variable', 'icu_use_data_file_flag', '1',
'--config-variable', 'use_openssl', '0',
]
assert not cmd_helper.RunCmd(isolate_cmd)
# We're relying on the fact that timestamps are preserved
# by the remap command (hardlinked). Otherwise, all the data
# will be pushed to the device once we move to using time diff
# instead of md5sum. Perform a sanity check here.
for root, _, filenames in os.walk(constants.ISOLATE_DEPS_DIR):
if filenames:
linked_file = os.path.join(root, filenames[0])
orig_file = os.path.join(
constants.DIR_SOURCE_ROOT,
os.path.relpath(linked_file, constants.ISOLATE_DEPS_DIR))
if os.stat(linked_file).st_ino == os.stat(orig_file).st_ino:
break
else:
raise Exception('isolate remap command did not use hardlinks.')
# Delete excluded files as defined by _DEPS_EXCLUSION_LIST.
old_cwd = os.getcwd()
try:
os.chdir(constants.ISOLATE_DEPS_DIR)
excluded_paths = [x for y in _DEPS_EXCLUSION_LIST for x in glob.glob(y)]
if excluded_paths:
logging.info('Excluding the following from dependency list: %s',
excluded_paths)
for p in excluded_paths:
if os.path.isdir(p):
shutil.rmtree(p)
else:
os.remove(p)
finally:
os.chdir(old_cwd)
# On Android, all pak files need to be in the top-level 'paks' directory.
paks_dir = os.path.join(constants.ISOLATE_DEPS_DIR, 'paks')
os.mkdir(paks_dir)
deps_out_dir = os.path.join(
constants.ISOLATE_DEPS_DIR,
os.path.relpath(os.path.join(constants.GetOutDirectory(), os.pardir),
constants.DIR_SOURCE_ROOT))
for root, _, filenames in os.walk(deps_out_dir):
for filename in fnmatch.filter(filenames, '*.pak'):
shutil.move(os.path.join(root, filename), paks_dir)
# Move everything in PRODUCT_DIR to top level.
deps_product_dir = os.path.join(deps_out_dir, constants.GetBuildType())
if os.path.isdir(deps_product_dir):
for p in os.listdir(deps_product_dir):
shutil.move(os.path.join(deps_product_dir, p), constants.ISOLATE_DEPS_DIR)
os.rmdir(deps_product_dir)
os.rmdir(deps_out_dir)
def _GetDisabledTestsFilterFromFile(suite_name):
"""Returns a gtest filter based on the *_disabled file.
Args:
suite_name: Name of the test suite (e.g. base_unittests).
Returns:
A gtest filter which excludes disabled tests.
Example: '*-StackTrace.*:StringPrintfTest.StringPrintfMisc'
"""
filter_file_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
'filter', '%s_disabled' % suite_name)
if not filter_file_path or not os.path.exists(filter_file_path):
logging.info('No filter file found at %s', filter_file_path)
return '*'
filters = [x for x in [x.strip() for x in file(filter_file_path).readlines()]
if x and x[0] != '#']
disabled_filter = '*-%s' % ':'.join(filters)
logging.info('Applying filter "%s" obtained from %s',
disabled_filter, filter_file_path)
return disabled_filter
def _GetTests(test_options, test_package, devices):
"""Get a list of tests.
Args:
test_options: A GTestOptions object.
test_package: A TestPackageApk object.
devices: A list of attached devices.
Returns:
A list of all the tests in the test suite.
"""
def TestListerRunnerFactory(device, _shard_index):
class TestListerRunner(test_runner.TestRunner):
def RunTest(self, _test):
result = base_test_result.BaseTestResult(
'gtest_list_tests', base_test_result.ResultType.PASS)
self.test_package.Install(self.device)
result.test_list = self.test_package.GetAllTests(self.device)
results = base_test_result.TestRunResults()
results.AddResult(result)
return results, None
return TestListerRunner(test_options, device, test_package)
results, _no_retry = test_dispatcher.RunTests(
['gtest_list_tests'], TestListerRunnerFactory, devices)
tests = []
for r in results.GetAll():
tests.extend(r.test_list)
return tests
def _FilterTestsUsingPrefixes(all_tests, pre=False, manual=False):
"""Removes tests with disabled prefixes.
Args:
all_tests: List of tests to filter.
pre: If True, include tests with PRE_ prefix.
manual: If True, include tests with MANUAL_ prefix.
Returns:
List of tests remaining.
"""
filtered_tests = []
filter_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_']
if not pre:
filter_prefixes.append('PRE_')
if not manual:
filter_prefixes.append('MANUAL_')
for t in all_tests:
test_case, test = t.split('.', 1)
if not any([test_case.startswith(prefix) or test.startswith(prefix) for
prefix in filter_prefixes]):
filtered_tests.append(t)
return filtered_tests
def _FilterDisabledTests(tests, suite_name, has_gtest_filter):
"""Removes disabled tests from |tests|.
Applies the following filters in order:
1. Remove tests with disabled prefixes.
2. Remove tests specified in the *_disabled files in the 'filter' dir
Args:
tests: List of tests.
suite_name: Name of the test suite (e.g. base_unittests).
has_gtest_filter: Whether a gtest_filter is provided.
Returns:
List of tests remaining.
"""
tests = _FilterTestsUsingPrefixes(
tests, has_gtest_filter, has_gtest_filter)
tests = unittest_util.FilterTestNames(
tests, _GetDisabledTestsFilterFromFile(suite_name))
return tests
def Setup(test_options, devices):
"""Create the test runner factory and tests.
Args:
test_options: A GTestOptions object.
devices: A list of attached devices.
Returns:
A tuple of (TestRunnerFactory, tests).
"""
test_package = test_package_apk.TestPackageApk(test_options.suite_name)
if not os.path.exists(test_package.suite_path):
test_package = test_package_exe.TestPackageExecutable(
test_options.suite_name)
if not os.path.exists(test_package.suite_path):
raise Exception(
'Did not find %s target. Ensure it has been built.'
% test_options.suite_name)
logging.warning('Found target %s', test_package.suite_path)
_GenerateDepsDirUsingIsolate(test_options.suite_name,
test_options.isolate_file_path)
tests = _GetTests(test_options, test_package, devices)
# Constructs a new TestRunner with the current options.
def TestRunnerFactory(device, _shard_index):
return test_runner.TestRunner(
test_options,
device,
test_package)
if test_options.run_disabled:
test_options = test_options._replace(
test_arguments=('%s --gtest_also_run_disabled_tests' %
test_options.test_arguments))
else:
tests = _FilterDisabledTests(tests, test_options.suite_name,
bool(test_options.gtest_filter))
if test_options.gtest_filter:
tests = unittest_util.FilterTestNames(tests, test_options.gtest_filter)
# Coalesce unit tests into a single test per device
if test_options.suite_name != 'content_browsertests':
num_devices = len(devices)
tests = [':'.join(tests[i::num_devices]) for i in xrange(num_devices)]
tests = [t for t in tests if t]
return (TestRunnerFactory, tests)
|
{
"content_hash": "3ee766ebc7d1b8dbf91e9277c63fd577",
"timestamp": "",
"source": "github",
"line_count": 348,
"max_line_length": 80,
"avg_line_length": 35.8448275862069,
"alnum_prop": 0.6805355138688473,
"repo_name": "AndroidOpenDevelopment/android_external_chromium_org",
"id": "e89846e3b1d46b60a7e09372084c73740e32a141",
"size": "12637",
"binary": false,
"copies": "8",
"ref": "refs/heads/lp",
"path": "build/android/pylib/gtest/setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Assembly",
"bytes": "24741"
},
{
"name": "C",
"bytes": "3938821"
},
{
"name": "C++",
"bytes": "200378039"
},
{
"name": "CSS",
"bytes": "942505"
},
{
"name": "Java",
"bytes": "5192594"
},
{
"name": "JavaScript",
"bytes": "11002659"
},
{
"name": "Makefile",
"bytes": "20865646"
},
{
"name": "Objective-C",
"bytes": "1198443"
},
{
"name": "Objective-C++",
"bytes": "7082902"
},
{
"name": "PHP",
"bytes": "61320"
},
{
"name": "Perl",
"bytes": "69392"
},
{
"name": "Python",
"bytes": "6310657"
},
{
"name": "Rebol",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "470717"
},
{
"name": "Standard ML",
"bytes": "1589"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "15206"
}
],
"symlink_target": ""
}
|
from insulaudit import core
from insulaudit.data import glucose
from insulaudit.console import device
import proto
class OnetouchApp(device.LinkCommand):
"""Onetouch compatible lifescan devices.
"""
name = 'onetouch'
def link_factory(self):
return proto.Link
#return proto.Linonetouch2.OneTouchUltra2( PORT, 5 )
def setup(self, parser):
import argparse, sys
super(type(self), self).setup(parser)
parser.add_argument('--output', type=argparse.FileType('w'),
default=sys.stdout)
def getFlows(self):
return [ HelloFlow, sugars ]
def title(self):
return "onetouch - talk with Lifescan OneTouch compatible devices."
def help(self):
return "talk with Lifescan OneTouch compatible devices"
def subcommand_manufacturer(self, flow):
return OTCommand(flow, self)
class OTCommand(device.FlowCommand):
def setup_link(self, port):
self.log.info('setting up %s' % port)
return self.handler.selected.link_factory()(port, 5)
class HelloFlow(core.Flow):
"""Hello world for Lifescan onetouch compatible devices.
Can we reliably exchange bytes?
"""
name = 'hello'
def flow(self, session):
link = session.link
serial = link.execute( proto.ReadSerial( ) )
print "serial number: %s" % serial
session.log.info("serial number: %s" % serial)
firmware = link.execute( proto.ReadFirmware( ) )
print "firmware: %s" % firmware
session.log.info("firmware: %s" % firmware)
session.log.info('done')
class sugars(core.Flow):
"""Dump the sugars to stdout
Can we reliably exchange bytes?
"""
#name = 'sugars'
def get_out_file(self):
return self.session.handler.handler.params.output
return sys.stdout
def flow(self, session):
link = session.link
serial = link.execute( proto.ReadSerial( ) )
data = link.read_glucose( )
#print data
print "len glucose: %s" % len( data )
head, body = data
records = glucose.format_records( body )
print head
print records
self.get_out_file( ).write(records)
#####
# EOF
|
{
"content_hash": "f1041b035a3c642e4c5bc801c577202e",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 71,
"avg_line_length": 28,
"alnum_prop": 0.6761583011583011,
"repo_name": "bewest/insulaudit",
"id": "6a44d2033652c488e504dfe76e48dae4cde68e63",
"size": "2073",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/insulaudit/devices/onetouch/console.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "253426"
},
{
"name": "Shell",
"bytes": "897"
},
{
"name": "VimL",
"bytes": "4333"
}
],
"symlink_target": ""
}
|
"""Tests for lead_sheet_pipelines."""
from absl.testing import absltest
from magenta.common import testing_lib as common_testing_lib
from magenta.pipelines import chord_pipelines
from magenta.pipelines import lead_sheet_pipelines
from magenta.pipelines import melody_pipelines
from note_seq import chords_lib
from note_seq import constants
from note_seq import lead_sheets_lib
from note_seq import melodies_lib
from note_seq import sequences_lib
from note_seq import testing_lib as music_testing_lib
from note_seq.protobuf import music_pb2
NOTE_OFF = constants.MELODY_NOTE_OFF
NO_EVENT = constants.MELODY_NO_EVENT
NO_CHORD = constants.NO_CHORD
class LeadSheetPipelinesTest(absltest.TestCase):
def setUp(self):
self.steps_per_quarter = 4
self.note_sequence = music_testing_lib.parse_test_proto(
music_pb2.NoteSequence,
"""
time_signatures: {
numerator: 4
denominator: 4
}
tempos: {
qpm: 60
}
""")
def _unit_transform_test(self, unit, input_instance,
expected_outputs):
outputs = unit.transform(input_instance)
self.assertIsInstance(outputs, list)
common_testing_lib.assert_set_equality(self, expected_outputs, outputs)
self.assertEqual(unit.input_type, type(input_instance))
if outputs:
self.assertEqual(unit.output_type, type(outputs[0]))
def testLeadSheetExtractor(self):
note_sequence = common_testing_lib.parse_test_proto(
music_pb2.NoteSequence,
"""
time_signatures: {
numerator: 4
denominator: 4}
tempos: {
qpm: 60}""")
music_testing_lib.add_track_to_sequence(
note_sequence, 0,
[(12, 100, 2, 4), (11, 1, 6, 7)])
music_testing_lib.add_track_to_sequence(
note_sequence, 1,
[(12, 127, 2, 4), (14, 50, 6, 8)])
music_testing_lib.add_chords_to_sequence(
note_sequence,
[('Cm7', 2), ('F9', 4), ('G7b9', 6)])
quantized_sequence = sequences_lib.quantize_note_sequence(
note_sequence, steps_per_quarter=1)
expected_melody_events = [
[NO_EVENT, NO_EVENT, 12, NO_EVENT, NOTE_OFF, NO_EVENT, 11],
[NO_EVENT, NO_EVENT, 12, NO_EVENT, NOTE_OFF, NO_EVENT, 14, NO_EVENT]]
expected_chord_events = [
[NO_CHORD, NO_CHORD, 'Cm7', 'Cm7', 'F9', 'F9', 'G7b9'],
[NO_CHORD, NO_CHORD, 'Cm7', 'Cm7', 'F9', 'F9', 'G7b9', 'G7b9']]
expected_lead_sheets = []
for melody_events, chord_events in zip(expected_melody_events,
expected_chord_events):
melody = melodies_lib.Melody(
melody_events, steps_per_quarter=1, steps_per_bar=4)
chords = chords_lib.ChordProgression(
chord_events, steps_per_quarter=1, steps_per_bar=4)
lead_sheet = lead_sheets_lib.LeadSheet(melody, chords)
expected_lead_sheets.append(lead_sheet)
unit = lead_sheet_pipelines.LeadSheetExtractor(
min_bars=1, min_unique_pitches=1, gap_bars=1, all_transpositions=False)
self._unit_transform_test(unit, quantized_sequence, expected_lead_sheets)
def testExtractLeadSheetFragments(self):
music_testing_lib.add_track_to_sequence(
self.note_sequence, 0,
[(12, 100, .5, 1), (11, 1, 1.5, 2.75)])
music_testing_lib.add_track_to_sequence(
self.note_sequence, 1,
[(12, 127, .5, 1), (14, 50, 1.5, 2),
(50, 100, 8.25, 9.25), (52, 100, 8.5, 9.25)])
music_testing_lib.add_chords_to_sequence(
self.note_sequence,
[('C', .5), ('G7', 1.5), ('Cmaj7', 8.25)])
quantized_sequence = sequences_lib.quantize_note_sequence(
self.note_sequence, self.steps_per_quarter)
lead_sheets, _ = lead_sheet_pipelines.extract_lead_sheet_fragments(
quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2,
ignore_polyphonic_notes=True, require_chords=True)
melodies, _ = melody_pipelines.extract_melodies(
quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2,
ignore_polyphonic_notes=True)
chord_progressions, _ = chord_pipelines.extract_chords_for_melodies(
quantized_sequence, melodies)
self.assertEqual(list(melodies),
list(lead_sheet.melody for lead_sheet in lead_sheets))
self.assertEqual(list(chord_progressions),
list(lead_sheet.chords for lead_sheet in lead_sheets))
def testExtractLeadSheetFragmentsCoincidentChords(self):
music_testing_lib.add_track_to_sequence(
self.note_sequence, 0,
[(12, 100, 2, 4), (11, 1, 6, 11)])
music_testing_lib.add_track_to_sequence(
self.note_sequence, 1,
[(12, 127, 2, 4), (14, 50, 6, 8),
(50, 100, 33, 37), (52, 100, 34, 37)])
music_testing_lib.add_chords_to_sequence(
self.note_sequence,
[('C', 2), ('G7', 6), ('Cmaj7', 33), ('F', 33)])
quantized_sequence = sequences_lib.quantize_note_sequence(
self.note_sequence, steps_per_quarter=1)
lead_sheets, _ = lead_sheet_pipelines.extract_lead_sheet_fragments(
quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2,
ignore_polyphonic_notes=True, require_chords=True)
melodies, _ = melody_pipelines.extract_melodies(
quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2,
ignore_polyphonic_notes=True)
chord_progressions, _ = chord_pipelines.extract_chords_for_melodies(
quantized_sequence, melodies)
# Last lead sheet should be rejected for coincident chords.
self.assertEqual(list(melodies[:2]),
list(lead_sheet.melody for lead_sheet in lead_sheets))
self.assertEqual(list(chord_progressions[:2]),
list(lead_sheet.chords for lead_sheet in lead_sheets))
def testExtractLeadSheetFragmentsNoChords(self):
music_testing_lib.add_track_to_sequence(
self.note_sequence, 0,
[(12, 100, 2, 4), (11, 1, 6, 11)])
music_testing_lib.add_track_to_sequence(
self.note_sequence, 1,
[(12, 127, 2, 4), (14, 50, 6, 8),
(50, 100, 33, 37), (52, 100, 34, 37)])
music_testing_lib.add_chords_to_sequence(
self.note_sequence,
[('C', 2), ('G7', 6), (NO_CHORD, 10)])
quantized_sequence = sequences_lib.quantize_note_sequence(
self.note_sequence, steps_per_quarter=1)
lead_sheets, stats = lead_sheet_pipelines.extract_lead_sheet_fragments(
quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2,
ignore_polyphonic_notes=True, require_chords=True)
melodies, _ = melody_pipelines.extract_melodies(
quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2,
ignore_polyphonic_notes=True)
chord_progressions, _ = chord_pipelines.extract_chords_for_melodies(
quantized_sequence, melodies)
stats_dict = dict((stat.name, stat) for stat in stats)
# Last lead sheet should be rejected for having no chords.
self.assertEqual(list(melodies[:2]),
list(lead_sheet.melody for lead_sheet in lead_sheets))
self.assertEqual(list(chord_progressions[:2]),
list(lead_sheet.chords for lead_sheet in lead_sheets))
self.assertEqual(stats_dict['empty_chord_progressions'].count, 1)
if __name__ == '__main__':
absltest.main()
|
{
"content_hash": "5c3a677e2429d445042a8880b6d0b0a3",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 79,
"avg_line_length": 43.714285714285715,
"alnum_prop": 0.6401143790849673,
"repo_name": "magenta/magenta",
"id": "a11a83984d11797149e9613cae3f90436c827ca9",
"size": "7929",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "magenta/pipelines/lead_sheet_pipelines_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2717663"
},
{
"name": "Shell",
"bytes": "22832"
}
],
"symlink_target": ""
}
|
import fixtures
import logging
import mock
import os
import subprocess
import diskimage_builder.block_device.tests.test_base as tb
from diskimage_builder.block_device.level0.localloop import image_create
from diskimage_builder.block_device.level1.mbr import MBR
logger = logging.getLogger(__name__)
class TestMBR(tb.TestBase):
disk_size_10M = 10 * 1024 * 1024
disk_size_1G = 1024 * 1024 * 1024
def _get_path_for_partx(self):
"""Searches and sets the path for partx
Because different distributions store the partx binary
at different places, there is the need to look for it.
"""
dirs = ["/bin", "/usr/bin", "/sbin", "/usr/sbin"]
for d in dirs:
if os.path.exists(os.path.join(d, "partx")):
return os.path.join(d, "partx")
return
# If not found, try without path.
return "partx"
def setUp(self):
super(TestMBR, self).setUp()
self.tmp_dir = fixtures.TempDir()
self.useFixture(self.tmp_dir)
self.image_path = os.path.join(self.tmp_dir.path, "image.raw")
image_create(self.image_path, TestMBR.disk_size_1G)
logger.debug("Temp image is %s", self.image_path)
self.partx_args = [self._get_path_for_partx(), "--raw",
"--output", "NR,START,END,TYPE,FLAGS,SCHEME",
"-g", "-b", "-", self.image_path]
def _run_partx(self, image_path):
logger.info("Running command: %s", self.partx_args)
return subprocess.check_output(self.partx_args).decode("ascii")
@mock.patch('os.fsync', wraps=os.fsync)
def test_one_ext_partition(self, mock_os_fsync):
"""Creates one partition and check correctness with partx."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024) as mbr:
mbr.add_partition(False, False, TestMBR.disk_size_10M, 0x83)
# the exit handler of MBR should have synced the raw device
# before exit
mock_os_fsync.assert_called()
output = self._run_partx(self.image_path)
self.assertEqual(
"1 2048 2097151 0xf 0x0 dos\n"
"5 4096 24575 0x83 0x0 dos\n", output)
def test_zero_partitions(self):
"""Creates no partition and check correctness with partx."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024):
pass
output = self._run_partx(self.image_path)
self.assertEqual("", output)
def test_many_ext_partitions(self):
"""Creates many partition and check correctness with partx."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024) as mbr:
for nr in range(0, 64):
mbr.add_partition(False, False, TestMBR.disk_size_10M, 0x83)
output = self._run_partx(self.image_path)
lines = output.split("\n")
self.assertEqual(66, len(lines))
self.assertEqual(
"1 2048 2097151 0xf 0x0 dos", lines[0])
start_block = 4096
end_block = start_block + TestMBR.disk_size_10M / 512 - 1
for nr in range(1, 65):
fields = lines[nr].split(" ")
self.assertEqual(6, len(fields))
self.assertEqual(nr + 4, int(fields[0]))
self.assertEqual(start_block, int(fields[1]))
self.assertEqual(end_block, int(fields[2]))
self.assertEqual("0x83", fields[3])
self.assertEqual("0x0", fields[4])
self.assertEqual("dos", fields[5])
start_block += 22528
end_block = start_block + TestMBR.disk_size_10M / 512 - 1
def test_one_pri_partition(self):
"""Creates one primary partition and check correctness with partx."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024) as mbr:
mbr.add_partition(True, False, TestMBR.disk_size_10M, 0x83)
output = self._run_partx(self.image_path)
self.assertEqual(
"1 2048 22527 0x83 0x0 dos\n", output)
def test_three_pri_partition(self):
"""Creates three primary partition and check correctness with partx."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024) as mbr:
for _ in range(3):
mbr.add_partition(True, False, TestMBR.disk_size_10M, 0x83)
output = self._run_partx(self.image_path)
self.assertEqual(
"1 2048 22527 0x83 0x0 dos\n"
"2 22528 43007 0x83 0x0 dos\n"
"3 43008 63487 0x83 0x0 dos\n", output)
def test_many_pri_and_ext_partition(self):
"""Creates many primary and extended partitions."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024) as mbr:
# Create three primary partitions
for _ in range(3):
mbr.add_partition(True, False, TestMBR.disk_size_10M, 0x83)
for _ in range(7):
mbr.add_partition(False, False, TestMBR.disk_size_10M, 0x83)
output = self._run_partx(self.image_path)
self.assertEqual(
"1 2048 22527 0x83 0x0 dos\n" # Primary 1
"2 22528 43007 0x83 0x0 dos\n" # Primary 2
"3 43008 63487 0x83 0x0 dos\n" # Primary 3
"4 63488 2097151 0xf 0x0 dos\n" # Extended
"5 65536 86015 0x83 0x0 dos\n" # Extended Partition 1
"6 88064 108543 0x83 0x0 dos\n" # Extended Partition 2
"7 110592 131071 0x83 0x0 dos\n" # ...
"8 133120 153599 0x83 0x0 dos\n"
"9 155648 176127 0x83 0x0 dos\n"
"10 178176 198655 0x83 0x0 dos\n"
"11 200704 221183 0x83 0x0 dos\n", output)
def test_pri_fat32_lba_partition(self):
"""Creates a partition with a non-default 'type' and verifies."""
with MBR(self.image_path, TestMBR.disk_size_1G, 1024 * 1024) as mbr:
mbr.add_partition(True, False, TestMBR.disk_size_10M, 0x0c)
output = self._run_partx(self.image_path)
self.assertEqual(
"1 2048 22527 0xc 0x0 dos\n", output)
|
{
"content_hash": "3309703f18334f0e2c3996771040322b",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 79,
"avg_line_length": 37.58641975308642,
"alnum_prop": 0.594843159796354,
"repo_name": "openstack/diskimage-builder",
"id": "0645ee51560cece8971ce6727e4ce2a0c0291ea3",
"size": "6634",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "diskimage_builder/block_device/tests/test_mbr.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "56"
},
{
"name": "Jinja",
"bytes": "2072"
},
{
"name": "Python",
"bytes": "312033"
},
{
"name": "Shell",
"bytes": "413132"
}
],
"symlink_target": ""
}
|
# NEEDS FIXING
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,hashlib,random,string,json,base64,sys
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import cache
from resources.lib.modules import directstream
from resources.lib.modules import jsunfuck
CODE = '''def retA():
class Infix:
def __init__(self, function):
self.function = function
def __ror__(self, other):
return Infix(lambda x, self=self, other=other: self.function(other, x))
def __or__(self, other):
return self.function(other)
def __rlshift__(self, other):
return Infix(lambda x, self=self, other=other: self.function(other, x))
def __rshift__(self, other):
return self.function(other)
def __call__(self, value1, value2):
return self.function(value1, value2)
def my_add(x, y):
try: return x + y
except Exception: return str(x) + str(y)
x = Infix(my_add)
return %s
param = retA()'''
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['solarmoviez.to']
self.base_link = 'https://solarmoviez.to'
self.search_link = '/movie/search/%s.html'
self.info_link = '/ajax/movie_info/%s.html?is_login=false'
self.server_link = '/ajax/v4_movie_episodes/%s'
self.embed_link = '/ajax/movie_embed/%s'
self.token_link = '/ajax/movie_token?eid=%s&mid=%s'
self.source_link = '/ajax/movie_sources/%s?x=%s&y=%s'
def matchAlias(self, title, aliases):
try:
for alias in aliases:
if cleantitle.get(title) == cleantitle.get(alias['title']):
return True
except:
return False
def movie(self, imdb, title, localtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': title})
url = {'imdb': imdb, 'title': title, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': tvshowtitle})
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def searchShow(self, title, season, aliases, headers):
try:
title = cleantitle.normalize(title)
search = '%s Season %01d' % (title, int(season))
url = urlparse.urljoin(self.base_link, self.search_link % urllib.quote_plus(cleantitle.getsearch(search)))
r = client.request(url, headers=headers, timeout='15')
r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'})
r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'))
r = [(i[0], i[1], re.findall('(.*?)\s+-\s+Season\s+(\d)', i[1])) for i in r]
r = [(i[0], i[1], i[2][0]) for i in r if len(i[2]) > 0]
url = [i[0] for i in r if self.matchAlias(i[2][0], aliases) and i[2][1] == season][0]
return url
except:
return
def searchMovie(self, title, year, aliases, headers):
try:
title = cleantitle.normalize(title)
url = urlparse.urljoin(self.base_link, self.search_link % urllib.quote_plus(cleantitle.getsearch(title)))
r = client.request(url, headers=headers, timeout='15')
r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'})
r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'))
results = [(i[0], i[1], re.findall('\((\d{4})', i[1])) for i in r]
try:
r = [(i[0], i[1], i[2][0]) for i in results if len(i[2]) > 0]
url = [i[0] for i in r if self.matchAlias(i[1], aliases) and (year == i[2])][0]
except:
url = None
pass
if (url == None):
url = [i[0] for i in results if self.matchAlias(i[1], aliases)][0]
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url is None: return sources
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
aliases = eval(data['aliases'])
headers = {}
if 'tvshowtitle' in data:
episode = int(data['episode'])
url = self.searchShow(data['tvshowtitle'], data['season'], aliases, headers)
else:
episode = 0
url = self.searchMovie(data['title'], data['year'], aliases, headers)
mid = re.findall('-(\d+)', url)[-1]
try:
headers = {'Referer': url}
u = urlparse.urljoin(self.base_link, self.server_link % mid)
r = client.request(u, headers=headers, XHR=True)
r = json.loads(r)['html']
r = client.parseDOM(r, 'div', attrs = {'class': 'pas-list'})
ids = client.parseDOM(r, 'li', ret='data-id')
servers = client.parseDOM(r, 'li', ret='data-server')
labels = client.parseDOM(r, 'a', ret='title')
r = zip(ids, servers, labels)
for eid in r:
try:
try:
ep = re.findall('episode.*?(\d+).*?',eid[2].lower())[0]
except:
ep = 0
if (episode == 0) or (int(ep) == episode):
url = urlparse.urljoin(self.base_link, self.token_link % (eid[0], mid))
script = client.request(url)
if '$_$' in script:
params = self.uncensored1(script)
elif script.startswith('[]') and script.endswith('()'):
params = self.uncensored2(script)
elif '_x=' in script:
x = re.search('''_x=['"]([^"']+)''', script).group(1)
y = re.search('''_y=['"]([^"']+)''', script).group(1)
params = {'x': x, 'y': y}
else:
raise Exception()
u = urlparse.urljoin(self.base_link, self.source_link % (eid[0], params['x'], params['y']))
r = client.request(u, XHR=True)
url = json.loads(r)['playlist'][0]['sources']
url = [i['file'] for i in url if 'file' in i]
url = [directstream.googletag(i) for i in url]
url = [i[0] for i in url if i]
for s in url:
sources.append({'source': 'gvideo', 'quality': s['quality'], 'language': 'en',
'url': s['url'], 'direct': True, 'debridonly': False})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
try:
if self.embed_link in url:
result = client.request(url, XHR=True)
url = json.loads(result)['embed_url']
return url
try:
for i in range(3):
u = directstream.googlepass(url)
if not u == None: break
return u
except:
return
except:
return
def uncensored(a, b):
x = '' ; i = 0
for i, y in enumerate(a):
z = b[i % len(b) - 1]
y = int(ord(str(y)[0])) + int(ord(str(z)[0]))
x += chr(y)
x = base64.b64encode(x)
return x
def uncensored1(self, script):
try:
script = '(' + script.split("(_$$)) ('_');")[0].split("/* `$$` */")[-1].strip()
script = script.replace('(__$)[$$$]', '\'"\'')
script = script.replace('(__$)[_$]', '"\\\\"')
script = script.replace('(o^_^o)', '3')
script = script.replace('(c^_^o)', '0')
script = script.replace('(_$$)', '1')
script = script.replace('($$_)', '4')
vGlobals = {"__builtins__": None, '__name__': __name__, 'str': str, 'Exception': Exception}
vLocals = {'param': None}
exec (CODE % script.replace('+', '|x|'), vGlobals, vLocals)
data = vLocals['param'].decode('string_escape')
x = re.search('''_x=['"]([^"']+)''', data).group(1)
y = re.search('''_y=['"]([^"']+)''', data).group(1)
return {'x': x, 'y': y}
except:
pass
def uncensored2(self, script):
try:
js = jsunfuck.JSUnfuck(script).decode()
x = re.search('''_x=['"]([^"']+)''', js).group(1)
y = re.search('''_y=['"]([^"']+)''', js).group(1)
return {'x': x, 'y': y}
except:
pass
|
{
"content_hash": "e2dcb8b209cf92fb3a40b59456c59815",
"timestamp": "",
"source": "github",
"line_count": 259,
"max_line_length": 119,
"avg_line_length": 41.15830115830116,
"alnum_prop": 0.4841463414634146,
"repo_name": "TheWardoctor/Wardoctors-repo",
"id": "67ec76632a772a94e65531d31fe1c4652dab9fc1",
"size": "10660",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "script.module.covenant/lib/resources/lib/sources/en/solarmovie.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3208"
},
{
"name": "JavaScript",
"bytes": "115722"
},
{
"name": "Python",
"bytes": "34405207"
},
{
"name": "Shell",
"bytes": "914"
}
],
"symlink_target": ""
}
|
import unittest
import random, sys, time, re
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_glm, h2o_util, h2o_rf
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
# assume we're at 0xdata with it's hdfs namenode
h2o.init(1, java_heap_GB=14)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_RF_mnist_both(self):
importFolderPath = "mnist"
csvFilelist = [
# ("mnist_training.csv.gz", "mnist_testing.csv.gz", 600, 784834182943470027),
("mnist_training.csv.gz", "mnist_testing.csv.gz", 600, None, '*mnist*gz'),
# to see results a 2nd time
("mnist_training.csv.gz", "mnist_testing.csv.gz", 600, None, '*mnist*gz'),
]
# IMPORT**********************************************
# since H2O deletes the source key, we should re-import every iteration if we re-use the src in the list
(importFolderResult, importPattern) = h2i.import_only(bucket='home-0xdiag-datasets', path=importFolderPath + "/*")
### print "importHDFSResult:", h2o.dump_json(importFolderResult)
if 'files' in importFolderResult:
succeededList = importFolderResult['files']
else:
succeededList = importFolderResult['succeeded']
### print "succeededList:", h2o.dump_json(succeededList)
self.assertGreater(len(succeededList),1,"Should see more than 1 files in the import?")
# why does this hang? can't look at storeview after import?
print "\nTrying StoreView after the import folder"
h2o_cmd.runStoreView(timeoutSecs=30)
trial = 0
allDelta = []
for (trainCsvFilename, testCsvFilename, timeoutSecs, rfSeed, parsePattern) in csvFilelist:
trialStart = time.time()
# PARSE test****************************************
testKey2 = testCsvFilename + "_" + str(trial) + ".hex"
start = time.time()
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=importFolderPath+"/"+testCsvFilename,
hex_key=testKey2, timeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "parse end on ", testCsvFilename, 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
print "parse result:", parseResult['destination_key']
print "We won't use this pruning of x on test data. See if it prunes the same as the training"
y = 0 # first column is pixel value
x = h2o_glm.goodXFromColumnInfo(y, key=parseResult['destination_key'], timeoutSecs=300)
# PARSE train****************************************
print "Use multi-file parse to grab both the mnist_testing.csv.gz and mnist_training.csv.gz for training"
trainKey2 = trainCsvFilename + "_" + str(trial) + ".hex"
start = time.time()
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=importFolderPath+"/"+parsePattern,
hex_key=trainKey2, timeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "parse end on ", trainCsvFilename, 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
print "parse result:", parseResult['destination_key']
# RF+RFView (train)****************************************
# print "This is the 'ignore=' we'll use"
# no longer use. depend on h2o to get it right.
ntree = 25
params = {
'response': 0,
'ntrees': ntree,
# 'data_key='mnist_training.csv.hex'
'mtries': 28, # fix because we ignore some cols, which will change the srt(cols) calc?
'max_depth': 2147483647,
'select_stat_type': 'ENTROPY',
'sampling_strategy': 'RANDOM',
'sample_rate': 0.67,
'oobee': 1,
# 'model_key': '__RFModel_7055e6cf-a0de-44db-b165-f5994730ac77',
'destination_key': 'RF_model',
'nbins': 1024,
# 'seed': 784834182943470027,
# 'class_weights': '0=1.0,1=1.0,2=1.0,3=1.0,4=1.0,5=1.0,6=1.0,7=1.0,8=1.0,9=1.0',
}
if rfSeed is None:
params['seed'] = random.randint(0,sys.maxint)
else:
params['seed'] = rfSeed
print "RF seed:", params['seed']
kwargs = params.copy()
print "Trying rf"
timeoutSecs = 1800
start = time.time()
rfView = h2o_cmd.runSpeeDRF(parseResult=parseResult,
timeoutSecs=timeoutSecs, pollTimeoutSecs=180, retryDelaySecs=2, **kwargs)
elapsed = time.time() - start
print "RF completed in", elapsed, "seconds.", \
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
# RFView (score on test)****************************************
(classification_error, classErrorPctList, totalScores) = h2o_rf.simpleCheckRFView(None, rfView, **params)
# was 2.84
# sometimes get 2.87?
self.assertAlmostEqual(classification_error, 1.6, delta=1.6,
msg="Classification error %s differs too much" % classification_error)
treeStats = rfView['speedrf_model']['treeStats']
leaves = {'min': treeStats['minLeaves'], 'mean': treeStats['meanLeaves'], 'max': treeStats['maxLeaves']}
# Expected values are from this case:
# ("mnist_training.csv.gz", "mnist_testing.csv.gz", 600, 784834182943470027),
leavesExpected = {'min': 4996, 'mean': 5064.1, 'max': 5148}
for l in leaves:
# self.assertAlmostEqual(leaves[l], leavesExpected[l], delta=10, msg="leaves %s %s %s differs too much" % (l, leaves[l], leavesExpected[l]))
delta = ((leaves[l] - leavesExpected[l])/leaves[l]) * 100
d = "seed: %s %s leaves: %s expected: %s pct. different %s" % (params['seed'], l, leaves[l], leavesExpected[l], delta)
print d
allDelta.append(d)
depth = {'min': treeStats['minDepth'], 'mean': treeStats['meanDepth'], 'max': treeStats['maxDepth']}
depthExpected = {'min': 21, 'mean': 23.8, 'max': 25}
for l in depth:
# self.assertAlmostEqual(depth[l], depthExpected[l], delta=1, msg="depth %s %s %s differs too much" % (l, depth[l], depthExpected[l]))
delta = ((depth[l] - depthExpected[l])/leaves[l]) * 100
d = "seed: %s %s depth: %s expected: %s pct. different %s" % (params['seed'], l, depth[l], depthExpected[l], delta)
print d
allDelta.append(d)
# Predict (on test)****************************************
start = time.time()
modelKey = rfView['speedrf_model']['_key']
predict = h2o.nodes[0].generate_predictions(model_key=modelKey, data_key=testKey2, timeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "generate_predictions in", elapsed, "secs", \
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
# Done *******************************************************
print "\nShowing the results again from all the trials, to see variance"
for d in allDelta:
print d
if __name__ == '__main__':
h2o.unit_main()
|
{
"content_hash": "6bf87ae2863b300564ac03e0c2b30ba3",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 156,
"avg_line_length": 50.86184210526316,
"alnum_prop": 0.5441728107618679,
"repo_name": "eg-zhang/h2o-2",
"id": "5b6588e35e343b04083ad1a14a5c145b6d0f2737",
"size": "7731",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "py/testdir_single_jvm/test_speedrf_mnist_both.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7065"
},
{
"name": "C",
"bytes": "2461"
},
{
"name": "CSS",
"bytes": "216906"
},
{
"name": "CoffeeScript",
"bytes": "205094"
},
{
"name": "Emacs Lisp",
"bytes": "7446"
},
{
"name": "Groovy",
"bytes": "518"
},
{
"name": "HTML",
"bytes": "177967"
},
{
"name": "Java",
"bytes": "5177683"
},
{
"name": "JavaScript",
"bytes": "42958"
},
{
"name": "Makefile",
"bytes": "50927"
},
{
"name": "PHP",
"bytes": "8490"
},
{
"name": "Perl",
"bytes": "22594"
},
{
"name": "Python",
"bytes": "3244626"
},
{
"name": "R",
"bytes": "1631216"
},
{
"name": "Ruby",
"bytes": "299"
},
{
"name": "Scala",
"bytes": "39365"
},
{
"name": "Shell",
"bytes": "189829"
}
],
"symlink_target": ""
}
|
'''
Runs the set of rules defined in provided config YAML file.
'''
from __future__ import print_function
import argparse
import os
import yaml
import json
import re
import math
from . import mlp_parser
def is_integer(value):
return abs(round(value) - value) < 0.00001
class CCError(Exception):
pass
def preety_dict(d):
return json.dumps(d, sort_keys=True, indent=2)
enqueued_configs = []
# this function can be called from yaml
def enqueue_config(config):
enqueued_configs.append(config)
def all_same(l):
return not l or len(l) == l.count(l[0])
def merge(*dicts,):
return { k:d[k] for d in dicts for k in d }
class ComplianceChecker:
def __init__(self, ruleset, quiet, werror):
self.ruleset = ruleset
self.warnings = {}
self.overwritable = {}
self.not_overwritable = []
self.quiet = quiet
self.werror = werror
def raise_exception(self, msg):
raise CCError(msg)
def put_warning(self, msg, key):
if self.werror:
self.put_message(msg, key)
elif not self.quiet:
print(key, msg)
self.warnings[key] = msg
def put_message(self, msg, key=None):
if key:
self.overwritable[key] = msg
else:
self.not_overwritable.append(msg)
def overwrite_messages(self, keys):
for key in keys:
self.overwritable.pop(key, None)
def log_messages(self):
message_separator = '\n' + '-' * 30 + '\n'
message = message_separator.join([
*self.warnings.values(),
*self.overwritable.values(),
*self.not_overwritable
])
if message:
print(message)
def has_messages(self):
return self.not_overwritable or self.overwritable
def run_check_eval(self, ll, tests, state):
if type(tests) is not list:
tests = [tests]
for test in tests:
try:
if not eval(test.strip(), state, {'ll': ll, 'v': ll.value }):
self.put_message(
f"CHECK for '{ll.key}' failed in line {ll.lineno}:"
f"\n{ll.full_string}"
f"\nfailed test: {test}"
f"\ncurrent context[s]={preety_dict(state['s'])}"
f"\ncurrent line[v]={preety_dict(ll.value)}",
key=ll.key
)
except:
self.put_message(
f'Failed executing CHECK code:'
f'\n{test}'
f'\ntriggered by line:'
f'\n{ll.full_string}'
f"\ncurrent context[s]={preety_dict(state['s'])}",
key=ll.key
)
def run_check_end(self, tests, state):
if type(tests) is not list:
tests = [tests]
for test in tests:
try:
if not eval(test.strip(), state):
self.put_message(
f"failed test: {test}"
f"\ncurrent context[s]={preety_dict(state['s'])}",
)
except:
self.put_message(
f'Failed executing CHECK code:'
f'\n{test}'
f'\ncurrent context[s]={preety_dict(state["s"])}'
)
def run_check_exec(self, ll, code, state, action):
if code is None: return
try:
exec(code.strip(), state, {'ll': ll, 'v': ll.value})
except:
self.put_message(f'Failed executing code {action} code triggered by line :\n{ll.full_string}',
key=ll.key)
def parse_alternatives(self, string):
in_pharentises = string[len('AT_LEAST_ONE_OR(') : -1]
alternatives = in_pharentises.split(',')
return [s.strip() for s in alternatives]
def configured_checks(self, loglines, config_file):
with open(config_file) as f:
checks = yaml.load(f, Loader=yaml.BaseLoader)
if checks is None:
return
s = {} # this would be visible from inside configs
state = {'enqueue_config':enqueue_config , 's':s,
'is_integer': is_integer,
'math': math}
#execute begin block
begin_blocks = [x for x in checks if list(x)[0]=='BEGIN']
assert(len(begin_blocks)<=1) # up to one begin block
if len(begin_blocks)==1:
exec(begin_blocks[0]['BEGIN']['CODE'].strip(), state)
key_records = {}
for k in checks:
if list(k)[0]=='KEY':
key_records.update({k['KEY']['NAME']:k['KEY']})
reported_values = {k:[] for k in key_records.keys()}
# if config overrides some rules from previous config, corresponding messages are not needed
self.overwrite_messages(key_records)
at_least_one_checks = {}
# executing the rules through log records
for line in loglines:
key_record = None
try:
reported_values[line.key].append(line.value['value'])
key_record = key_records[line.key]
except:
# unknown key - it's allowed, skip to next record
continue
if 'PRE' in key_record: self.run_check_exec(line, key_record['PRE'], state, 'PRE')
if 'CHECK' in key_record: self.run_check_eval(line, key_record['CHECK'], state)
if 'POST' in key_record: self.run_check_exec(line, key_record['POST'], state, 'POST')
if 'ATLEAST_ONE_CHECK' in key_record:
if line.key not in at_least_one_checks:
at_least_one_checks[line.key] = [0, key_record['ATLEAST_ONE_CHECK']]
check = eval(key_record['ATLEAST_ONE_CHECK'].strip(),
state, {'ll': line, 'v': line.value})
if check:
at_least_one_checks[line.key][0] += 1
for name in at_least_one_checks:
if at_least_one_checks[name][0] == 0:
self.put_message('Failed checks for {} : {}'
.format(name, at_least_one_checks[name][1]))
alternatives = set()
# verify occurrences requirements
for k,v in key_records.items():
if 'REQ' not in v:
continue
if v['REQ']=='EXACTLY_ONE':
if len(reported_values[k]) !=1:
if reported_values[k] and all_same(reported_values[k]):
self.put_warning(f"Required EXACTLY_ONE occurrence of '{k}'"
f" but found {len(reported_values[k])}",
key=k)
else:
self.put_message(f"Required EXACTLY_ONE occurrence of '{k}'"
f" but found {len(reported_values[k])}" +
(f" with different values:"
f" [{', '.join(x for x in set(str(v) for v in reported_values[k]))}]"
if reported_values[k] else ''),
key=k)
if v['REQ']=='AT_LEAST_ONE':
if len(reported_values[k])<1:
self.put_message(f"Required AT_LEAST_ONE occurrence of '{k}' but found {len(reported_values[k])}",
key=k)
if v['REQ'].startswith('AT_LEAST_ONE_OR'):
alternatives.add(tuple({k, *self.parse_alternatives(v['REQ'])}))
for alts in alternatives:
if not any(reported_values[k] for k in alts):
self.put_message("Required AT_LEAST_ONE occurrence of {}".format(' or '.join(f"'{s}'" for s in alts)))
# execute end block
end_blocks = [x for x in checks if list(x)[0]=='END']
assert(len(end_blocks)<=1) # up to one end block
if len(end_blocks)==1:
end_record = end_blocks[0]['END']
if 'PRE' in end_record: exec(end_record['PRE'].strip(), state)
if 'CHECK' in end_record:
self.run_check_end(end_record['CHECK'], state)
def check_loglines(self, loglines, config):
if not loglines:
self.put_message('No log lines detected')
enqueue_config(config)
current_dir = os.path.dirname(os.path.abspath(__file__))
while len(enqueued_configs)>0:
current_config = enqueued_configs.pop(0)
config_file = general_file = os.path.join(current_dir, current_config)
if not os.path.exists(config_file):
self.put_message('Could not find config file: {}'.format(config_file))
# processing a config may have a side affect of pushing another config(s) to be checked
self.configured_checks(loglines, config_file)
def check_file(self, filename, config_file):
loglines, errors = mlp_parser.parse_file(filename, ruleset=self.ruleset)
if len(errors) > 0:
print('Found parsing errors:')
for line, error in errors:
print(line)
print(' ^^ ', error)
print()
self.put_message('Log lines had parsing errors.')
self.check_loglines(loglines, config_file)
self.log_messages()
return not self.has_messages()
def rule_choices():
return [ x for x in os.listdir(os.path.dirname(__file__))
if re.match('\d+\.\d+\.\d+', x) ]
def get_parser():
parser = argparse.ArgumentParser(
prog='mlperf_logging.compliance_checker',
description='Lint MLPerf Compliance Logs.',
)
parser.add_argument('filename', type=str,
help='the file to check for compliance')
parser.add_argument('--ruleset', type=str, default='0.7.0',
choices=rule_choices(),
help='what version of rules to check the log against')
parser.add_argument('--config', type=str,
help='mlperf logging config, by default it loads {ruleset}/common.yaml', default=None)
parser.add_argument('--werror', action='store_true',
help='Treas warnings as errors')
parser.add_argument('--quiet', action='store_true',
help='Suppress warnings. Does nothing if --werror is set')
return parser
def make_checker(ruleset, quiet, werror):
return ComplianceChecker(
ruleset,
quiet,
werror,
)
def main(filename, config_file, checker):
valid = checker.check_file(filename, config_file)
return valid, None, None, None
|
{
"content_hash": "bfbac87f87f8dd5e8192174358ea5817",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 119,
"avg_line_length": 33.626168224299064,
"alnum_prop": 0.5240874559940708,
"repo_name": "mlperf/training_results_v0.7",
"id": "b57fa4a4e5f9ab5fc127db395758d1dc6dc6f679",
"size": "10794",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "SIAT/benchmarks/resnet/implementations/tensorflow_close_src/mlperf_logging/compliance_checker/mlp_compliance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Awk",
"bytes": "14530"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "172914"
},
{
"name": "C++",
"bytes": "13037795"
},
{
"name": "CMake",
"bytes": "113458"
},
{
"name": "CSS",
"bytes": "70255"
},
{
"name": "Clojure",
"bytes": "622652"
},
{
"name": "Cuda",
"bytes": "1974745"
},
{
"name": "Dockerfile",
"bytes": "149523"
},
{
"name": "Groovy",
"bytes": "160449"
},
{
"name": "HTML",
"bytes": "171537"
},
{
"name": "Java",
"bytes": "189275"
},
{
"name": "JavaScript",
"bytes": "98224"
},
{
"name": "Julia",
"bytes": "430755"
},
{
"name": "Jupyter Notebook",
"bytes": "11091342"
},
{
"name": "Lua",
"bytes": "17720"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "215967"
},
{
"name": "Perl",
"bytes": "1551186"
},
{
"name": "PowerShell",
"bytes": "13906"
},
{
"name": "Python",
"bytes": "36943114"
},
{
"name": "R",
"bytes": "134921"
},
{
"name": "Raku",
"bytes": "7280"
},
{
"name": "Ruby",
"bytes": "4930"
},
{
"name": "SWIG",
"bytes": "140111"
},
{
"name": "Scala",
"bytes": "1304960"
},
{
"name": "Shell",
"bytes": "1312832"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "Starlark",
"bytes": "69877"
},
{
"name": "TypeScript",
"bytes": "243012"
}
],
"symlink_target": ""
}
|
import boto3
def get_public_api_base(event):
# https://forums.aws.amazon.com/thread.jspa?threadID=241370
host_header_value = event["request-params"]["header"]["Host"]
if host_header_value.endswith(".amazonaws.com"):
# Assume this is the default deployment URL.
return "https://{}/{}".format(
host_header_value,
event["stage"]
)
# The host header indicates this is invoked through a custom domain name.
# Look up the base path mapping based on our stage.
# Note that this will be imperfect because a stage can have multiple base path mappings.
response_iterator = boto3.client("apigateway").get_paginator("get_base_path_mappings").paginate(
domainName = host_header_value
)
own_mapping = None
for each_response in response_iterator:
for each_item in each_response.get("items"):
if each_item["restApiId"] == event["api-id"] and each_item.get("stage", "") in ["", event["stage"]]:
if own_mapping is not None:
raise Exception("Ambiguous base path mapping. Can't determine base path of API.")
own_mapping = each_item
if own_mapping is None:
raise Exception("Unable to determine API's public URL.")
base_path = own_mapping["basePath"]
if own_mapping.get("stage", "") == "":
base_path += "/" + event["stage"]
return "https://{}/{}".format(
host_header_value,
base_path
)
|
{
"content_hash": "c7a5c7e3a739e94f4c4b2b90b7e4b943",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 112,
"avg_line_length": 34.97727272727273,
"alnum_prop": 0.5964912280701754,
"repo_name": "moduspwnens/boa-chat",
"id": "54e6cb5ea96b6c6bea759fcc7e0c4e1d8d52fe36",
"size": "1539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boa-nimbus/lambda-pip-modules/apigateway-helpers/apigateway_helpers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7685"
},
{
"name": "Dockerfile",
"bytes": "1002"
},
{
"name": "HTML",
"bytes": "20463"
},
{
"name": "JavaScript",
"bytes": "64145"
},
{
"name": "Python",
"bytes": "160254"
},
{
"name": "Shell",
"bytes": "781"
}
],
"symlink_target": ""
}
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.WGL import _types as _cs
# End users want this...
from OpenGL.raw.WGL._types import *
from OpenGL.raw.WGL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'WGL_ARB_framebuffer_sRGB'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.WGL,'WGL_ARB_framebuffer_sRGB',error_checker=_errors._error_checker)
WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB=_C('WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB',0x20A9)
|
{
"content_hash": "d4429de7b57cb9b49dbe148a8e9d1946",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 119,
"avg_line_length": 39.13333333333333,
"alnum_prop": 0.7597955706984668,
"repo_name": "alexus37/AugmentedRealityChess",
"id": "8493ac5a6903d897636ae27361c0567fc298aac3",
"size": "587",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/raw/WGL/ARB/framebuffer_sRGB.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "158062"
},
{
"name": "C++",
"bytes": "267993"
},
{
"name": "CMake",
"bytes": "11319"
},
{
"name": "Fortran",
"bytes": "3707"
},
{
"name": "Makefile",
"bytes": "14618"
},
{
"name": "Python",
"bytes": "12813086"
},
{
"name": "Roff",
"bytes": "3310"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
}
|
import re
class Unit:
"""Implementation of SVG units and conversions between them.
Parameters
----------
measure : str
value with unit (for example, '2cm')
"""
per_inch = {"px": 90, "cm": 2.54, "mm": 25.4, "pt": 72.0}
def __init__(self, measure):
try:
self.value = float(measure)
self.unit = "px"
except ValueError:
m = re.match("([0-9]+\.?[0-9]*)([a-z]+)", measure)
value, unit = m.groups()
self.value = float(value)
self.unit = unit
def to(self, unit):
"""Convert to a given unit.
Parameters
----------
unit : str
Name of the unit to convert to.
Returns
-------
u : Unit
new Unit object with the requested unit and computed value.
"""
u = Unit("0cm")
u.value = self.value / self.per_inch[self.unit] * self.per_inch[unit]
u.unit = unit
return u
def __str__(self):
return "{}{}".format(self.value, self.unit)
def __repr__(self):
return "Unit({})".format(str(self))
def __mul__(self, number):
u = Unit("0cm")
u.value = self.value * number
u.unit = self.unit
return u
def __truediv__(self, number):
return self * (1.0 / number)
def __div__(self, number):
return self * (1.0 / number)
|
{
"content_hash": "6ed8729c18c3d7a70574b28380a1f19c",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 77,
"avg_line_length": 24.16949152542373,
"alnum_prop": 0.4845722300140252,
"repo_name": "btel/svg_utils",
"id": "9928d7277b4e5d8d8b435450a5e34fb7788f990a",
"size": "1426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/svgutils/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31174"
}
],
"symlink_target": ""
}
|
from ctypes.wintypes import *
from ctypes import *
import logging
log = logging.getLogger(__name__)
CreateFileW = windll.kernel32.CreateFileW
CreateFileW.argtypes = (LPCWSTR, DWORD, DWORD, c_void_p, DWORD, DWORD, HANDLE)
CreateFileW.restype = HANDLE
ReadFile = windll.kernel32.ReadFile
ReadFile.argtypes = (HANDLE, c_void_p, DWORD, POINTER(DWORD), HANDLE)
ReadFile.restype = BOOL
NULL = 0
MAX_PATH = 260
DEFAULT_BUFFER_SIZE = 4096
LPSECURITY_ATTRIBUTES = c_void_p
class WindowsInterop(object):
ri_buffer = None
@classmethod
def create_file(cls, path, desired_access, share_mode, creation_disposition, flags_and_attributes):
h = CreateFileW(
path,
desired_access,
share_mode,
NULL,
creation_disposition,
flags_and_attributes,
NULL
)
error = GetLastError()
if error != 0:
raise Exception('[WindowsASIO.open] "%s"' % FormatError(error))
return h
@classmethod
def read(cls, handle, buf_size=DEFAULT_BUFFER_SIZE):
buf = create_string_buffer(buf_size)
bytes_read = c_ulong(0)
success = ReadFile(handle, buf, buf_size, byref(bytes_read), NULL)
error = GetLastError()
if error:
log.debug('read_file - error: (%s) "%s"', error, FormatError(error))
if not success and error:
raise Exception('[WindowsInterop.read_file] (%s) "%s"' % (error, FormatError(error)))
# Return if we have a valid buffer
if success and bytes_read.value:
return buf.value
return None
@classmethod
def read_into(cls, handle, b):
if cls.ri_buffer is None or len(cls.ri_buffer) < len(b):
cls.ri_buffer = create_string_buffer(len(b))
bytes_read = c_ulong(0)
success = ReadFile(handle, cls.ri_buffer, len(b), byref(bytes_read), NULL)
bytes_read = int(bytes_read.value)
b[:bytes_read] = cls.ri_buffer[:bytes_read]
error = GetLastError()
if not success and error:
raise Exception('[WindowsInterop.read_file] (%s) "%s"' % (error, FormatError(error)))
# Return if we have a valid buffer
if success and bytes_read:
return bytes_read
return None
@classmethod
def set_file_pointer(cls, handle, distance, method):
pos_high = DWORD(NULL)
result = windll.kernel32.SetFilePointer(
handle,
c_ulong(distance),
byref(pos_high),
DWORD(method)
)
if result == -1:
raise Exception('[WindowsASIO.seek] INVALID_SET_FILE_POINTER: "%s"' % FormatError(GetLastError()))
return result
@classmethod
def get_file_size(cls, handle):
return windll.kernel32.GetFileSize(
handle,
DWORD(NULL)
)
@classmethod
def close_handle(cls, handle):
return windll.kernel32.CloseHandle(handle)
@classmethod
def create_file_mapping(cls, handle, protect, maximum_size_high=0, maximum_size_low=1):
return HANDLE(windll.kernel32.CreateFileMappingW(
handle,
LPSECURITY_ATTRIBUTES(NULL),
DWORD(protect),
DWORD(maximum_size_high),
DWORD(maximum_size_low),
LPCSTR(NULL)
))
@classmethod
def map_view_of_file(cls, map_handle, desired_access, num_bytes, file_offset_high=0, file_offset_low=0):
return HANDLE(windll.kernel32.MapViewOfFile(
map_handle,
DWORD(desired_access),
DWORD(file_offset_high),
DWORD(file_offset_low),
num_bytes
))
@classmethod
def unmap_view_of_file(cls, view_handle):
return windll.kernel32.UnmapViewOfFile(view_handle)
@classmethod
def get_mapped_file_name(cls, view_handle, translate_device_name=True):
buf = create_string_buffer(MAX_PATH + 1)
result = windll.psapi.GetMappedFileNameW(
cls.get_current_process(),
view_handle,
buf,
MAX_PATH
)
# Raise exception on error
error = GetLastError()
if result == 0:
raise Exception(FormatError(error))
# Retrieve a clean file name (skipping over NUL bytes)
file_name = cls.clean_buffer_value(buf)
# If we are not translating the device name return here
if not translate_device_name:
return file_name
drives = cls.get_logical_drive_strings()
# Find the drive matching the file_name device name
translated = False
for drive in drives:
device_name = cls.query_dos_device(drive)
if file_name.startswith(device_name):
file_name = drive + file_name[len(device_name):]
translated = True
break
if not translated:
raise Exception('Unable to translate device name')
return file_name
@classmethod
def get_logical_drive_strings(cls, buf_size=512):
buf = create_string_buffer(buf_size)
result = windll.kernel32.GetLogicalDriveStringsW(buf_size, buf)
error = GetLastError()
if result == 0:
raise Exception(FormatError(error))
drive_strings = cls.clean_buffer_value(buf)
return [dr for dr in drive_strings.split('\\') if dr != '']
@classmethod
def query_dos_device(cls, drive, buf_size=MAX_PATH):
buf = create_string_buffer(buf_size)
result = windll.kernel32.QueryDosDeviceA(
drive,
buf,
buf_size
)
return cls.clean_buffer_value(buf)
@classmethod
def get_current_process(cls):
return HANDLE(windll.kernel32.GetCurrentProcess())
@classmethod
def clean_buffer_value(cls, buf):
value = ""
for ch in buf.raw:
if ord(ch) != 0:
value += ch
return value
|
{
"content_hash": "3acb20c964abed1f12998edcaaee01ed",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 110,
"avg_line_length": 27.935185185185187,
"alnum_prop": 0.5896586012595293,
"repo_name": "pannal/Subliminal.bundle",
"id": "7bce197c241c6aff25bf457ed823eaf322e43bf7",
"size": "6631",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Contents/Libraries/Shared/asio/interfaces/windows/interop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3012769"
},
{
"name": "Python",
"bytes": "3311785"
},
{
"name": "Shell",
"bytes": "273"
}
],
"symlink_target": ""
}
|
from .. import Load, DirFromEnv, Nminus1Cut
newRatioComparer = Load('RatioComparison')
plotter = newRatioComparer()
def SetupFromEnv(aPlotter=plotter):
"""A function that sets up a plotter after sourcing a config file.
@param aPlotter is the plotter to setup. Defaults to plotter in this module.
"""
from ..CommonTools.FileConfigReader import SetupConfigFromEnv, SetFunctionFromEnv
SetupConfigFromEnv(aPlotter)
DirFromEnv('CrombieOutPlotDir')
SetFunctionFromEnv([
(aPlotter.SetOutDirectory, 'CrombieOutPlotDir'),
])
def SetCuts(category1, region1, region2, category2=None, aPlotter=plotter, Nminus1=''):
""" Sets cuts based on category and region.
@param category1 is the category of the analysis being used.
@param region1 is the region of the numerator.
@param region2 is the region of the denominator.
@param category2 if set, is the category of the denominator.
Otherwise, the category is assumed to be the same
@param aPlotter is the plotter that is having its cuts set.
Default is the plotter defined in this module.
@param Nminus1 is a parameter to not cut on
"""
from ..LoadConfig import cuts
category2 = category2 or category1
aPlotter.Reset()
numerator_cut = Nminus1Cut(cuts.cut(category1, region1), Nminus1) if Nminus1 else cuts.cut(category1, region1)
denominator_cut = Nminus1Cut(cuts.cut(category2, region2), Nminus1) if Nminus1 else cuts.cut(category2, region2)
aPlotter.AddRatioCuts(numerator_cut + ' && ' + cuts.dataMCCuts(region1, True),
denominator_cut + ' && ' + cuts.dataMCCuts(region2, True),
aPlotter.kData, 'Data', 1)
aPlotter.AddRatioCuts(numerator_cut + ' * ' + cuts.dataMCCuts(region1, False),
denominator_cut + ' * ' + cuts.dataMCCuts(region2, False),
aPlotter.kBackground, 'MC', 2)
aPlotter.SetDataIndex(0)
aPlotter.SetRatioIndex(1)
|
{
"content_hash": "95ea43358dadaac1873544f9d2eb951f",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 116,
"avg_line_length": 38.716981132075475,
"alnum_prop": 0.6681286549707602,
"repo_name": "dabercro/CrombieTools",
"id": "06f5398904eec0ac31d68842ab1b0d786620c561",
"size": "2052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/CrombieTools/PlotTools/RatioComparison.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6339"
},
{
"name": "C++",
"bytes": "744000"
},
{
"name": "HTML",
"bytes": "4719"
},
{
"name": "JavaScript",
"bytes": "1783"
},
{
"name": "Makefile",
"bytes": "723"
},
{
"name": "Objective-C",
"bytes": "184882"
},
{
"name": "PHP",
"bytes": "6820"
},
{
"name": "Perl",
"bytes": "8637"
},
{
"name": "Python",
"bytes": "155475"
},
{
"name": "R",
"bytes": "12216"
},
{
"name": "Shell",
"bytes": "55692"
},
{
"name": "TeX",
"bytes": "2664"
}
],
"symlink_target": ""
}
|
import random
import re
from AISuite.game import Game as Game
import AISuite.player as player
from AISuite.alphabeta import UPPER_BOUND, LOWER_BOUND, shallowest_first
class Pig(Game):
def __init__(self, player1, player2, be_quiet = False, show_game = False):
super(self.__class__, self).__init__(player1, player2, be_quiet)
self.show_board = show_game
self.scores = [0.0,0,0]
def get_child_states(self):
root = str(self)
self.scores[self.get_player_num()] += int(self.scores[0])
self.scores[0] = 0.0
self.turn += 1
hold_state = str(self)
self.load_state_from_string(root)
#gives expected value of rolling the die:
self.scores[0] = (5.0 * self.scores[0] + 20.0) / 6.0
roll_state = str(self)
self.load_state_from_string(root)
return [hold_state, roll_state]
def get_child_moves(self):
return ['Hold', 'Roll']
def do_turn(self):
human = self.is_human_turn()
if not self.quiet:
print "Turn " + str(self.turn)
finished_playing = False
possible_moves = self.get_child_moves()
while not finished_playing:
if human or self.show_board:
self.opg()
print "Player" + str(self.get_player_num()) + ", enter a valid move from " + str(possible_moves)
move = self.current_player().choose_move(self)
if human and move in self.escapes:
self.handle_escape(move)
if str(move).lower() in possible_moves + ['h','r','hold','roll']:
if 'h' in move or 'H' in move:
self.scores[self.get_player_num()] += int(self.scores[0])
self.scores[0] = 0.0
self.turn += 1
finished_playing = True
else:
roll = random.choice([1,2,3,4,5,6])
if roll == 1:
self.scores[0] = 0.0
self.turn += 1
finished_playing = True
else:
self.scores[0] += roll
else:
if human:
print 'That wasn\'t a valid move.'
self.opg()
self.check_winner()
def make_new_instance(self):
return Pig(player.Player(), player.Player())
def __str__(self):
return str(self.scores[0]) + ';' + str(self.scores[1]) + ';' + str(self.scores[2]) + ';' + str(self.turn)
@staticmethod
def parse_state(game_state):
split_list = game_state.split(';')
split_list = split_list[:-1]
return ';'.join(split_list)
def load_state_from_string(self, state):
split = state.split(';')
for x in range(3):
try:
self.scores[x] = int(split[x])
except:
self.scores[x] = float(split[x])
self.turn = int(split[3])
self.check_winner()
def opg(self):
print "Player1's score: " + str(self.scores[1])
print "Player2's score: " + str(self.scores[2])
print "it is currently player" + str(self.get_player_num()) + "'s turn."
print "current turn score: " + str(self.scores[0])
print
def check_winner(self):
if self.scores[1] >= 100:
self.winner = 1
elif self.scores[2] >= 100:
self.winner = 2
else:
self.winner = -1
return self.winner
def pig_heuristic(game_state):
value = 0
split = game_state.split(';')
#check if the game is over
if int(split[1]) >= 100:
return UPPER_BOUND
elif int(split[2]) >= 100:
return LOWER_BOUND
m = 1
if int(split[-1]) % 2 == 1:
m = -1
value = int(split[1]) - int(split[2]) + m*int((5.0 * float(split[0]) + 20.0) / 6.0)
#respect the bounds
if value >= UPPER_BOUND:
value = UPPER_BOUND-1
elif value <= LOWER_BOUND:
value = LOWER_BOUND+1
return value
#p = Pig(player.Human(), player.Human())
#p.play()
|
{
"content_hash": "1ff1f0209787be65db6bed181844e8ae",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 107,
"avg_line_length": 26.818897637795274,
"alnum_prop": 0.6250733998825602,
"repo_name": "blamedcloud/PythonGames",
"id": "0243644f2699e226aa5411416b059d97d75fba2a",
"size": "3465",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pig.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "71307"
}
],
"symlink_target": ""
}
|
from urllib import urlopen
doc = urlopen("http://www.python.org").read()
print doc
|
{
"content_hash": "b2ae6f54e5761d959269cb8e0a068745",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 21,
"alnum_prop": 0.7261904761904762,
"repo_name": "ActiveState/code",
"id": "45ebfb353bb59d0edf9dc578f6a651c479e9932c",
"size": "84",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/Python/52199_Grab_a_document_from_the_web/recipe-52199.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35894"
},
{
"name": "C",
"bytes": "56048"
},
{
"name": "C++",
"bytes": "90880"
},
{
"name": "HTML",
"bytes": "11656"
},
{
"name": "Java",
"bytes": "57468"
},
{
"name": "JavaScript",
"bytes": "181218"
},
{
"name": "PHP",
"bytes": "250144"
},
{
"name": "Perl",
"bytes": "37296"
},
{
"name": "Perl 6",
"bytes": "9914"
},
{
"name": "Python",
"bytes": "17387779"
},
{
"name": "Ruby",
"bytes": "40233"
},
{
"name": "Shell",
"bytes": "190732"
},
{
"name": "Tcl",
"bytes": "674650"
}
],
"symlink_target": ""
}
|
"""Tests for the `FilterFusion` optimization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import optimization
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
def _filter_fusion_test_cases():
"""Generates test cases for the FilterFusion optimization."""
take_all = lambda x: constant_op.constant(True)
is_zero = lambda x: math_ops.equal(x, 0)
greater = lambda x: math_ops.greater(x + 5, 0)
tests = []
filters = [take_all, is_zero, greater]
identity = lambda x: x
for x, predicate_1 in enumerate(filters):
for y, predicate_2 in enumerate(filters):
tests.append(("Mixed{}{}".format(x, y), identity,
[predicate_1, predicate_2]))
for z, predicate_3 in enumerate(filters):
tests.append(("Mixed{}{}{}".format(x, y, z), identity,
[predicate_1, predicate_2, predicate_3]))
take_all_multiple = lambda x, y: constant_op.constant(True)
# Multi output
tests.append(("Multi1", lambda x: (x, x),
[take_all_multiple, take_all_multiple]))
tests.append(("Multi2", lambda x: (x, 2), [
take_all_multiple,
lambda x, y: math_ops.equal(x * math_ops.cast(y, dtypes.int64), 0)
]))
return tuple(tests)
@test_util.run_all_in_graph_and_eager_modes
class FilterFusionTest(test_base.DatasetTestBase, parameterized.TestCase):
@parameterized.named_parameters(*_filter_fusion_test_cases())
def testFilterFusion(self, map_function, predicates):
dataset = dataset_ops.Dataset.range(5).apply(
optimization.assert_next(["Map", "Filter",
"MemoryCacheImpl"])).map(map_function)
for predicate in predicates:
dataset = dataset.filter(predicate)
dataset = dataset.cache()
options = dataset_ops.Options()
options.experimental_optimization.filter_fusion = True
dataset = dataset.with_options(options)
expected_output = []
for x in range(5):
r = map_function(x)
filtered = False
for predicate in predicates:
if isinstance(r, tuple):
b = predicate(*r) # Pass tuple as multiple arguments.
else:
b = predicate(r)
if not self.evaluate(b):
filtered = True
break
if not filtered:
expected_output.append(r)
self.assertDatasetProduces(dataset, expected_output=expected_output)
if __name__ == "__main__":
test.main()
|
{
"content_hash": "ccc35f15754e7f9789fad754ff954553",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 74,
"avg_line_length": 35,
"alnum_prop": 0.6701940035273368,
"repo_name": "asimshankar/tensorflow",
"id": "3ce921b5efe9e870fe1c5fb6406736f8bbb9c09f",
"size": "3524",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/data/experimental/kernel_tests/optimization/filter_fusion_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4882"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "490070"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "52677142"
},
{
"name": "CMake",
"bytes": "207176"
},
{
"name": "Dockerfile",
"bytes": "39454"
},
{
"name": "Go",
"bytes": "1290930"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "890529"
},
{
"name": "Jupyter Notebook",
"bytes": "2618412"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "68402"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102518"
},
{
"name": "PHP",
"bytes": "5172"
},
{
"name": "Pascal",
"bytes": "221"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "43038983"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "497659"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
}
|
class PlaybackState:
"""
Enum of playback states.
"""
#: Constant representing the paused state.
PAUSED = "paused"
#: Constant representing the playing state.
PLAYING = "playing"
#: Constant representing the stopped state.
STOPPED = "stopped"
|
{
"content_hash": "c1f158d78d10d2c07ba2ee3d07401a3f",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 47,
"avg_line_length": 20.214285714285715,
"alnum_prop": 0.6431095406360424,
"repo_name": "kingosticks/mopidy",
"id": "c1046f45d5cc4d2736ba34157be7db26777b7ba1",
"size": "283",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "mopidy/audio/constants.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "610"
},
{
"name": "HTML",
"bytes": "805"
},
{
"name": "Python",
"bytes": "743402"
},
{
"name": "Roff",
"bytes": "573"
},
{
"name": "Shell",
"bytes": "741"
}
],
"symlink_target": ""
}
|
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- Project information -----------------------------------------------------
project = 'PyRep'
copyright = '2019, Stephen James'
author = 'Stephen James'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.napoleon'
]
autodoc_mock_imports = ["pyrep.backend._sim_cffi"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
master_doc = 'index'
# -- Extension configuration -------------------------------------------------
|
{
"content_hash": "02b701d8b9729cd372e13cea1dcf9f29",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 78,
"avg_line_length": 30.2,
"alnum_prop": 0.6258278145695364,
"repo_name": "stepjam/PyRep",
"id": "4cf517f5fd5f80763e1d55a449bfe002ed225119",
"size": "2044",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "56307"
},
{
"name": "Lua",
"bytes": "16854"
},
{
"name": "Python",
"bytes": "428818"
}
],
"symlink_target": ""
}
|
"""
A reader for corpora that contain chunked (and optionally tagged)
documents.
"""
import os.path, codecs
import nltk
from nltk.corpus.reader.bracket_parse import BracketParseCorpusReader
from nltk import compat
from nltk.tree import Tree
from nltk.tokenize import *
from nltk.chunk import tagstr2tree
from nltk.corpus.reader.util import *
from nltk.corpus.reader.api import *
class ChunkedCorpusReader(CorpusReader):
"""
Reader for chunked (and optionally tagged) corpora. Paragraphs
are split using a block reader. They are then tokenized into
sentences using a sentence tokenizer. Finally, these sentences
are parsed into chunk trees using a string-to-chunktree conversion
function. Each of these steps can be performed using a default
function or a custom function. By default, paragraphs are split
on blank lines; sentences are listed one per line; and sentences
are parsed into chunk trees using ``nltk.chunk.tagstr2tree``.
"""
def __init__(self, root, fileids, extension='',
str2chunktree=tagstr2tree,
sent_tokenizer=RegexpTokenizer('\n', gaps=True),
para_block_reader=read_blankline_block,
encoding='utf8', tagset=None):
"""
:param root: The root directory for this corpus.
:param fileids: A list or regexp specifying the fileids in this corpus.
"""
CorpusReader.__init__(self, root, fileids, encoding)
self._cv_args = (str2chunktree, sent_tokenizer, para_block_reader, tagset)
"""Arguments for corpus views generated by this corpus: a tuple
(str2chunktree, sent_tokenizer, para_block_tokenizer)"""
def raw(self, fileids=None):
"""
:return: the given file(s) as a single string.
:rtype: str
"""
if fileids is None: fileids = self._fileids
elif isinstance(fileids, compat.string_types): fileids = [fileids]
return concat([self.open(f).read() for f in fileids])
def words(self, fileids=None):
"""
:return: the given file(s) as a list of words
and punctuation symbols.
:rtype: list(str)
"""
return concat([ChunkedCorpusView(f, enc, 0, 0, 0, 0, *self._cv_args)
for (f, enc) in self.abspaths(fileids, True)])
def sents(self, fileids=None):
"""
:return: the given file(s) as a list of
sentences or utterances, each encoded as a list of word
strings.
:rtype: list(list(str))
"""
return concat([ChunkedCorpusView(f, enc, 0, 1, 0, 0, *self._cv_args)
for (f, enc) in self.abspaths(fileids, True)])
def paras(self, fileids=None):
"""
:return: the given file(s) as a list of
paragraphs, each encoded as a list of sentences, which are
in turn encoded as lists of word strings.
:rtype: list(list(list(str)))
"""
return concat([ChunkedCorpusView(f, enc, 0, 1, 1, 0, *self._cv_args)
for (f, enc) in self.abspaths(fileids, True)])
def tagged_words(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of tagged
words and punctuation symbols, encoded as tuples
``(word,tag)``.
:rtype: list(tuple(str,str))
"""
return concat([ChunkedCorpusView(f, enc, 1, 0, 0, 0, *self._cv_args, target_tagset=tagset)
for (f, enc) in self.abspaths(fileids, True)])
def tagged_sents(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of
sentences, each encoded as a list of ``(word,tag)`` tuples.
:rtype: list(list(tuple(str,str)))
"""
return concat([ChunkedCorpusView(f, enc, 1, 1, 0, 0, *self._cv_args, target_tagset=tagset)
for (f, enc) in self.abspaths(fileids, True)])
def tagged_paras(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of
paragraphs, each encoded as a list of sentences, which are
in turn encoded as lists of ``(word,tag)`` tuples.
:rtype: list(list(list(tuple(str,str))))
"""
return concat([ChunkedCorpusView(f, enc, 1, 1, 1, 0, *self._cv_args, target_tagset=tagset)
for (f, enc) in self.abspaths(fileids, True)])
def chunked_words(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of tagged
words and chunks. Words are encoded as ``(word, tag)``
tuples (if the corpus has tags) or word strings (if the
corpus has no tags). Chunks are encoded as depth-one
trees over ``(word,tag)`` tuples or word strings.
:rtype: list(tuple(str,str) and Tree)
"""
return concat([ChunkedCorpusView(f, enc, 1, 0, 0, 1, *self._cv_args, target_tagset=tagset)
for (f, enc) in self.abspaths(fileids, True)])
def chunked_sents(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of
sentences, each encoded as a shallow Tree. The leaves
of these trees are encoded as ``(word, tag)`` tuples (if
the corpus has tags) or word strings (if the corpus has no
tags).
:rtype: list(Tree)
"""
return concat([ChunkedCorpusView(f, enc, 1, 1, 0, 1, *self._cv_args, target_tagset=tagset)
for (f, enc) in self.abspaths(fileids, True)])
def chunked_paras(self, fileids=None, tagset=None):
"""
:return: the given file(s) as a list of
paragraphs, each encoded as a list of sentences, which are
in turn encoded as a shallow Tree. The leaves of these
trees are encoded as ``(word, tag)`` tuples (if the corpus
has tags) or word strings (if the corpus has no tags).
:rtype: list(list(Tree))
"""
return concat([ChunkedCorpusView(f, enc, 1, 1, 1, 1, *self._cv_args, target_tagset=tagset)
for (f, enc) in self.abspaths(fileids, True)])
def _read_block(self, stream):
return [tagstr2tree(t) for t in read_blankline_block(stream)]
class ChunkedCorpusView(StreamBackedCorpusView):
def __init__(self, fileid, encoding, tagged, group_by_sent,
group_by_para, chunked, str2chunktree, sent_tokenizer,
para_block_reader, source_tagset=None, target_tagset=None):
StreamBackedCorpusView.__init__(self, fileid, encoding=encoding)
self._tagged = tagged
self._group_by_sent = group_by_sent
self._group_by_para = group_by_para
self._chunked = chunked
self._str2chunktree = str2chunktree
self._sent_tokenizer = sent_tokenizer
self._para_block_reader = para_block_reader
self._source_tagset = source_tagset
self._target_tagset = target_tagset
def read_block(self, stream):
block = []
for para_str in self._para_block_reader(stream):
para = []
for sent_str in self._sent_tokenizer.tokenize(para_str):
sent = self._str2chunktree(sent_str, source_tagset=self._source_tagset,
target_tagset=self._target_tagset)
# If requested, throw away the tags.
if not self._tagged:
sent = self._untag(sent)
# If requested, throw away the chunks.
if not self._chunked:
sent = sent.leaves()
# Add the sentence to `para`.
if self._group_by_sent:
para.append(sent)
else:
para.extend(sent)
# Add the paragraph to `block`.
if self._group_by_para:
block.append(para)
else:
block.extend(para)
# Return the block
return block
def _untag(self, tree):
for i, child in enumerate(tree):
if isinstance(child, Tree):
self._untag(child)
elif isinstance(child, tuple):
tree[i] = child[0]
else:
raise ValueError('expected child to be Tree or tuple')
return tree
|
{
"content_hash": "2435c48fbf5e2442bfde379a865d7152",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 98,
"avg_line_length": 42.11219512195122,
"alnum_prop": 0.5687478281014711,
"repo_name": "MyRookie/SentimentAnalyse",
"id": "c13fef842aba0f1108023fc67d42406144961882",
"size": "8889",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/nltk/corpus/reader/chunked.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "316238"
},
{
"name": "C++",
"bytes": "5171"
},
{
"name": "CSS",
"bytes": "6267"
},
{
"name": "FORTRAN",
"bytes": "3200"
},
{
"name": "HTML",
"bytes": "449"
},
{
"name": "JavaScript",
"bytes": "6187"
},
{
"name": "Prolog",
"bytes": "60188"
},
{
"name": "Python",
"bytes": "13690978"
},
{
"name": "Shell",
"bytes": "8340"
},
{
"name": "TeX",
"bytes": "212"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Order.external_order_id'
db.delete_column('shop_order', 'external_order_id')
# Adding field 'OrderItem.external_order_id'
db.add_column('shop_orderitem', 'external_order_id',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Order.external_order_id'
db.add_column('shop_order', 'external_order_id',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Deleting field 'OrderItem.external_order_id'
db.delete_column('shop_orderitem', 'external_order_id')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'generic.assignedkeyword': {
'Meta': {'ordering': "('_order',)", 'object_name': 'AssignedKeyword'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': "orm['generic.Keyword']"}),
'object_pk': ('django.db.models.fields.IntegerField', [], {})
},
'generic.keyword': {
'Meta': {'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'generic.rating': {
'Meta': {'object_name': 'Rating'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.IntegerField', [], {}),
'value': ('django.db.models.fields.IntegerField', [], {})
},
'pages.page': {
'Meta': {'ordering': "('titles',)", 'object_name': 'Page'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_menus': ('mezzanine.pages.fields.MenusField', [], {'default': '[1, 2, 3]', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': "orm['generic.AssignedKeyword']", 'frozen_by_south': 'True'}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'})
},
'shop.cart': {
'Meta': {'object_name': 'Cart'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'shop.cartitem': {
'Meta': {'object_name': 'CartItem'},
'cart': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['shop.Cart']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'from_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sku': ('cartridge.shop.fields.SKUField', [], {'max_length': '20'}),
'to_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'total_price': ('cartridge.shop.fields.MoneyField', [], {'default': "'0'", 'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'unit_price': ('cartridge.shop.fields.MoneyField', [], {'default': "'0'", 'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'shop.category': {
'Meta': {'ordering': "('_order',)", 'object_name': 'Category', '_ormbases': ['pages.Page']},
'combined': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'featured_image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'product_options'", 'blank': 'True', 'to': "orm['shop.ProductOption']"}),
'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'}),
'price_max': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'price_min': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'sale': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.Sale']", 'null': 'True', 'blank': 'True'})
},
'shop.discountcode': {
'Meta': {'object_name': 'DiscountCode'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'discountcode_related'", 'blank': 'True', 'to': "orm['shop.Category']"}),
'code': ('cartridge.shop.fields.DiscountCodeField', [], {'unique': 'True', 'max_length': '20'}),
'discount_deduct': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_exact': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_percent': ('cartridge.shop.fields.PercentageField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'free_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_purchase': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'uses_remaining': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'valid_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'shop.order': {
'Meta': {'ordering': "('-id',)", 'object_name': 'Order'},
'additional_instructions': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'billing_detail_city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'billing_detail_first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'billing_detail_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'billing_detail_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'discount_code': ('cartridge.shop.fields.DiscountCodeField', [], {'max_length': '20', 'blank': 'True'}),
'discount_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'shipping_detail_city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'shipping_detail_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'shipping_detail_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'shipping_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'shop.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'external_order_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'from_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['shop.Order']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sku': ('cartridge.shop.fields.SKUField', [], {'max_length': '20'}),
'to_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'total_price': ('cartridge.shop.fields.MoneyField', [], {'default': "'0'", 'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'unit_price': ('cartridge.shop.fields.MoneyField', [], {'default': "'0'", 'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['shop.Category']", 'symmetrical': 'False', 'blank': 'True'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': "orm['generic.AssignedKeyword']", 'frozen_by_south': 'True'}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'rating': ('mezzanine.generic.fields.RatingField', [], {'object_id_field': "'object_pk'", 'to': "orm['generic.Rating']", 'frozen_by_south': 'True'}),
'rating_average': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'rating_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_products_rel_+'", 'blank': 'True', 'to': "orm['shop.Product']"}),
'sale_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sale_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'sale_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'sale_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'sku': ('cartridge.shop.fields.SKUField', [], {'max_length': '20', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'unit_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'upsell_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'upsell_products_rel_+'", 'blank': 'True', 'to': "orm['shop.Product']"})
},
'shop.productaction': {
'Meta': {'unique_together': "(('product', 'timestamp'),)", 'object_name': 'ProductAction'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actions'", 'to': "orm['shop.Product']"}),
'timestamp': ('django.db.models.fields.IntegerField', [], {}),
'total_cart': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'total_purchase': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'shop.productimage': {
'Meta': {'ordering': "('_order',)", 'object_name': 'ProductImage'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'images'", 'to': "orm['shop.Product']"})
},
'shop.productoption': {
'Meta': {'object_name': 'ProductOption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('cartridge.shop.fields.OptionField', [], {'max_length': '50', 'null': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
'shop.productvariation': {
'Meta': {'ordering': "('-default',)", 'object_name': 'ProductVariation'},
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.ProductImage']", 'null': 'True', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'option1': ('cartridge.shop.fields.OptionField', [], {'max_length': '50', 'null': 'True'}),
'option2': ('cartridge.shop.fields.OptionField', [], {'max_length': '50', 'null': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variations'", 'to': "orm['shop.Product']"}),
'sale_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sale_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'sale_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'sale_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sku': ('cartridge.shop.fields.SKUField', [], {'max_length': '20', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'unit_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'})
},
'shop.reservableproduct': {
'Meta': {'object_name': 'ReservableProduct', '_ormbases': ['shop.Product']},
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'})
},
'shop.reservableproductcartreservation': {
'Meta': {'object_name': 'ReservableProductCartReservation'},
'cart': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reservations'", 'to': "orm['shop.Cart']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'reservation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'in_carts'", 'to': "orm['shop.ReservableProductReservation']"})
},
'shop.reservableproductorderreservation': {
'Meta': {'object_name': 'ReservableProductOrderReservation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reservations'", 'to': "orm['shop.Order']"}),
'reservation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'in_orders'", 'to': "orm['shop.ReservableProductReservation']"})
},
'shop.reservableproductreservation': {
'Meta': {'object_name': 'ReservableProductReservation'},
'date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reservations'", 'to': "orm['shop.ReservableProduct']"})
},
'shop.sale': {
'Meta': {'object_name': 'Sale'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'sale_related'", 'blank': 'True', 'to': "orm['shop.Category']"}),
'discount_deduct': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_exact': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_percent': ('cartridge.shop.fields.PercentageField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'valid_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'shop.specialprice': {
'Meta': {'object_name': 'SpecialPrice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price_change': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'specialprices'", 'to': "orm['shop.Product']"}),
'special_type': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['shop']
|
{
"content_hash": "2fe9c789a928f53759db905e96c3ca71",
"timestamp": "",
"source": "github",
"line_count": 301,
"max_line_length": 197,
"avg_line_length": 86.60132890365449,
"alnum_prop": 0.5501975678060382,
"repo_name": "jaywink/cartridge-reservable",
"id": "f4c7b82a699c1198bc0a27dfe5e8675066d59ec7",
"size": "26091",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cartridge/shop/migrations/0022_auto__del_field_order_external_order_id__add_field_orderitem_external_.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "5886"
},
{
"name": "HTML",
"bytes": "43072"
},
{
"name": "JavaScript",
"bytes": "8904"
},
{
"name": "Python",
"bytes": "1118270"
}
],
"symlink_target": ""
}
|
import os
import json
from pathlib import Path
from .. import mesonlib
from ..mesonlib import MesonException
from . import ExtensionModule
from mesonbuild.modules import ModuleReturnValue
from ..interpreterbase import (
noPosargs, noKwargs, permittedKwargs,
InterpreterObject, InvalidArguments,
FeatureNew
)
from ..interpreter import ExternalProgramHolder
from ..interpreterbase import flatten
from ..build import known_shmod_kwargs
from .. import mlog
from ..environment import detect_cpu_family
from ..dependencies.base import (
DependencyMethods, ExternalDependency,
ExternalProgram, PkgConfigDependency,
NonExistingExternalProgram
)
mod_kwargs = set(['subdir'])
mod_kwargs.update(known_shmod_kwargs)
mod_kwargs -= set(['name_prefix', 'name_suffix'])
def run_command(python, command):
_, stdout, _ = mesonlib.Popen_safe(python.get_command() + [
'-c',
command])
return stdout.strip()
class PythonDependency(ExternalDependency):
def __init__(self, python_holder, environment, kwargs):
super().__init__('python', environment, None, kwargs)
self.name = 'python'
self.static = kwargs.get('static', False)
self.version = python_holder.version
self.platform = python_holder.platform
self.pkgdep = None
self.variables = python_holder.variables
self.paths = python_holder.paths
if mesonlib.version_compare(self.version, '>= 3.0'):
self.major_version = 3
else:
self.major_version = 2
# We first try to find the necessary python variables using pkgconfig
if DependencyMethods.PKGCONFIG in self.methods and not python_holder.is_pypy:
pkg_version = self.variables.get('LDVERSION') or self.version
pkg_libdir = self.variables.get('LIBPC')
# If python-X.Y.pc exists in LIBPC, we will try to use it
if pkg_libdir is not None and Path(os.path.join(pkg_libdir, 'python-{}.pc'.format(pkg_version))).is_file():
old_pkg_libdir = os.environ.get('PKG_CONFIG_LIBDIR')
old_pkg_path = os.environ.get('PKG_CONFIG_PATH')
os.environ.pop('PKG_CONFIG_PATH', None)
if pkg_libdir:
os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir
try:
self.pkgdep = PkgConfigDependency('python-{}'.format(pkg_version), environment, kwargs)
mlog.debug('Found "python-{}" via pkgconfig lookup in LIBPC ({})'.format(pkg_version, pkg_libdir))
py_lookup_method = 'pkgconfig'
except MesonException as e:
mlog.debug('"python-{}" could not be found in LIBPC ({})'.format(pkg_version, pkg_libdir))
mlog.debug(e)
if old_pkg_path is not None:
os.environ['PKG_CONFIG_PATH'] = old_pkg_path
if old_pkg_libdir is not None:
os.environ['PKG_CONFIG_LIBDIR'] = old_pkg_libdir
else:
os.environ.pop('PKG_CONFIG_LIBDIR', None)
else:
mlog.debug('"python-{}" could not be found in LIBPC ({}), this is likely due to a relocated python installation'.format(pkg_version, pkg_libdir))
# If lookup via LIBPC failed, try to use fallback PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH mechanisms
if self.pkgdep is None or not self.pkgdep.found():
try:
self.pkgdep = PkgConfigDependency('python-{}'.format(pkg_version), environment, kwargs)
mlog.debug('Found "python-{}" via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH'.format(pkg_version))
py_lookup_method = 'pkgconfig-fallback'
except MesonException as e:
mlog.debug('"python-{}" could not be found via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH'.format(pkg_version))
mlog.debug(e)
if self.pkgdep and self.pkgdep.found():
self.compile_args = self.pkgdep.get_compile_args()
self.link_args = self.pkgdep.get_link_args()
self.is_found = True
self.pcdep = self.pkgdep
else:
self.pkgdep = None
# Finally, try to find python via SYSCONFIG as a final measure
if DependencyMethods.SYSCONFIG in self.methods:
if mesonlib.is_windows():
self._find_libpy_windows(environment)
else:
self._find_libpy(python_holder, environment)
if self.is_found:
mlog.debug('Found "python-{}" via SYSCONFIG module'.format(self.version))
py_lookup_method = 'sysconfig'
if self.is_found:
mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES ({})'.format(py_lookup_method)))
else:
mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
def _find_libpy(self, python_holder, environment):
if python_holder.is_pypy:
if self.major_version == 3:
libname = 'pypy3-c'
else:
libname = 'pypy-c'
libdir = os.path.join(self.variables.get('base'), 'bin')
libdirs = [libdir]
else:
libname = 'python{}'.format(self.version)
if 'DEBUG_EXT' in self.variables:
libname += self.variables['DEBUG_EXT']
if 'ABIFLAGS' in self.variables:
libname += self.variables['ABIFLAGS']
libdirs = []
largs = self.clib_compiler.find_library(libname, environment, libdirs)
self.is_found = largs is not None
self.link_args = largs
inc_paths = mesonlib.OrderedSet([
self.variables.get('INCLUDEPY'),
self.paths.get('include'),
self.paths.get('platinclude')])
self.compile_args += ['-I' + path for path in inc_paths if path]
def get_windows_python_arch(self):
if self.platform == 'mingw':
pycc = self.variables.get('CC')
if pycc.startswith('x86_64'):
return '64'
elif pycc.startswith(('i686', 'i386')):
return '32'
else:
mlog.log('MinGW Python built with unknown CC {!r}, please file'
'a bug'.format(pycc))
return None
elif self.platform == 'win32':
return '32'
elif self.platform in ('win64', 'win-amd64'):
return '64'
mlog.log('Unknown Windows Python platform {!r}'.format(self.platform))
return None
def get_windows_link_args(self):
if self.platform.startswith('win'):
vernum = self.variables.get('py_version_nodot')
if self.static:
libname = 'libpython{}.a'.format(vernum)
else:
libname = 'python{}.lib'.format(vernum)
lib = Path(self.variables.get('base')) / 'libs' / libname
elif self.platform == 'mingw':
if self.static:
libname = self.variables.get('LIBRARY')
else:
libname = self.variables.get('LDLIBRARY')
lib = Path(self.variables.get('LIBDIR')) / libname
if not lib.exists():
mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
return None
return [str(lib)]
def _find_libpy_windows(self, env):
'''
Find python3 libraries on Windows and also verify that the arch matches
what we are building for.
'''
pyarch = self.get_windows_python_arch()
if pyarch is None:
self.is_found = False
return
arch = detect_cpu_family(env.coredata.compilers)
if arch == 'x86':
arch = '32'
elif arch == 'x86_64':
arch = '64'
else:
# We can't cross-compile Python 3 dependencies on Windows yet
mlog.log('Unknown architecture {!r} for'.format(arch),
mlog.bold(self.name))
self.is_found = False
return
# Pyarch ends in '32' or '64'
if arch != pyarch:
mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but '
'found {}-bit'.format(arch, pyarch))
self.is_found = False
return
# This can fail if the library is not found
largs = self.get_windows_link_args()
if largs is None:
self.is_found = False
return
self.link_args = largs
# Compile args
inc_paths = mesonlib.OrderedSet([
self.variables.get('INCLUDEPY'),
self.paths.get('include'),
self.paths.get('platinclude')])
self.compile_args += ['-I' + path for path in inc_paths if path]
# https://sourceforge.net/p/mingw-w64/mailman/message/30504611/
if pyarch == '64' and self.major_version == 2:
self.compile_args += ['-DMS_WIN64']
self.is_found = True
@staticmethod
def get_methods():
if mesonlib.is_windows():
return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
elif mesonlib.is_osx():
return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
else:
return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
def get_pkgconfig_variable(self, variable_name, kwargs):
if self.pkgdep:
return self.pkgdep.get_pkgconfig_variable(variable_name, kwargs)
else:
return super().get_pkgconfig_variable(variable_name, kwargs)
VARIABLES_COMMAND = '''
import sysconfig
import json
print (json.dumps (sysconfig.get_config_vars()))
'''
PATHS_COMMAND = '''
import sysconfig
import json
print (json.dumps(sysconfig.get_paths()))
'''
INSTALL_PATHS_COMMAND = '''
import sysconfig
import json
print (json.dumps(sysconfig.get_paths(scheme='posix_prefix', vars={'base': '', 'platbase': '', 'installed_base': ''})))
'''
IS_PYPY_COMMAND = '''
import sys
import json
print (json.dumps('__pypy__' in sys.builtin_module_names))
'''
class PythonInstallation(ExternalProgramHolder, InterpreterObject):
def __init__(self, interpreter, python):
InterpreterObject.__init__(self)
ExternalProgramHolder.__init__(self, python)
self.interpreter = interpreter
prefix = self.interpreter.environment.coredata.get_builtin_option('prefix')
self.variables = json.loads(run_command(python, VARIABLES_COMMAND))
self.paths = json.loads(run_command(python, PATHS_COMMAND))
install_paths = json.loads(run_command(python, INSTALL_PATHS_COMMAND))
self.platlib_install_path = os.path.join(prefix, install_paths['platlib'][1:])
self.purelib_install_path = os.path.join(prefix, install_paths['purelib'][1:])
self.version = run_command(python, "import sysconfig; print (sysconfig.get_python_version())")
self.platform = run_command(python, "import sysconfig; print (sysconfig.get_platform())")
self.is_pypy = json.loads(run_command(python, IS_PYPY_COMMAND))
@permittedKwargs(mod_kwargs)
def extension_module(self, interpreter, state, args, kwargs):
if 'subdir' in kwargs and 'install_dir' in kwargs:
raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive')
if 'subdir' in kwargs:
subdir = kwargs.pop('subdir', '')
if not isinstance(subdir, str):
raise InvalidArguments('"subdir" argument must be a string.')
kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir)
suffix = self.variables.get('EXT_SUFFIX') or self.variables.get('SO') or self.variables.get('.so')
# msys2's python3 has "-cpython-36m.dll", we have to be clever
split = suffix.rsplit('.', 1)
suffix = split.pop(-1)
args[0] += ''.join(s for s in split)
kwargs['name_prefix'] = ''
kwargs['name_suffix'] = suffix
return interpreter.func_shared_module(None, args, kwargs)
def dependency(self, interpreter, state, args, kwargs):
dep = PythonDependency(self, interpreter.environment, kwargs)
return interpreter.holderify(dep)
@permittedKwargs(['pure', 'subdir'])
def install_sources(self, interpreter, state, args, kwargs):
pure = kwargs.pop('pure', False)
if not isinstance(pure, bool):
raise InvalidArguments('"pure" argument must be a boolean.')
subdir = kwargs.pop('subdir', '')
if not isinstance(subdir, str):
raise InvalidArguments('"subdir" argument must be a string.')
if pure:
kwargs['install_dir'] = os.path.join(self.purelib_install_path, subdir)
else:
kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir)
return interpreter.func_install_data(None, args, kwargs)
@noPosargs
@permittedKwargs(['pure', 'subdir'])
def get_install_dir(self, node, args, kwargs):
pure = kwargs.pop('pure', True)
if not isinstance(pure, bool):
raise InvalidArguments('"pure" argument must be a boolean.')
subdir = kwargs.pop('subdir', '')
if not isinstance(subdir, str):
raise InvalidArguments('"subdir" argument must be a string.')
if pure:
res = os.path.join(self.purelib_install_path, subdir)
else:
res = os.path.join(self.platlib_install_path, subdir)
return ModuleReturnValue(res, [])
@noPosargs
@noKwargs
def language_version(self, node, args, kwargs):
return ModuleReturnValue(self.version, [])
@noPosargs
@noKwargs
def found(self, node, args, kwargs):
return ModuleReturnValue(True, [])
@noKwargs
def has_path(self, node, args, kwargs):
if len(args) != 1:
raise InvalidArguments('has_path takes exactly one positional argument.')
path_name = args[0]
if not isinstance(path_name, str):
raise InvalidArguments('has_path argument must be a string.')
return ModuleReturnValue(path_name in self.paths, [])
@noKwargs
def get_path(self, node, args, kwargs):
if len(args) not in (1, 2):
raise InvalidArguments('get_path must have one or two arguments.')
path_name = args[0]
if not isinstance(path_name, str):
raise InvalidArguments('get_path argument must be a string.')
try:
path = self.paths[path_name]
except KeyError:
if len(args) == 2:
path = args[1]
else:
raise InvalidArguments('{} is not a valid path name'.format(path_name))
return ModuleReturnValue(path, [])
@noKwargs
def has_variable(self, node, args, kwargs):
if len(args) != 1:
raise InvalidArguments('has_variable takes exactly one positional argument.')
var_name = args[0]
if not isinstance(var_name, str):
raise InvalidArguments('has_variable argument must be a string.')
return ModuleReturnValue(var_name in self.variables, [])
@noKwargs
def get_variable(self, node, args, kwargs):
if len(args) not in (1, 2):
raise InvalidArguments('get_variable must have one or two arguments.')
var_name = args[0]
if not isinstance(var_name, str):
raise InvalidArguments('get_variable argument must be a string.')
try:
var = self.variables[var_name]
except KeyError:
if len(args) == 2:
var = args[1]
else:
raise InvalidArguments('{} is not a valid variable name'.format(var_name))
return ModuleReturnValue(var, [])
def method_call(self, method_name, args, kwargs):
try:
fn = getattr(self, method_name)
except AttributeError:
raise InvalidArguments('Python object does not have method %s.' % method_name)
if not getattr(fn, 'no-args-flattening', False):
args = flatten(args)
if method_name in ['extension_module', 'dependency', 'install_sources']:
value = fn(self.interpreter, None, args, kwargs)
return self.interpreter.holderify(value)
elif method_name in ['has_variable', 'get_variable', 'has_path', 'get_path', 'found', 'language_version', 'get_install_dir']:
value = fn(None, args, kwargs)
return self.interpreter.module_method_callback(value)
else:
raise InvalidArguments('Python object does not have method %s.' % method_name)
class PythonModule(ExtensionModule):
@FeatureNew('Python Module', '0.46.0')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.snippets.add('find_installation')
# https://www.python.org/dev/peps/pep-0397/
def _get_win_pythonpath(self, name_or_path):
if name_or_path not in ['python2', 'python3']:
return None
ver = {'python2': '-2', 'python3': '-3'}[name_or_path]
cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"]
_, stdout, _ = mesonlib.Popen_safe(cmd)
dir = stdout.strip()
if os.path.exists(dir):
return os.path.join(dir, 'python')
else:
return None
@permittedKwargs(['required'])
def find_installation(self, interpreter, state, args, kwargs):
required = kwargs.get('required', True)
if not isinstance(required, bool):
raise InvalidArguments('"required" argument must be a boolean.')
if len(args) > 1:
raise InvalidArguments('find_installation takes zero or one positional argument.')
if args:
name_or_path = args[0]
if not isinstance(name_or_path, str):
raise InvalidArguments('find_installation argument must be a string.')
else:
name_or_path = None
if not name_or_path:
mlog.log("Using meson's python {}".format(mesonlib.python_command))
python = ExternalProgram('python3', mesonlib.python_command, silent=True)
else:
python = ExternalProgram(name_or_path, silent = True)
if not python.found() and mesonlib.is_windows():
pythonpath = self._get_win_pythonpath(name_or_path)
if pythonpath is not None:
name_or_path = pythonpath
python = ExternalProgram(name_or_path, silent = True)
# Last ditch effort, python2 or python3 can be named python
# on various platforms, let's not give up just yet, if an executable
# named python is available and has a compatible version, let's use
# it
if not python.found() and name_or_path in ['python2', 'python3']:
python = ExternalProgram('python', silent = True)
if python.found():
version = run_command(python, "import sysconfig; print (sysconfig.get_python_version())")
if not version or \
name_or_path == 'python2' and mesonlib.version_compare(version, '>= 3.0') or \
name_or_path == 'python3' and not mesonlib.version_compare(version, '>= 3.0'):
python = NonExistingExternalProgram()
if not python.found():
if required:
raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
res = ExternalProgramHolder(NonExistingExternalProgram())
else:
# Sanity check, we expect to have something that at least quacks in tune
version = run_command(python, "import sysconfig; print (sysconfig.get_python_version())")
if not version:
res = ExternalProgramHolder(NonExistingExternalProgram())
if required:
raise mesonlib.MesonException('{} is not a valid python'.format(python))
else:
res = PythonInstallation(interpreter, python)
return res
def initialize(*args, **kwargs):
return PythonModule(*args, **kwargs)
|
{
"content_hash": "7bb7d55ab6eef7865adbb85d606842fc",
"timestamp": "",
"source": "github",
"line_count": 523,
"max_line_length": 161,
"avg_line_length": 39.170172084130016,
"alnum_prop": 0.5913794786683588,
"repo_name": "thiblahute/meson",
"id": "d9ec56201a15f697df6eee51c4e95f407881e69b",
"size": "21074",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mesonbuild/modules/python.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4190"
},
{
"name": "Batchfile",
"bytes": "868"
},
{
"name": "C",
"bytes": "142464"
},
{
"name": "C#",
"bytes": "949"
},
{
"name": "C++",
"bytes": "26871"
},
{
"name": "CMake",
"bytes": "1780"
},
{
"name": "D",
"bytes": "4111"
},
{
"name": "Dockerfile",
"bytes": "694"
},
{
"name": "Emacs Lisp",
"bytes": "919"
},
{
"name": "Fortran",
"bytes": "4590"
},
{
"name": "Genie",
"bytes": "341"
},
{
"name": "Inno Setup",
"bytes": "372"
},
{
"name": "Java",
"bytes": "2125"
},
{
"name": "JavaScript",
"bytes": "136"
},
{
"name": "LLVM",
"bytes": "75"
},
{
"name": "Lex",
"bytes": "135"
},
{
"name": "Meson",
"bytes": "316300"
},
{
"name": "Objective-C",
"bytes": "1092"
},
{
"name": "Objective-C++",
"bytes": "332"
},
{
"name": "Python",
"bytes": "1817555"
},
{
"name": "Roff",
"bytes": "301"
},
{
"name": "Rust",
"bytes": "1079"
},
{
"name": "Shell",
"bytes": "2083"
},
{
"name": "Swift",
"bytes": "1152"
},
{
"name": "Vala",
"bytes": "10025"
},
{
"name": "Verilog",
"bytes": "709"
},
{
"name": "Vim script",
"bytes": "9480"
},
{
"name": "Yacc",
"bytes": "50"
}
],
"symlink_target": ""
}
|
import json
from secret_sauce.action_models import action_train, action_predict
#importing functions for training and predicting the model
import torch.nn as nn
#importing torch
from torch.autograd import Variable
import torch
training_data = []
with open('app/datasets/action_dataset.json') as data_file:
data = json.load(data_file)
for line in data:
#fetching training data
training_data.append(line)
action_train(20000, training_data) #training the model
#testing with a new input
#print("say wahts tinkerhub")
print("intent:" + action_predict("hello"))
"""
accuracy= 0 % input= tell me more about RIT actual= website guess= website
accuracy= 0 % input= give me more information actual= website guess= website
accuracy= 1 % input= who is the principal actual= website guess= website
accuracy= 0 % input= give me the phone number actual= contact guess= contact
accuracy= 31 % input= open college website actual= website guess= website
accuracy= 43 % input= open college website actual= website guess= website
accuracy= 0 % input= navigate me to RIT actual= location guess= location
accuracy= 30 % input= give me more information actual= website guess= website
accuracy= 28 % input= good bye actual= goodbye guess= goodbye
accuracy= 69 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 0 % input= navigate me to RIT actual= location guess= location
accuracy= 76 % input= open college website actual= website guess= website
accuracy= 67 % input= i have to go actual= goodbye guess= goodbye
accuracy= 80 % input= open college website actual= website guess= website
accuracy= 77 % input= i am going actual= goodbye guess= goodbye
accuracy= 78 % input= i am going actual= goodbye guess= goodbye
accuracy= 65 % input= how to go there actual= location guess= location
accuracy= 80 % input= i want to talk with authorities actual= contact guess= contact
accuracy= 80 % input= give me the phone number actual= contact guess= contact
accuracy= 90 % input= see you later actual= goodbye guess= goodbye
accuracy= 95 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 79 % input= how to go there actual= location guess= location
accuracy= 93 % input= show me RIT website actual= website guess= website
accuracy= 94 % input= see you later actual= goodbye guess= goodbye
accuracy= 87 % input= i have to go actual= goodbye guess= goodbye
accuracy= 88 % input= good bye actual= goodbye guess= goodbye
accuracy= 92 % input= give me the phone number actual= contact guess= contact
accuracy= 95 % input= who is the principal actual= website guess= website
accuracy= 80 % input= how can i reach actual= location guess= location
accuracy= 80 % input= how to contact actual= contact guess= contact
accuracy= 91 % input= give me more information actual= website guess= website
accuracy= 98 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 96 % input= who is the principal actual= website guess= website
accuracy= 95 % input= give me the phone number actual= contact guess= contact
accuracy= 97 % input= show me RIT website actual= website guess= website
accuracy= 99 % input= latest news about RIT actual= website guess= website
accuracy= 91 % input= connect me with RIT actual= contact guess= contact
accuracy= 98 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 96 % input= give me the phone number actual= contact guess= contact
accuracy= 99 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 93 % input= how to go there actual= location guess= location
accuracy= 94 % input= good bye actual= goodbye guess= goodbye
accuracy= 93 % input= how to go there actual= location guess= location
accuracy= 98 % input= see you later actual= goodbye guess= goodbye
accuracy= 98 % input= who is the principal actual= website guess= website
accuracy= 95 % input= good bye actual= goodbye guess= goodbye
accuracy= 89 % input= RIT actual= website guess= website
accuracy= 97 % input= give me the phone number actual= contact guess= contact
accuracy= 98 % input= show me RIT website actual= website guess= website
accuracy= 94 % input= how to go there actual= location guess= location
accuracy= 96 % input= give me more information actual= website guess= website
accuracy= 100 % input= tell me more about RIT actual= website guess= website
accuracy= 90 % input= RIT actual= website guess= website
accuracy= 96 % input= good bye actual= goodbye guess= goodbye
accuracy= 97 % input= give me more information actual= website guess= website
accuracy= 97 % input= give me more information actual= website guess= website
accuracy= 98 % input= talk to you soon actual= goodbye guess= goodbye
accuracy= 99 % input= open college website actual= website guess= website
accuracy= 97 % input= i have to go actual= goodbye guess= goodbye
accuracy= 98 % input= give me the phone number actual= contact guess= contact
accuracy= 100 % input= tell me more about RIT actual= website guess= website
accuracy= 96 % input= connect me with RIT actual= contact guess= contact
accuracy= 96 % input= how can i reach actual= location guess= location
accuracy= 97 % input= i have to go actual= goodbye guess= goodbye
accuracy= 95 % input= navigate me to RIT actual= location guess= location
accuracy= 94 % input= how to contact actual= contact guess= contact
accuracy= 97 % input= good bye actual= goodbye guess= goodbye
accuracy= 95 % input= navigate me to RIT actual= location guess= location
accuracy= 99 % input= see you later actual= goodbye guess= goodbye
accuracy= 99 % input= i am going actual= goodbye guess= goodbye
accuracy= 98 % input= i have to go actual= goodbye guess= goodbye
accuracy= 99 % input= i am going actual= goodbye guess= goodbye
accuracy= 99 % input= open college website actual= website guess= website
accuracy= 99 % input= show me RIT website actual= website guess= website
accuracy= 99 % input= i want to talk with authorities actual= contact guess= contact
accuracy= 95 % input= RIT actual= website guess= website
accuracy= 100 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 97 % input= how can i reach actual= location guess= location
accuracy= 99 % input= i want to talk with authorities actual= contact guess= contact
accuracy= 98 % input= how can i reach actual= location guess= location
accuracy= 100 % input= tell me more about RIT actual= website guess= website
accuracy= 100 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 100 % input= tell me more about RIT actual= website guess= website
accuracy= 100 % input= latest news about RIT actual= website guess= website
accuracy= 98 % input= good bye actual= goodbye guess= goodbye
accuracy= 98 % input= i have to go actual= goodbye guess= goodbye
accuracy= 96 % input= navigate me to RIT actual= location guess= location
accuracy= 95 % input= RIT actual= website guess= website
accuracy= 97 % input= navigate me to RIT actual= location guess= location
accuracy= 100 % input= have a nice day actual= goodbye guess= goodbye
accuracy= 98 % input= connect me with RIT actual= contact guess= contact
accuracy= 99 % input= i want to talk with authorities actual= contact guess= contact
accuracy= 100 % input= open college website actual= website guess= website
accuracy= 96 % input= RIT actual= website guess= website
accuracy= 96 % input= RIT actual= website guess= website
accuracy= 96 % input= RIT actual= website guess= website
accuracy= 97 % input= navigate me to RIT actual= location guess= location
accuracy= 100 % input= tell me more about RIT actual= website guess= website
accuracy= 97 % input= how to contact actual= contact guess= contact
accuracy= 100 % input= latest news about RIT actual= website guess= website
go to college website
intent: website
"""
|
{
"content_hash": "26f4302d60aff2c111864c21b45f96dc",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 84,
"avg_line_length": 61.30952380952381,
"alnum_prop": 0.7640129449838188,
"repo_name": "RITct/Rita",
"id": "180994496d88c62b80c9fab84ac3f4ecac51f71f",
"size": "7825",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/action_train.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "20"
},
{
"name": "HTML",
"bytes": "5151"
},
{
"name": "Python",
"bytes": "32107"
},
{
"name": "Shell",
"bytes": "144"
}
],
"symlink_target": ""
}
|
"""
Copyright 2014 Hans Hoogenboom
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os, sys
# TODO: export/output of parms????
# metadata according to the OSL specification
_shaderTypes = ["surface", "displacement", "light", "volume", "shader"]
_shaderKeys = ["name", "label", "type", "help", "url", "value", "page", "widget", "float", "int", "units"]
_parmWidgets = ["number", "string", "boolean", "checkBox", "popup", "mapper", "filename", "null"]
_parmFloat = ["min", "max", "sensitivity"]
_parmInteger = ["min", "max", "sensitivity", "digits", "slider"]
_parmKeyword = ["output"]
def _error( msg, crash = False ):
sys.stderr.write( msg )
sys.stderr.write( '\n' )
if crash:
sys.exit(1)
return False
def _formatVal( st ):
value = st.replace('"','',2)
value = value.strip()
return value
def _getKeyValue( st ):
signPos = st.index('=')
value = st[signPos+1:]
key = st[:signPos-1]
key = key.split()
key = key[-1].strip()
return (key, value)
def parseOslInfo( compiledShader ):
DEBUG = False
try:
cmd = 'oslinfo -v %s' % compiledShader
fp = os.popen(cmd, 'r')
except:
_error("Invalid shaders in file %s.\n" % compileShader)
return False
lines = fp.readlines()
if not lines:
_error('Missing shader definition for %s' % compiledShader)
return False
count = 0
shaderDef = lines[ count ]
args = shaderDef.split()
# tempShader stores all the data
tempShader = dict()
# stores the order in which oslinfo outputs its data
# and separates the parameters from general shader data
parmlist = list()
if args[0] not in _shaderTypes:
_error("Not a valid shader type: %s" % args[0])
return False
else:
tempShader['type'] = _formatVal( args[0] )
tempShader['name'] = _formatVal( args[1] )
tempShader['hasMetaData'] = False
tempShader['hasParmHelp'] = False
# parse the rest of the file to get parameters
# number of entries in lines
length = len( lines ) - 1
# lines iterator
count = 1
while True:
line = lines[ count ]
if not line:
_error( "No more lines to read, invalid shader %s?" % compiledShader )
args = line.split()
# find parameter name
if args[0] not in ["Default", "metadata:"]: # or args[0] == "export":
tempparm = dict()
if len( args ) < 3:
tempparm['name'] = _formatVal( args[0] )
tempparm['type'] = _formatVal( args[1] )
else:
tempparm['output'] = True
tempparm['name'] = _formatVal( args[0] )
tempparm['type'] = _formatVal( args[2] )
condition = True
widget = list()
while condition:
# read next line
count += 1
if count > length:
break
line = lines[ count ]
parmargs = line.split()
if parmargs[0] == "Default":
tempparm['value'] = _formatVal( ' '.join(parmargs[2:]) )
elif parmargs[0] == "metadata:":
(key, value) = _getKeyValue( line )
value = _formatVal( value )
if key != 'widget':
tempparm[key] = value
else:
widget.append( value )
else:
condition = False
# move one line back
count -= 1
if len(widget) > 0 and 'widget' not in tempparm:
tempparm['widget'] = widget
tempShader[tempparm['name']] = tempparm
parmlist.append(tempparm['name'])
if 'help' in tempparm:
tempShader['hasParmHelp'] = True
# we didn't find a parameter yet, so there must be some general stuff
else:
if args[0] == "metadata:":
(key, value) = _getKeyValue( line )
value = _formatVal( value )
tempparm[key] = value
tempShader['hasMetaData'] = True
if count > length:
break
else:
count += 1
# parsed all lines
tempShader['parmlist'] = parmlist
if DEBUG:
for key in tempShader:
print( "%s: %s" % ( key, tempShader[key] ) )
return tempShader
|
{
"content_hash": "11f855903cde5b27f8d056f2e0dcdfef",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 107,
"avg_line_length": 32.91503267973856,
"alnum_prop": 0.5381254964257347,
"repo_name": "appleseedhq/houseed",
"id": "381e72c912bc0e3035b56c34a6e9032097c334a6",
"size": "5060",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "houdini/python2.xlibs/oslparser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "85171"
}
],
"symlink_target": ""
}
|
"""Utilities to calculate batch sizes."""
from lingvo.core import cluster_factory
from lingvo.core import py_utils
def scale_infeed_to_global(infeed_batch_size, use_per_host_infeed):
"""Obtains a global batch size from an infeed batch size and cluster configs.
Args:
infeed_batch_size: int: Per-infeed batch size.
use_per_host_infeed: bool: Whether to use an individual infeed for each
host.
Returns:
int: Global batch size.
"""
cluster = cluster_factory.Current()
if use_per_host_infeed and cluster.num_tpu_hosts > 0:
if not py_utils.use_tpu():
raise ValueError('Scaling to TPU hosts without TPUs. {}'.format(
cluster.num_tpu_hosts))
return infeed_batch_size * cluster.num_tpu_hosts
else:
return infeed_batch_size
def scale_global_to_infeed(global_batch_size, use_per_host_infeed):
"""Obtains infeed batch size from global batch size and cluster configs.
Args:
global_batch_size: int: Global batch size.
use_per_host_infeed: bool: Whether to use an individual infeed for each
host.
Returns:
int: infeed batch size.
"""
cluster = cluster_factory.Current()
if use_per_host_infeed and cluster.num_tpu_hosts > 0:
if not py_utils.use_tpu():
raise ValueError('Scaling to TPU hosts without TPUs. {}'.format(
cluster.num_tpu_hosts))
return global_batch_size // cluster.num_tpu_hosts
else:
return global_batch_size
def scale_split_to_infeed(split_batch_size, use_per_host_infeed):
"""Obtains an infeed batch size from a split batch size and cluster configs.
Args:
split_batch_size: int: Per-split batch size.
use_per_host_infeed: bool: Whether to use an individual infeed for each
host.
Returns:
int: Per-infeed batch size.
"""
cluster = cluster_factory.Current()
global_batch_size = split_batch_size * cluster.num_splits_per_client
# If use_per_host_infeed, each input op is only responsible
# for generating a subset of the whole batch.
if use_per_host_infeed and cluster.num_tpu_hosts > 0:
return global_batch_size // cluster.num_tpu_hosts
else:
return global_batch_size
def scale_global_to_worker(global_batch_size):
"""Obtains per-worker batch size given a global batch size.
Args:
global_batch_size: int: Global batch size.
Returns:
int: per-worker batch size.
"""
cluster = cluster_factory.Cluster.Top()
if not cluster:
raise ValueError('Called scale_global_to_worker without a current cluster.')
q, r = divmod(global_batch_size, cluster.total_worker_devices)
if r:
raise ValueError(f'global_batch_size {global_batch_size} did not divide'
f' evenly by {cluster.total_worker_devices} workers.')
return q
|
{
"content_hash": "d69113a251569f4faf4b8793dbebf600",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 80,
"avg_line_length": 31.86046511627907,
"alnum_prop": 0.6985401459854015,
"repo_name": "tensorflow/lingvo",
"id": "1842e3dd86454d4fb62e639c9f9697b7ad4bb186",
"size": "3429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lingvo/core/batch_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5163"
},
{
"name": "C++",
"bytes": "556344"
},
{
"name": "Dockerfile",
"bytes": "8484"
},
{
"name": "Jupyter Notebook",
"bytes": "36721"
},
{
"name": "Python",
"bytes": "9574124"
},
{
"name": "Shell",
"bytes": "50408"
},
{
"name": "Starlark",
"bytes": "182688"
},
{
"name": "TeX",
"bytes": "37275"
}
],
"symlink_target": ""
}
|
import logging
from .. import requests
def get_build_results(conn, project_key=None, expand=''):
params = {
"expand": expand
}
entity = "result/"+project_key if project_key else "result"
res = requests.get_rest_return_json(
conn,
conn.baseurl+'/rest/api/latest/'+entity,
params)
return res
|
{
"content_hash": "5468a3058059ee9bd9f97a23f1f4ac02",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 61,
"avg_line_length": 22.066666666666666,
"alnum_prop": 0.6404833836858006,
"repo_name": "mhellmic/bamboo-automate",
"id": "b4848e5067c12dc9d3d22ee5d351bf919e3aacad",
"size": "331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/bamboo/results.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "36226"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('myflq', '0012_auto_20141224_1141'),
]
operations = [
migrations.AddField(
model_name='analysisresults',
name='updating',
field=models.BooleanField(default=False),
preserve_default=True,
),
]
|
{
"content_hash": "e088b177055208fd0a2cfee0c210b22f",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 53,
"avg_line_length": 22.263157894736842,
"alnum_prop": 0.5981087470449172,
"repo_name": "beukueb/myflq",
"id": "b34c4d10c1d63779cc45943466e484ac1f51ec85",
"size": "447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/MyFLsite/myflq/migrations/0013_analysisresults_updating.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6679"
},
{
"name": "HTML",
"bytes": "42283"
},
{
"name": "JavaScript",
"bytes": "27656"
},
{
"name": "Liquid",
"bytes": "2583"
},
{
"name": "Python",
"bytes": "269494"
},
{
"name": "Shell",
"bytes": "4371"
},
{
"name": "XSLT",
"bytes": "10701"
}
],
"symlink_target": ""
}
|
import boto3
import socket
import zeroconf
import z42
import z42.common
import z42.driver
LOG = z42.LOG.getChild(__name__)
class EC2Driver(z42.driver.Driver):
def __init__(self):
try:
self.region_names = self.fetch_region_names()
LOG.debug('Available region names: %s', self.region_names)
except Exception as e:
raise RuntimeError(
'Could not fetch region names. This error is likely to happen when you did not run `aws configure`.', e)
def fetch_region_names(self):
ec2c = boto3.client('ec2')
names = [f['RegionName'] for f in ec2c.describe_regions()['Regions']]
return names
def get_service_info_for_instance(self, inst):
ip = inst.public_ip_address
hostname = None
try:
hostname = [f['Value'] for f in inst.tags if f['Key'] == 'Name'][0]
except (IndexError, TypeError):
LOG.warning('No "Name" tag is assigned for %s. Using %s as the name.',
inst.id, inst.id)
hostname = inst.id
# TODO: sanitize name!
assert isinstance(hostname, str), '%s is not string?' % hostname
desc = {'model': 'Z42 (driver: %s)' % __name__}
srv = zeroconf.ServiceInfo(
type='_device-info._tcp.local.',
name='%s._device-info._tcp.local.' % hostname,
address=socket.inet_aton(ip),
port=0,
weight=0,
priority=0,
properties=desc,
server='%s.local.' % hostname)
return srv
def get_service_infos_for_region(self, region_name):
LOG.debug('Scanning %s', region_name)
ec2r = boto3.resource('ec2', region_name=region_name)
insts = ec2r.instances.all()
for inst in insts:
try:
srv = self.get_service_info_for_instance(inst)
LOG.debug('Found %s (%s) on %s',
z42.common.Util.pretty(srv),
inst, region_name)
yield srv
except Exception as e:
LOG.warning('Could not get information for %s on %s: %s',
inst, region_name, e)
LOG.exception(e)
# overrides Driver
def get_service_infos(self):
for region in self.region_names:
for srv in self.get_service_infos_for_region(region):
yield srv
|
{
"content_hash": "eb16c4ce5ea0fbdf4a6909adfd4ff78e",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 120,
"avg_line_length": 34.521126760563384,
"alnum_prop": 0.543859649122807,
"repo_name": "AkihiroSuda/z42",
"id": "414c97c27280282f148eaa3de227178d57800f1a",
"size": "2451",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "z42/drivers/ec2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7258"
},
{
"name": "Shell",
"bytes": "35"
}
],
"symlink_target": ""
}
|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from nose.tools import assert_equal
from streamalert_cli.terraform import rules_engine
class TestTerraformGenerateRuleEngine:
"""CLI Terraform Generate, Rules Engine"""
# pylint: disable=no-self-use,attribute-defined-outside-init
def setup(self):
"""CLI Terraform Generate, Rules Engine - Setup"""
self.config = {
'global': {
'account': {
'aws_account_id': '123456789012',
'prefix': 'unit-test',
'region': 'us-east-1'
},
'infrastructure': {
'monitoring': {
'sns_topic_name': 'test_topic'
},
'rule_staging': {
'enabled': False
}
}
},
'threat_intel': {
'dynamodb_table_name': 'test_table',
'enabled': False
},
'lambda': {
'rules_engine_config': {
'log_level': 'info',
'log_retention_days': 14,
'memory': 128,
'metric_alarms': {
'errors': {
'enabled': True,
'evaluation_periods': 1,
'period_secs': 120,
'threshold': 0
},
'throttles': {
'enabled': True,
'evaluation_periods': 1,
'period_secs': 120,
'threshold': 0
}
},
'timeout': 60,
'vpc_config': {
'security_group_ids': [],
'subnet_ids': []
}
}
}
}
def test_generate_rules_engine(self):
"""CLI - Terraform Generate, Rules Engine"""
result = rules_engine.generate_rules_engine(self.config)
expected_result = {
'module': {
'rules_engine_iam': {
'source': './modules/tf_rules_engine',
'account_id': '123456789012',
'region': 'us-east-1',
'prefix': 'unit-test',
'function_alias_arn': '${module.rules_engine_lambda.function_alias_arn}',
'function_name': '${module.rules_engine_lambda.function_name}',
'function_role_id': '${module.rules_engine_lambda.role_id}',
'threat_intel_enabled': self.config['threat_intel']['enabled'],
'dynamodb_table_name': self.config['threat_intel']['dynamodb_table_name'],
'rules_table_arn': '${module.globals.rules_table_arn}',
'enable_rule_staging': False,
'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}',
'classifier_sqs_sse_kms_key_arn': (
'${module.globals.classifier_sqs_sse_kms_key_arn}'
),
'sqs_record_batch_size': 10
},
'rules_engine_lambda': {
'alarm_actions': ['arn:aws:sns:us-east-1:123456789012:test_topic'],
'description': 'Unit-Test Streamalert Rules Engine',
'environment_variables': {
'ALERTS_TABLE': 'unit-test_streamalert_alerts',
'ENABLE_METRICS': '0',
'LOGGER_LEVEL': 'info',
'STREAMALERT_PREFIX': 'unit-test',
},
'tags': {},
'errors_alarm_enabled': True,
'errors_alarm_evaluation_periods': 1,
'errors_alarm_period_secs': 120,
'errors_alarm_threshold': 0,
'function_name': 'unit-test_streamalert_rules_engine',
'handler': 'streamalert.rules_engine.main.handler',
'log_retention_days': 14,
'memory_size_mb': 128,
'source': './modules/tf_lambda',
'throttles_alarm_enabled': True,
'throttles_alarm_evaluation_periods': 1,
'throttles_alarm_period_secs': 120,
'throttles_alarm_threshold': 0,
'timeout_sec': 60,
'vpc_security_group_ids': [],
'vpc_subnet_ids': []
}
}
}
assert_equal(result, expected_result)
|
{
"content_hash": "68954421559a7de82b1f3c2fd7e54c0f",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 94,
"avg_line_length": 41.21705426356589,
"alnum_prop": 0.45232273838630804,
"repo_name": "airbnb/streamalert",
"id": "07e8e69becd9b144557e136112a0c28084abb74b",
"size": "5317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/streamalert_cli/terraform/test_generate_rules_engine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HCL",
"bytes": "142275"
},
{
"name": "Python",
"bytes": "2209853"
},
{
"name": "Shell",
"bytes": "2975"
}
],
"symlink_target": ""
}
|
from multiprocessing import pool
import tkinter as tk
import config
class TickerWindow:
"""
Template class for a ticker window. All/most of the code here
is "backend" and widgets are not drawn via tk.grid(). This is intended
to be inherited in the GUI classes where objects will be drawn and
functions/widgets overridden if needed.
Args:
app(the main tkinter object)
Required Overrides(inside child object):
on_click_help(needs custom dialogue in help windows)
timer_update(needs different values/behavior for countdown timers)
timerValue(set to maximum timer rate value)
Required widget draw(grid()) calls(inside child object):
labelName
labelChange
labelVol
labelBuf
sortByName
sortByChange
sortByVol
notifyBell
help
yScroll
listName
listChange
listVol
timerLabel
timerFrame
timerDisp
"""
def __init__(self, app):
self.app = app
self.pool = pool.ThreadPool(processes=1)
self.async = pool.AsyncResult
self.ticker_data = []
self.create_labels()
self.create_buttons()
self.create_lists()
self.create_timer()
def create_labels(self):
color_options = {'bg': config.MAIN_BG,
'fg': config.TEXT_COLOR}
self.labelName = tk.Label(text='Market Name', **color_options)
self.labelChange = tk.Label(text='Change', **color_options)
self.labelVol = tk.Label(text='Volume', **color_options)
self.labelBuf = tk.Frame(width=120, height=42, bg=config.MAIN_BG)
def create_buttons(self):
color_options = {'bg': config.MAIN_BG,
'activebackground': config.CLICKED_BG}
self.sortByName = tk.Button(relief='raised', image=self.app.noArrow,
command=lambda: self.on_click_sort('sortByName'),
**color_options)
self.sortByChange = tk.Button(relief='raised', image=self.app.downArrow,
command=lambda: self.on_click_sort('sortByChange'),
**color_options)
self.sortByVol = tk.Button(relief='raised', image=self.app.noArrow,
command=lambda: self.on_click_sort('sortByVol'),
**color_options)
self.buttons = {'sortByName': ['none', 0],
'sortByChange': ['desc', 1],
'sortByVol': ['none', 2]}
self.notifyBell = tk.Button(relief='raised', image=self.app.notifyBell,
command=lambda: self.on_click_notif(),
**color_options)
self.notifyIsActive = False
self.help = tk.Button(relief='flat', image=self.app.questionMark,
command=lambda: self.on_click_help(),
**color_options)
def on_click_sort(self, pressed_name):
for b_name in self.buttons:
if b_name == pressed_name:
sort_direction = self.buttons[b_name][0]
if sort_direction == 'desc':
self.buttons[b_name][0] = 'asc'
getattr(self, b_name).config(image=self.app.upArrow)
self.ticker_data.sort(key=lambda x: x[self.buttons[b_name][1]])
if sort_direction == 'asc' or sort_direction == 'none':
self.buttons[b_name][0] = 'desc'
getattr(self, b_name).config(image=self.app.downArrow)
self.ticker_data.sort(key=lambda x: x[self.buttons[b_name][1]],
reverse=True)
else:
self.buttons[b_name][0] = 'none'
getattr(self, b_name).config(image=self.app.noArrow)
self.display_ticker()
def on_click_notif(self):
if self.notifyBell.cget('relief') == 'raised':
self.notifyBell.config(relief='sunken')
self.notifyIsActive = True
else:
self.notifyBell.config(relief='raised')
self.notifyIsActive = False
def on_click_help(self):
pass
def create_lists(self):
self.yScroll = tk.Scrollbar(orient=tk.VERTICAL, command=self.on_vsb)
color_options = {'bg': config.LIGHT_BG,
'fg': config.TEXT_COLOR,
'selectbackground': config.LIGHT_BG,
'selectforeground': config.TEXT_COLOR,
'highlightcolor': config.LIGHT_BG,
'highlightbackground': config.LIGHT_BG}
self.listName = tk.Listbox(activestyle='none', relief='sunken',
yscrollcommand=self.yScroll.set,
width=40, height=6, **color_options)
self.listName.bind('<MouseWheel>', self.on_mouse_wheel)
self.listChange = tk.Listbox(activestyle='none', relief='sunken',
yscrollcommand=self.yScroll.set,
width=8, height=6, **color_options)
self.listChange.bind('<MouseWheel>', self.on_mouse_wheel)
self.listVol = tk.Listbox(activestyle='none', relief='sunken',
yscrollcommand=self.yScroll.set,
width=8, height=6, **color_options)
self.listVol.bind('<MouseWheel>', self.on_mouse_wheel)
def on_vsb(self, *args):
self.listName.yview(*args)
self.listChange.yview(*args)
self.listVol.yview(*args)
def on_mouse_wheel(self, event):
if event.delta < 0:
self.listName.yview('scroll', 1, 'units')
self.listChange.yview('scroll', 1, 'units')
self.listVol.yview('scroll', 1, 'units')
if event.delta > 0:
self.listName.yview('scroll', -1, 'units')
self.listChange.yview('scroll', -1, 'units')
self.listVol.yview('scroll', -1, 'units')
return 'break'
def create_timer(self):
self.timerLabel = tk.Label(text='Time until update:', bg=config.MAIN_BG, fg=config.TEXT_COLOR)
self.timerFrame = tk.LabelFrame(width=120, height=120, bg=config.MAIN_BG)
self.timerDisp = tk.Label(font=('', 20), bg=config.MAIN_BG, fg=config.TEXT_COLOR)
self.timerValue = 0
def timer_update(self):
pass
def sort_ticker(self):
if self.ticker_data:
for b_name in self.buttons:
if self.buttons[b_name][0] == 'desc':
self.ticker_data.sort(key=lambda x: x[self.buttons[b_name][1]],
reverse=True)
if self.buttons[b_name][0] == 'asc':
self.ticker_data.sort(key=lambda x: x[self.buttons[b_name][1]])
self.display_ticker()
def display_ticker(self):
self.listName.delete(0, tk.END)
self.listChange.delete(0, tk.END)
self.listVol.delete(0, tk.END)
for i in self.ticker_data:
self.listName.insert(tk.END, f'{i[0]}')
self.listChange.insert(tk.END, f'+{i[1]:.02f}%')
self.listVol.insert(tk.END, f'{i[2]:.02f}')
self.app.update()
|
{
"content_hash": "5e5297aac718faa5c40782c787284485",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 102,
"avg_line_length": 39.1578947368421,
"alnum_prop": 0.5388440860215054,
"repo_name": "JevinJ/Bittrex-Notify",
"id": "0147a10990310b288d933278138ecfa14cc0635a",
"size": "7440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tickerwindow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25145"
}
],
"symlink_target": ""
}
|
"""Provide functionality to interact with the vlc telnet interface."""
from __future__ import annotations
from datetime import datetime
from functools import wraps
from typing import Any, Callable, TypeVar, cast
from aiovlc.client import Client
from aiovlc.exceptions import AuthError, CommandError, ConnectError
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_CLEAR_PLAYLIST,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_SHUFFLE_SET,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
STATE_IDLE,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
import homeassistant.util.dt as dt_util
from .const import DATA_AVAILABLE, DATA_VLC, DEFAULT_NAME, DEFAULT_PORT, DOMAIN, LOGGER
MAX_VOLUME = 500
SUPPORT_VLC = (
SUPPORT_CLEAR_PLAYLIST
| SUPPORT_NEXT_TRACK
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_SEEK
| SUPPORT_SHUFFLE_SET
| SUPPORT_STOP
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
Func = TypeVar("Func", bound=Callable[..., Any])
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the vlc platform."""
LOGGER.warning(
"Loading VLC media player Telnet integration via platform setup is deprecated; "
"Please remove it from your configuration"
)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the vlc platform."""
# CONF_NAME is only present in imported YAML.
name = entry.data.get(CONF_NAME) or DEFAULT_NAME
vlc = hass.data[DOMAIN][entry.entry_id][DATA_VLC]
available = hass.data[DOMAIN][entry.entry_id][DATA_AVAILABLE]
async_add_entities([VlcDevice(entry, vlc, name, available)], True)
def catch_vlc_errors(func: Func) -> Func:
"""Catch VLC errors."""
@wraps(func)
async def wrapper(self, *args: Any, **kwargs: Any) -> Any:
"""Catch VLC errors and modify availability."""
try:
await func(self, *args, **kwargs)
except CommandError as err:
LOGGER.error("Command error: %s", err)
except ConnectError as err:
# pylint: disable=protected-access
if self._available:
LOGGER.error("Connection error: %s", err)
self._available = False
return cast(Func, wrapper)
class VlcDevice(MediaPlayerEntity):
"""Representation of a vlc player."""
def __init__(
self, config_entry: ConfigEntry, vlc: Client, name: str, available: bool
) -> None:
"""Initialize the vlc device."""
self._config_entry = config_entry
self._name = name
self._volume: float | None = None
self._muted: bool | None = None
self._state: str | None = None
self._media_position_updated_at: datetime | None = None
self._media_position: int | None = None
self._media_duration: int | None = None
self._vlc = vlc
self._available = available
self._volume_bkp = 0.0
self._media_artist: str | None = None
self._media_title: str | None = None
config_entry_id = config_entry.entry_id
self._attr_unique_id = config_entry_id
self._attr_device_info = {
"name": name,
"identifiers": {(DOMAIN, config_entry_id)},
"manufacturer": "VideoLAN",
"entry_type": "service",
}
@catch_vlc_errors
async def async_update(self) -> None:
"""Get the latest details from the device."""
if not self._available:
try:
await self._vlc.connect()
except ConnectError as err:
LOGGER.debug("Connection error: %s", err)
return
try:
await self._vlc.login()
except AuthError:
LOGGER.debug("Failed to login to VLC")
self.hass.async_create_task(
self.hass.config_entries.async_reload(self._config_entry.entry_id)
)
return
self._state = STATE_IDLE
self._available = True
LOGGER.info("Connected to vlc host: %s", self._vlc.host)
status = await self._vlc.status()
LOGGER.debug("Status: %s", status)
self._volume = status.audio_volume / MAX_VOLUME
state = status.state
if state == "playing":
self._state = STATE_PLAYING
elif state == "paused":
self._state = STATE_PAUSED
else:
self._state = STATE_IDLE
if self._state != STATE_IDLE:
self._media_duration = (await self._vlc.get_length()).length
time_output = await self._vlc.get_time()
vlc_position = time_output.time
# Check if current position is stale.
if vlc_position != self._media_position:
self._media_position_updated_at = dt_util.utcnow()
self._media_position = vlc_position
info = await self._vlc.info()
data = info.data
LOGGER.debug("Info data: %s", data)
self._media_artist = data.get(0, {}).get("artist")
self._media_title = data.get(0, {}).get("title")
if not self._media_title:
# Fall back to filename.
if data_info := data.get("data"):
self._media_title = data_info["filename"]
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def volume_level(self) -> float | None:
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_VLC
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self._media_duration
@property
def media_position(self):
"""Position of current playing media in seconds."""
return self._media_position
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
return self._media_position_updated_at
@property
def media_title(self):
"""Title of current playing media."""
return self._media_title
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._media_artist
@catch_vlc_errors
async def async_media_seek(self, position: float) -> None:
"""Seek the media to a specific location."""
await self._vlc.seek(round(position))
@catch_vlc_errors
async def async_mute_volume(self, mute: bool) -> None:
"""Mute the volume."""
assert self._volume is not None
if mute:
self._volume_bkp = self._volume
await self.async_set_volume_level(0)
else:
await self.async_set_volume_level(self._volume_bkp)
self._muted = mute
@catch_vlc_errors
async def async_set_volume_level(self, volume: float) -> None:
"""Set volume level, range 0..1."""
await self._vlc.set_volume(round(volume * MAX_VOLUME))
self._volume = volume
if self._muted and self._volume > 0:
# This can happen if we were muted and then see a volume_up.
self._muted = False
@catch_vlc_errors
async def async_media_play(self) -> None:
"""Send play command."""
await self._vlc.play()
self._state = STATE_PLAYING
@catch_vlc_errors
async def async_media_pause(self) -> None:
"""Send pause command."""
status = await self._vlc.status()
if status.state != "paused":
# Make sure we're not already paused since VLCTelnet.pause() toggles
# pause.
await self._vlc.pause()
self._state = STATE_PAUSED
@catch_vlc_errors
async def async_media_stop(self) -> None:
"""Send stop command."""
await self._vlc.stop()
self._state = STATE_IDLE
@catch_vlc_errors
async def async_play_media(
self, media_type: str, media_id: str, **kwargs: Any
) -> None:
"""Play media from a URL or file."""
if media_type != MEDIA_TYPE_MUSIC:
LOGGER.error(
"Invalid media type %s. Only %s is supported",
media_type,
MEDIA_TYPE_MUSIC,
)
return
await self._vlc.add(media_id)
self._state = STATE_PLAYING
@catch_vlc_errors
async def async_media_previous_track(self) -> None:
"""Send previous track command."""
await self._vlc.prev()
@catch_vlc_errors
async def async_media_next_track(self) -> None:
"""Send next track command."""
await self._vlc.next()
@catch_vlc_errors
async def async_clear_playlist(self) -> None:
"""Clear players playlist."""
await self._vlc.clear()
@catch_vlc_errors
async def async_set_shuffle(self, shuffle: bool) -> None:
"""Enable/disable shuffle mode."""
shuffle_command = "on" if shuffle else "off"
await self._vlc.random(shuffle_command)
|
{
"content_hash": "6fa522817f87e08eb11dcbee0832e2f2",
"timestamp": "",
"source": "github",
"line_count": 352,
"max_line_length": 88,
"avg_line_length": 31.286931818181817,
"alnum_prop": 0.6042858440025425,
"repo_name": "aronsky/home-assistant",
"id": "624234ce712c781a0f67842d09a5a35ee208a02e",
"size": "11013",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/vlc_telnet/media_player.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38448521"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
__author__ = """Ubirajara Cruz"""
__email__ = 'xvzfbira@yahoo.com.br'
__version__ = '0.1.0'
import numpy as np
from osgeo import gdal
from scipy import interpolate
|
{
"content_hash": "2451e47027ae2aeefa075a725fcac7a1",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 35,
"avg_line_length": 20.625,
"alnum_prop": 0.6787878787878788,
"repo_name": "biracruz/rasterrgb",
"id": "913994cdf138b13942c54f464df10139094c917f",
"size": "190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rasterrgb/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2303"
},
{
"name": "Python",
"bytes": "21363"
}
],
"symlink_target": ""
}
|
from couchdbkit import ResourceNotFound
from crispy_forms.bootstrap import StrictButton, InlineField, FormActions, FieldWithButtons
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field, HTML, Div, Fieldset
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _, ugettext_noop
from casexml.apps.case.models import CommCareCaseGroup
class AddCaseGroupForm(forms.Form):
name = forms.CharField(required=True, label=ugettext_noop("Group Name"))
def __init__(self, *args, **kwargs):
super(AddCaseGroupForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_style = 'inline'
self.helper.form_show_labels = False
self.helper.layout = Layout(
InlineField('name'),
StrictButton(
mark_safe('<i class="icon-plus"></i> %s' % _("Create Group")),
css_class='btn-success',
type="submit"
)
)
def create_group(self, domain):
group = CommCareCaseGroup(
name=self.cleaned_data['name'],
domain=domain
)
group.save()
return group
class UpdateCaseGroupForm(AddCaseGroupForm):
item_id = forms.CharField(widget=forms.HiddenInput())
action = forms.CharField(widget=forms.HiddenInput(), initial="update_case_group")
def __init__(self, *args, **kwargs):
super(UpdateCaseGroupForm, self).__init__(*args, **kwargs)
self.fields['name'].label = ""
self.helper.form_style = 'inline'
self.helper.form_method = 'post'
self.helper.form_show_labels = True
self.helper.layout = Layout(
'item_id',
'action',
FieldWithButtons(
Field('name', placeholder="Group Name"),
StrictButton(
_("Update Group Name"),
css_class='btn-primary',
type="submit",
)
),
)
def clean(self):
cleaned_data = super(UpdateCaseGroupForm, self).clean()
try:
self.current_group = CommCareCaseGroup.get(self.cleaned_data.get('item_id'))
except AttributeError:
raise forms.ValidationError("You're not passing in the group's id!")
except ResourceNotFound:
raise forms.ValidationError("This case group was not found in our database!")
return cleaned_data
def update_group(self):
self.current_group.name = self.cleaned_data['name']
self.current_group.save()
return self.current_group
class AddCaseToGroupForm(forms.Form):
case_identifier = forms.CharField(label=ugettext_noop("Case ID, External ID, or Phone Number"))
def __init__(self, *args, **kwargs):
super(AddCaseToGroupForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_style = 'inline'
self.helper.form_show_labels = False
self.helper.layout = Layout(
InlineField(
'case_identifier',
css_class='input-xlarge'
),
StrictButton(
mark_safe('<i class="icon-plus"></i> %s' % _("Add Case")),
css_class='btn-success',
type="submit"
)
)
|
{
"content_hash": "0165b72d7eb30d86b28e8d67bfe8f667",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 99,
"avg_line_length": 35.4375,
"alnum_prop": 0.5911228689006467,
"repo_name": "SEL-Columbia/commcare-hq",
"id": "43c3fee70103c168fb08f32a521e07852bc13cc9",
"size": "3402",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "corehq/apps/data_interfaces/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "768322"
},
{
"name": "JavaScript",
"bytes": "2647080"
},
{
"name": "Python",
"bytes": "7806659"
},
{
"name": "Shell",
"bytes": "28569"
}
],
"symlink_target": ""
}
|
from tensorflow.python.ops.rnn_cell import _linear
from tensorflow.python.util import nest
import tensorflow as tf
from my.tensorflow import flatten, reconstruct, add_wd, exp_mask
def linear(args, output_size, bias, bias_start=0.0, scope=None, squeeze=False, wd=0.0, input_keep_prob=1.0,
is_train=None):
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
flat_args = [flatten(arg, 1) for arg in args]
if input_keep_prob < 1.0:
assert is_train is not None
flat_args = [tf.cond(is_train, lambda: tf.nn.dropout(arg, input_keep_prob), lambda: arg)
for arg in flat_args]
flat_out = _linear(flat_args, output_size, bias, bias_start=bias_start, scope=scope)
out = reconstruct(flat_out, args[0], 1)
if squeeze:
out = tf.squeeze(out, [len(args[0].get_shape().as_list())-1])
if wd:
add_wd(wd)
return out
def dropout(x, keep_prob, is_train, noise_shape=None, seed=None, name=None):
with tf.name_scope(name or "dropout"):
if keep_prob < 1.0:
d = tf.nn.dropout(x, keep_prob, noise_shape=noise_shape, seed=seed)
out = tf.cond(is_train, lambda: d, lambda: x)
return out
return x
def softmax(logits, mask=None, scope=None):
with tf.name_scope(scope or "Softmax"):
if mask is not None:
logits = exp_mask(logits, mask)
flat_logits = flatten(logits, 1)
flat_out = tf.nn.softmax(flat_logits)
out = reconstruct(flat_out, logits, 1)
return out
def softsel(target, logits, mask=None, scope=None):
"""
:param target: [ ..., J, d] dtype=float
:param logits: [ ..., J], dtype=float
:param mask: [ ..., J], dtype=bool
:param scope:
:return: [..., d], dtype=float
"""
with tf.name_scope(scope or "Softsel"):
a = softmax(logits, mask=mask)
target_rank = len(target.get_shape().as_list())
out = tf.reduce_sum(tf.expand_dims(a, -1) * target, target_rank - 2)
return out
def double_linear_logits(args, size, bias, bias_start=0.0, scope=None, mask=None, wd=0.0, input_keep_prob=1.0, is_train=None):
with tf.variable_scope(scope or "Double_Linear_Logits"):
first = tf.tanh(linear(args, size, bias, bias_start=bias_start, scope='first',
wd=wd, input_keep_prob=input_keep_prob, is_train=is_train))
second = linear(first, 1, bias, bias_start=bias_start, squeeze=True, scope='second',
wd=wd, input_keep_prob=input_keep_prob, is_train=is_train)
if mask is not None:
second = exp_mask(second, mask)
return second
def linear_logits(args, bias, bias_start=0.0, scope=None, mask=None, wd=0.0, input_keep_prob=1.0, is_train=None):
with tf.variable_scope(scope or "Linear_Logits"):
logits = linear(args, 1, bias, bias_start=bias_start, squeeze=True, scope='first',
wd=wd, input_keep_prob=input_keep_prob, is_train=is_train)
if mask is not None:
logits = exp_mask(logits, mask)
return logits
def sum_logits(args, mask=None, name=None):
with tf.name_scope(name or "sum_logits"):
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
rank = len(args[0].get_shape())
logits = sum(tf.reduce_sum(arg, rank-1) for arg in args)
if mask is not None:
logits = exp_mask(logits, mask)
return logits
def get_logits(args, size, bias, bias_start=0.0, scope=None, mask=None, wd=0.0, input_keep_prob=1.0, is_train=None, func=None):
if func is None:
func = "sum"
if func == 'sum':
return sum_logits(args, mask=mask, name=scope)
elif func == 'linear':
return linear_logits(args, bias, bias_start=bias_start, scope=scope, mask=mask, wd=wd, input_keep_prob=input_keep_prob,
is_train=is_train)
elif func == 'double':
return double_linear_logits(args, size, bias, bias_start=bias_start, scope=scope, mask=mask, wd=wd, input_keep_prob=input_keep_prob,
is_train=is_train)
elif func == 'dot':
assert len(args) == 2
arg = args[0] * args[1]
return sum_logits([arg], mask=mask, name=scope)
elif func == 'mul_linear':
assert len(args) == 2
arg = args[0] * args[1]
return linear_logits([arg], bias, bias_start=bias_start, scope=scope, mask=mask, wd=wd, input_keep_prob=input_keep_prob,
is_train=is_train)
elif func == 'proj':
assert len(args) == 2
d = args[1].get_shape()[-1]
proj = linear([args[0]], d, False, bias_start=bias_start, scope=scope, wd=wd, input_keep_prob=input_keep_prob,
is_train=is_train)
return sum_logits([proj * args[1]], mask=mask)
elif func == 'tri_linear':
assert len(args) == 2
new_arg = args[0] * args[1]
return linear_logits([args[0], args[1], new_arg], bias, bias_start=bias_start, scope=scope, mask=mask, wd=wd, input_keep_prob=input_keep_prob,
is_train=is_train)
else:
raise Exception()
def highway_layer(arg, bias, bias_start=0.0, scope=None, wd=0.0, input_keep_prob=1.0, is_train=None):
with tf.variable_scope(scope or "highway_layer"):
d = arg.get_shape()[-1]
trans = linear([arg], d, bias, bias_start=bias_start, scope='trans', wd=wd, input_keep_prob=input_keep_prob, is_train=is_train)
trans = tf.nn.relu(trans)
gate = linear([arg], d, bias, bias_start=bias_start, scope='gate', wd=wd, input_keep_prob=input_keep_prob, is_train=is_train)
gate = tf.nn.sigmoid(gate)
out = gate * trans + (1 - gate) * arg
return out
def highway_network(arg, num_layers, bias, bias_start=0.0, scope=None, wd=0.0, input_keep_prob=1.0, is_train=None):
with tf.variable_scope(scope or "highway_network"):
prev = arg
cur = None
for layer_idx in range(num_layers):
cur = highway_layer(prev, bias, bias_start=bias_start, scope="layer_{}".format(layer_idx), wd=wd,
input_keep_prob=input_keep_prob, is_train=is_train)
prev = cur
return cur
def conv1d(in_, filter_size, height, padding, is_train=None, keep_prob=1.0, scope=None):
with tf.variable_scope(scope or "conv1d"):
num_channels = in_.get_shape()[-1]
filter_ = tf.get_variable("filter", shape=[1, height, num_channels, filter_size], dtype='float')
bias = tf.get_variable("bias", shape=[filter_size], dtype='float')
strides = [1, 1, 1, 1]
if is_train is not None and keep_prob < 1.0:
in_ = dropout(in_, keep_prob, is_train)
xxc = tf.nn.conv2d(in_, filter_, strides, padding) + bias # [N*M, JX, W/filter_stride, d]
out = tf.reduce_max(tf.nn.relu(xxc), 2) # [-1, JX, d]
return out
def multi_conv1d(in_, filter_sizes, heights, padding, is_train=None, keep_prob=1.0, scope=None):
with tf.variable_scope(scope or "multi_conv1d"):
assert len(filter_sizes) == len(heights)
outs = []
for filter_size, height in zip(filter_sizes, heights):
if filter_size == 0:
continue
out = conv1d(in_, filter_size, height, padding, is_train=is_train, keep_prob=keep_prob, scope="conv1d_{}".format(height))
outs.append(out)
concat_out = tf.concat(2, outs)
return concat_out
|
{
"content_hash": "5723affc84efdef09b5808838fa62be3",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 150,
"avg_line_length": 43.33519553072626,
"alnum_prop": 0.5946886682996003,
"repo_name": "allenai/bi-att-flow",
"id": "1248dac56e2699d57a05f8928a0fbbd8de3fdcda",
"size": "7757",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "my/tensorflow/nn.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7398"
},
{
"name": "Jupyter Notebook",
"bytes": "84356"
},
{
"name": "Python",
"bytes": "285931"
},
{
"name": "Shell",
"bytes": "2428"
}
],
"symlink_target": ""
}
|
import os
import math
import subprocess
########################################
f = open("wonkyworld-uniform.church")
ch_model = [l.rstrip() for l in f.readlines()]
f.close()
pfile = open("smoothed_15marbles_priors.txt") # get smoothed priors (kernel density estimates)
priors_15 = [l.rstrip() for l in pfile.readlines()[1:]]
pfile.close()
wpriors = [.1, .2, .3, .4, .5]
rfile = open("results/model_results/results_uniform.txt","w")
#results = []
w = ch_model.index("(define alternatives '(some all none))")
pline = ch_model.index("(define theprior (list .1 .1 .1 .1 .1 .1 .1 .1 .1 .1 .1 .1 .1 .1 .1 .1))")
wline = ch_model.index(" '(prior)")
wwline = ch_model.index("(define wonkyworld-prior .1)")
alternatives = [["some","all","none"]#,
# ["some","all","none","one","two","three","four","five","six","seven","eight","nine","ten","eleven","twelve","thirteen","fourteen","fifteen"],
#["some","all","none","one","two","three","four","five","six","seven","eight","nine","ten"],
# ["some","all","none","most","many","few", "afew"],
# ["some","all","none","most","many","few", "afew","one","two","three","four","five","six","seven","eight","nine","ten","eleven","twelve","thirteen","fourteen","fifteen"],
# ["some","all","none","most","many","few", "afew","one","two","three","four","five","six","seven","eight","nine","ten"],
# ["some","all","none","most","many","few", "half","several","one","two","three","four","five","six","seven","eight","nine","ten","eleven","twelve","thirteen","fourteen","fifteen","acouple","afew","almostnone","veryfew","almostall","overhalf","alot","notone","onlyone","everyone","notmany","justone"],
# ["some","all","none","most","many","few", "half","several","one","two","three","four","five","six","seven","eight","nine","ten","acouple","afew","almostnone","veryfew","almostall","overhalf","alot","notone","onlyone","everyone","notmany","justone"],
# ["some","all","none","most","many","few", "half","several","one","two","three","four","five","six","seven","eight","nine","ten","eleven","twelve","thirteen","fourteen","fifteen","acouple","afew","almostnone","veryfew","almostall","overhalf","alot","notone","onlyone","everyone","notmany","justone","morethanhalf","allbutone","lessthanhalf"],
# ["some","all","none","most","many","few", "half","several","one","two","three","four","five","six","seven","eight","nine","ten","acouple","afew","almostnone","veryfew","almostall","overhalf","alot","notone","onlyone","everyone","notmany","justone","morethanhalf","allbutone","lessthanhalf"]
]
for i,a in enumerate(alternatives):
print a
for k,p in enumerate(priors_15):
print p
for ww in wpriors:
alts = " ".join(a)
ch_model[w] = "(define alternatives '(%s))"%alts
ch_model[pline] = "(define empiricalprior (list %s))"%p
ch_model[wline] = "'(prior_15)"
ch_model[wwline] = "(define wonkyworld-prior %s)"%ww
ofile = open("m.church","w")
ofile.write("\n".join(ch_model))
ofile.close()
subresults = subprocess.Popen(['church',ofile.name], stdout=subprocess.PIPE)
subresults.communicate()[0]
rfile.write("".join(open("results/model_results/raw_uniform_results.txt").readlines()))
rfile.close()
|
{
"content_hash": "079b09a6f81088f6ac5e95420b2ae5c8",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 363,
"avg_line_length": 59.285714285714285,
"alnum_prop": 0.5921686746987952,
"repo_name": "thegricean/sinking-marbles",
"id": "434301425dcf1e5c2ec157600a46c44e8d6e7ef1",
"size": "3320",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/wonky_world/runUniformEmpiricalWonkyWorldModel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8726"
},
{
"name": "HTML",
"bytes": "4235001"
},
{
"name": "JavaScript",
"bytes": "657622"
},
{
"name": "PostScript",
"bytes": "128"
},
{
"name": "Python",
"bytes": "376632"
},
{
"name": "R",
"bytes": "1018292"
},
{
"name": "Rebol",
"bytes": "994"
},
{
"name": "Shell",
"bytes": "39589"
},
{
"name": "Stan",
"bytes": "1173"
},
{
"name": "TeX",
"bytes": "325478"
}
],
"symlink_target": ""
}
|
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_equal, assert_allclose, assert_raises
from statsmodels.tsa.statespace.tools import (
constrain_stationary_univariate as constrain,
unconstrain_stationary_univariate as unconstrain)
from statsmodels.tsa.arima import specification
def check_attributes(spec, order, seasonal_order, enforce_stationarity,
enforce_invertibility, concentrate_scale):
p, d, q = order
P, D, Q, s = seasonal_order
assert_equal(spec.order, (p, d, q))
assert_equal(spec.seasonal_order, (P, D, Q, s))
assert_equal(spec.ar_order, p)
assert_equal(spec.diff, d)
assert_equal(spec.ma_order, q)
assert_equal(spec.seasonal_ar_order, P)
assert_equal(spec.seasonal_diff, D)
assert_equal(spec.seasonal_ma_order, Q)
assert_equal(spec.seasonal_periods, s)
assert_equal(spec.ar_lags,
(p if isinstance(p, list) else np.arange(1, p + 1)))
assert_equal(spec.ma_lags,
(q if isinstance(q, list) else np.arange(1, q + 1)))
assert_equal(spec.seasonal_ar_lags,
(P if isinstance(P, list) else np.arange(1, P + 1)))
assert_equal(spec.seasonal_ma_lags,
(Q if isinstance(Q, list) else np.arange(1, Q + 1)))
max_ar_order = p[-1] if isinstance(p, list) else p
max_ma_order = q[-1] if isinstance(q, list) else q
max_seasonal_ar_order = P[-1] if isinstance(P, list) else P
max_seasonal_ma_order = Q[-1] if isinstance(Q, list) else Q
assert_equal(spec.max_ar_order, max_ar_order)
assert_equal(spec.max_ma_order, max_ma_order)
assert_equal(spec.max_seasonal_ar_order, max_seasonal_ar_order)
assert_equal(spec.max_seasonal_ma_order, max_seasonal_ma_order)
assert_equal(spec.max_reduced_ar_order,
max_ar_order + max_seasonal_ar_order * s)
assert_equal(spec.max_reduced_ma_order,
max_ma_order + max_seasonal_ma_order * s)
assert_equal(spec.enforce_stationarity, enforce_stationarity)
assert_equal(spec.enforce_invertibility, enforce_invertibility)
assert_equal(spec.concentrate_scale, concentrate_scale)
def check_properties(spec, order, seasonal_order, enforce_stationarity,
enforce_invertibility, concentrate_scale,
is_ar_consecutive, is_ma_consecutive, exog_names,
ar_names, ma_names, seasonal_ar_names, seasonal_ma_names):
p, d, q = order
P, D, Q, s = seasonal_order
k_exog_params = len(exog_names)
k_ar_params = len(p) if isinstance(p, list) else p
k_ma_params = len(q) if isinstance(q, list) else q
k_seasonal_ar_params = len(P) if isinstance(P, list) else P
k_seasonal_ma_params = len(Q) if isinstance(Q, list) else Q
k_variance_params = int(not concentrate_scale)
param_names = (exog_names + ar_names + ma_names + seasonal_ar_names +
seasonal_ma_names)
if not concentrate_scale:
param_names.append('sigma2')
assert_equal(spec.is_ar_consecutive, is_ar_consecutive)
assert_equal(spec.is_ma_consecutive, is_ma_consecutive)
assert_equal(spec.is_integrated, d + D > 0)
assert_equal(spec.is_seasonal, s > 0)
assert_equal(spec.k_exog_params, k_exog_params)
assert_equal(spec.k_ar_params, k_ar_params)
assert_equal(spec.k_ma_params, k_ma_params)
assert_equal(spec.k_seasonal_ar_params, k_seasonal_ar_params)
assert_equal(spec.k_seasonal_ma_params, k_seasonal_ma_params)
assert_equal(spec.k_params,
k_exog_params + k_ar_params + k_ma_params +
k_seasonal_ar_params + k_seasonal_ma_params +
k_variance_params)
assert_equal(spec.exog_names, exog_names)
assert_equal(spec.ar_names, ar_names)
assert_equal(spec.ma_names, ma_names)
assert_equal(spec.seasonal_ar_names, seasonal_ar_names)
assert_equal(spec.seasonal_ma_names, seasonal_ma_names)
assert_equal(spec.param_names, param_names)
def check_methods(spec, order, seasonal_order, enforce_stationarity,
enforce_invertibility, concentrate_scale,
exog_params, ar_params, ma_params, seasonal_ar_params,
seasonal_ma_params, sigma2):
params = np.r_[exog_params, ar_params, ma_params, seasonal_ar_params,
seasonal_ma_params, sigma2]
# Test methods
desired = {
'exog_params': exog_params,
'ar_params': ar_params,
'ma_params': ma_params,
'seasonal_ar_params': seasonal_ar_params,
'seasonal_ma_params': seasonal_ma_params}
if not concentrate_scale:
desired['sigma2'] = sigma2
assert_equal(spec.split_params(params), desired)
assert_equal(spec.join_params(**desired), params)
assert_equal(spec.validate_params(params), None)
# Wrong shape
assert_raises(ValueError, spec.validate_params, [])
# Wrong dtype
assert_raises(ValueError, spec.validate_params,
['a'] + params[1:].tolist())
# NaN / Infinity
assert_raises(ValueError, spec.validate_params,
np.r_[np.inf, params[1:]])
assert_raises(ValueError, spec.validate_params,
np.r_[np.nan, params[1:]])
# Non-stationary / non-invertible
if spec.max_ar_order > 0:
params = np.r_[exog_params, np.ones_like(ar_params), ma_params,
np.zeros_like(seasonal_ar_params),
seasonal_ma_params, sigma2]
if enforce_stationarity:
assert_raises(ValueError, spec.validate_params, params)
else:
assert_equal(spec.validate_params(params), None)
if spec.max_ma_order > 0:
params = np.r_[exog_params, ar_params, np.ones_like(ma_params),
seasonal_ar_params, np.zeros_like(seasonal_ma_params),
sigma2]
if enforce_invertibility:
assert_raises(ValueError, spec.validate_params, params)
else:
assert_equal(spec.validate_params(params), None)
if spec.max_seasonal_ar_order > 0:
params = np.r_[exog_params, np.zeros_like(ar_params), ma_params,
np.ones_like(seasonal_ar_params), seasonal_ma_params,
sigma2]
if enforce_stationarity:
assert_raises(ValueError, spec.validate_params, params)
else:
assert_equal(spec.validate_params(params), None)
if spec.max_seasonal_ma_order > 0:
params = np.r_[exog_params, ar_params, np.zeros_like(ma_params),
seasonal_ar_params, np.ones_like(seasonal_ma_params),
sigma2]
if enforce_invertibility:
assert_raises(ValueError, spec.validate_params, params)
else:
assert_equal(spec.validate_params(params), None)
# Invalid variances
if not concentrate_scale:
params = np.r_[exog_params, ar_params, ma_params, seasonal_ar_params,
seasonal_ma_params, 0.]
assert_raises(ValueError, spec.validate_params, params)
params = np.r_[exog_params, ar_params, ma_params, seasonal_ar_params,
seasonal_ma_params, -1]
assert_raises(ValueError, spec.validate_params, params)
# Constrain / unconstrain
unconstrained_ar_params = ar_params
unconstrained_ma_params = ma_params
unconstrained_seasonal_ar_params = seasonal_ar_params
unconstrained_seasonal_ma_params = seasonal_ma_params
unconstrained_sigma2 = sigma2
if spec.max_ar_order > 0 and enforce_stationarity:
unconstrained_ar_params = unconstrain(np.array(ar_params))
if spec.max_ma_order > 0 and enforce_invertibility:
unconstrained_ma_params = unconstrain(-np.array(ma_params))
if spec.max_seasonal_ar_order > 0 and enforce_stationarity:
unconstrained_seasonal_ar_params = (
unconstrain(np.array(seasonal_ar_params)))
if spec.max_seasonal_ma_order > 0 and enforce_invertibility:
unconstrained_seasonal_ma_params = (
unconstrain(-np.array(unconstrained_seasonal_ma_params)))
if not concentrate_scale:
unconstrained_sigma2 = unconstrained_sigma2**0.5
unconstrained_params = np.r_[
exog_params, unconstrained_ar_params, unconstrained_ma_params,
unconstrained_seasonal_ar_params, unconstrained_seasonal_ma_params,
unconstrained_sigma2]
params = np.r_[exog_params, ar_params, ma_params, seasonal_ar_params,
seasonal_ma_params, sigma2]
assert_allclose(spec.unconstrain_params(params), unconstrained_params)
assert_allclose(spec.constrain_params(unconstrained_params), params)
assert_allclose(
spec.constrain_params(spec.unconstrain_params(params)), params)
@pytest.mark.parametrize("n,d,D,s,params,which", [
# AR models
(0, 0, 0, 0, np.array([1.]), 'p'),
(1, 0, 0, 0, np.array([0.5, 1.]), 'p'),
(1, 0, 0, 0, np.array([-0.2, 100.]), 'p'),
(2, 0, 0, 0, np.array([-0.2, 0.5, 100.]), 'p'),
(20, 0, 0, 0, np.array([0.0] * 20 + [100.]), 'p'),
# ARI models
(0, 1, 0, 0, np.array([1.]), 'p'),
(0, 1, 1, 4, np.array([1.]), 'p'),
(1, 1, 0, 0, np.array([0.5, 1.]), 'p'),
(1, 1, 1, 4, np.array([0.5, 1.]), 'p'),
# MA models
(0, 0, 0, 0, np.array([1.]), 'q'),
(1, 0, 0, 0, np.array([0.5, 1.]), 'q'),
(1, 0, 0, 0, np.array([-0.2, 100.]), 'q'),
(2, 0, 0, 0, np.array([-0.2, 0.5, 100.]), 'q'),
(20, 0, 0, 0, np.array([0.0] * 20 + [100.]), 'q'),
# IMA models
(0, 1, 0, 0, np.array([1.]), 'q'),
(0, 1, 1, 4, np.array([1.]), 'q'),
(1, 1, 0, 0, np.array([0.5, 1.]), 'q'),
(1, 1, 1, 4, np.array([0.5, 1.]), 'q'),
])
def test_specification_ar_or_ma(n, d, D, s, params, which):
if which == 'p':
p, d, q = n, d, 0
ar_names = ['ar.L%d' % i for i in range(1, p + 1)]
ma_names = []
else:
p, d, q = 0, d, n
ar_names = []
ma_names = ['ma.L%d' % i for i in range(1, q + 1)]
ar_params = params[:p]
ma_params = params[p:-1]
sigma2 = params[-1]
P, D, Q, s = 0, D, 0, s
args = ((p, d, q), (P, D, Q, s))
kwargs = {
'enforce_stationarity': None,
'enforce_invertibility': None,
'concentrate_scale': None
}
properties_kwargs = kwargs.copy()
properties_kwargs.update({
'is_ar_consecutive': True,
'is_ma_consecutive': True,
'exog_names': [],
'ar_names': ar_names,
'ma_names': ma_names,
'seasonal_ar_names': [],
'seasonal_ma_names': []})
methods_kwargs = kwargs.copy()
methods_kwargs.update({
'exog_params': [],
'ar_params': ar_params,
'ma_params': ma_params,
'seasonal_ar_params': [],
'seasonal_ma_params': [],
'sigma2': sigma2})
# Test the spec created with order, seasonal_order
spec = specification.SARIMAXSpecification(
order=(p, d, q), seasonal_order=(P, D, Q, s))
check_attributes(spec, *args, **kwargs)
check_properties(spec, *args, **properties_kwargs)
check_methods(spec, *args, **methods_kwargs)
# Test the spec created with ar_order, etc.
spec = specification.SARIMAXSpecification(
ar_order=p, diff=d, ma_order=q, seasonal_ar_order=P,
seasonal_diff=D, seasonal_ma_order=Q, seasonal_periods=s)
check_attributes(spec, *args, **kwargs)
check_properties(spec, *args, **properties_kwargs)
check_methods(spec, *args, **methods_kwargs)
@pytest.mark.parametrize(("endog,exog,p,d,q,P,D,Q,s,"
"enforce_stationarity,enforce_invertibility,"
"concentrate_scale"), [
(None, None, 0, 0, 0, 0, 0, 0, 0, True, True, False),
(None, None, 1, 0, 1, 0, 0, 0, 0, True, True, False),
(None, None, 1, 1, 1, 0, 0, 0, 0, True, True, False),
(None, None, 1, 0, 0, 0, 0, 0, 4, True, True, False),
(None, None, 0, 0, 0, 1, 1, 1, 4, True, True, False),
(None, None, 1, 0, 0, 1, 0, 0, 4, True, True, False),
(None, None, 1, 0, 0, 1, 1, 1, 4, True, True, False),
(None, None, 2, 1, 3, 4, 1, 3, 12, True, True, False),
# Non-consecutive lag orders
(None, None, [1, 3], 0, 0, 1, 0, 0, 4, True, True, False),
(None, None, 0, 0, 0, 0, 0, [1, 3], 4, True, True, False),
(None, None, [2], 0, [1, 3], [1, 3], 0, [1, 4], 4, True, True, False),
# Modify enforce / concentrate
(None, None, 2, 1, 3, 4, 1, 3, 12, False, False, True),
(None, None, 2, 1, 3, 4, 1, 3, 12, True, False, True),
(None, None, 2, 1, 3, 4, 1, 3, 12, False, True, True),
# Endog / exog
(True, None, 2, 1, 3, 4, 1, 3, 12, False, True, True),
(None, 2, 2, 1, 3, 4, 1, 3, 12, False, True, True),
(True, 2, 2, 1, 3, 4, 1, 3, 12, False, True, True),
('y', None, 2, 1, 3, 4, 1, 3, 12, False, True, True),
(None, ['x1'], 2, 1, 3, 4, 1, 3, 12, False, True, True),
('y', ['x1'], 2, 1, 3, 4, 1, 3, 12, False, True, True),
('y', ['x1', 'x2'], 2, 1, 3, 4, 1, 3, 12, False, True, True),
(True, ['x1', 'x2'], 2, 1, 3, 4, 1, 3, 12, False, True, True),
('y', 2, 2, 1, 3, 4, 1, 3, 12, False, True, True),
])
def test_specification(endog, exog, p, d, q, P, D, Q, s,
enforce_stationarity, enforce_invertibility,
concentrate_scale):
# Assumptions:
# - p, q, P, Q are either integers or lists of non-consecutive integers
# (i.e. we are not testing boolean lists or consecutive lists here, which
# should be tested in the `standardize_lag_order` tests)
# Construct the specification
if isinstance(p, list):
k_ar_params = len(p)
max_ar_order = p[-1]
else:
k_ar_params = max_ar_order = p
if isinstance(q, list):
k_ma_params = len(q)
max_ma_order = q[-1]
else:
k_ma_params = max_ma_order = q
if isinstance(P, list):
k_seasonal_ar_params = len(P)
max_seasonal_ar_order = P[-1]
else:
k_seasonal_ar_params = max_seasonal_ar_order = P
if isinstance(Q, list):
k_seasonal_ma_params = len(Q)
max_seasonal_ma_order = Q[-1]
else:
k_seasonal_ma_params = max_seasonal_ma_order = Q
# Get endog / exog
nobs = d + D * s + max(3 * max_ma_order + 1,
3 * max_seasonal_ma_order * s + 1,
max_ar_order,
max_seasonal_ar_order * s) + 1
if endog is True:
endog = np.arange(nobs) * 1.0
elif isinstance(endog, str):
endog = pd.Series(np.arange(nobs) * 1.0, name=endog)
elif endog is not None:
raise ValueError('Invalid `endog` in test setup.')
if isinstance(exog, int):
exog_names = ['x%d' % (i + 1) for i in range(exog)]
exog = np.arange(nobs * len(exog_names)).reshape(nobs, len(exog_names))
elif isinstance(exog, list):
exog_names = exog
exog = np.arange(nobs * len(exog_names)).reshape(nobs, len(exog_names))
exog = pd.DataFrame(exog, columns=exog_names)
elif exog is None:
exog_names = []
else:
raise ValueError('Invalid `exog` in test setup.')
# Setup args, kwargs
args = ((p, d, q), (P, D, Q, s))
kwargs = {
'enforce_stationarity': enforce_stationarity,
'enforce_invertibility': enforce_invertibility,
'concentrate_scale': concentrate_scale
}
properties_kwargs = kwargs.copy()
is_ar_consecutive = not isinstance(p, list) and max_seasonal_ar_order == 0
is_ma_consecutive = not isinstance(q, list) and max_seasonal_ma_order == 0
properties_kwargs.update({
'is_ar_consecutive': is_ar_consecutive,
'is_ma_consecutive': is_ma_consecutive,
'exog_names': exog_names,
'ar_names': [
'ar.L%d' % i
for i in (p if isinstance(p, list) else range(1, p + 1))],
'ma_names': [
'ma.L%d' % i
for i in (q if isinstance(q, list) else range(1, q + 1))],
'seasonal_ar_names': [
'ar.S.L%d' % (i * s)
for i in (P if isinstance(P, list) else range(1, P + 1))],
'seasonal_ma_names': [
'ma.S.L%d' % (i * s)
for i in (Q if isinstance(Q, list) else range(1, Q + 1))]})
methods_kwargs = kwargs.copy()
methods_kwargs.update({
'exog_params': np.arange(len(exog_names)),
'ar_params': (
[] if k_ar_params == 0 else
constrain(np.arange(k_ar_params) / 10)),
'ma_params': (
[] if k_ma_params == 0 else
constrain((np.arange(k_ma_params) + 10) / 100)),
'seasonal_ar_params': (
[] if k_seasonal_ar_params == 0 else
constrain(np.arange(k_seasonal_ar_params) - 4)),
'seasonal_ma_params': (
[] if k_seasonal_ma_params == 0 else
constrain((np.arange(k_seasonal_ma_params) - 10) / 100)),
'sigma2': [] if concentrate_scale else 2.3424})
# Test the spec created with order, seasonal_order
spec = specification.SARIMAXSpecification(
endog, exog=exog,
order=(p, d, q), seasonal_order=(P, D, Q, s),
enforce_stationarity=enforce_stationarity,
enforce_invertibility=enforce_invertibility,
concentrate_scale=concentrate_scale)
check_attributes(spec, *args, **kwargs)
check_properties(spec, *args, **properties_kwargs)
check_methods(spec, *args, **methods_kwargs)
# Test the spec created with ar_order, etc.
spec = specification.SARIMAXSpecification(
endog, exog=exog,
ar_order=p, diff=d, ma_order=q, seasonal_ar_order=P,
seasonal_diff=D, seasonal_ma_order=Q, seasonal_periods=s,
enforce_stationarity=enforce_stationarity,
enforce_invertibility=enforce_invertibility,
concentrate_scale=concentrate_scale)
check_attributes(spec, *args, **kwargs)
check_properties(spec, *args, **properties_kwargs)
check_methods(spec, *args, **methods_kwargs)
def test_misc():
# Check that no arguments results in all zero orders
spec = specification.SARIMAXSpecification()
assert_equal(spec.order, (0, 0, 0))
assert_equal(spec.seasonal_order, (0, 0, 0, 0))
# Check for repr
spec = specification.SARIMAXSpecification(
endog=pd.Series([0], name='y'),
exog=pd.DataFrame([[0, 0]], columns=['x1', 'x2']),
order=(1, 1, 2), seasonal_order=(2, 1, 0, 12),
enforce_stationarity=False, enforce_invertibility=False,
concentrate_scale=True)
desired = ("SARIMAXSpecification(endog=y, exog=['x1', 'x2'],"
" order=(1, 1, 2), seasonal_order=(2, 1, 0, 12),"
" enforce_stationarity=False, enforce_invertibility=False,"
" concentrate_scale=True)")
assert_equal(repr(spec), desired)
def test_invalid():
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(1, 0, 0), ar_order=1)
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(1, 0, 0), seasonal_ar_order=1)
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(-1, 0, 0))
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(1.5, 0, 0))
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(0, -1, 0))
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(0, 1.5, 0))
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(0,))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(0, 1.5, 0, 4))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(-1, 0, 0, 4))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(1.5, 0, 0, 4))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(0, -1, 0, 4))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(0, 1.5, 0, 4))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(1, 0, 0, 0))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(1, 0, 0, -1))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(1, 0, 0, 1))
assert_raises(ValueError, specification.SARIMAXSpecification,
seasonal_order=(1,))
assert_raises(ValueError, specification.SARIMAXSpecification,
order=(1, 0, 0), endog=np.zeros((10, 2)))
spec = specification.SARIMAXSpecification(ar_order=1)
assert_raises(ValueError, spec.join_params)
assert_raises(ValueError, spec.join_params, ar_params=[0.2, 0.3])
@pytest.mark.parametrize(
"order,seasonal_order,enforce_stationarity,"
"enforce_invertibility,concentrate_scale,valid", [
# Different orders
((0, 0, 0), (0, 0, 0, 0), None, None, None,
['yule_walker', 'burg', 'innovations', 'hannan_rissanen',
'innovations_mle', 'statespace']),
((1, 0, 0), (0, 0, 0, 0), None, None, None,
['yule_walker', 'burg', 'hannan_rissanen',
'innovations_mle', 'statespace']),
((0, 0, 1), (0, 0, 0, 0), None, None, None,
['innovations', 'hannan_rissanen', 'innovations_mle',
'statespace']),
((1, 0, 1), (0, 0, 0, 0), None, None, None,
['hannan_rissanen', 'innovations_mle', 'statespace']),
((0, 0, 0), (1, 0, 0, 4), None, None, None,
['innovations_mle', 'statespace']),
# Different options
((1, 0, 0), (0, 0, 0, 0), True, None, None,
['innovations_mle', 'statespace']),
((1, 0, 0), (0, 0, 0, 0), False, None, None,
['yule_walker', 'burg', 'hannan_rissanen', 'statespace']),
((1, 0, 0), (0, 0, 0, 0), None, True, None,
['yule_walker', 'burg', 'hannan_rissanen', 'innovations_mle',
'statespace']),
((1, 0, 0), (0, 0, 0, 0), None, False, None,
['yule_walker', 'burg', 'hannan_rissanen', 'innovations_mle',
'statespace']),
((1, 0, 0), (0, 0, 0, 0), None, None, True,
['yule_walker', 'burg', 'hannan_rissanen', 'statespace']),
])
def test_valid_estimators(order, seasonal_order, enforce_stationarity,
enforce_invertibility, concentrate_scale, valid):
# Basic specification
spec = specification.SARIMAXSpecification(
order=order, seasonal_order=seasonal_order,
enforce_stationarity=enforce_stationarity,
enforce_invertibility=enforce_invertibility,
concentrate_scale=concentrate_scale)
estimators = set(['yule_walker', 'burg', 'innovations',
'hannan_rissanen', 'innovations_mle', 'statespace'])
desired = set(valid)
assert_equal(spec.valid_estimators, desired)
for estimator in desired:
assert_equal(spec.validate_estimator(estimator), None)
for estimator in estimators.difference(desired):
print(estimator, enforce_stationarity)
assert_raises(ValueError, spec.validate_estimator, estimator)
# Now try specification with missing values in endog
spec = specification.SARIMAXSpecification(
endog=[np.nan],
order=order, seasonal_order=seasonal_order,
enforce_stationarity=enforce_stationarity,
enforce_invertibility=enforce_invertibility,
concentrate_scale=concentrate_scale)
assert_equal(spec.valid_estimators, set(['statespace']))
assert_equal(spec.validate_estimator('statespace'), None)
for estimator in estimators.difference(['statespace']):
assert_raises(ValueError, spec.validate_estimator, estimator)
def test_invalid_estimator():
spec = specification.SARIMAXSpecification()
assert_raises(ValueError, spec.validate_estimator, 'not_an_estimator')
|
{
"content_hash": "5c242ec10680d2ac7b4ce921d17321d0",
"timestamp": "",
"source": "github",
"line_count": 586,
"max_line_length": 79,
"avg_line_length": 41.21331058020478,
"alnum_prop": 0.5935157964473521,
"repo_name": "josef-pkt/statsmodels",
"id": "8f8164e15b024d20b2862716f180be35edda602d",
"size": "24151",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "statsmodels/tsa/arima/tests/test_specification.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10035"
},
{
"name": "Batchfile",
"bytes": "625"
},
{
"name": "C",
"bytes": "381"
},
{
"name": "Cython",
"bytes": "225838"
},
{
"name": "Fortran",
"bytes": "16671"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "MATLAB",
"bytes": "100525"
},
{
"name": "Python",
"bytes": "14428857"
},
{
"name": "R",
"bytes": "106569"
},
{
"name": "Shell",
"bytes": "25322"
},
{
"name": "Stata",
"bytes": "50129"
}
],
"symlink_target": ""
}
|
<<<<<<< HEAD
<<<<<<< HEAD
doctests = """
Unpack tuple
>>> t = (1, 2, 3)
>>> a, b, c = t
>>> a == 1 and b == 2 and c == 3
True
Unpack list
>>> l = [4, 5, 6]
>>> a, b, c = l
>>> a == 4 and b == 5 and c == 6
True
Unpack implied tuple
>>> a, b, c = 7, 8, 9
>>> a == 7 and b == 8 and c == 9
True
Unpack string... fun!
>>> a, b, c = 'one'
>>> a == 'o' and b == 'n' and c == 'e'
True
Unpack generic sequence
>>> class Seq:
... def __getitem__(self, i):
... if i >= 0 and i < 3: return i
... raise IndexError
...
>>> a, b, c = Seq()
>>> a == 0 and b == 1 and c == 2
True
Single element unpacking, with extra syntax
>>> st = (99,)
>>> sl = [100]
>>> a, = st
>>> a
99
>>> b, = sl
>>> b
100
Now for some failures
Unpacking non-sequence
>>> a, b, c = 7
Traceback (most recent call last):
...
TypeError: 'int' object is not iterable
Unpacking tuple of wrong size
>>> a, b = t
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking tuple of wrong size
>>> a, b = l
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking sequence too short
>>> a, b, c, d = Seq()
Traceback (most recent call last):
...
ValueError: need more than 3 values to unpack
Unpacking sequence too long
>>> a, b = Seq()
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking a sequence where the test for too long raises a different kind of
error
>>> class BozoError(Exception):
... pass
...
>>> class BadSeq:
... def __getitem__(self, i):
... if i >= 0 and i < 3:
... return i
... elif i == 3:
... raise BozoError
... else:
... raise IndexError
...
Trigger code while not expecting an IndexError (unpack sequence too long, wrong
error)
>>> a, b, c, d, e = BadSeq()
Traceback (most recent call last):
...
test.test_unpack.BozoError
Trigger code while expecting an IndexError (unpack sequence too short, wrong
error)
>>> a, b, c = BadSeq()
Traceback (most recent call last):
...
test.test_unpack.BozoError
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
from test import test_unpack
support.run_doctest(test_unpack, verbose)
if __name__ == "__main__":
test_main(verbose=True)
=======
doctests = """
Unpack tuple
>>> t = (1, 2, 3)
>>> a, b, c = t
>>> a == 1 and b == 2 and c == 3
True
Unpack list
>>> l = [4, 5, 6]
>>> a, b, c = l
>>> a == 4 and b == 5 and c == 6
True
Unpack implied tuple
>>> a, b, c = 7, 8, 9
>>> a == 7 and b == 8 and c == 9
True
Unpack string... fun!
>>> a, b, c = 'one'
>>> a == 'o' and b == 'n' and c == 'e'
True
Unpack generic sequence
>>> class Seq:
... def __getitem__(self, i):
... if i >= 0 and i < 3: return i
... raise IndexError
...
>>> a, b, c = Seq()
>>> a == 0 and b == 1 and c == 2
True
Single element unpacking, with extra syntax
>>> st = (99,)
>>> sl = [100]
>>> a, = st
>>> a
99
>>> b, = sl
>>> b
100
Now for some failures
Unpacking non-sequence
>>> a, b, c = 7
Traceback (most recent call last):
...
TypeError: 'int' object is not iterable
Unpacking tuple of wrong size
>>> a, b = t
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking tuple of wrong size
>>> a, b = l
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking sequence too short
>>> a, b, c, d = Seq()
Traceback (most recent call last):
...
ValueError: need more than 3 values to unpack
Unpacking sequence too long
>>> a, b = Seq()
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking a sequence where the test for too long raises a different kind of
error
>>> class BozoError(Exception):
... pass
...
>>> class BadSeq:
... def __getitem__(self, i):
... if i >= 0 and i < 3:
... return i
... elif i == 3:
... raise BozoError
... else:
... raise IndexError
...
Trigger code while not expecting an IndexError (unpack sequence too long, wrong
error)
>>> a, b, c, d, e = BadSeq()
Traceback (most recent call last):
...
test.test_unpack.BozoError
Trigger code while expecting an IndexError (unpack sequence too short, wrong
error)
>>> a, b, c = BadSeq()
Traceback (most recent call last):
...
test.test_unpack.BozoError
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
from test import test_unpack
support.run_doctest(test_unpack, verbose)
if __name__ == "__main__":
test_main(verbose=True)
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
doctests = """
Unpack tuple
>>> t = (1, 2, 3)
>>> a, b, c = t
>>> a == 1 and b == 2 and c == 3
True
Unpack list
>>> l = [4, 5, 6]
>>> a, b, c = l
>>> a == 4 and b == 5 and c == 6
True
Unpack implied tuple
>>> a, b, c = 7, 8, 9
>>> a == 7 and b == 8 and c == 9
True
Unpack string... fun!
>>> a, b, c = 'one'
>>> a == 'o' and b == 'n' and c == 'e'
True
Unpack generic sequence
>>> class Seq:
... def __getitem__(self, i):
... if i >= 0 and i < 3: return i
... raise IndexError
...
>>> a, b, c = Seq()
>>> a == 0 and b == 1 and c == 2
True
Single element unpacking, with extra syntax
>>> st = (99,)
>>> sl = [100]
>>> a, = st
>>> a
99
>>> b, = sl
>>> b
100
Now for some failures
Unpacking non-sequence
>>> a, b, c = 7
Traceback (most recent call last):
...
TypeError: 'int' object is not iterable
Unpacking tuple of wrong size
>>> a, b = t
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking tuple of wrong size
>>> a, b = l
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking sequence too short
>>> a, b, c, d = Seq()
Traceback (most recent call last):
...
ValueError: need more than 3 values to unpack
Unpacking sequence too long
>>> a, b = Seq()
Traceback (most recent call last):
...
ValueError: too many values to unpack (expected 2)
Unpacking a sequence where the test for too long raises a different kind of
error
>>> class BozoError(Exception):
... pass
...
>>> class BadSeq:
... def __getitem__(self, i):
... if i >= 0 and i < 3:
... return i
... elif i == 3:
... raise BozoError
... else:
... raise IndexError
...
Trigger code while not expecting an IndexError (unpack sequence too long, wrong
error)
>>> a, b, c, d, e = BadSeq()
Traceback (most recent call last):
...
test.test_unpack.BozoError
Trigger code while expecting an IndexError (unpack sequence too short, wrong
error)
>>> a, b, c = BadSeq()
Traceback (most recent call last):
...
test.test_unpack.BozoError
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
from test import test_unpack
support.run_doctest(test_unpack, verbose)
if __name__ == "__main__":
test_main(verbose=True)
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
{
"content_hash": "2358939653bb6e86a005b9d859fc18ee",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 79,
"avg_line_length": 20.194444444444443,
"alnum_prop": 0.5279479804926848,
"repo_name": "ArcherSys/ArcherSys",
"id": "abfbbf55885022a1c4134124e9d6e83f8e09fcd5",
"size": "7997",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Lib/test/test_unpack.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
from __future__ import absolute_import
import atexit
import functools
import logging
import os
import re
import shutil
import socket
import sys
import warnings
import fixtures
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_log import log
import oslotest.base as oslotest
from oslotest import mockpatch
import six
from sqlalchemy import exc
from testtools import testcase
import webob
# NOTE(ayoung)
# environment.use_eventlet must run before any of the code that will
# call the eventlet monkeypatching.
from keystone.common import environment # noqa
environment.use_eventlet()
from keystone import auth
from keystone.common import config as common_cfg
from keystone.common import dependency
from keystone.common import kvs
from keystone.common.kvs import core as kvs_core
from keystone import config
from keystone import controllers
from keystone import exception
from keystone import notifications
from keystone.policy.backends import rules
from keystone.server import common
from keystone import service
from keystone.tests.unit import ksfixtures
config.configure()
LOG = log.getLogger(__name__)
PID = six.text_type(os.getpid())
TESTSDIR = os.path.dirname(os.path.abspath(__file__))
TESTCONF = os.path.join(TESTSDIR, 'config_files')
ROOTDIR = os.path.normpath(os.path.join(TESTSDIR, '..', '..', '..'))
VENDOR = os.path.join(ROOTDIR, 'vendor')
ETCDIR = os.path.join(ROOTDIR, 'etc')
def _calc_tmpdir():
env_val = os.environ.get('KEYSTONE_TEST_TEMP_DIR')
if not env_val:
return os.path.join(TESTSDIR, 'tmp', PID)
return os.path.join(env_val, PID)
TMPDIR = _calc_tmpdir()
CONF = cfg.CONF
log.register_options(CONF)
rules.init()
IN_MEM_DB_CONN_STRING = 'sqlite://'
exception._FATAL_EXCEPTION_FORMAT_ERRORS = True
os.makedirs(TMPDIR)
atexit.register(shutil.rmtree, TMPDIR)
class dirs(object):
@staticmethod
def root(*p):
return os.path.join(ROOTDIR, *p)
@staticmethod
def etc(*p):
return os.path.join(ETCDIR, *p)
@staticmethod
def tests(*p):
return os.path.join(TESTSDIR, *p)
@staticmethod
def tmp(*p):
return os.path.join(TMPDIR, *p)
@staticmethod
def tests_conf(*p):
return os.path.join(TESTCONF, *p)
# keystone.common.sql.initialize() for testing.
DEFAULT_TEST_DB_FILE = dirs.tmp('test.db')
@atexit.register
def remove_test_databases():
db = dirs.tmp('test.db')
if os.path.exists(db):
os.unlink(db)
pristine = dirs.tmp('test.db.pristine')
if os.path.exists(pristine):
os.unlink(pristine)
def generate_paste_config(extension_name):
# Generate a file, based on keystone-paste.ini, that is named:
# extension_name.ini, and includes extension_name in the pipeline
with open(dirs.etc('keystone-paste.ini'), 'r') as f:
contents = f.read()
new_contents = contents.replace(' service_v3',
' %s service_v3' % (extension_name))
new_paste_file = dirs.tmp(extension_name + '.ini')
with open(new_paste_file, 'w') as f:
f.write(new_contents)
return new_paste_file
def remove_generated_paste_config(extension_name):
# Remove the generated paste config file, named extension_name.ini
paste_file_to_remove = dirs.tmp(extension_name + '.ini')
os.remove(paste_file_to_remove)
def skip_if_cache_disabled(*sections):
"""This decorator is used to skip a test if caching is disabled either
globally or for the specific section.
In the code fragment::
@skip_if_cache_is_disabled('assignment', 'token')
def test_method(*args):
...
The method test_method would be skipped if caching is disabled globally via
the `enabled` option in the `cache` section of the configuration or if
the `caching` option is set to false in either `assignment` or `token`
sections of the configuration. This decorator can be used with no
arguments to only check global caching.
If a specified configuration section does not define the `caching` option,
this decorator makes the same assumption as the `should_cache_fn` in
keystone.common.cache that caching should be enabled.
"""
def wrapper(f):
@functools.wraps(f)
def inner(*args, **kwargs):
if not CONF.cache.enabled:
raise testcase.TestSkipped('Cache globally disabled.')
for s in sections:
conf_sec = getattr(CONF, s, None)
if conf_sec is not None:
if not getattr(conf_sec, 'caching', True):
raise testcase.TestSkipped('%s caching disabled.' % s)
return f(*args, **kwargs)
return inner
return wrapper
def skip_if_no_multiple_domains_support(f):
"""This decorator is used to skip a test if an identity driver
does not support multiple domains.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
test_obj = args[0]
if not test_obj.identity_api.multiple_domains_supported:
raise testcase.TestSkipped('No multiple domains support')
return f(*args, **kwargs)
return wrapper
class UnexpectedExit(Exception):
pass
class BadLog(Exception):
"""Raised on invalid call to logging (parameter mismatch)."""
pass
class TestClient(object):
def __init__(self, app=None, token=None):
self.app = app
self.token = token
def request(self, method, path, headers=None, body=None):
if headers is None:
headers = {}
if self.token:
headers.setdefault('X-Auth-Token', self.token)
req = webob.Request.blank(path)
req.method = method
for k, v in six.iteritems(headers):
req.headers[k] = v
if body:
req.body = body
return req.get_response(self.app)
def get(self, path, headers=None):
return self.request('GET', path=path, headers=headers)
def post(self, path, headers=None, body=None):
return self.request('POST', path=path, headers=headers, body=body)
def put(self, path, headers=None, body=None):
return self.request('PUT', path=path, headers=headers, body=body)
class BaseTestCase(oslotest.BaseTestCase):
"""Light weight base test class.
This is a placeholder that will eventually go away once the
setup/teardown in TestCase is properly trimmed down to the bare
essentials. This is really just a play to speed up the tests by
eliminating unnecessary work.
"""
def setUp(self):
super(BaseTestCase, self).setUp()
self.useFixture(mockpatch.PatchObject(sys, 'exit',
side_effect=UnexpectedExit))
def cleanup_instance(self, *names):
"""Create a function suitable for use with self.addCleanup.
:returns: a callable that uses a closure to delete instance attributes
"""
def cleanup():
for name in names:
# TODO(dstanek): remove this 'if' statement once
# load_backend in test_backend_ldap is only called once
# per test
if hasattr(self, name):
delattr(self, name)
return cleanup
@dependency.requires('revoke_api')
class TestCase(BaseTestCase):
def config_files(self):
return []
def config_overrides(self):
signing_certfile = 'examples/pki/certs/signing_cert.pem'
signing_keyfile = 'examples/pki/private/signing_key.pem'
self.config_fixture.config(group='oslo_policy',
policy_file=dirs.etc('policy.json'))
self.config_fixture.config(
# TODO(morganfainberg): Make Cache Testing a separate test case
# in tempest, and move it out of the base unit tests.
group='cache',
backend='dogpile.cache.memory',
enabled=True,
proxies=['keystone.tests.unit.test_cache.CacheIsolatingProxy'])
self.config_fixture.config(
group='catalog',
driver='keystone.catalog.backends.templated.Catalog',
template_file=dirs.tests('default_catalog.templates'))
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.sql.Identity')
self.config_fixture.config(
group='kvs',
backends=[
('keystone.tests.unit.test_kvs.'
'KVSBackendForcedKeyMangleFixture'),
'keystone.tests.unit.test_kvs.KVSBackendFixture'])
self.config_fixture.config(
group='revoke',
driver='keystone.contrib.revoke.backends.kvs.Revoke')
self.config_fixture.config(
group='signing', certfile=signing_certfile,
keyfile=signing_keyfile,
ca_certs='examples/pki/certs/cacert.pem')
self.config_fixture.config(
group='token',
driver='keystone.token.persistence.backends.kvs.Token')
self.config_fixture.config(
group='trust',
driver='keystone.trust.backends.sql.Trust')
self.config_fixture.config(
group='saml', certfile=signing_certfile, keyfile=signing_keyfile)
self.config_fixture.config(
default_log_levels=[
'amqp=WARN',
'amqplib=WARN',
'boto=WARN',
'qpid=WARN',
'sqlalchemy=WARN',
'suds=INFO',
'oslo.messaging=INFO',
'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'routes.middleware=INFO',
'stevedore.extension=INFO',
'keystone.notifications=INFO',
'keystone.common._memcache_pool=INFO',
'keystone.common.ldap=INFO',
])
self.auth_plugin_config_override()
def auth_plugin_config_override(self, methods=None, **method_classes):
if methods is None:
methods = ['external', 'password', 'token', ]
if not method_classes:
method_classes = dict(
external='keystone.auth.plugins.external.DefaultDomain',
password='keystone.auth.plugins.password.Password',
token='keystone.auth.plugins.token.Token',
)
self.config_fixture.config(group='auth', methods=methods)
common_cfg.setup_authentication()
if method_classes:
self.config_fixture.config(group='auth', **method_classes)
def setUp(self):
super(TestCase, self).setUp()
self.addCleanup(self.cleanup_instance('config_fixture', 'logger'))
self.addCleanup(CONF.reset)
self.useFixture(mockpatch.PatchObject(logging.Handler, 'handleError',
side_effect=BadLog))
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.config(self.config_files())
# NOTE(morganfainberg): mock the auth plugin setup to use the config
# fixture which automatically unregisters options when performing
# cleanup.
def mocked_register_auth_plugin_opt(conf, opt):
self.config_fixture.register_opt(opt, group='auth')
self.register_auth_plugin_opt_patch = self.useFixture(
mockpatch.PatchObject(common_cfg, '_register_auth_plugin_opt',
new=mocked_register_auth_plugin_opt))
self.config_overrides()
self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
# NOTE(morganfainberg): This code is a copy from the oslo-incubator
# log module. This is not in a function or otherwise available to use
# without having a CONF object to setup logging. This should help to
# reduce the log size by limiting what we log (similar to how Keystone
# would run under mod_wsgi or eventlet).
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
logger.setLevel(level_name)
warnings.filterwarnings('error', category=DeprecationWarning,
module='^keystone\\.')
warnings.simplefilter('error', exc.SAWarning)
self.addCleanup(warnings.resetwarnings)
self.useFixture(ksfixtures.Cache())
# Clear the registry of providers so that providers from previous
# tests aren't used.
self.addCleanup(dependency.reset)
self.addCleanup(kvs.INMEMDB.clear)
# Ensure Notification subscriptions and resource types are empty
self.addCleanup(notifications.clear_subscribers)
self.addCleanup(notifications.reset_notifier)
# Reset the auth-plugin registry
self.addCleanup(self.clear_auth_plugin_registry)
self.addCleanup(setattr, controllers, '_VERSIONS', [])
def config(self, config_files):
CONF(args=[], project='keystone', default_config_files=config_files)
def load_backends(self):
"""Initializes each manager and assigns them to an attribute."""
# TODO(blk-u): Shouldn't need to clear the registry here, but some
# tests call load_backends multiple times. These should be fixed to
# only call load_backends once.
dependency.reset()
# TODO(morganfainberg): Shouldn't need to clear the registry here, but
# some tests call load_backends multiple times. Since it is not
# possible to re-configure a backend, we need to clear the list. This
# should eventually be removed once testing has been cleaned up.
kvs_core.KEY_VALUE_STORE_REGISTRY.clear()
self.clear_auth_plugin_registry()
drivers, _unused = common.setup_backends(
load_extra_backends_fn=self.load_extra_backends)
for manager_name, manager in six.iteritems(drivers):
setattr(self, manager_name, manager)
self.addCleanup(self.cleanup_instance(*drivers.keys()))
def load_extra_backends(self):
"""Override to load managers that aren't loaded by default.
This is useful to load managers initialized by extensions. No extra
backends are loaded by default.
:return: dict of name -> manager
"""
return {}
def load_fixtures(self, fixtures):
"""Hacky basic and naive fixture loading based on a python module.
Expects that the various APIs into the various services are already
defined on `self`.
"""
# NOTE(dstanek): create a list of attribute names to be removed
# from this instance during cleanup
fixtures_to_cleanup = []
# TODO(termie): doing something from json, probably based on Django's
# loaddata will be much preferred.
if (hasattr(self, 'identity_api') and
hasattr(self, 'assignment_api') and
hasattr(self, 'resource_api')):
for domain in fixtures.DOMAINS:
try:
rv = self.resource_api.create_domain(domain['id'], domain)
except exception.Conflict:
rv = self.resource_api.get_domain(domain['id'])
except exception.NotImplemented:
rv = domain
attrname = 'domain_%s' % domain['id']
setattr(self, attrname, rv)
fixtures_to_cleanup.append(attrname)
for tenant in fixtures.TENANTS:
if hasattr(self, 'tenant_%s' % tenant['id']):
try:
# This will clear out any roles on the project as well
self.resource_api.delete_project(tenant['id'])
except exception.ProjectNotFound:
pass
rv = self.resource_api.create_project(
tenant['id'], tenant)
attrname = 'tenant_%s' % tenant['id']
setattr(self, attrname, rv)
fixtures_to_cleanup.append(attrname)
for role in fixtures.ROLES:
try:
rv = self.role_api.create_role(role['id'], role)
except exception.Conflict:
rv = self.role_api.get_role(role['id'])
attrname = 'role_%s' % role['id']
setattr(self, attrname, rv)
fixtures_to_cleanup.append(attrname)
for user in fixtures.USERS:
user_copy = user.copy()
tenants = user_copy.pop('tenants')
try:
existing_user = getattr(self, 'user_%s' % user['id'], None)
if existing_user is not None:
self.identity_api.delete_user(existing_user['id'])
except exception.UserNotFound:
pass
# For users, the manager layer will generate the ID
user_copy = self.identity_api.create_user(user_copy)
# Our tests expect that the password is still in the user
# record so that they can reference it, so put it back into
# the dict returned.
user_copy['password'] = user['password']
for tenant_id in tenants:
try:
self.assignment_api.add_user_to_project(
tenant_id, user_copy['id'])
except exception.Conflict:
pass
# Use the ID from the fixture as the attribute name, so
# that our tests can easily reference each user dict, while
# the ID in the dict will be the real public ID.
attrname = 'user_%s' % user['id']
setattr(self, attrname, user_copy)
fixtures_to_cleanup.append(attrname)
self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup))
def _paste_config(self, config):
if not config.startswith('config:'):
test_path = os.path.join(TESTSDIR, config)
etc_path = os.path.join(ROOTDIR, 'etc', config)
for path in [test_path, etc_path]:
if os.path.exists('%s-paste.ini' % path):
return 'config:%s-paste.ini' % path
return config
def loadapp(self, config, name='main'):
return service.loadapp(self._paste_config(config), name=name)
def clear_auth_plugin_registry(self):
auth.controllers.AUTH_METHODS.clear()
auth.controllers.AUTH_PLUGINS_LOADED = False
def assertCloseEnoughForGovernmentWork(self, a, b, delta=3):
"""Asserts that two datetimes are nearly equal within a small delta.
:param delta: Maximum allowable time delta, defined in seconds.
"""
msg = '%s != %s within %s delta' % (a, b, delta)
self.assertTrue(abs(a - b).seconds <= delta, msg)
def assertNotEmpty(self, l):
self.assertTrue(len(l))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict)
self.assertIsInstance(d2, dict)
self.assertEqual(d1, d2, msg)
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
"""
try:
callable_obj(*args, **kwargs)
except expected_exception as exc_value:
if isinstance(expected_regexp, six.string_types):
expected_regexp = re.compile(expected_regexp)
if isinstance(exc_value.args[0], unicode):
if not expected_regexp.search(unicode(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
(expected_regexp.pattern, unicode(exc_value)))
else:
if not expected_regexp.search(str(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
else:
if hasattr(expected_exception, '__name__'):
excName = expected_exception.__name__
else:
excName = str(expected_exception)
raise self.failureException("%s not raised" % excName)
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
def safe_repr(obj, short=False):
_MAX_LENGTH = 80
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
missing = []
mismatched = []
for key, value in six.iteritems(expected):
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
@property
def ipv6_enabled(self):
if socket.has_ipv6:
sock = None
try:
sock = socket.socket(socket.AF_INET6)
# NOTE(Mouad): Try to bind to IPv6 loopback ip address.
sock.bind(("::1", 0))
return True
except socket.error:
pass
finally:
if sock:
sock.close()
return False
def skip_if_no_ipv6(self):
if not self.ipv6_enabled:
raise self.skipTest("IPv6 is not enabled in the system")
def skip_if_env_not_set(self, env_var):
if not os.environ.get(env_var):
self.skipTest('Env variable %s is not set.' % env_var)
class SQLDriverOverrides(object):
"""A mixin for consolidating sql-specific test overrides."""
def config_overrides(self):
super(SQLDriverOverrides, self).config_overrides()
# SQL specific driver overrides
self.config_fixture.config(
group='catalog',
driver='keystone.catalog.backends.sql.Catalog')
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.sql.Identity')
self.config_fixture.config(
group='policy',
driver='keystone.policy.backends.sql.Policy')
self.config_fixture.config(
group='revoke',
driver='keystone.contrib.revoke.backends.sql.Revoke')
self.config_fixture.config(
group='token',
driver='keystone.token.persistence.backends.sql.Token')
self.config_fixture.config(
group='trust',
driver='keystone.trust.backends.sql.Trust')
|
{
"content_hash": "947150caef9bf41dcf8bd2a73f74c5c4",
"timestamp": "",
"source": "github",
"line_count": 646,
"max_line_length": 79,
"avg_line_length": 36.61764705882353,
"alnum_prop": 0.5951807228915663,
"repo_name": "rushiagr/keystone",
"id": "caca7dbdab3fea8c8065a67ec2c4e78b2926da17",
"size": "24241",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "keystone/tests/unit/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Python",
"bytes": "3739901"
},
{
"name": "Shell",
"bytes": "10877"
}
],
"symlink_target": ""
}
|
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.get_programs_response import GetProgramsResponse # noqa: E501
from swagger_client.rest import ApiException
class TestGetProgramsResponse(unittest.TestCase):
"""GetProgramsResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetProgramsResponse(self):
"""Test GetProgramsResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.get_programs_response.GetProgramsResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "413757cf792dfcda513bc666908e9fd5",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 119,
"avg_line_length": 25.342105263157894,
"alnum_prop": 0.7040498442367601,
"repo_name": "mindbody/API-Examples",
"id": "4ab276e64326e4fbd6090e04ad350fe21ed92508",
"size": "980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SDKs/Python/test/test_get_programs_response.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "PHP",
"bytes": "3610259"
},
{
"name": "Python",
"bytes": "2338642"
},
{
"name": "Ruby",
"bytes": "2284441"
},
{
"name": "Shell",
"bytes": "5058"
}
],
"symlink_target": ""
}
|
"""
fizzbuzz_functional_2.py (Python 2.7.5)
A second version of a functional approach.
This one is essentially the same idea
as the first functional code, but with named
rather than anonymous functions.
One addition is a "function generator" (fb_func)
for the fizzbuzz functions, allowing a more
perhaps more general form for the mod arithmetic.
Be clear that fb_func returns a function, so
for example fb_func(3, 'Fizz')(9) would be 'Fizz'.
And one difference is that this time all the functions
are called in succession on each integer, rather
than transforming all of the integers. In other words,
there is only one map() call here, not many as in
the first functional program.
All of these are "pure" functions,
without sideffects or global state changes.
Each takes a clearly defined input and produces
a clearly defined output, with no dependency
on or modification of a "state" anywhere else.
A python subtlety that I've used here which you may not
have seen before is the * in argument lists. This
is used to convert a collection of arguments to their
components before passing into a function. Like this :
def printem(a,b): # takes two arguments
print a, b
pair = (3,4)
printem(pair) # error
printem(*pair) # works - same as printem(3,4)
In this case, each number (i.e. 19) has been turned into
a tuple (i.e. (19, '')) where the 2nd element carries
around the string which the number might be turning into.
This lets us do the 'Fizz' and 'Buzz' checks one at a time,
without losing the number along the way.
Since the functions return by fb_func and contract all
take two arguments, the * are needed to turn for example
contract((19,'')) into contract(*(19,'')) which is contract(19,'').
Well, anyway, it works.
I haven't put docstrings on any of these functions. Nor tests.
Can you
Jim Mahoney | cs.marlboro.edu | Jan 2014 | opensource.org/licenses/MIT
"""
def expand(number):
return (number, '')
def contract(number, string):
if string == '':
return str(number)
else:
return string
def fizzbuzz(i, fb_value, fb_string):
if i % fb_value == 0:
return fb_string
else:
return ''
def fb_func(fb_value, fb_string):
return lambda i, s: (i, s + fizzbuzz(i, fb_value, fb_string))
def transform(i):
return contract(*fb_func(5, 'Buzz')(*fb_func(3, 'Fizz')(*expand(i))))
print '\n'.join(map(transform, range(1, 101)))
|
{
"content_hash": "358f32c16d6e9451faca3edb7adb4e51",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 73,
"avg_line_length": 30.82716049382716,
"alnum_prop": 0.6900280336403685,
"repo_name": "CorySpitzer/FizzBuzz",
"id": "ea064c643b7c9f58875d71d4c8cb51efc877151b",
"size": "2497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jim/fizzbuzz_functional_2.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
import json
import logging
import sys
import tempfile
from collections import namedtuple
from multiprocessing import Process, Queue, current_process
from operator import attrgetter
from urllib.parse import urlparse
import click
import click_log
from detectem.core import Detector
from detectem.exceptions import DockerStartError, NoPluginsError, SplashError
from detectem.plugin import load_plugins
from detectem.response import get_response
from detectem.settings import (
CMD_OUTPUT,
JSON_OUTPUT,
NUMBER_OF_SPLASH_INSTANCES,
SPLASH_MAX_RETRIES,
SPLASH_TIMEOUT,
)
from detectem.splash import get_splash_manager
from detectem.utils import create_printer
# Set up logging
logger = logging.getLogger("detectem")
click_log.ColorFormatter.colors["info"] = dict(fg="green")
click_log.basic_config(logger)
TaskItem = namedtuple("TaskItem", ["args", "retries"])
@click.command()
@click.option(
"--timeout",
default=SPLASH_TIMEOUT,
type=click.INT,
help="Timeout for Splash (in seconds).",
)
@click.option(
"--format",
default=CMD_OUTPUT,
type=click.Choice([CMD_OUTPUT, JSON_OUTPUT]),
help="Set the format of the results.",
)
@click.option(
"--metadata",
default=False,
is_flag=True,
help="Include this flag to return plugin metadata.",
)
@click.option("--list-plugins", is_flag=True, help="List registered plugins")
@click.option("--save-har", is_flag=True, help="Save har to file")
@click.option("-i", "--input-file", type=click.File("r"), help="Read URLs from file")
@click_log.simple_verbosity_option(logger, default="error")
@click.argument("input_url", required=False)
def main(timeout, format, metadata, list_plugins, save_har, input_file, input_url):
# Gather urls
urls = []
if input_file:
urls += input_file.read().splitlines()
if input_url:
urls.append(input_url)
# Check that `urls` contains valid URLs
if not all(map(lambda u: urlparse(u).scheme in ["http", "https"], urls)):
raise click.BadParameter("Check that all provided URLs are valid URLS")
OPTIONS_WITHOUT_URLS = [list_plugins]
# Exit if neither urls were defined nor an option that works without urls
if not urls and not any(OPTIONS_WITHOUT_URLS):
click.echo(click.get_current_context().get_help())
sys.exit(1)
printer = create_printer(format)
# --list-plugins option
if list_plugins:
try:
printer(get_plugins(metadata))
except NoPluginsError as e:
printer(str(e))
finally:
sys.exit(1)
# Create queues
task_queue = Queue()
result_queue = Queue()
# Init splash manager
splash_manager = get_splash_manager()
logger.info(f"[+] Using {splash_manager.__class__.__name__} as Splash manager")
# Change number of instances if there are fewer urls to analyze
n_instances = NUMBER_OF_SPLASH_INSTANCES
if n_instances > len(urls):
n_instances = len(urls)
logger.info(f"[+] Using {n_instances} Splash instances")
logger.info(f"[+] Setting up Splash manager")
splash_manager.setup(n_instances)
# Number of available instances could be different to `n_instances` because of issues starting instances
n_available_instances = splash_manager.get_number_of_available_instances()
if n_available_instances != n_instances:
logger.info(f"[+] Only {n_available_instances} instances are going to be used")
logger.info(f"[+] Setting up done")
# Create pool of workers
processes = [
Process(
target=process_url_worker,
args=(splash_manager, task_queue, result_queue),
)
for _ in range(n_available_instances)
]
# Start the workers
for p in processes:
p.start()
# Send the provided urls to the input queue
for url in urls:
task_queue.put(TaskItem(args=[url, timeout, metadata, save_har], retries=0))
# Wait until processing on all workers is done
for p in processes:
p.join()
# Process results
results = []
while not result_queue.empty():
result = result_queue.get()
results.append(result)
printer(results)
splash_manager.teardown()
def process_url_worker(splash_manager, task_queue, result_queue):
process_name = current_process().name
with splash_manager.sem:
task_item: TaskItem
for task_item in iter(task_queue.get, "STOP"):
args = task_item.args
url = args[0]
# Get a Splash instance from pool of Splash servers
with splash_manager.assign_instance() as (container_name, splash_url):
result = None
logger.info(
f"[+] Processing {url} @ {process_name} [retry: {task_item.retries} | instance: {container_name}]"
)
try:
result = get_detection_results(*args + [splash_url])
except SplashError as e:
# Handle limit of retries
retries = task_item.retries + 1
if retries == SPLASH_MAX_RETRIES:
result = {
"url": url,
"error": "Maximum number of retries reached.",
}
else:
# Put back in `task_queue` with incremented `retries`
task_queue.put(TaskItem(args=task_item.args, retries=retries))
# Notify error to the manager
if splash_manager.handles_errors:
splash_manager.handle_error(container_name)
except (NoPluginsError, DockerStartError) as e:
result = {"url": url, "error": str(e)}
if result:
result_queue.put(result)
# Finish if there aren't any more tasks in the queue
if task_queue.empty():
logger.info(f"[+] Processing is done @ {process_name}")
return
def get_detection_results(
url,
timeout,
metadata=False,
save_har=False,
splash_url="",
):
"""Return results from detector.
This function prepares the environment loading the plugins,
getting the response and passing it to the detector.
In case of errors, it raises exceptions to be handled externally.
"""
plugins = load_plugins()
if not plugins:
raise NoPluginsError("No plugins found")
logger.debug("[+] Starting detection with %(n)d plugins", {"n": len(plugins)})
response = get_response(url, plugins, timeout, splash_url)
# Save HAR
if save_har:
fd, path = tempfile.mkstemp(suffix=".har")
logger.info(f"Saving HAR file to {path}")
with open(fd, "w") as f:
json.dump(response["har"], f)
det = Detector(response, plugins, url)
softwares = det.get_results(metadata=metadata)
output = {"url": url, "softwares": softwares}
return output
def get_plugins(metadata):
"""Return the registered plugins.
Load and return all registered plugins.
"""
plugins = load_plugins()
if not plugins:
raise NoPluginsError("No plugins found")
results = []
for p in sorted(plugins.get_all(), key=attrgetter("name")):
if metadata:
data = {"name": p.name, "homepage": p.homepage}
hints = getattr(p, "hints", [])
if hints:
data["hints"] = hints
results.append(data)
else:
results.append(p.name)
return results
if __name__ == "__main__":
main()
|
{
"content_hash": "97e5a604db0909c275117f2b07002db8",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 118,
"avg_line_length": 30.109375,
"alnum_prop": 0.6105345096004151,
"repo_name": "spectresearch/detectem",
"id": "1245c44f18bde67322910087aa9b1f9d05da880d",
"size": "7708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "detectem/cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "1226"
},
{
"name": "Makefile",
"bytes": "1916"
},
{
"name": "Python",
"bytes": "81582"
}
],
"symlink_target": ""
}
|
"""Utilities for working with ECS tasks."""
from .. import client as boto3client
def create(profile, cluster, task_definition, started_by=None, count=None):
"""Run a task in a cluster.
Args:
profile
A profile to connect to AWS with.
cluster
The name of the cluster to run the task in.
task_definition
The full name of the task to run, i.e., family:revision.
started_by
A string to help identify the task later.
count
The number of copies of the task to run.
Returns:
The data returned by boto3.
"""
client = boto3client.get("ecs", profile)
params = {}
params["cluster"] = cluster
params["taskDefinition"] = task_definition
if started_by:
params["startedBy"] = started_by
if count:
params["count"] = count
return client.run_task(**params)
def delete(profile, cluster, task_id):
"""Stop a task in a cluster.
Args:
profile
A profile to connect to AWS with.
cluster
The name of the cluster the task is running in.
task_id
The ID of the task to stop.
Returns:
The data returned by boto3.
"""
client = boto3client.get("ecs", profile)
params = {}
params["cluster"] = cluster
params["task"] = task_id
return client.stop_task(**params)
def get_arns(profile, cluster, started_by=None):
"""Get all ECS task ARNs for a cluster.
Args:
profile
A profile to connect to AWS with.
cluster
The name of a cluster.
started_by
Get tasks started with this value.
Returns:
The data returned by boto3.
"""
result = None
client = boto3client.get("ecs", profile)
params = {}
params["cluster"] = cluster
if started_by:
params["startedBy"] = started_by
return client.list_tasks(**params)
def get(profile, cluster, tasks):
"""Get the info for tasks in a cluster.
Args:
profile
A profile to connect to AWS with.
cluster
The name of a cluster.
tasks
The list of task ARNs to fetch.
Returns:
The data returned by boto3.
"""
client = boto3client.get("ecs", profile)
params = {}
params["cluster"] = cluster
params["tasks"] = tasks
return client.describe_tasks(**params)
|
{
"content_hash": "2c3fec25a5d77bdaf5d1a09d5a2b445b",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 75,
"avg_line_length": 21.417391304347827,
"alnum_prop": 0.5789687373122209,
"repo_name": "jtpaasch/armyguys",
"id": "bb3bb714b34da0eec0101183d7049e2053859d1a",
"size": "2488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "armyguys/aws/ecs/task.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "334826"
},
{
"name": "Shell",
"bytes": "3654"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import socket
from six.moves.socketserver import ThreadingUnixStreamServer
# from threading import Thread
# import select
# ============= local library imports ==========================
from .messaging_server import MessagingServer
from pychron.messaging.handlers.ipc_handler import IPCHandler
class IPCServer(ThreadingUnixStreamServer, MessagingServer):
""" """
def __init__(self, parent, processor_type, datasize, *args, **kw):
""" """
self.parent = parent
self.repeater = parent.repeater
self.datasize = datasize
self.processor_type = processor_type
self.connected = True
try:
args += (IPCHandler,)
super(IPCServer, self).__init__(*args, **kw)
except socket.error as e:
self.warning(e)
self.connected = False
# ============= EOF ====================================
|
{
"content_hash": "c656ee0f6e339ebc6a79d5749ef08bab",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 70,
"avg_line_length": 27.647058823529413,
"alnum_prop": 0.5872340425531914,
"repo_name": "NMGRL/pychron",
"id": "81f6346bb17d86bd94f62fbf3a9e2058ee1dbbe8",
"size": "1809",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/messaging/ipc_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
}
|
'''
Una tienda hace un descuento del 10% por compras menores de 20,
un 20% por compras entre 20 y 50 y un 25% si la compra es mayor.
Escribe un programa que pida el precio de un producto
y muestre su precio final en las rebajas.
'''
from easygui import *
precio = float(enterbox('Introduce el precio'))
if precio < 20:
precio = precio * 0.9
#msgbox('El precio final con 10% de descuento es: '+ str(precio))
msgbox('El precio final con 10%% de descuento es: %.2f' %precio)
elif precio >= 20 and precio <= 50:
precio = precio * 0.8
msgbox('El precio final con 20%% de descuento es: %.2f' %precio)
else:
precio = precio * 0.75
msgbox('El precio final con 25%% de descuento es: %.2f' %precio)
'''
consumo = float(enterbox('Introduce el precio'))
if consumo < 20:
descuento = consumo * 0.1
elif consumo < 50:
descuento = consumo * 0.2
else:
descuento = consumo * 0.25
precio = consumo - descuento
msgbox('El precio final es %f' precio)
'''
'''
|
{
"content_hash": "b9907f6e03a60a92331fa7c84f0f4e74",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 69,
"avg_line_length": 23.11627906976744,
"alnum_prop": 0.6609657947686117,
"repo_name": "txtbits/daw-python",
"id": "0470d35b61ab7a20b9c00a6f47336f241b96ebce",
"size": "1019",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "control de flujo/ejercicio2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "108479"
},
{
"name": "HTML",
"bytes": "329723"
},
{
"name": "JavaScript",
"bytes": "173357"
},
{
"name": "Python",
"bytes": "1368269"
}
],
"symlink_target": ""
}
|
class GetAttr:
eggs = 88 # eggs stored on class, spam on instance
def __init__(self):
self.spam = 77
def __len__(self): # len here, else __getattr__ called with __len__
print('__len__: 42')
return 42
def __getattr__(self, attr): # Provide __str__ if asked, else dummy func
print('getattr: ' + attr)
if attr == '__str__':
return lambda *args: '[Getattr str]'
else:
return lambda *args: None
class GetAttribute(object): # object required in 2.X, implied in 3.X
eggs = 88 # In 2.X all are isinstance(object) auto
def __init__(self): # But must derive to get new-style tools,
self.spam = 77 # incl __getattribute__, some __X__ defaults
def __len__(self):
print('__len__: 42')
return 42
def __getattribute__(self, attr):
print('getattribute: ' + attr)
if attr == '__str__':
return lambda *args: '[GetAttribute str]'
else:
return lambda *args: None
for Class in GetAttr, GetAttribute:
print('\n' + Class.__name__.ljust(50, '='))
X = Class()
X.eggs # Class attr
X.spam # Instance attr
X.other # Missing attr
len(X) # __len__ defined explicitly
# New-styles must support [], +, call directly: redefine
try: X[0] # __getitem__?
except: print('fail []')
try: X + 99 # __add__?
except: print('fail +')
try: X() # __call__? (implicit via built-in)
except: print('fail ()')
X.__call__() # __call__? (explicit, not inherited)
print(X.__str__()) # __str__? (explicit, inherited from type)
print(X) # __str__? (implicit via built-in)
|
{
"content_hash": "cfdef2120447de4c084b552148ce0352",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 81,
"avg_line_length": 37.411764705882355,
"alnum_prop": 0.47746331236897277,
"repo_name": "dreadrel/UWF_2014_spring_COP3990C-2507",
"id": "71fe06b11b75caed2f7bd0bb5c4676bb18c85b71",
"size": "1908",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notebooks/scripts/book_code/code/getattr-builtins.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1796"
},
{
"name": "Python",
"bytes": "493591"
}
],
"symlink_target": ""
}
|
'''
Mimics a neopets websession, handles cookies, headers, updating referer
header and human-like pauses.
Part of naps (neopets automation program suite)
'''
import requests
import pickle
import json
import os
import sys
import random
import time
import datetime
import configparser as cp
class NeoSession:
'''Login to neopets.com'''
conf = cp.ConfigParser()
conf.read('settings.conf')
session = requests.Session()
username = conf['USER-SETTINGS']['USERNAME']
login_data = {'username': username, 'password': conf['USER-SETTINGS']
['PASSWORD'], 'destination': '%2Findex.phtml'}
jar = conf['PROGRAM-SETTINGS']['COOKIE_JAR']
pause_tuple = (2, 3)
def __init__(self):
self.load_cookies()
self.load_headers()
@staticmethod
def current_time():
return str(datetime.datetime.now().strftime('%r %D'))
def get(self, url, pause=pause_tuple, login_check=True, referer=None):
if pause:
time.sleep(random.randint(pause[0], pause[1]))
resp = self.session.get(url)
if referer is None:
self.session.headers.update({'Referer': url})
else:
self.session.headers.update({'Referer': referer})
self.update_cookies()
try:
resp.raise_for_status()
except requests.exceptions.HTTPError:
print('{}: ERROR - Connection error.'.format(self.current_time()))
if login_check is True:
if self.login_status(resp) is False:
self.login()
resp = self.session.post(url)
self.session.headers.update({'Referer': url})
self.update_cookies()
return resp
else:
return resp
else:
return resp
def post(self, url, data=None, pause=pause_tuple, login_check=True, referer=None):
if pause:
time.sleep(random.randint(pause[0], pause[1]))
if data is None:
resp = self.session.post(url)
if referer is None:
self.session.headers.update({'Referer': url})
else:
self.session.headers.update({'Referer': referer})
self.update_cookies()
else:
resp = self.session.post(url, data)
self.session.headers.update({'Referer': url})
self.update_cookies()
try:
resp.raise_for_status()
except requests.exceptions.HTTPError:
print('{}: ERROR - Connection error.'.format(self.current_time()))
if login_check is True:
if self.login_status(resp) is False:
self.login()
resp = self.session.post(url, data)
self.session.headers.update({'Referer': url})
self.update_cookies()
return resp
else:
return resp
else:
return resp
def update_cookies(self):
if os.path.isfile(self.jar):
with open(self.jar, 'wb') as jar:
pickle.dump(self.session.cookies, jar)
def load_cookies(self):
if os.path.isfile(self.jar):
if os.path.getsize(self.jar) is not 0:
with open(self.jar, 'rb') as jar:
session_cookies = pickle.load(jar)
self.session.cookies.update(session_cookies)
def load_headers(self):
with open(self.conf['PROGRAM-SETTINGS']['HEADERS'], 'r') as headers:
session_headers = json.load(headers)
self.session.headers.update(session_headers)
def login_status(self, resp):
if 'Welcome, <a href="/userlookup.phtml?user={}">'.format(
self.username) not in resp.text:
print('{}: Session - Not Logged in. [{}] \n{}: Session - Logging in.'.format(self.current_time(), resp.url, self.current_time()))
return False
if 'Welcome, <a href="/userlookup.phtml?user={}">'.format(
self.username) in resp.text:
print('{}: Session - Login check passed. [{}]'.format(self.current_time(), resp.url))
return True
def login(self):
'''Log-in to neopets.com'''
url = 'http://www.neopets.com/login.phtml'
resp = self.post(url, data=self.login_data)
self.session.headers.update({'Referer': resp.url})
if os.path.isfile(self.jar) is not True:
os.system('touch neopets.cookies')
self.session.cookies.update(resp.cookies)
self.update_cookies()
print('{}: Session - Login successful.'.format(self.current_time()))
def download_image(self, image_url):
resp = self.session.get(image_url, stream=True)
if resp.status_code == 200:
with open("captcha.jpeg", 'wb') as f:
f.write(resp.content)
def main():
NeoSession()
if __name__ == '__main__':
main()
|
{
"content_hash": "5849e378a3e0fb9a418018f9ad8411b4",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 141,
"avg_line_length": 32.46052631578947,
"alnum_prop": 0.5674908796108634,
"repo_name": "jameseh/naps",
"id": "09c561cef554e136f60fadb8c7f51b804174c561",
"size": "4957",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NeoSession.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16129"
}
],
"symlink_target": ""
}
|
"""Defines a command message that sets PENDING status for job models"""
from __future__ import unicode_literals
import logging
from django.db import transaction
from job.models import Job
from messaging.messages.message import CommandMessage
from util.parse import datetime_to_string, parse_datetime
# This is the maximum number of job models that can fit in one message. This maximum ensures that every message of this
# type is less than 25 KiB long.
MAX_NUM = 1000
logger = logging.getLogger(__name__)
def create_pending_jobs_messages(pending_job_ids, when):
"""Creates messages to update the given job IDs to PENDING
:param pending_job_ids: The job IDs
:type pending_job_ids: :func:`list`
:param when: The current time
:type when: :class:`datetime.datetime`
:return: The list of messages
:rtype: :func:`list`
"""
messages = []
message = None
for job_id in pending_job_ids:
if not message:
message = PendingJobs()
message.status_change = when
elif not message.can_fit_more():
messages.append(message)
message = PendingJobs()
message.status_change = when
message.add_job(job_id)
if message:
messages.append(message)
return messages
class PendingJobs(CommandMessage):
"""Command message that sets PENDING status for job models
"""
def __init__(self):
"""Constructor
"""
super(PendingJobs, self).__init__('pending_jobs')
self._count = 0
self._pending_job_ids = []
self.status_change = None
def add_job(self, job_id):
"""Adds the given job ID to this message
:param job_id: The job ID
:type job_id: int
"""
self._count += 1
self._pending_job_ids.append(job_id)
def can_fit_more(self):
"""Indicates whether more jobs can fit in this message
:return: True if more jobs can fit, False otherwise
:rtype: bool
"""
return self._count < MAX_NUM
def to_json(self):
"""See :meth:`messaging.messages.message.CommandMessage.to_json`
"""
return {'status_change': datetime_to_string(self.status_change), 'job_ids': self._pending_job_ids}
@staticmethod
def from_json(json_dict):
"""See :meth:`messaging.messages.message.CommandMessage.from_json`
"""
status_change = parse_datetime(json_dict['status_change'])
message = PendingJobs()
message.status_change = status_change
for job_id in json_dict['job_ids']:
message.add_job(job_id)
return message
def execute(self):
"""See :meth:`messaging.messages.message.CommandMessage.execute`
"""
with transaction.atomic():
jobs_to_pending = []
# Retrieve locked job models
for job_model in Job.objects.get_locked_jobs(self._pending_job_ids):
if not job_model.last_status_change or job_model.last_status_change < self.status_change:
# Status update is not old, so perform the update
jobs_to_pending.append(job_model)
# Update jobs that need status set to PENDING
if jobs_to_pending:
job_ids = Job.objects.update_jobs_to_pending(jobs_to_pending, self.status_change)
logger.info('Set %d job(s) to PENDING status', len(job_ids))
# Send messages to update recipe metrics
from recipe.messages.update_recipe_metrics import create_update_recipe_metrics_messages_from_jobs
self.new_messages.extend(create_update_recipe_metrics_messages_from_jobs(self._pending_job_ids))
return True
|
{
"content_hash": "47b937ae2e8eb9301452fe798fd7a173",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 119,
"avg_line_length": 30.422764227642276,
"alnum_prop": 0.6269374665954035,
"repo_name": "ngageoint/scale",
"id": "c609977ad52e39f96e41730374e40998550506a8",
"size": "3742",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scale/job/messages/pending_jobs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7219"
},
{
"name": "CSS",
"bytes": "12193"
},
{
"name": "Dockerfile",
"bytes": "14853"
},
{
"name": "HCL",
"bytes": "301"
},
{
"name": "HTML",
"bytes": "48818"
},
{
"name": "JavaScript",
"bytes": "503"
},
{
"name": "Makefile",
"bytes": "5852"
},
{
"name": "Python",
"bytes": "5295677"
},
{
"name": "Shell",
"bytes": "26650"
}
],
"symlink_target": ""
}
|
import numpy as np
class MCMCSet(object):
"""Class for storage and management of multiple MCMC objects representing
repeated runs of the MCMC."""
def __init__(self, name):
"""Create the MCMCSet object and assign a name.
Assigns a name to the set and initializes an empty list
of MCMC objects.
Parameters
----------
name : string
The string describing the model/name/data/mcmc parameters that
is used to identify the set of chains.
"""
self.name = name
"""The name associated with this set of chains (e.g., the model
name, fit parameters, etc.)"""
self.chains = []
"""The list of chains in the MCMC set."""
self.pooled_positions = None
"""numpy array of the pooled positions (if ``pool_positions`` is
called)."""
def add_chain(self, chain):
"""Add an MCMC chain to the set."""
self.chains.append(chain)
def prune_all_chains(self, burn, thin=1):
"""Iterates over all the chains and prunes each one with the
specified arguments.
"""
for chain in self.chains:
chain.prune(burn, thin)
# If any chains are empty after pruning (i.e., there were no accepts)
# then remove them from the list
for chain in self.chains:
if len(chain.positions) == 0:
# TODO: Should this be an exception?
print("WARNING: Chain had no steps after pruning " \
"(probably because no moves were accepted) " \
"and is being removed.")
self.chains.remove(chain)
def all_pruned(self):
"""Indicates whether all chains have been pruned already.
"""
if not self.chains:
raise Exception("There are no chains in the MCMCSet.")
for chain in self.chains:
if not chain.pruned:
return False
return True
def pool_chains(self):
"""Pool the chains into a single set of pooled positions stored along
with the MCMCSet.
"""
if not self.chains:
raise Exception("There are no chains in the MCMCSet.")
# First, count the total number of steps after pruning and make sure
# all chains have been pruned.
total_positions = 0
for chain in self.chains:
if not chain.pruned:
raise Exception("The chains have not yet been pruned.")
else:
total_positions += len(chain.positions)
# Allocate enough space for the pooled positions
self.pooled_positions = np.zeros((total_positions,
self.chains[0].num_estimate))
# Iterate again, filling in the pooled positions
start_index = 0
for chain in self.chains:
last_index = start_index + len(chain.positions)
self.pooled_positions[start_index:last_index,:] = chain.positions
start_index = last_index
def get_sample_position(self):
"""Returns a position sampled at random from the pooled chains.
Requires that the chains have already been pooled.
"""
if not self.chains:
raise Exception("There are no chains in the MCMCSet.")
if self.pooled_positions is None:
raise Exception("Cannot get a sample position until the chains " \
"have been pooled.")
if len(self.pooled_positions) == 0:
raise NoPositionsException('There are no positions in the combined '
'pool of positions.')
rand_index = np.random.randint(len(self.pooled_positions))
return self.pooled_positions[rand_index]
def get_sample_simulation(self, observables=True):
"""Uses the model in the first chain in the set to run a simulation for
a randomly sampled position from the pooled chains.
"""
position = self.get_sample_position()
return self.chains[0].simulate(position=position, observables=True)
def initialize_and_pool(self, chains, burn, thin=1):
"""Adds the chains to the MCMCSet and prunes and pools them."""
for chain in chains:
self.add_chain(chain)
self.prune_all_chains(burn, thin)
self.pool_chains()
def maximum_likelihood(self):
"""Returns the maximum log likelihood (minimum negative log likelihood)
from the set of chains, along with the position giving the maximum
likelihood.
"""
if not self.chains:
raise Exception("There are no chains in the MCMCSet.")
max_likelihood = np.inf
max_likelihood_position = None
for chain in self.chains:
# Make sure the chain is not empty!
if len(chain.likelihoods) > 0:
chain_max_likelihood_index = np.nanargmin(chain.likelihoods)
chain_max_likelihood = \
chain.likelihoods[chain_max_likelihood_index]
if chain_max_likelihood < max_likelihood:
max_likelihood = chain_max_likelihood
max_likelihood_position = \
chain.positions[chain_max_likelihood_index]
# Check if there are no positions
if max_likelihood_position is None:
raise NoPositionsException('The maximum likelihood could not be '
'determined because there are no accepted positions.')
return (max_likelihood, max_likelihood_position)
def maximum_posterior(self):
"""Returns the maximum log posterior (minimum negative log posterior)
from the set of chains, along with the position giving the maximum
posterior.
"""
if not self.chains:
raise Exception("There are no chains in the MCMCSet.")
max_posterior = np.inf
max_posterior_position = None
for chain in self.chains:
# Make sure the chain is not empty!
if len(chain.posteriors) > 0:
chain_max_posterior_index = np.nanargmin(chain.posteriors)
chain_max_posterior = \
chain.posteriors[chain_max_posterior_index]
if chain_max_posterior < max_posterior:
max_posterior = chain_max_posterior
max_posterior_position = \
chain.positions[chain_max_posterior_index]
# Check if there are no positions
if max_posterior_position is None:
raise NoPositionsException('The maximum posterior could not be determined '
'because there are no accepted positions.')
return (max_posterior, max_posterior_position)
class NoPositionsException(Exception):
pass
|
{
"content_hash": "4fb20450bf3a853a8063345e31de9d66",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 87,
"avg_line_length": 38.52486187845304,
"alnum_prop": 0.585974472967159,
"repo_name": "jmuhlich/bayessb",
"id": "1be72487ca61d3855b02a7337b28c139d1187110",
"size": "6973",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bayessb/multichain.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "187314"
}
],
"symlink_target": ""
}
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^$', 'remindme.views.home', name='home'),
url(r'^dashboard$', 'remindme.views.dashboard', name='dashboard'),
url(r'^create$', 'remindme.views.create', name='create'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
]
|
{
"content_hash": "c2e38f8f73934ea66f03aa11d8101ee7",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 70,
"avg_line_length": 37.5,
"alnum_prop": 0.6613333333333333,
"repo_name": "hitchtest/django-remindme",
"id": "72398e14a2c39f2d0da3ba2847e49601a5fa048a",
"size": "375",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "remindme/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3321"
},
{
"name": "Python",
"bytes": "12257"
}
],
"symlink_target": ""
}
|
import multiprocessing
#
# Server socket
#
# bind - The socket to bind.
#
# A string of the form: 'HOST', 'HOST:PORT', 'unix:PATH'.
# An IP is a valid HOST.
#
# backlog - The number of pending connections. This refers
# to the number of clients that can be waiting to be
# served. Exceeding this number results in the client
# getting an error when attempting to connect. It should
# only affect servers under significant load.
#
# Must be a positive integer. Generally set in the 64-2048
# range.
#
bind = '0.0.0.0:8000'
# backlog = 2048
#
# Worker processes
#
# workers - The number of worker processes that this server
# should keep alive for handling requests.
#
# A positive integer generally in the 2-4 x $(NUM_CORES)
# range. You'll want to vary this a bit to find the best
# for your particular application's work load.
#
# worker_class - The type of workers to use. The default
# sync class should handle most 'normal' types of work
# loads. You'll want to read
# http://docs.gunicorn.org/en/latest/design.html#choosing-a-worker-type
# for information on when you might want to choose one
# of the other worker classes.
#
# An string referring to a 'gunicorn.workers' entry point
# or a python path to a subclass of
# gunicorn.workers.base.Worker. The default provided values
# are:
#
# egg:gunicorn#sync
# egg:gunicorn#eventlet - Requires eventlet >= 0.9.7
# egg:gunicorn#gevent - Requires gevent >= 0.12.2 (?)
# egg:gunicorn#tornado - Requires tornado >= 0.2
#
# worker_connections - For the eventlet and gevent worker classes
# this limits the maximum number of simultaneous clients that
# a single process can handle.
#
# A positive integer generally set to around 1000.
#
# timeout - If a worker does not notify the master process in this
# number of seconds it is killed and a new worker is spawned
# to replace it.
#
# Generally set to thirty seconds. Only set this noticeably
# higher if you're sure of the repercussions for sync workers.
# For the non sync workers it just means that the worker
# process is still communicating and is not tied to the length
# of time required to handle a single request.
#
# keepalive - The number of seconds to wait for the next request
# on a Keep-Alive HTTP connection.
#
# A positive integer. Generally set in the 1-5 seconds range.
#
workers = multiprocessing.cpu_count() * 2 + 1
# worker_class = 'sync'
# worker_connections = 1000
timeout = 180
# keepalive = 2
#
# spew - Install a trace function that spews every line of Python
# that is executed when running the server. This is the
# nuclear option.
#
# True or False
#
# spew = False
#
# Server mechanics
#
# daemon - Detach the main Gunicorn process from the controlling
# terminal with a standard fork/fork sequence.
#
# True or False
#
# pidfile - The path to a pid file to write
#
# A path string or None to not write a pid file.
#
# user - Switch worker processes to run as this user.
#
# A valid user id (as an integer) or the name of a user that
# can be retrieved with a call to pwd.getpwnam(value) or None
# to not change the worker process user.
#
# group - Switch worker process to run as this group.
#
# A valid group id (as an integer) or the name of a user that
# can be retrieved with a call to pwd.getgrnam(value) or None
# to change the worker processes group.
#
# umask - A mask for file permissions written by Gunicorn. Note that
# this affects unix socket permissions.
#
# A valid value for the os.umask(mode) call or a string
# compatible with int(value, 0) (0 means Python guesses
# the base, so values like "0", "0xFF", "0022" are valid
# for decimal, hex, and octal representations)
#
# tmp_upload_dir - A directory to store temporary request data when
# requests are read. This will most likely be disappearing soon.
#
# A path to a directory where the process owner can write. Or
# None to signal that Python should choose one on its own.
#
# daemon = False
# pidfile = None
# umask = 0
# user = None
# group = None
# tmp_upload_dir = None
#
# Logging
#
# logfile - The path to a log file to write to.
#
# A path string. "-" means log to stdout.
#
# loglevel - The granularity of log output
#
# A string of "debug", "info", "warning", "error", "critical"
#
# errorlog = '-'
# loglevel = 'info'
# accesslog = '-'
#
# Process naming
#
# proc_name - A base to use with setproctitle to change the way
# that Gunicorn processes are reported in the system process
# table. This affects things like 'ps' and 'top'. If you're
# going to be running more than one instance of Gunicorn you'll
# probably want to set a name to tell them apart. This requires
# that you install the setproctitle module.
#
# A string or None to choose a default of something like 'gunicorn'.
#
# proc_name = None
#
# Server hooks
#
# post_fork - Called just after a worker has been forked.
#
# A callable that takes a server and worker instance
# as arguments.
#
# pre_fork - Called just prior to forking the worker subprocess.
#
# A callable that accepts the same arguments as after_fork
#
# pre_exec - Called just prior to forking off a secondary
# master process during things like config reloading.
#
# A callable that takes a server instance as the sole argument.
#
def post_fork(server, worker):
server.log.info("Worker spawned (pid: %s)", worker.pid)
def pre_fork(server, worker):
pass
def pre_exec(server):
server.log.info("Forked child, re-executing.")
def when_ready(server):
server.log.info("Server is ready. Spawning workers")
def worker_int(worker):
worker.log.info("worker received INT or QUIT signal")
## get traceback info
import threading, sys, traceback
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
code = []
for threadId, stack in sys._current_frames().items():
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""),
threadId))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename,
lineno, name))
if line:
code.append(" %s" % (line.strip()))
worker.log.debug("\n".join(code))
def worker_abort(worker):
worker.log.info("worker received SIGABRT signal")
|
{
"content_hash": "1c7a4992d28b4e5d4a6174d96551f2f1",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 77,
"avg_line_length": 30.55909090909091,
"alnum_prop": 0.6569983638256731,
"repo_name": "Chumbak/RetailstoreTV-Content-Player",
"id": "c7baf209e87259ae07e8c2a1c0253f9d05cc0225",
"size": "6761",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django_project/store_cms/store_cms/production_gunicorn.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "2316"
},
{
"name": "JavaScript",
"bytes": "40545"
},
{
"name": "Python",
"bytes": "25633"
}
],
"symlink_target": ""
}
|
from triangle import load_triangles, is_possible, load_triangles_from_cols
import unittest
class TestTriangle(unittest.TestCase):
def setUp(self):
self.triangles = [(1, 2, 3), (2, 2, 4), (2, 2, 3)]
self.col_triangles = [(1, 2, 2), (2, 2, 2), (3, 4, 3)]
def test_load_triangles(self):
assert load_triangles('example.txt') == self.triangles
def test_is_possible(self):
assert is_possible(1, 2, 3) == False
assert is_possible(2, 2, 5) == False
assert is_possible(2, 2, 3) == True
def test_load_triangles_from_cols(self):
assert load_triangles_from_cols('example.txt') == self.col_triangles
|
{
"content_hash": "813bfdf5f3ac554400a6b9c9e4d122ee",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 76,
"avg_line_length": 34.94736842105263,
"alnum_prop": 0.6189759036144579,
"repo_name": "machinelearningdeveloper/aoc_2016",
"id": "6c601f7476da65be2f00afd66f9520a627efe8af",
"size": "664",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "03/test_triangle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "47387"
}
],
"symlink_target": ""
}
|
'''
pluginPackages test case
(c) Copyright 2012 Mark V Systems Limited, All rights reserved.
'''
def foo():
print ("imported unpackaged plug-in grandchild 1")
__pluginInfo__ = {
'name': 'Unpackaged Listed Import Grandchild 1.1',
'version': '0.9',
'description': "This is a packages-containing unpackaged child plugin.",
'license': 'Apache-2',
'author': 'Mark V Systems',
'copyright': '(c) Copyright 2015 Mark V Systems Limited, All rights reserved.',
# classes of mount points (required)
'Import.Unpackaged.Entry4': foo,
# imported plugins
}
|
{
"content_hash": "39e583f3a186e2cca36a5911c6fcdb6f",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 83,
"avg_line_length": 30.1,
"alnum_prop": 0.6495016611295681,
"repo_name": "sternshus/Arelle",
"id": "8c94841fc492897fe4165ea35f2ed28972d6610d",
"size": "602",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "arelle/examples/plugin/importTestGrandchild1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "31873"
},
{
"name": "C#",
"bytes": "850"
},
{
"name": "HTML",
"bytes": "8640"
},
{
"name": "Java",
"bytes": "4663"
},
{
"name": "Makefile",
"bytes": "5565"
},
{
"name": "NSIS",
"bytes": "9050"
},
{
"name": "PLSQL",
"bytes": "1056360"
},
{
"name": "Python",
"bytes": "5523072"
},
{
"name": "Shell",
"bytes": "13921"
}
],
"symlink_target": ""
}
|
import ast
import token
import tokenize
from os.path import islink
from StringIO import StringIO
from itertools import izip
from dxr.build import unignored
from dxr.filters import FILE, LINE
from dxr.indexers import (Extent, FileToIndex as FileToIndexBase,
iterable_per_line, Position, split_into_lines,
TreeToIndex as TreeToIndexBase,
QUALIFIED_FILE_NEEDLE, QUALIFIED_LINE_NEEDLE,
with_start_and_end)
from dxr.lines import Ref
from dxr.plugins.python.analysis import TreeAnalysis
from dxr.plugins.python.menus import ClassRef
from dxr.plugins.python.utils import (ClassFunctionVisitorMixin,
convert_node_to_name, local_name,
path_to_module, ast_parse)
mappings = {
FILE: {
'properties': {
'py_module': QUALIFIED_FILE_NEEDLE,
},
},
LINE: {
'properties': {
'py_type': QUALIFIED_LINE_NEEDLE,
'py_function': QUALIFIED_LINE_NEEDLE,
'py_derived': QUALIFIED_LINE_NEEDLE,
'py_bases': QUALIFIED_LINE_NEEDLE,
'py_callers': QUALIFIED_LINE_NEEDLE,
'py_overrides': QUALIFIED_LINE_NEEDLE,
'py_overridden': QUALIFIED_LINE_NEEDLE,
},
},
}
class _FileToIgnore(object):
"""A file that we don't want to bother indexing, usually due to
syntax errors.
"""
def is_interesting(self):
return False
FILE_TO_IGNORE = _FileToIgnore()
class TreeToIndex(TreeToIndexBase):
@property
def unignored_files(self):
return unignored(self.tree.source_folder, self.tree.ignore_paths,
self.tree.ignore_filenames)
def post_build(self):
paths = ((path, self.tree.source_encoding)
for path in self.unignored_files if is_interesting(path))
self.tree_analysis = TreeAnalysis(
python_path=self.plugin_config.python_path,
source_folder=self.tree.source_folder,
paths=paths)
def file_to_index(self, path, contents):
if path in self.tree_analysis.ignore_paths:
return FILE_TO_IGNORE
else:
return FileToIndex(path, contents, self.plugin_name, self.tree,
tree_analysis=self.tree_analysis)
class IndexingNodeVisitor(ast.NodeVisitor, ClassFunctionVisitorMixin):
"""Node visitor that walks through the nodes in an abstract syntax
tree and finds interesting things to index.
"""
def __init__(self, file_to_index, tree_analysis):
super(IndexingNodeVisitor, self).__init__()
self.file_to_index = file_to_index
self.tree_analysis = tree_analysis
self.needles = []
self.refs = []
def visit_FunctionDef(self, node):
# Index the function itself for the function: filter.
start, end = self.file_to_index.get_node_start_end(node)
if start is not None:
self.yield_needle('py_function', node.name, start, end)
super(IndexingNodeVisitor, self).visit_FunctionDef(node)
def visit_Call(self, node):
# Index function/method call sites
name = convert_node_to_name(node.func)
if name:
start, end = self.file_to_index.get_node_start_end(node)
if start is not None:
self.yield_needle('py_callers', name, start, end)
self.generic_visit(node)
def visit_ClassDef(self, node):
# Index the class itself for the type: filter.
start, end = self.file_to_index.get_node_start_end(node)
if start is not None:
self.yield_needle('py_type', node.name, start, end)
# Index the class hierarchy for classes for the derived: and
# bases: filters.
class_name = self.get_class_name(node)
bases = self.tree_analysis.get_base_classes(class_name,
set([class_name]))
for qualname in bases:
self.yield_needle(needle_type='py_derived',
name=local_name(qualname), qualname=qualname,
start=start, end=end)
derived_classes = self.tree_analysis.get_derived_classes(class_name,
set([class_name]))
for qualname in derived_classes:
self.yield_needle(needle_type='py_bases',
name=local_name(qualname), qualname=qualname,
start=start, end=end)
# Show a menu when hovering over this class.
self.yield_ref(start, end,
ClassRef(self.file_to_index.tree, class_name))
super(IndexingNodeVisitor, self).visit_ClassDef(node)
def visit_ClassFunction(self, class_node, function_node):
class_name = self.get_class_name(class_node)
function_qualname = class_name + '.' + function_node.name
start, end = self.file_to_index.get_node_start_end(function_node)
if start is None:
return
# Index this function as being overridden by other functions for
# the overridden: filter.
for qualname in self.tree_analysis.overridden_functions[function_qualname]:
name = qualname.rsplit('.')[-1]
self.yield_needle(needle_type='py_overridden',
name=name, qualname=qualname,
start=start, end=end)
# Index this function as overriding other functions for the
# overrides: filter.
for qualname in self.tree_analysis.overriding_functions[function_qualname]:
name = qualname.rsplit('.')[-1]
self.yield_needle(needle_type='py_overrides',
name=name, qualname=qualname,
start=start, end=end)
def get_class_name(self, class_node):
return self.file_to_index.abs_module_name + '.' + class_node.name
def yield_needle(self, *args, **kwargs):
needle = line_needle(*args, **kwargs)
self.needles.append(needle)
def yield_ref(self, start, end, ref):
self.refs.append((
self.file_to_index.char_offset(*start),
self.file_to_index.char_offset(*end),
ref,
))
class FileToIndex(FileToIndexBase):
def __init__(self, path, contents, plugin_name, tree, tree_analysis):
"""
:arg tree_analysis: TreeAnalysisResult object with the results
from the post-build analysis.
"""
super(FileToIndex, self).__init__(path, contents, plugin_name, tree)
self.tree_analysis = tree_analysis
self.abs_module_name = path_to_module(tree_analysis.python_path, self.path)
self._visitor = None
def is_interesting(self):
return super(FileToIndex, self).is_interesting() and is_interesting(self.path)
@property
def visitor(self):
"""Return IndexingNodeVisitor for this file, lazily creating and
running it if it doesn't exist yet.
"""
if not self._visitor:
self.node_start_table, self.call_start_table = self.analyze_tokens()
self._visitor = IndexingNodeVisitor(self, self.tree_analysis)
syntax_tree = ast_parse(self.contents)
self._visitor.visit(syntax_tree)
return self._visitor
def needles(self):
# Index module name. For practical purposes, this includes
# __init__.py files for packages even though that's not
# _technically_ a module.
yield file_needle('py_module',
name=local_name(self.abs_module_name),
qualname=self.abs_module_name)
def needles_by_line(self):
return iterable_per_line(
with_start_and_end(
split_into_lines(
self.visitor.needles
)
)
)
def refs(self):
return self.visitor.refs
def analyze_tokens(self):
"""Split the file into tokens and analyze them for data needed
for indexing.
"""
# Run the file contents through the tokenizer, both as unicode
# and as a utf-8 encoded string. This will allow us to build
# up a mapping between the byte offset and the character offset.
token_gen = tokenize.generate_tokens(StringIO(self.contents).readline)
utf8_token_gen = tokenize.generate_tokens(
StringIO(self.contents.encode('utf-8')).readline)
# These are a mapping from the utf-8 byte starting points provided by
# the ast nodes, to the unicode character offset tuples for both the
# start and the end points.
node_start_table = {}
call_start_table = {}
node_type, node_start = None, None
paren_level, paren_stack = 0, {}
for unicode_token, utf8_token in izip(token_gen, utf8_token_gen):
tok_type, tok_name, start, end, _ = unicode_token
utf8_start = utf8_token[2]
if tok_type == token.NAME:
# AST nodes for classes and functions point to the position of
# their 'def' and 'class' tokens. To get the position of their
# names, we look for 'def' and 'class' tokens and store the
# position of the token immediately following them.
if node_start and node_type == 'definition':
node_start_table[node_start[0]] = (start, end)
node_type, node_start = None, None
continue
if tok_name in ('def', 'class'):
node_type, node_start = 'definition', (utf8_start, start)
continue
# Record all name nodes in the token table. Currently unused,
# but will be needed for recording variable references.
node_start_table[utf8_start] = (start, end)
node_type, node_start = 'name', (utf8_start, start)
elif tok_type == token.OP:
# In order to properly capture the start and end of function
# calls, we need to keep track of the parens. Put the
# starting positions on a stack (here implemented with a dict
# so that it can be sparse), but only if the previous node was
# a name.
if tok_name == '(':
if node_type == 'name':
paren_stack[paren_level] = node_start
paren_level += 1
elif tok_name == ')':
paren_level -= 1
if paren_level in paren_stack:
call_start = paren_stack.pop(paren_level)
call_start_table[call_start[0]] = (call_start[1], end)
node_type, node_start = None, None
else:
node_type, node_start = None, None
return node_start_table, call_start_table
def get_node_start_end(self, node):
"""Return start and end positions within the file for the given
AST Node.
"""
loc = node.lineno, node.col_offset
if isinstance(node, ast.ClassDef) or isinstance(node, ast.FunctionDef):
start, end = self.node_start_table.get(loc, (None, None))
elif isinstance(node, ast.Call):
start, end = self.call_start_table.get(loc, (None, None))
else:
start, end = None, None
return start, end
def file_needle(needle_type, name, qualname=None):
data = {'name': name}
if qualname:
data['qualname'] = qualname
return needle_type, data
def line_needle(needle_type, name, start, end, qualname=None):
data = {
'name': name,
'start': start[1],
'end': end[1]
}
if qualname:
data['qualname'] = qualname
return (
needle_type,
data,
Extent(Position(row=start[0],
col=start[1]),
Position(row=end[0],
col=end[1]))
)
def is_interesting(path):
"""Determine if the file at the given path is interesting enough to
analyze.
"""
return path.endswith('.py') and not islink(path)
|
{
"content_hash": "1acbce33e95759f2bde87877a605d525",
"timestamp": "",
"source": "github",
"line_count": 342,
"max_line_length": 87,
"avg_line_length": 36.538011695906434,
"alnum_prop": 0.5698623559539052,
"repo_name": "jay-z007/dxr",
"id": "a6e9147122c56e3815e5b3c98e772a036414bffa",
"size": "12496",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dxr/plugins/python/indexers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1593"
},
{
"name": "C++",
"bytes": "81227"
},
{
"name": "CSS",
"bytes": "23441"
},
{
"name": "HTML",
"bytes": "42042"
},
{
"name": "IDL",
"bytes": "8448"
},
{
"name": "JavaScript",
"bytes": "74891"
},
{
"name": "Makefile",
"bytes": "9444"
},
{
"name": "Python",
"bytes": "720044"
},
{
"name": "Rust",
"bytes": "11607"
},
{
"name": "Shell",
"bytes": "2524"
}
],
"symlink_target": ""
}
|
from __future__ import generators
import logging
_logger = logging.getLogger(__name__)
import os
from urlparse import urljoin, urldefrag
from urllib import pathname2url
from rdflib.term import URIRef, Variable, _XSD_PFX
class Namespace(URIRef):
@property
def title(self):
return URIRef(self + 'title')
def term(self, name):
return URIRef(self + name)
def __getitem__(self, key, default=None):
return self.term(key)
def __getattr__(self, name):
if name.startswith("__"): # ignore any special Python names!
raise AttributeError
else:
return self.term(name)
class NamespaceDict(dict):
def __new__(cls, uri=None, context=None):
inst = dict.__new__(cls)
inst.uri = uri # TODO: do we need to set these both here and in __init__ ??
inst.__context = context
return inst
def __init__(self, uri, context=None):
self.uri = uri
self.__context = context
def term(self, name):
uri = self.get(name)
if uri is None:
uri = URIRef(self.uri + name)
if self.__context and (uri, None, None) not in self.__context:
_logger.warning("%s not defined" % uri)
self[name] = uri
return uri
def __getattr__(self, name):
return self.term(name)
def __getitem__(self, key, default=None):
return self.term(key) or default
def __str__(self):
return self.uri
def __repr__(self):
return """rdflib.namespace.NamespaceDict('%s')""" % str(self.uri)
class ClosedNamespace(object):
"""
"""
def __init__(self, uri, terms):
self.uri = uri
self.__uris = {}
for t in terms:
self.__uris[t] = URIRef(self.uri + t)
def term(self, name):
uri = self.__uris.get(name)
if uri is None:
raise Exception("term '%s' not in namespace '%s'" % (name, self.uri))
else:
return uri
def __getitem__(self, key, default=None):
return self.term(key)
def __getattr__(self, name):
if name.startswith("__"): # ignore any special Python names!
raise AttributeError
else:
return self.term(name)
def __str__(self):
return self.uri
def __repr__(self):
return """rdf.namespace.ClosedNamespace('%s')""" % str(self.uri)
class _RDFNamespace(ClosedNamespace):
def __init__(self):
super(_RDFNamespace, self).__init__(
URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
terms=[
# Syntax Names
"RDF", "Description", "ID", "about", "parseType", "resource", "li", "nodeID", "datatype",
# RDF Classes
"Seq", "Bag", "Alt", "Statement", "Property", "XMLLiteral", "List", "PlainLiteral",
# RDF Properties
"subject", "predicate", "object", "type", "value", "first", "rest",
# and _n where n is a non-negative integer
# RDF Resources
"nil"]
)
def term(self, name):
try:
i = int(name)
return URIRef("%s_%s" % (self.uri, i))
except ValueError, e:
return super(_RDFNamespace, self).term(name)
RDF = _RDFNamespace()
RDFS = ClosedNamespace(
uri = URIRef("http://www.w3.org/2000/01/rdf-schema#"),
terms = [
"Resource", "Class", "subClassOf", "subPropertyOf", "comment", "label",
"domain", "range", "seeAlso", "isDefinedBy", "Literal", "Container",
"ContainerMembershipProperty", "member", "Datatype"]
)
OWL = Namespace('http://www.w3.org/2002/07/owl#')
XSD = Namespace(_XSD_PFX)
class NamespaceManager(object):
def __init__(self, graph):
self.graph = graph
self.__cache = {}
self.__log = None
self.bind("xml", u"http://www.w3.org/XML/1998/namespace")
self.bind("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
self.bind("rdfs", "http://www.w3.org/2000/01/rdf-schema#")
def reset(self):
self.__cache = {}
def __get_store(self):
return self.graph.store
store = property(__get_store)
def qname(self, uri):
prefix, namespace, name = self.compute_qname(uri)
if prefix=="":
return name
else:
return ":".join((prefix, name))
def normalizeUri(self,rdfTerm):
"""
Takes an RDF Term and 'normalizes' it into a QName (using the registered prefix)
or (unlike compute_qname) the Notation 3 form for URIs: <...URI...>
"""
try:
namespace, name = split_uri(rdfTerm)
namespace = URIRef(namespace)
except:
if isinstance(rdfTerm,Variable):
return "?%s"%rdfTerm
else:
return "<%s>"%rdfTerm
prefix = self.store.prefix(namespace)
if prefix is None and isinstance(rdfTerm,Variable):
return "?%s"%rdfTerm
elif prefix is None:
return "<%s>"%rdfTerm
else:
qNameParts = self.compute_qname(rdfTerm)
return ':'.join([qNameParts[0],qNameParts[-1]])
def compute_qname(self, uri):
if not uri in self.__cache:
namespace, name = split_uri(uri)
namespace = URIRef(namespace)
prefix = self.store.prefix(namespace)
if prefix is None:
prefix = "_%s" % len(list(self.store.namespaces()))
self.bind(prefix, namespace)
self.__cache[uri] = (prefix, namespace, name)
return self.__cache[uri]
def bind(self, prefix, namespace, override=True):
namespace = URIRef(namespace)
# When documenting explain that override only applies in what cases
if prefix is None:
prefix = ''
bound_namespace = self.store.namespace(prefix)
if bound_namespace and bound_namespace!=namespace:
# prefix already in use for different namespace
#
# append number to end of prefix until we find one
# that's not in use.
if not prefix:
prefix = "default"
num = 1
while 1:
new_prefix = "%s%s" % (prefix, num)
if not self.store.namespace(new_prefix):
break
num +=1
self.store.bind(new_prefix, namespace)
else:
bound_prefix = self.store.prefix(namespace)
if bound_prefix is None:
self.store.bind(prefix, namespace)
elif bound_prefix == prefix:
pass # already bound
else:
if override or bound_prefix.startswith("_"): # or a generated prefix
self.store.bind(prefix, namespace)
def namespaces(self):
for prefix, namespace in self.store.namespaces():
namespace = URIRef(namespace)
yield prefix, namespace
def absolutize(self, uri, defrag=1):
base = urljoin("file:", pathname2url(os.getcwd()))
result = urljoin("%s/" % base, uri, allow_fragments=not defrag)
if defrag:
result = urldefrag(result)[0]
if not defrag:
if uri and uri[-1]=="#" and result[-1]!="#":
result = "%s#" % result
return URIRef(result)
# From: http://www.w3.org/TR/REC-xml#NT-CombiningChar
#
# * Name start characters must have one of the categories Ll, Lu, Lo,
# Lt, Nl.
#
# * Name characters other than Name-start characters must have one of
# the categories Mc, Me, Mn, Lm, or Nd.
#
# * Characters in the compatibility area (i.e. with character code
# greater than #xF900 and less than #xFFFE) are not allowed in XML
# names.
#
# * Characters which have a font or compatibility decomposition
# (i.e. those with a "compatibility formatting tag" in field 5 of the
# database -- marked by field 5 beginning with a "<") are not allowed.
#
# * The following characters are treated as name-start characters rather
# than name characters, because the property file classifies them as
# Alphabetic: [#x02BB-#x02C1], #x0559, #x06E5, #x06E6.
#
# * Characters #x20DD-#x20E0 are excluded (in accordance with Unicode
# 2.0, section 5.14).
#
# * Character #x00B7 is classified as an extender, because the property
# list so identifies it.
#
# * Character #x0387 is added as a name character, because #x00B7 is its
# canonical equivalent.
#
# * Characters ':' and '_' are allowed as name-start characters.
#
# * Characters '-' and '.' are allowed as name characters.
from unicodedata import category, decomposition
NAME_START_CATEGORIES = ["Ll", "Lu", "Lo", "Lt", "Nl"]
NAME_CATEGORIES = NAME_START_CATEGORIES + ["Mc", "Me", "Mn", "Lm", "Nd"]
ALLOWED_NAME_CHARS = [u"\u00B7", u"\u0387", u"-", u".", u"_"]
# http://www.w3.org/TR/REC-xml-names/#NT-NCName
# [4] NCName ::= (Letter | '_') (NCNameChar)* /* An XML Name, minus
# the ":" */
# [5] NCNameChar ::= Letter | Digit | '.' | '-' | '_' | CombiningChar
# | Extender
def is_ncname(name):
first = name[0]
if first=="_" or category(first) in NAME_START_CATEGORIES:
for i in xrange(1, len(name)):
c = name[i]
if not category(c) in NAME_CATEGORIES:
if c in ALLOWED_NAME_CHARS:
continue
return 0
#if in compatibility area
#if decomposition(c)!='':
# return 0
return 1
else:
return 0
XMLNS = "http://www.w3.org/XML/1998/namespace"
def split_uri(uri):
if uri.startswith(XMLNS):
return (XMLNS, uri.split(XMLNS)[1])
length = len(uri)
for i in xrange(0, length):
c = uri[-i-1]
if not category(c) in NAME_CATEGORIES:
if c in ALLOWED_NAME_CHARS:
continue
for j in xrange(-1-i, length):
if category(uri[j]) in NAME_START_CATEGORIES or uri[j]=="_":
ns = uri[:j]
if not ns:
break
ln = uri[j:]
return (ns, ln)
break
raise Exception("Can't split '%s'" % uri)
|
{
"content_hash": "3fd360f687a2c62d45dbd42e2cbe64b1",
"timestamp": "",
"source": "github",
"line_count": 325,
"max_line_length": 97,
"avg_line_length": 31.476923076923075,
"alnum_prop": 0.5586510263929618,
"repo_name": "robdennis/sideboard",
"id": "935f1a1b3f09c95470c36c8500ac52534bb6b156",
"size": "10230",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/plugins/different_versions/rdflib3_0_0/env/lib/python2.7/site-packages/rdflib/namespace.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "16435"
},
{
"name": "JavaScript",
"bytes": "900051"
},
{
"name": "Python",
"bytes": "7132717"
},
{
"name": "Shell",
"bytes": "26299"
}
],
"symlink_target": ""
}
|
"""
.. moduleauthor:: Jonas Berg
test_minimalmodbus: Unittests for the :mod:`minimalmodbus` module.
For each function are these tests performed:
* Known results
* Invalid input value
* Invalid input type
This unittest suite uses a mock/dummy serial port from the module :mod:`dummy_serial`,
so it is possible to test the functionality using previously recorded communication data.
With dummy responses, it is also possible to simulate errors in the communication
from the slave. A few different types of communication errors are tested, as seen in this table.
===================================== ===================== =================================
Simulated response error Tested using function Tested using Modbus function code
===================================== ===================== =================================
No response read_bit 2
Wrong CRC in response write_register 16
Wrong slave address in response write_register 16
Wrong function code in response write_register 16
Slave indicates an error write_register 16
Wrong byte count in response read_bit 2
Wrong register address in response write_register 16
Wrong number of registers in response write_bit 15
Wrong number of registers in response write_register 16
Wrong write data in response write_bit 5
Wrong write data in response write_register 6
===================================== ===================== =================================
"""
__author__ = "Jonas Berg"
__license__ = "Apache License, Version 2.0"
import sys
import time
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
import unittest
sys.path.append(".")
import tests.dummy_serial as dummy_serial
import minimalmodbus
from minimalmodbus import IllegalRequestError
from minimalmodbus import InvalidResponseError
from minimalmodbus import LocalEchoError
from minimalmodbus import MasterReportedException
from minimalmodbus import ModbusException
from minimalmodbus import NegativeAcknowledgeError
from minimalmodbus import NoResponseError
from minimalmodbus import SlaveDeviceBusyError
from minimalmodbus import SlaveReportedException
from minimalmodbus import _Payloadformat
from minimalmodbus import BYTEORDER_BIG
from minimalmodbus import BYTEORDER_LITTLE
from minimalmodbus import BYTEORDER_BIG_SWAP
from minimalmodbus import BYTEORDER_LITTLE_SWAP
VERBOSITY = 0
"""Verbosity level for the unit testing. Use value 0 or 2. Note that it only has an effect for Python 2.7 and above."""
SHOW_ERROR_MESSAGES_FOR_ASSERTRAISES = False
"""Set this to :const:`True` for printing the error messages caught by assertRaises().
If set to :const:`True`, any unintentional error messages raised during the processing of the command in :meth:`.assertRaises` are also caught (not counted). It will be printed in the short form, and will show no traceback. It can also be useful to set :data:`VERBOSITY` = 2.
"""
_LARGE_NUMBER_OF_BYTES = 1000
###########################################################
# For showing the error messages caught by assertRaises() #
# and to implement a better assertAlmostEqual() #
###########################################################
class _NonexistantError(Exception):
pass
class ExtendedTestCase(unittest.TestCase):
"""Overriding the assertRaises() method to be able to print the error message.
Use :data:`test_minimalmodbus.SHOW_ERROR_MESSAGES_FOR_ASSERTRAISES` = :const:`True`
in order to use this option. It can also be useful to set :data:`test_minimalmodbus.VERBOSITY` = 2.
Based on https://stackoverflow.com/questions/8672754/how-to-show-the-error-messages-caught-by-assertraises-in-unittest-in-python2-7
"""
def assertRaises( # type: ignore
self,
excClass: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
callableObj: Callable[..., Any],
*args: Any,
**kwargs: Any
) -> None:
"""Prints the caught error message (if :data:`SHOW_ERROR_MESSAGES_FOR_ASSERTRAISES` is :const:`True`)."""
if SHOW_ERROR_MESSAGES_FOR_ASSERTRAISES:
try:
unittest.TestCase.assertRaises(
self, _NonexistantError, callableObj, *args, **kwargs
)
except:
minimalmodbus._print_out("\n " + repr(sys.exc_info()[1]))
else:
unittest.TestCase.assertRaises(self, excClass, callableObj, *args, **kwargs)
def assertAlmostEqualRatio(
self, first: float, second: float, epsilon: float = 1.000001
) -> None:
"""A function to compare floats, with ratio instead of "number_of_places".
This is slightly different than the standard unittest.assertAlmostEqual()
Args:
* first: Input argument for comparison
* second: Input argument for comparison
* epsilon: Largest allowed ratio of largest to smallest of the two input arguments
"""
if first == second:
return
if (first < 0 and second >= 0) or (first >= 0 and second < 0):
raise AssertionError(
"The arguments have different signs: {0!r} and {1!r}".format(
first, second
)
)
ratio = max(first, second) / float(min(first, second))
if ratio > epsilon:
raise AssertionError(
"The arguments are not equal: {0!r} and {1!r}. Epsilon is {2!r}.".format(
first, second, epsilon
)
)
##############################
# Constants for type testing #
##############################
_NOT_INTERGERS_OR_NONE = [
0.0,
1.0,
"1",
b"1",
["1"],
[b"1"],
[1],
["\x00\x2d\x00\x58"],
["A", "B", "C"],
]
_NOT_INTERGERS = _NOT_INTERGERS_OR_NONE + [None]
_NOT_NUMERICALS_OR_NONE = [
"1",
b"1",
["1"],
[b"1"],
[1],
["\x00\x2d\x00\x58"],
["A", "B", "C"],
]
_NOT_NUMERICALS = _NOT_NUMERICALS_OR_NONE + [None]
_NOT_STRINGS_OR_NONE = [
1,
0.0,
1.0,
b"1",
["1"],
[b"1"],
[1],
["\x00\x2d\x00\x58"],
["A", "B", "C"],
True,
False,
]
_NOT_STRINGS = _NOT_STRINGS_OR_NONE + [None]
_NOT_BYTES_OR_NONE = [
1,
0.0,
1.0,
"1",
["1"],
[1],
"ABC",
["\x00\x2d\x00\x58"],
["A", "B", "C"],
True,
False,
]
_NOT_BYTES = _NOT_BYTES_OR_NONE + [None]
_NOT_BOOLEANS = [
"True",
"False",
b"1",
[b"1"],
-1,
1,
2,
0,
8,
9999999,
-1.0,
1.0,
0.0,
[True],
[False],
[1],
[1.0],
]
_NOT_INTLISTS = [
0,
1,
2,
-1,
True,
False,
0.0,
1.0,
"1",
["1"],
b"1",
[b"1"],
None,
["\x00\x2d\x00\x58"],
["A", "B", "C"],
[1.0],
[1.0, 2.0],
]
####################
# Payload handling #
####################
class TestCreatePayload(ExtendedTestCase):
def testKnownValues(self) -> None:
# read_bit(61, functioncode=2)
self.assertEqual(
minimalmodbus._create_payload(
2, 61, None, 0, 0, 1, False, False, _Payloadformat.BIT
),
"\x00\x3D\x00\x01",
)
# read_bit(62, functioncode=1)
self.assertEqual(
minimalmodbus._create_payload(
1, 62, None, 0, 0, 1, False, False, _Payloadformat.BIT
),
"\x00\x3E\x00\x01",
)
# write_bit(71, 1, functioncode=5)
self.assertEqual(
minimalmodbus._create_payload(
5, 71, 1, 0, 0, 1, False, False, _Payloadformat.BIT
),
"\x00\x47\xFF\x00",
)
# read_bits(196, 22, functioncode=2)
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
minimalmodbus._create_payload(
2, 196, None, 0, 0, 22, False, False, _Payloadformat.BITS
),
"\x00\xC4\x00\x16",
)
# read_bits(19, 19, functioncode=1)
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
minimalmodbus._create_payload(
1, 19, None, 0, 0, 19, False, False, _Payloadformat.BITS
),
"\x00\x13\x00\x13",
)
# write_bits(19, [1, 0, 1, 1, 0, 0, 1, 1, 1, 0])
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
minimalmodbus._create_payload(
15,
19,
[1, 0, 1, 1, 0, 0, 1, 1, 1, 0],
0,
0,
10,
False,
False,
_Payloadformat.BITS,
),
"\x00\x13\x00\x0A\x02\xCD\x01",
)
# read_register(289, 0, functioncode=3)
self.assertEqual(
minimalmodbus._create_payload(
3, 289, None, 0, 1, 0, False, False, _Payloadformat.REGISTER
),
"\x01\x21\x00\x01",
)
# read_register(14, 0, functioncode=4)
self.assertEqual(
minimalmodbus._create_payload(
4, 14, None, 0, 1, 0, False, False, _Payloadformat.REGISTER
),
"\x00\x0E\x00\x01",
)
# write_register(35, 20, functioncode = 16)
self.assertEqual(
minimalmodbus._create_payload(
16, 35, 20, 0, 1, 0, False, False, _Payloadformat.REGISTER
),
"\x00\x23\x00\x01\x02\x00\x14",
)
# write_register(45, 88, functioncode = 6)
self.assertEqual(
minimalmodbus._create_payload(
6, 45, 88, 0, 1, 0, False, False, _Payloadformat.REGISTER
),
"\x00\x2D\x00\x58",
)
# write_register(101, -5, signed=True)
self.assertEqual(
minimalmodbus._create_payload(
16, 101, -5, 0, 1, 0, True, False, _Payloadformat.REGISTER
),
"\x00\x65\x00\x01\x02\xFF\xFB",
)
# write_register(101, -5, 1, signed=True)
self.assertEqual(
minimalmodbus._create_payload(
16, 101, -5, 1, 1, 0, True, False, _Payloadformat.REGISTER
),
"\x00\x65\x00\x01\x02\xFF\xCE",
)
# read_long(102)
self.assertEqual(
minimalmodbus._create_payload(
3, 102, None, 0, 2, 0, False, False, _Payloadformat.LONG
),
"\x00\x66\x00\x02",
)
# read_long(102, functioncode=4)
self.assertEqual(
minimalmodbus._create_payload(
4, 102, None, 0, 2, 0, False, False, _Payloadformat.LONG
),
"\x00\x66\x00\x02",
)
# read_long(256)
self.assertEqual(
minimalmodbus._create_payload(
3, 256, None, 0, 2, 0, False, False, _Payloadformat.LONG
),
"\x01\x00\x00\x02",
)
# write_long(102, 5)
self.assertEqual(
minimalmodbus._create_payload(
16, 102, 5, 0, 2, 0, False, False, _Payloadformat.LONG
),
"\x00\x66\x00\x02\x04\x00\x00\x00\x05",
)
# write_long(102, 5, signed=True)
self.assertEqual(
minimalmodbus._create_payload(
16, 102, 5, 0, 2, 0, True, False, _Payloadformat.LONG
),
"\x00\x66\x00\x02\x04\x00\x00\x00\x05",
)
# write_long(102, -5, signed=True)
self.assertEqual(
minimalmodbus._create_payload(
16, 102, -5, 0, 2, 0, True, False, _Payloadformat.LONG
),
"\x00\x66\x00\x02\x04\xFF\xFF\xFF\xFB",
)
# read_float(103, functioncode=3, number_of_registers=2)
self.assertEqual(
minimalmodbus._create_payload(
3, 103, None, 0, 2, 0, False, False, _Payloadformat.FLOAT
),
"\x00\x67\x00\x02",
)
# read_float(103, functioncode=3, number_of_registers=4)
self.assertEqual(
minimalmodbus._create_payload(
3, 103, None, 0, 4, 0, False, False, _Payloadformat.FLOAT
),
"\x00\x67\x00\x04",
)
# read_float(103, functioncode=4, number_of_registers=2)
self.assertEqual(
minimalmodbus._create_payload(
4, 103, None, 0, 2, 0, False, False, _Payloadformat.FLOAT
),
"\x00\x67\x00\x02",
)
# write_float(103, 1.1, number_of_registers=2) OK compare to recorded data
self.assertEqual(
minimalmodbus._create_payload(
16, 103, 1.1, 0, 2, 0, False, False, _Payloadformat.FLOAT
),
"\x00\x67\x00\x02\x04\x3F\x8C\xCC\xCD",
)
# write_float(103, 1.1, number_of_registers=4) OK compare to recorded data
self.assertEqual(
minimalmodbus._create_payload(
16, 103, 1.1, 0, 4, 0, False, False, _Payloadformat.FLOAT
),
"\x00\x67\x00\x04\x08\x3F\xF1\x99\x99\x99\x99\x99\x9A",
)
# read_string(104, 1)
self.assertEqual(
minimalmodbus._create_payload(
3, 104, None, 0, 1, 0, False, False, _Payloadformat.STRING
),
"\x00\x68\x00\x01",
)
# read_string(104, 4)
self.assertEqual(
minimalmodbus._create_payload(
3, 104, None, 0, 4, 0, False, False, _Payloadformat.STRING
),
"\x00\x68\x00\x04",
)
# read_string(104, 4, functioncode=4)
self.assertEqual(
minimalmodbus._create_payload(
4, 104, None, 0, 4, 0, False, False, _Payloadformat.STRING
),
"\x00\x68\x00\x04",
)
# write_string(104, 'A', 1)
self.assertEqual(
minimalmodbus._create_payload(
16, 104, "A", 0, 1, 0, False, False, _Payloadformat.STRING
),
"\x00\x68\x00\x01\x02A ",
)
# write_string(104, 'A', 4)
self.assertEqual(
minimalmodbus._create_payload(
16, 104, "A", 0, 4, 0, False, False, _Payloadformat.STRING
),
"\x00\x68\x00\x04\x08A ",
)
# write_string(104, 'ABCDEFGH', 4)
self.assertEqual(
minimalmodbus._create_payload(
16, 104, "ABCDEFGH", 0, 4, 0, False, False, _Payloadformat.STRING
),
"\x00\x68\x00\x04\x08ABCDEFGH",
)
# read_registers(105, 1)
self.assertEqual(
minimalmodbus._create_payload(
3, 105, None, 0, 1, 0, False, False, _Payloadformat.REGISTERS
),
"\x00\x69\x00\x01",
)
# read_registers(105, 3)
self.assertEqual(
minimalmodbus._create_payload(
3, 105, None, 0, 3, 0, False, False, _Payloadformat.REGISTERS
),
"\x00\x69\x00\x03",
)
# read_registers(105, 7, functioncode=4)
self.assertEqual(
minimalmodbus._create_payload(
4, 105, None, 0, 7, 0, False, False, _Payloadformat.REGISTERS
),
"\x00\x69\x00\x07",
)
# write_registers(105, [2])
self.assertEqual(
minimalmodbus._create_payload(
16, 105, [2], 0, 1, 0, False, False, _Payloadformat.REGISTERS
),
"\x00\x69\x00\x01\x02\x00\x02",
)
# write_registers(105, [2, 4, 8])
self.assertEqual(
minimalmodbus._create_payload(
16, 105, [2, 4, 8], 0, 3, 0, False, False, _Payloadformat.REGISTERS
),
"\x00\x69\x00\x03\x06\x00\x02\x00\x04\x00\x08",
)
def testWrongValues(self) -> None:
# NOTE: Most of the error checking is done in other methods
self.assertRaises(
ValueError,
minimalmodbus._create_payload,
25,
104,
"A",
0,
4,
0,
False,
False,
_Payloadformat.STRING,
)
class TestParsePayload(ExtendedTestCase):
def testKnownValues(self) -> None:
# read_bit(61, functioncode=2)
self.assertEqual(
minimalmodbus._parse_payload(
"\x01\x01", 2, 61, None, 0, 0, 1, False, False, _Payloadformat.BIT
),
1,
)
# read_bit(62, functioncode=1)
self.assertEqual(
minimalmodbus._parse_payload(
"\x01\x00", 1, 62, None, 0, 0, 1, False, False, _Payloadformat.BIT
),
0,
)
# write_bit(71, 1, functioncode=5)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00\x47\xff\x00", 5, 71, 1, 0, 0, 1, False, False, _Payloadformat.BIT
),
None,
)
# write_bit(72, 1, functioncode=15)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00\x48\x00\x01", 15, 72, 1, 0, 0, 1, False, False, _Payloadformat.BIT
),
None,
)
# read_bits(196, 22, functioncode=2)
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
minimalmodbus._parse_payload(
"\x03\xAC\xDB\x35",
2,
196,
None,
0,
0,
22,
False,
False,
_Payloadformat.BITS,
),
[0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1],
)
# read_bits(19, 19, functioncode=1)
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
minimalmodbus._parse_payload(
"\x03\xCD\x6B\x05",
1,
19,
None,
0,
0,
19,
False,
False,
_Payloadformat.BITS,
),
[1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1],
)
# write_bits(19, [1, 0, 1, 1, 0, 0, 1, 1, 1, 0])
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
minimalmodbus._parse_payload(
"\x00\x13\x00\x0A",
15,
19,
[1, 0, 1, 1, 0, 0, 1, 1, 1, 0],
0,
0,
10,
False,
False,
_Payloadformat.BITS,
),
None,
)
# read_register(289, 0, functioncode=3)
self.assertEqual(
minimalmodbus._parse_payload(
"\x02\x03\x02",
3,
289,
None,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
),
770,
)
# read_register(14, 0, functioncode=4)
self.assertEqual(
minimalmodbus._parse_payload(
"\x02\x03\x70",
4,
14,
None,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
),
880,
)
# write_register(35, 20, functioncode = 16)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00#\x00\x01",
16,
35,
20,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
),
None,
)
# write_register(45, 88, functioncode = 6)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00\x2d\x00\x58",
6,
45,
88,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
),
None,
)
# write_register(101, -5, signed=True)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00e\x00\x01",
16,
101,
-5,
0,
1,
0,
True,
False,
_Payloadformat.REGISTER,
),
None,
)
# read_long(102)
self.assertEqual(
minimalmodbus._parse_payload(
"\x04\xff\xff\xff\xff",
3,
102,
None,
0,
2,
0,
False,
False,
_Payloadformat.LONG,
),
4294967295,
)
# read_long(102, signed=True)
self.assertEqual(
minimalmodbus._parse_payload(
"\x04\xff\xff\xff\xff",
3,
102,
None,
0,
2,
0,
True,
False,
_Payloadformat.LONG,
),
-1,
)
# write_long(102, 5)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00f\x00\x02", 16, 102, 5, 0, 2, 0, False, False, _Payloadformat.LONG
),
None,
)
# write_long(102, -5, signed=True)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00f\x00\x02", 16, 102, -5, 0, 2, 0, True, False, _Payloadformat.LONG
),
None,
)
# read_float(103, functioncode=3, number_of_registers=2)
parsed_value = minimalmodbus._parse_payload(
"\x04\x3f\x80\x00\x00",
3,
103,
None,
0,
2,
0,
False,
False,
_Payloadformat.FLOAT,
)
assert isinstance(parsed_value, float)
self.assertAlmostEqual(
parsed_value,
1.0,
)
# read_float(103, functioncode=3, number_of_registers=4)
parsed_value = minimalmodbus._parse_payload(
"\x08\xc0\x00\x00\x00\x00\x00\x00\x00",
3,
103,
None,
0,
4,
0,
False,
False,
_Payloadformat.FLOAT,
)
assert isinstance(parsed_value, float)
self.assertAlmostEqual(
parsed_value,
-2.0,
)
# read_float(103, functioncode=4, number_of_registers=2)
parsed_value = minimalmodbus._parse_payload(
"\x04\x72\x38\x47\x25",
4,
103,
None,
0,
2,
0,
False,
False,
_Payloadformat.FLOAT,
)
assert isinstance(parsed_value, float)
self.assertAlmostEqualRatio(
parsed_value,
3.65e30,
)
# write_float(103, 1.1, number_of_registers=2)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00g\x00\x02",
16,
103,
1.1,
0,
2,
0,
False,
False,
_Payloadformat.FLOAT,
),
None,
)
# write_float(103, 1.1, number_of_registers=4)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00g\x00\x04",
16,
103,
1.1,
0,
4,
0,
False,
False,
_Payloadformat.FLOAT,
),
None,
)
# read_string(104, 1)
self.assertEqual(
minimalmodbus._parse_payload(
"\x02AB", 3, 104, None, 0, 1, 0, False, False, _Payloadformat.STRING
),
"AB",
)
# read_string(104, 4)
self.assertEqual(
minimalmodbus._parse_payload(
"\x08ABCDEFGH",
3,
104,
None,
0,
4,
0,
False,
False,
_Payloadformat.STRING,
),
"ABCDEFGH",
)
# write_string(104, 'A', 1)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00h\x00\x01",
16,
104,
"A",
0,
1,
0,
False,
False,
_Payloadformat.STRING,
),
None,
)
# write_string(104, 'A', 4)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00h\x00\x04",
16,
104,
"A",
0,
4,
0,
False,
False,
_Payloadformat.STRING,
),
None,
)
# write_string(104, 'ABCDEFGH', 4)
self.assertEqual(
minimalmodbus._parse_payload(
"\x00h\x00\x04",
16,
104,
"ABCDEFGH",
0,
4,
0,
False,
False,
_Payloadformat.STRING,
),
None,
)
# read_registers(105, 1)
self.assertEqual(
minimalmodbus._parse_payload(
"\x02\x00\x10",
3,
105,
None,
0,
1,
0,
False,
False,
_Payloadformat.REGISTERS,
),
[16],
)
# read_registers(105, 3)
self.assertEqual(
minimalmodbus._parse_payload(
"\x06\x00\x10\x00\x20\x00\x40",
3,
105,
None,
0,
3,
0,
False,
False,
_Payloadformat.REGISTERS,
),
[16, 32, 64],
)
# write_registers(105, [2])
self.assertEqual(
minimalmodbus._parse_payload(
"\x00i\x00\x01",
16,
105,
[2],
0,
1,
0,
False,
False,
_Payloadformat.REGISTERS,
),
None,
)
# write_registers(105, [2, 4, 8])
self.assertEqual(
minimalmodbus._parse_payload(
"\x00i\x00\x03",
16,
105,
[2, 4, 8],
0,
3,
0,
False,
False,
_Payloadformat.REGISTERS,
),
None,
)
def testInvalidPayloads(self) -> None:
# read_bit(63, functioncode=2) # Slave gives wrong byte count
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x02\x01",
2,
63,
None,
0,
0,
1,
False,
False,
_Payloadformat.BIT,
)
# write_bit(73, 1, functioncode=15) # Slave gives wrong number of registers
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x49\x00\x02",
15,
73,
1,
0,
0,
1,
False,
False,
_Payloadformat.BIT,
)
# write_bit(74, 1, functioncode=5) # Slave gives wrong write data
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x47\x00\x00",
5,
74,
1,
0,
0,
1,
False,
False,
_Payloadformat.BIT,
)
# write_bit(73, 1, functioncode=15) # Slave gives wrong number of registers
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x49\x00\x02",
15,
73,
1,
0,
0,
1,
False,
False,
_Payloadformat.BIT,
)
# write_bit(74, 1, functioncode=5) # Slave gives wrong write data (address)
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x47\x00\x00",
5,
74,
1,
0,
0,
1,
False,
False,
_Payloadformat.BIT,
)
# read_bits(196, 22, functioncode=2) # Wrong number of bits
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x03\xAC\xDB\x35",
2,
196,
None,
0,
0,
7,
False,
False,
_Payloadformat.REGISTER,
)
# read_register(202, 0, functioncode=3) # Slave gives too long response
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x02\x00\x00\x09",
3,
202,
None,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
)
# read_register(203, 0, functioncode=3) # Slave gives too short response
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x02\x09",
3,
203,
None,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
)
# write_register(52, 99, functioncode = 16) # Slave gives wrong number of registers
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x34\x00\x02",
16,
52,
99,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
)
# write_register(53, 99, functioncode = 16) # Slave gives wrong register address
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x36\x00\x01",
16,
53,
99,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
)
# write_register(55, 99, functioncode = 6) # Slave gives wrong write data
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x00\x36\x00\x01",
6,
55,
99,
0,
1,
0,
False,
False,
_Payloadformat.REGISTER,
)
# read_registers(105, 3) # wrong number of registers
self.assertRaises(
InvalidResponseError,
minimalmodbus._parse_payload,
"\x06\x00\x10\x00\x20\x00\x40",
3,
105,
None,
0,
4,
0,
False,
False,
_Payloadformat.REGISTERS,
)
class TestEmbedPayload(ExtendedTestCase):
knownValues = [
(2, 2, "rtu", "123", "\x02\x02123X\xc2"),
(1, 16, "rtu", "ABC", "\x01\x10ABC<E"),
(0, 5, "rtu", "hjl", "\x00\x05hjl\x8b\x9d"),
(1, 3, "rtu", "\x01\x02\x03", "\x01\x03\x01\x02\x03\t%"),
(1, 3, "ascii", "123", ":010331323366\r\n"),
(4, 5, "ascii", "\x01\x02\x03", ":0405010203F1\r\n"),
(2, 2, "ascii", "123", ":020231323366\r\n"),
]
def testKnownValues(self) -> None:
for (
slaveaddress,
functioncode,
mode,
inputstring,
knownresult,
) in self.knownValues:
result = minimalmodbus._embed_payload(
slaveaddress, mode, functioncode, inputstring
)
self.assertEqual(result, knownresult)
def testWrongInputValue(self) -> None:
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 256, "rtu", 16, "ABC"
) # Wrong slave address
self.assertRaises(
ValueError, minimalmodbus._embed_payload, -1, "rtu", 16, "ABC"
)
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 256, "ascii", 16, "ABC"
)
self.assertRaises(
ValueError, minimalmodbus._embed_payload, -1, "ascii", 16, "ABC"
)
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 1, "rtuu", 16, "ABC"
) # Wrong Modbus mode
self.assertRaises(ValueError, minimalmodbus._embed_payload, 1, "RTU", 16, "ABC")
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 1, "ASCII", 16, "ABC"
)
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 1, "asci", 16, "ABC"
)
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 1, "rtu", 222, "ABC"
) # Wrong function code
self.assertRaises(ValueError, minimalmodbus._embed_payload, 1, "rtu", -1, "ABC")
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 1, "ascii", 222, "ABC"
)
self.assertRaises(
ValueError, minimalmodbus._embed_payload, 1, "ascii", -1, "ABC"
)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._embed_payload, value, "rtu", 16, "ABC"
)
self.assertRaises(
TypeError, minimalmodbus._embed_payload, value, "ascii", 16, "ABC"
)
self.assertRaises(
TypeError, minimalmodbus._embed_payload, 1, "rtu", value, "ABC"
)
self.assertRaises(
TypeError, minimalmodbus._embed_payload, 1, "ascii", value, "ABC"
)
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._embed_payload, 1, value, 16, "ABC"
)
self.assertRaises(
TypeError, minimalmodbus._embed_payload, 1, "rtu", 16, value
)
self.assertRaises(
TypeError, minimalmodbus._embed_payload, 1, "ascii", 16, value
)
class TestExtractPayload(ExtendedTestCase):
knownValues = TestEmbedPayload.knownValues
def testKnownValues(self) -> None:
for (
slaveaddress,
functioncode,
mode,
knownresult,
inputstring,
) in self.knownValues:
result = minimalmodbus._extract_payload(
inputstring, slaveaddress, mode, functioncode
)
self.assertEqual(result, knownresult)
def testWrongInputValue(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc3",
2,
"rtu",
2,
) # Wrong CRC from slave
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
":0202313233F1\r\n",
2,
"ascii",
2,
) # Wrong LRC from slave
self.assertRaises(
SlaveReportedException,
minimalmodbus._extract_payload,
"\x02\x82123q\x02",
2,
"rtu",
2,
) # Error indication from slave
self.assertRaises(
SlaveReportedException,
minimalmodbus._extract_payload,
":0282313233E6\r\n",
2,
"ascii",
2,
)
self.assertRaises(
InvalidResponseError, minimalmodbus._extract_payload, "ABC", 2, "rtu", 2
) # Too short message from slave
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
"ABCDEFGH",
2,
"ascii",
2,
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
"\x02\x72123B\x02",
2,
"rtu",
2,
) # Wrong functioncode from slave
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
":020431323364\r\n",
2,
"ascii",
2,
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
"020231323366\r\n",
2,
"ascii",
2,
) # Missing ASCII header
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
":020231323366",
2,
"ascii",
2,
) # Wrong ASCII footer
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
":020231323366\r",
2,
"ascii",
2,
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
":020231323366\n",
2,
"ascii",
2,
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
":02023132366\r\n",
2,
"ascii",
2,
) # Odd number of ASCII payload characters
for value in [256, -1]:
self.assertRaises(
ValueError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
value,
"rtu",
2,
) # Invalid slave address
for value in [3, 95, 128]:
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
value,
"rtu",
2,
) # Wrong slave address
for value in [128, 256, -1]:
self.assertRaises(
ValueError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
2,
"rtu",
value,
) # Invalid functioncode
for value in [3, 95, 127]:
self.assertRaises(
InvalidResponseError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
2,
"rtu",
value,
) # Wrong functioncode
for value_str in ["RTU", "ASCII", "asc", "", " "]:
self.assertRaises(
ValueError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
2,
value_str,
2,
) # Wrong mode
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
value,
"rtu",
2,
) # Wrong slaveaddress type
self.assertRaises(
TypeError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
value,
"ascii",
2,
)
self.assertRaises(
TypeError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
2,
"rtu",
value,
) # Wrong functioncode type
self.assertRaises(
TypeError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
2,
"ascii",
value,
)
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._extract_payload, value, 2, "rtu", 2
) # Wrong message
self.assertRaises(
TypeError, minimalmodbus._extract_payload, value, 2, "ascii", 2
)
self.assertRaises(
TypeError,
minimalmodbus._extract_payload,
"\x02\x02123X\xc2",
2,
value,
2,
) # Wrong mode
class TestSanityEmbedExtractPayload(ExtendedTestCase):
knownValues = TestEmbedPayload.knownValues
def testKnownValues(self) -> None:
for slaveaddress, functioncode, mode, payload, message in self.knownValues:
embeddedResult = minimalmodbus._embed_payload(
slaveaddress, mode, functioncode, payload
)
extractedResult = minimalmodbus._extract_payload(
embeddedResult, slaveaddress, mode, functioncode
)
self.assertEqual(extractedResult, payload)
def testRange(self) -> None:
for i in range(110):
payload = str(i)
embeddedResultRtu = minimalmodbus._embed_payload(2, "rtu", 6, payload)
extractedResultRtu = minimalmodbus._extract_payload(
embeddedResultRtu, 2, "rtu", 6
)
self.assertEqual(extractedResultRtu, payload)
embeddedResultAscii = minimalmodbus._embed_payload(2, "ascii", 6, payload)
extractedResultAscii = minimalmodbus._extract_payload(
embeddedResultAscii, 2, "ascii", 6
)
self.assertEqual(extractedResultAscii, payload)
############################################
## Serial communication utility functions ##
############################################
class TestPredictResponseSize(ExtendedTestCase):
knownValues = [
("rtu", 1, "\x00\x3e\x00\x01", 6),
("rtu", 1, "\x00\x3e\x00\x07", 6),
("rtu", 1, "\x00\x3e\x00\x08", 6),
("rtu", 1, "\x00\x3e\x00\x09", 7),
("rtu", 2, "\x00\x3e\x00\x09", 7),
("rtu", 3, "AB\x00\x07", 19),
("rtu", 4, "AB\x00\x07", 19),
("rtu", 4, "AB\x01\x07", 531),
("rtu", 5, "\x00\x47\xff\x00", 8),
("rtu", 6, "\x00\x47\xFF\xFF", 8),
("rtu", 16, "\x00\x48\x00\x01\x01\x01", 8),
("ascii", 1, "\x00\x3e\x00\x01", 13),
("ascii", 1, "\x00\x3e\x00\x07", 13),
("ascii", 1, "\x00\x3e\x00\x08", 13),
("ascii", 1, "\x00\x3e\x00\x09", 15),
("ascii", 3, "AB\x00\x07", 39),
("ascii", 4, "AB\x00\x07", 39),
("ascii", 4, "AB\x01\x07", 1063),
("ascii", 5, "\x00\x47\xff\x00", 17),
("ascii", 16, "\x00\x48\x00\x01\x01\x01", 17),
]
def testKnownValues(self) -> None:
for mode, functioncode, payload_to_slave, knownvalue in self.knownValues:
resultvalue = minimalmodbus._predict_response_size(
mode, functioncode, payload_to_slave
)
self.assertEqual(resultvalue, knownvalue)
def testRecordedRtuMessages(self) -> None:
## Use the dictionary where the key is the 'message', and the item is the 'response'
for message in GOOD_RTU_RESPONSES:
slaveaddress = message[0]
functioncode = message[1]
messagestring = str(message, encoding="latin1")
payload_to_slave = minimalmodbus._extract_payload(
messagestring, slaveaddress, "rtu", functioncode
)
result = minimalmodbus._predict_response_size(
"rtu", functioncode, payload_to_slave
)
responseFromSlave = GOOD_RTU_RESPONSES[message]
self.assertEqual(result, len(responseFromSlave))
def testRecordedAsciiMessages(self) -> None:
## Use the dictionary where the key is the 'message', and the item is the 'response'
for message in GOOD_ASCII_RESPONSES:
slaveaddress = int(message[1:3])
functioncode = int(message[3:5])
messagestring = str(message, encoding="latin1")
payload_to_slave = minimalmodbus._extract_payload(
messagestring, slaveaddress, "ascii", functioncode
)
result = minimalmodbus._predict_response_size(
"ascii", functioncode, payload_to_slave
)
responseFromSlave = GOOD_ASCII_RESPONSES[message]
self.assertEqual(result, len(responseFromSlave))
def testWrongInputValue(self) -> None:
# Wrong mode
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "asciiii", 6, "ABCD"
)
# Wrong function code
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "ascii", 35, "ABCD"
)
# Wrong function code
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "rtu", 35, "ABCD"
)
# Too short message
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "ascii", 1, "ABC"
)
# Too short message
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "rtu", 1, "ABC"
)
# Too short message
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "ascii", 1, "AB"
)
# Too short message
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "ascii", 1, "A"
)
# Too short message
self.assertRaises(
ValueError, minimalmodbus._predict_response_size, "ascii", 1, ""
)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._predict_response_size, value, 1, "ABCD"
)
self.assertRaises(
TypeError, minimalmodbus._predict_response_size, "rtu", 1, value
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._predict_response_size, "rtu", value, "ABCD"
)
class TestCalculateMinimumSilentPeriod(ExtendedTestCase):
knownValues = [
(2400, 0.016),
(2400.0, 0.016),
(4800, 0.008),
(9600, 0.004),
(19200, 0.002),
(38400, 0.00175),
(57600, 0.00175),
(115200, 0.00175),
(128000, 0.00175),
(230400, 0.00175),
(4000000, 0.00175),
]
def testKnownValues(self) -> None:
for baudrate, knownresult in self.knownValues:
result = minimalmodbus._calculate_minimum_silent_period(baudrate)
self.assertAlmostEqualRatio(
result, knownresult, 1.02
) # Allow 2% deviation from listed known values
def testWrongInputValue(self) -> None:
for value in [-2400, -2400.0, -1, -0.5, 0, 0.5, 0.9]:
self.assertRaises(
ValueError, minimalmodbus._calculate_minimum_silent_period, value
)
def testWrongInputType(self) -> None:
for value in _NOT_NUMERICALS:
self.assertRaises(
TypeError, minimalmodbus._calculate_minimum_silent_period, value
)
##############################
# String and num conversions #
##############################
class TestNumToOneByteString(ExtendedTestCase):
knownValues = [
(0, "\x00"),
(7, "\x07"),
(255, "\xff"),
]
def testKnownValues(self) -> None:
for inputvalue, knownstring in self.knownValues:
resultstring = minimalmodbus._num_to_onebyte_string(inputvalue)
self.assertEqual(resultstring, knownstring)
def testKnownLoop(self) -> None:
for value in range(256):
knownstring = chr(value)
resultstring = minimalmodbus._num_to_onebyte_string(value)
self.assertEqual(resultstring, knownstring)
def testWrongInput(self) -> None:
self.assertRaises(ValueError, minimalmodbus._num_to_onebyte_string, -1)
self.assertRaises(ValueError, minimalmodbus._num_to_onebyte_string, 256)
def testWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._num_to_onebyte_string, value)
class TestNumToTwoByteString(ExtendedTestCase):
knownValues = [
(0.0, 0, False, False, "\x00\x00"), # Range 0-65535
(0, 0, False, False, "\x00\x00"),
(0, 0, True, False, "\x00\x00"),
(77.0, 1, False, False, "\x03\x02"),
(77.0, 1, True, False, "\x02\x03"),
(770, 0, False, False, "\x03\x02"),
(770, 0, True, False, "\x02\x03"),
(65535, 0, False, False, "\xff\xff"),
(65535, 0, True, False, "\xff\xff"),
(770, 0, False, True, "\x03\x02"), # Range -32768 to 32767
(77.0, 1, False, True, "\x03\x02"),
(0.0, 0, False, True, "\x00\x00"),
(0.0, 3, False, True, "\x00\x00"),
(-1, 0, False, True, "\xff\xff"),
(-1, 1, False, True, "\xff\xf6"),
(-77, 0, False, True, "\xff\xb3"),
(-770, 0, False, True, "\xfc\xfe"),
(-77, 1, False, True, "\xfc\xfe"),
(-32768, 0, False, True, "\x80\x00"),
(32767, 0, False, True, "\x7f\xff"),
]
def testKnownValues(self) -> None:
for (
inputvalue,
number_of_decimals,
lsb_first,
signed,
knownstring,
) in self.knownValues:
resultstring = minimalmodbus._num_to_twobyte_string(
inputvalue, number_of_decimals, lsb_first, signed
)
self.assertEqual(resultstring, knownstring)
def testWrongInputValue(self) -> None:
for lsb_first in [False, True]:
# Range 0-65535
self.assertRaises(
ValueError, minimalmodbus._num_to_twobyte_string, 77, -1, lsb_first
)
self.assertRaises(
ValueError, minimalmodbus._num_to_twobyte_string, 77, 11, lsb_first
)
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
77000,
0,
lsb_first,
) # Gives DeprecationWarning instead of ValueError for Python 2.6
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
65536,
0,
lsb_first,
)
self.assertRaises(
ValueError, minimalmodbus._num_to_twobyte_string, 77, 4, lsb_first
)
self.assertRaises(
ValueError, minimalmodbus._num_to_twobyte_string, -1, 0, lsb_first
)
self.assertRaises(
ValueError, minimalmodbus._num_to_twobyte_string, -77, 1, lsb_first
)
# Range -32768 to 32767
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
77,
-1,
lsb_first,
True,
)
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
-77000,
0,
lsb_first,
True,
) # Gives DeprecationWarning instead of ValueError for Python 2.6
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
-32769,
0,
lsb_first,
True,
)
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
32768,
0,
lsb_first,
True,
)
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
77000,
0,
lsb_first,
True,
)
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
77,
4,
lsb_first,
True,
)
self.assertRaises(
ValueError,
minimalmodbus._num_to_twobyte_string,
-77,
4,
lsb_first,
True,
)
def testWrongInputType(self) -> None:
for value in _NOT_NUMERICALS:
self.assertRaises(
TypeError, minimalmodbus._num_to_twobyte_string, value, 1, False, False
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._num_to_twobyte_string, 77, value, False, False
)
for value in _NOT_BOOLEANS:
self.assertRaises(
TypeError, minimalmodbus._num_to_twobyte_string, 77, 1, value, False
)
self.assertRaises(
TypeError, minimalmodbus._num_to_twobyte_string, 77, 1, False, value
)
class TestTwoByteStringToNum(ExtendedTestCase):
knownValues = TestNumToTwoByteString.knownValues
def testKnownValues(self) -> None:
for (
knownvalue,
number_of_decimals,
lsb_first,
signed,
bytestring,
) in self.knownValues:
if not lsb_first:
resultvalue = minimalmodbus._twobyte_string_to_num(
bytestring, number_of_decimals, signed
)
self.assertEqual(resultvalue, knownvalue)
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._twobyte_string_to_num, "ABC", 1)
self.assertRaises(ValueError, minimalmodbus._twobyte_string_to_num, "A", 1)
self.assertRaises(ValueError, minimalmodbus._twobyte_string_to_num, "AB", -1)
self.assertRaises(ValueError, minimalmodbus._twobyte_string_to_num, "AB", 11)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._twobyte_string_to_num, value, 1)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._twobyte_string_to_num, "AB", value
)
for value in _NOT_BOOLEANS:
self.assertRaises(
TypeError, minimalmodbus._twobyte_string_to_num, "\x03\x02", 1, value
)
class TestSanityTwoByteString(ExtendedTestCase):
knownValues = TestNumToTwoByteString.knownValues
def testSanity(self) -> None:
for (
value,
number_of_decimals,
lsb_first,
signed,
bytestring,
) in self.knownValues:
if not lsb_first:
resultvalue = minimalmodbus._twobyte_string_to_num(
minimalmodbus._num_to_twobyte_string(
value, number_of_decimals, lsb_first, signed
),
number_of_decimals,
signed,
)
self.assertEqual(resultvalue, value)
for value in range(0x10000):
resultvalue = minimalmodbus._twobyte_string_to_num(
minimalmodbus._num_to_twobyte_string(value)
)
self.assertEqual(resultvalue, value)
class TestBytestringToBits(ExtendedTestCase):
knownValues = [
("\x00", 1, [0]),
("\x01", 1, [1]),
("\x02", 2, [0, 1]),
("\x04", 3, [0, 0, 1]),
("\x08", 4, [0, 0, 0, 1]),
("\x10", 5, [0, 0, 0, 0, 1]),
("\x20", 6, [0, 0, 0, 0, 0, 1]),
("\x40", 7, [0, 0, 0, 0, 0, 0, 1]),
("\x80", 8, [0, 0, 0, 0, 0, 0, 0, 1]),
("\x00\x01", 9, [0, 0, 0, 0, 0, 0, 0, 0, 1]),
("\x00\x02", 10, [0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
("\x00\x00\x01", 17, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
("\x00\x00\x02", 18, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
("\x00\x00\x02", 19, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0]),
("\x01", 1, [1]),
("\x01", 2, [1, 0]),
("\x01", 3, [1, 0, 0]),
("\x01", 4, [1, 0, 0, 0]),
("\x01", 5, [1, 0, 0, 0, 0]),
("\x01", 6, [1, 0, 0, 0, 0, 0]),
("\x01", 7, [1, 0, 0, 0, 0, 0, 0]),
("\x01", 8, [1, 0, 0, 0, 0, 0, 0, 0]),
("\x01\x00", 9, [1, 0, 0, 0, 0, 0, 0, 0, 0]),
("\x01\x00", 10, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
("\x01\x00", 16, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
("\x01\x00\x00", 17, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
("\x01\x00\x00", 18, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
("\xCD\x6B\x05", 19, [1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1]),
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
(
"\xAC\xDB\x35",
22,
[0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1],
),
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
("\xCD\x01", 10, [1, 0, 1, 1, 0, 0, 1, 1, 1, 0]),
]
def testKnownValues(self) -> None:
for bytestring, number_of_bits, expected_result in self.knownValues:
assert len(expected_result) == number_of_bits
result = minimalmodbus._bytestring_to_bits(bytestring, number_of_bits)
self.assertEqual(result, expected_result)
def testWrongValues(self) -> None:
self.assertRaises(ValueError, minimalmodbus._bytestring_to_bits, "\x01\x02", 3)
class TestBitsToBytestring(ExtendedTestCase):
knownValues = TestBytestringToBits.knownValues
def testKnownValues(self) -> None:
for knownresult, __, bitlist in self.knownValues:
result = minimalmodbus._bits_to_bytestring(bitlist)
self.assertEqual(result, knownresult)
def testWrongValues(self) -> None:
self.assertRaises(ValueError, minimalmodbus._bits_to_bytestring, [1, 0, 3])
self.assertRaises(TypeError, minimalmodbus._bits_to_bytestring, 1)
class TestBitToBytestring(ExtendedTestCase):
knownValues = [
(0, "\x00\x00"),
(1, "\xff\x00"),
]
def testKnownValues(self) -> None:
for value, knownresult in self.knownValues:
resultvalue = minimalmodbus._bit_to_bytestring(value)
self.assertEqual(resultvalue, knownresult)
def testWrongValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._bit_to_bytestring, 2)
self.assertRaises(ValueError, minimalmodbus._bit_to_bytestring, 222)
self.assertRaises(ValueError, minimalmodbus._bit_to_bytestring, -1)
def testValueNotInteger(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._bit_to_bytestring, value)
class TestCalculateNumberOfBytesForBits(ExtendedTestCase):
knownValues = [
(0, 0),
(1, 1),
(2, 1),
(3, 1),
(4, 1),
(5, 1),
(6, 1),
(7, 1),
(8, 1),
(9, 2),
(10, 2),
(11, 2),
(12, 2),
(13, 2),
(14, 2),
(15, 2),
(16, 2),
(17, 3),
]
def testKnownValues(self) -> None:
for bits, knownresult in self.knownValues:
resultvalue = minimalmodbus._calculate_number_of_bytes_for_bits(bits)
self.assertEqual(resultvalue, knownresult)
class TestLongToBytestring(ExtendedTestCase):
knownValues = [
(0, True, BYTEORDER_BIG, "\x00\x00\x00\x00"),
(1, False, BYTEORDER_BIG, "\x00\x00\x00\x01"),
(1, True, BYTEORDER_BIG, "\x00\x00\x00\x01"),
(2, False, BYTEORDER_BIG, "\x00\x00\x00\x02"),
(2, True, BYTEORDER_BIG, "\x00\x00\x00\x02"),
(75000, False, BYTEORDER_BIG, "\x00\x01\x24\xf8"),
(75000, True, BYTEORDER_BIG, "\x00\x01\x24\xf8"),
(1000000, False, BYTEORDER_BIG, "\x00\x0f\x42\x40"),
(1000000, True, BYTEORDER_BIG, "\x00\x0f\x42\x40"),
(2147483647, False, BYTEORDER_BIG, "\x7f\xff\xff\xff"),
(2147483647, True, BYTEORDER_BIG, "\x7f\xff\xff\xff"),
(2147483648, False, BYTEORDER_BIG, "\x80\x00\x00\x00"),
(4294967295, False, BYTEORDER_BIG, "\xff\xff\xff\xff"),
(-1, True, BYTEORDER_BIG, "\xff\xff\xff\xff"),
(-2147483648, True, BYTEORDER_BIG, "\x80\x00\x00\x00"),
(-200000000, True, BYTEORDER_BIG, "\xf4\x14\x3e\x00"),
# Example from https://www.simplymodbus.ca/FAQ.htm
(2923517522, False, BYTEORDER_BIG, "\xAE\x41\x56\x52"),
# Example from https://www.simplymodbus.ca/FAQ.htm
(-1371449774, True, BYTEORDER_BIG, "\xAE\x41\x56\x52"),
# Example from https://www.simplymodbus.ca/FAQ.htm
(2923517522, False, BYTEORDER_LITTLE, "\x52\x56\x41\xAE"),
# Example from https://www.simplymodbus.ca/FAQ.htm (the byteorder is not named)
(2923517522, False, BYTEORDER_LITTLE_SWAP, "\x56\x52\xAE\x41"),
# Example from https://www.simplymodbus.ca/FAQ.htm (the byteorder is not named)
(2923517522, False, BYTEORDER_BIG_SWAP, "\x41\xAE\x52\x56"),
]
def testKnownValues(self) -> None:
for value, signed, byteorder, knownstring in self.knownValues:
resultstring = minimalmodbus._long_to_bytestring(
value, signed, 2, byteorder
)
self.assertEqual(resultstring, knownstring)
def testWrongInputValue(self) -> None:
self.assertRaises(
ValueError, minimalmodbus._long_to_bytestring, -1, False, 2
) # Range 0 to 4294967295
self.assertRaises(
ValueError, minimalmodbus._long_to_bytestring, 4294967296, False, 2
)
self.assertRaises(
ValueError, minimalmodbus._long_to_bytestring, -2147483649, True, 2
) # Range -2147483648 to 2147483647
self.assertRaises(
ValueError, minimalmodbus._long_to_bytestring, 2147483648, True, 2
)
self.assertRaises(
ValueError, minimalmodbus._long_to_bytestring, 222222222222222, True, 2
)
for number_of_registers in [0, 1, 3, 4, 5, 6, 7, 8, 16]:
self.assertRaises(
ValueError,
minimalmodbus._long_to_bytestring,
1,
True,
number_of_registers,
)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._long_to_bytestring, value, True, 2
)
self.assertRaises(
TypeError, minimalmodbus._long_to_bytestring, 1, True, value
)
for value in _NOT_BOOLEANS:
self.assertRaises(TypeError, minimalmodbus._long_to_bytestring, 1, value, 2)
class TestBytestringToLong(ExtendedTestCase):
knownValues = TestLongToBytestring.knownValues
def testKnownValues(self) -> None:
for knownvalue, signed, byteorder, bytestring in self.knownValues:
resultvalue = minimalmodbus._bytestring_to_long(
bytestring, signed, 2, byteorder
)
self.assertEqual(resultvalue, knownvalue)
def testWrongInputValue(self) -> None:
for inputstring in ["", "A", "AA", "AAA", "AAAAA"]:
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_long, inputstring, True, 2
)
for number_of_registers in [0, 1, 3, 4, 5, 6, 7, 8, 16]:
self.assertRaises(
ValueError,
minimalmodbus._bytestring_to_long,
"AAAA",
True,
number_of_registers,
)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_long, value, True, 2
)
for value in _NOT_BOOLEANS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_long, "AAAA", value, 2
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_long, "AAAA", True, value
)
class TestSanityLong(ExtendedTestCase):
knownValues = TestLongToBytestring.knownValues
def testSanity(self) -> None:
for value, signed, byteorder, bytestring in self.knownValues:
resultvalue = minimalmodbus._bytestring_to_long(
minimalmodbus._long_to_bytestring(value, signed, 2, byteorder),
signed,
2,
byteorder,
)
self.assertEqual(resultvalue, value)
class TestFloatToBytestring(ExtendedTestCase):
# Use this online calculator:
# https://www.h-schmidt.net/FloatConverter/IEEE754.html
# See also examples in
# http://en.wikipedia.org/wiki/Single-precision_floating-point_format
# http://en.wikipedia.org/wiki/Double-precision_floating-point_format
knownValues = [
(1, 2, BYTEORDER_BIG, "\x3f\x80\x00\x00"),
(1.0, 2, BYTEORDER_BIG, "\x3f\x80\x00\x00"), # wikipedia
(1.0, 2, BYTEORDER_BIG, "?\x80\x00\x00"),
(1.1, 2, BYTEORDER_BIG, "\x3f\x8c\xcc\xcd"),
(100, 2, BYTEORDER_BIG, "\x42\xc8\x00\x00"),
(100.0, 2, BYTEORDER_BIG, "\x42\xc8\x00\x00"),
(1.0e5, 2, BYTEORDER_BIG, "\x47\xc3\x50\x00"),
(1.1e9, 2, BYTEORDER_BIG, "\x4e\x83\x21\x56"),
(1.0e16, 2, BYTEORDER_BIG, "\x5a\x0e\x1b\xca"),
(1.5e16, 2, BYTEORDER_BIG, "\x5a\x55\x29\xaf"),
(3.65e30, 2, BYTEORDER_BIG, "\x72\x38\x47\x25"),
(-1.1, 2, BYTEORDER_BIG, "\xbf\x8c\xcc\xcd"),
(-2, 2, BYTEORDER_BIG, "\xc0\x00\x00\x00"),
(-3.6e30, 2, BYTEORDER_BIG, "\xf2\x35\xc0\xe9"),
(1.0, 4, BYTEORDER_BIG, "\x3f\xf0\x00\x00\x00\x00\x00\x00"),
(2, 4, BYTEORDER_BIG, "\x40\x00\x00\x00\x00\x00\x00\x00"),
(1.1e9, 4, BYTEORDER_BIG, "\x41\xd0\x64\x2a\xc0\x00\x00\x00"),
(3.65e30, 4, BYTEORDER_BIG, "\x46\x47\x08\xe4\x9e\x2f\x4d\x62"),
(2.42e300, 4, BYTEORDER_BIG, "\x7e\x4c\xe8\xa5\x67\x1f\x46\xa0"),
(-1.1, 4, BYTEORDER_BIG, "\xbf\xf1\x99\x99\x99\x99\x99\x9a"),
(-2, 4, BYTEORDER_BIG, "\xc0\x00\x00\x00\x00\x00\x00\x00"),
(-3.6e30, 4, BYTEORDER_BIG, "\xc6\x46\xb8\x1d\x1a\x43\xb2\x06"),
(-3.6e30, 4, BYTEORDER_LITTLE, "\x06\xb2\x43\x1a\x1d\xb8\x46\xc6"),
(-3.6e30, 4, BYTEORDER_BIG_SWAP, "\x46\xc6\x1d\xb8\x43\x1a\x06\xb2"),
(-3.6e30, 4, BYTEORDER_LITTLE_SWAP, "\xb2\x06\x1a\x43\xb8\x1d\xc6\x46"),
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page)
(-4.3959787e-11, 2, BYTEORDER_BIG, "\xAE\x41\x56\x52"),
# Shifted byte positions manually
(-4.3959787e-11, 2, BYTEORDER_LITTLE, "\x52\x56\x41\xAE"),
# Shifted byte positions manually
(-4.3959787e-11, 2, BYTEORDER_BIG_SWAP, "\x41\xAE\x52\x56"),
# Shifted byte positions manually
(-4.3959787e-11, 2, BYTEORDER_LITTLE_SWAP, "\x56\x52\xAE\x41"),
# Calculated by https://www.h-schmidt.net/FloatConverter/IEEE754.html
(123456.00, 2, BYTEORDER_BIG, "\x47\xF1\x20\x00"),
# Example from https://store.chipkin.com/articles/how-real-floating-point-
# and-32-bit-data-is-encoded-in-modbus-rtu-messages
# Byte order = "No swap"
(123456.00, 2, BYTEORDER_LITTLE, "\x00\x20\xF1\x47"),
]
def testKnownValues(self) -> None:
for value, number_of_registers, byteorder, knownstring in self.knownValues:
resultstring = minimalmodbus._float_to_bytestring(
value, number_of_registers, byteorder
)
self.assertEqual(resultstring, knownstring)
self.assertEqual(
minimalmodbus._float_to_bytestring(1.5e999, 2), "\x7f\x80\x00\x00"
) # +inf
def testWrongInputValue(self) -> None:
# Note: Out of range will not necessarily raise any error, instead it will indicate +inf etc.
for number_of_registers in [0, 1, 3, 5, 6, 7, 8, 16]:
self.assertRaises(
ValueError, minimalmodbus._float_to_bytestring, 1.1, number_of_registers
)
def testWrongInputType(self) -> None:
for value in _NOT_NUMERICALS:
self.assertRaises(TypeError, minimalmodbus._float_to_bytestring, value, 2)
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._float_to_bytestring, 1.1, value)
class TestBytestringToFloat(ExtendedTestCase):
knownValues = TestFloatToBytestring.knownValues
def testKnownValues(self) -> None:
for knownvalue, number_of_registers, byteorder, bytestring in self.knownValues:
resultvalue = minimalmodbus._bytestring_to_float(
bytestring, number_of_registers, byteorder
)
self.assertAlmostEqualRatio(resultvalue, knownvalue)
def testWrongInputValue(self) -> None:
for bytestring in [
"",
"A",
"AB",
"ABC",
"ABCDE",
"ABCDEF",
"ABCDEFG",
"ABCDEFGHI",
]:
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_float, bytestring, 2
)
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_float, bytestring, 4
)
for number_of_registers in [0, 1, 3, 5, 6, 7, 8, 16]:
self.assertRaises(
ValueError,
minimalmodbus._bytestring_to_float,
"ABCD",
number_of_registers,
)
self.assertRaises(
ValueError,
minimalmodbus._bytestring_to_float,
"ABCDEFGH",
number_of_registers,
)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._bytestring_to_float, value, 2)
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._bytestring_to_float, 1.1, value)
class TestSanityFloat(ExtendedTestCase):
knownValues = TestFloatToBytestring.knownValues
def testSanity(self) -> None:
for value, number_of_registers, byteorder, bytestring in self.knownValues:
resultvalue = minimalmodbus._bytestring_to_float(
minimalmodbus._float_to_bytestring(
value, number_of_registers, byteorder
),
number_of_registers,
byteorder,
)
self.assertAlmostEqualRatio(resultvalue, value)
class TestValuelistToBytestring(ExtendedTestCase):
knownValues = [
([1], 1, "\x00\x01"),
([0, 0], 2, "\x00\x00\x00\x00"),
([1, 2], 2, "\x00\x01\x00\x02"),
([1, 256], 2, "\x00\x01\x01\x00"),
([1, 2, 3, 4], 4, "\x00\x01\x00\x02\x00\x03\x00\x04"),
([1, 2, 3, 4, 5], 5, "\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05"),
]
def testKnownValues(self) -> None:
for value, number_of_registers, knownstring in self.knownValues:
resultstring = minimalmodbus._valuelist_to_bytestring(
value, number_of_registers
)
self.assertEqual(resultstring, knownstring)
def testWrongInputValue(self) -> None:
self.assertRaises(
ValueError, minimalmodbus._valuelist_to_bytestring, [1, 2, 3, 4], 1
)
self.assertRaises(
ValueError, minimalmodbus._valuelist_to_bytestring, [1, 2, 3, 4], -4
)
def testWrongInputType(self) -> None:
for value in _NOT_INTLISTS:
self.assertRaises(
TypeError, minimalmodbus._valuelist_to_bytestring, value, 4
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._valuelist_to_bytestring, [1, 2, 3, 4], value
)
class TestBytestringToValuelist(ExtendedTestCase):
knownValues = TestValuelistToBytestring.knownValues
def testKnownValues(self) -> None:
for knownlist, number_of_registers, bytestring in self.knownValues:
resultlist = minimalmodbus._bytestring_to_valuelist(
bytestring, number_of_registers
)
self.assertEqual(resultlist, knownlist)
def testWrongInputValue(self) -> None:
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_valuelist, "\x00\x01\x00\x02", 1
)
self.assertRaises(ValueError, minimalmodbus._bytestring_to_valuelist, "", 1)
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_valuelist, "\x00\x01", 0
)
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_valuelist, "\x00\x01", -1
)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_valuelist, value, 1
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_valuelist, "A", value
)
class TestSanityValuelist(ExtendedTestCase):
knownValues = TestValuelistToBytestring.knownValues
def testSanity(self) -> None:
for valuelist, number_of_registers, bytestring in self.knownValues:
resultlist = minimalmodbus._bytestring_to_valuelist(
minimalmodbus._valuelist_to_bytestring(valuelist, number_of_registers),
number_of_registers,
)
self.assertEqual(resultlist, valuelist)
class TestTextstringToBytestring(ExtendedTestCase):
knownValues = [
("A", 1, "A "),
("AB", 1, "AB"),
("ABC", 2, "ABC "),
("ABCD", 2, "ABCD"),
("A", 16, "A" + " " * 31),
("A", 32, "A" + " " * 63),
("A" * 246, 123, "A" * 246),
]
def testKnownValues(self) -> None:
for textstring, number_of_registers, knownstring in self.knownValues:
resultstring = minimalmodbus._textstring_to_bytestring(
textstring, number_of_registers
)
self.assertEqual(resultstring, knownstring)
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._textstring_to_bytestring, "ABC", 1)
self.assertRaises(ValueError, minimalmodbus._textstring_to_bytestring, "", 1)
self.assertRaises(ValueError, minimalmodbus._textstring_to_bytestring, "A", -1)
self.assertRaises(ValueError, minimalmodbus._textstring_to_bytestring, "A", 124)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._textstring_to_bytestring, value, 1
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._textstring_to_bytestring, "AB", value
)
class TestBytestringToTextstring(ExtendedTestCase):
knownValues = TestTextstringToBytestring.knownValues
def testKnownValues(self) -> None:
for knownstring, number_of_registers, bytestring in self.knownValues:
resultstring = minimalmodbus._bytestring_to_textstring(
bytestring, number_of_registers
)
self.assertEqual(resultstring.strip(), knownstring)
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._bytestring_to_textstring, "A", 1)
self.assertRaises(ValueError, minimalmodbus._bytestring_to_textstring, "", 1)
self.assertRaises(ValueError, minimalmodbus._bytestring_to_textstring, "", 0)
self.assertRaises(ValueError, minimalmodbus._bytestring_to_textstring, "ABC", 1)
self.assertRaises(ValueError, minimalmodbus._bytestring_to_textstring, "AB", 0)
self.assertRaises(ValueError, minimalmodbus._bytestring_to_textstring, "AB", -1)
self.assertRaises(
ValueError, minimalmodbus._bytestring_to_textstring, "AB", 126
)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_textstring, value, 1
)
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._bytestring_to_textstring, "AB", value
)
class TestSanityTextstring(ExtendedTestCase):
knownValues = TestTextstringToBytestring.knownValues
def testSanity(self) -> None:
for textstring, number_of_registers, bytestring in self.knownValues:
resultstring = minimalmodbus._bytestring_to_textstring(
minimalmodbus._textstring_to_bytestring(
textstring, number_of_registers
),
number_of_registers,
)
self.assertEqual(resultstring.strip(), textstring)
class TestPack(ExtendedTestCase):
knownValues = [
(-77, ">h", "\xff\xb3"), # (Signed) short (2 bytes)
(-1, ">h", "\xff\xff"),
(-770, ">h", "\xfc\xfe"),
(-32768, ">h", "\x80\x00"),
(32767, ">h", "\x7f\xff"),
(770, ">H", "\x03\x02"), # Unsigned short (2 bytes)
(65535, ">H", "\xff\xff"),
(75000, ">l", "\x00\x01\x24\xf8"), # (Signed) long (4 bytes)
(-1, ">l", "\xff\xff\xff\xff"),
(-2147483648, ">l", "\x80\x00\x00\x00"),
(-200000000, ">l", "\xf4\x14\x3e\x00"),
(1, ">L", "\x00\x00\x00\x01"), # Unsigned long (4 bytes)
(75000, ">L", "\x00\x01\x24\xf8"),
(2147483648, ">L", "\x80\x00\x00\x00"),
(2147483647, ">L", "\x7f\xff\xff\xff"),
(1.0, ">f", "\x3f\x80\x00\x00"), # Float (4 bytes)
(1.0e5, ">f", "\x47\xc3\x50\x00"),
(1.0e16, ">f", "\x5a\x0e\x1b\xca"),
(3.65e30, ">f", "\x72\x38\x47\x25"),
(-2, ">f", "\xc0\x00\x00\x00"),
(-3.6e30, ">f", "\xf2\x35\xc0\xe9"),
(1.0, ">d", "\x3f\xf0\x00\x00\x00\x00\x00\x00"), # Double (8 bytes)
(2, ">d", "\x40\x00\x00\x00\x00\x00\x00\x00"),
(1.1e9, ">d", "\x41\xd0\x64\x2a\xc0\x00\x00\x00"),
(3.65e30, ">d", "\x46\x47\x08\xe4\x9e\x2f\x4d\x62"),
(2.42e300, ">d", "\x7e\x4c\xe8\xa5\x67\x1f\x46\xa0"),
(-1.1, ">d", "\xbf\xf1\x99\x99\x99\x99\x99\x9a"),
(-2, ">d", "\xc0\x00\x00\x00\x00\x00\x00\x00"),
]
def testKnownValues(self) -> None:
for value, formatstring, knownstring in self.knownValues:
resultstring = minimalmodbus._pack(formatstring, value)
self.assertEqual(resultstring, knownstring)
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._pack, "ABC", 35)
self.assertRaises(ValueError, minimalmodbus._pack, "", 35)
self.assertRaises(ValueError, minimalmodbus._pack, ">H", -35)
self.assertRaises(ValueError, minimalmodbus._pack, ">L", -35)
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._pack, value, 1)
for value in ["1", ["1"], [1], ["\x00\x2d\x00\x58"], ["A", "B", "C"], "ABC"]:
self.assertRaises(ValueError, minimalmodbus._pack, ">h", value)
class TestUnpack(ExtendedTestCase):
knownValues = TestPack.knownValues
def testKnownValues(self) -> None:
for knownvalue, formatstring, bytestring in self.knownValues:
resultvalue = minimalmodbus._unpack(formatstring, bytestring)
self.assertAlmostEqualRatio(resultvalue, knownvalue)
def testWrongInputValue(self) -> None:
self.assertRaises(
InvalidResponseError, minimalmodbus._unpack, "ABC", "\xff\xb3"
)
self.assertRaises(ValueError, minimalmodbus._unpack, "", "\xff\xb3")
self.assertRaises(ValueError, minimalmodbus._unpack, ">h", "")
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._unpack, value, "\xff\xb3")
self.assertRaises(TypeError, minimalmodbus._unpack, ">h", value)
class TestSwap(ExtendedTestCase):
knownValues = [
("", ""),
("AB", "BA"),
("ABCD", "BADC"),
("ABCDEF", "BADCFE"),
("ABCDEFGH", "BADCFEHG"),
("ABCDEFGHIJ", "BADCFEHGJI"),
("ABCDEFGHIJKL", "BADCFEHGJILK"),
]
wrongValues = ["A", "ABC", "ABCDE", "A" * 123]
def testKnownValues(self) -> None:
for inputvalue, knownresult in self.knownValues:
result = minimalmodbus._swap(inputvalue)
self.assertEqual(result, knownresult)
def testWrongValues(self) -> None:
for value in self.wrongValues:
self.assertRaises(ValueError, minimalmodbus._swap, value)
class TestSanityPackUnpack(ExtendedTestCase):
knownValues = TestPack.knownValues
def testSanity(self) -> None:
for value, formatstring, bytestring in self.knownValues:
resultstring = minimalmodbus._pack(
formatstring, minimalmodbus._unpack(formatstring, bytestring)
)
self.assertEqual(resultstring, bytestring)
class TestHexencode(ExtendedTestCase):
knownValues = [
("", False, ""),
("7", False, "37"),
("J", False, "4A"),
("\x5d", False, "5D"),
("\x04", False, "04"),
("\x04\x5d", False, "045D"),
("mn", False, "6D6E"),
("Katt1", False, "4B61747431"),
("", True, ""),
("7", True, "37"),
("J", True, "4A"),
("\x5d", True, "5D"),
("\x04", True, "04"),
("\x04\x5d", True, "04 5D"),
("mn", True, "6D 6E"),
("Katt1", True, "4B 61 74 74 31"),
]
def testKnownValues(self) -> None:
for value, insert_spaces, knownstring in self.knownValues:
resultstring = minimalmodbus._hexencode(value, insert_spaces)
self.assertEqual(resultstring, knownstring)
def testWrongInputValue(self) -> None:
pass
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._hexencode, value)
class TestHexdecode(ExtendedTestCase):
knownValues = TestHexencode.knownValues
def testKnownValues(self) -> None:
for knownstring, insert_spaces, value in self.knownValues:
if not insert_spaces:
resultstring = minimalmodbus._hexdecode(value)
self.assertEqual(resultstring, knownstring)
self.assertEqual(minimalmodbus._hexdecode("4A"), "J")
self.assertEqual(minimalmodbus._hexdecode("4a"), "J")
def testAllowLowercase(self) -> None:
minimalmodbus._hexdecode("Aa")
minimalmodbus._hexdecode("aa23")
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._hexdecode, "A")
self.assertRaises(ValueError, minimalmodbus._hexdecode, "AAA")
self.assertRaises(TypeError, minimalmodbus._hexdecode, "AG")
def testWrongInputType(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._hexdecode, value)
class TestSanityHexencodeHexdecode(ExtendedTestCase):
knownValues = TestHexencode.knownValues
def testKnownValues(self) -> None:
for value, insert_spaces, knownstring in self.knownValues:
if not insert_spaces:
resultstring = minimalmodbus._hexdecode(minimalmodbus._hexencode(value))
self.assertEqual(resultstring, value)
def testKnownValuesLoop(self) -> None:
"""Loop through all bytestrings of length two."""
RANGE_VALUE = 256
for i in range(RANGE_VALUE):
for j in range(RANGE_VALUE):
bytestring = chr(i) + chr(j)
resultstring = minimalmodbus._hexdecode(
minimalmodbus._hexencode(bytestring)
)
self.assertEqual(resultstring, bytestring)
class TestDescribeBytes(ExtendedTestCase):
def testKnownValues(self) -> None:
self.assertEqual(
minimalmodbus._describe_bytes(b"\x01\x02\x03"), "01 02 03 (3 bytes)"
)
############################
# Test number manipulation #
############################
class TestTwosComplement(ExtendedTestCase):
knownValues = [
(0, 8, 0),
(1, 8, 1),
(127, 8, 127),
(-128, 8, 128),
(-127, 8, 129),
(-1, 8, 255),
(0, 16, 0),
(1, 16, 1),
(32767, 16, 32767),
(-32768, 16, 32768),
(-32767, 16, 32769),
(-1, 16, 65535),
]
def testKnownValues(self) -> None:
for x, bits, knownresult in self.knownValues:
result = minimalmodbus._twos_complement(x, bits)
self.assertEqual(result, knownresult)
def testOutOfRange(self) -> None:
self.assertRaises(ValueError, minimalmodbus._twos_complement, 128, 8)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 1000000, 8)
self.assertRaises(ValueError, minimalmodbus._twos_complement, -129, 8)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 32768, 16)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 1000000, 16)
self.assertRaises(ValueError, minimalmodbus._twos_complement, -32769, 16)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 1, 0)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 1, -1)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 1, -2)
self.assertRaises(ValueError, minimalmodbus._twos_complement, 1, -100)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._twos_complement, value, 8)
class TestFromTwosComplement(ExtendedTestCase):
knownValues = TestTwosComplement.knownValues
def testKnownValues(self) -> None:
for knownresult, bits, x in self.knownValues:
result = minimalmodbus._from_twos_complement(x, bits)
self.assertEqual(result, knownresult)
def testOutOfRange(self) -> None:
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 256, 8)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 1000000, 8)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, -1, 8)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 65536, 16)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 1000000, 16)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, -1, 16)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 1, 0)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 1, -1)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 1, -2)
self.assertRaises(ValueError, minimalmodbus._from_twos_complement, 1, -100)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._from_twos_complement, value, 8)
self.assertRaises(TypeError, minimalmodbus._from_twos_complement, 1, value)
class TestSanityTwosComplement(ExtendedTestCase):
knownValues = [1, 2, 4, 8, 12, 16]
def testSanity(self) -> None:
for bits in self.knownValues:
for x in range(2 ** bits):
resultvalue = minimalmodbus._twos_complement(
minimalmodbus._from_twos_complement(x, bits), bits
)
self.assertEqual(resultvalue, x)
#########################
# Test bit manipulation #
#########################
class TestSetBitOn(ExtendedTestCase):
knownValues = [
(4, 0, 5),
(4, 1, 6),
(1, 1, 3),
]
def testKnownValues(self) -> None:
for x, bitnum, knownresult in self.knownValues:
result = minimalmodbus._set_bit_on(x, bitnum)
self.assertEqual(result, knownresult)
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._set_bit_on, 1, -1)
self.assertRaises(ValueError, minimalmodbus._set_bit_on, -2, 1)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._set_bit_on, value, 1)
self.assertRaises(TypeError, minimalmodbus._set_bit_on, 1, value)
class TestCheckBit(ExtendedTestCase):
knownValues = [
(0, 0, False),
(0, 1, False),
(0, 2, False),
(0, 3, False),
(0, 4, False),
(0, 5, False),
(0, 6, False),
(4, 0, False),
(4, 1, False),
(4, 2, True),
(4, 3, False),
(4, 4, False),
(4, 5, False),
(4, 5, False),
]
def testKnownValues(self) -> None:
for x, bitnum, knownresult in self.knownValues:
result = minimalmodbus._check_bit(x, bitnum)
self.assertEqual(result, knownresult)
def testWrongInputValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_bit, 1, -1)
self.assertRaises(ValueError, minimalmodbus._check_bit, -2, 1)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._check_bit, value, 1)
self.assertRaises(TypeError, minimalmodbus._check_bit, 1, value)
############################
# Error checking functions #
############################
class TestCalculateCrcString(ExtendedTestCase):
knownValues = [
(
"\x02\x07",
"\x41\x12",
), # Example from MODBUS over Serial Line Specification and Implementation Guide V1.02
("ABCDE", "\x0fP"),
]
def testKnownValues(self) -> None:
for inputstring, knownresult in self.knownValues:
resultstring = minimalmodbus._calculate_crc_string(inputstring)
self.assertEqual(resultstring, knownresult)
def testCalculationTime(self) -> None:
teststrings = [minimalmodbus._num_to_twobyte_string(i) for i in range(2 ** 16)]
minimalmodbus._print_out(
"\n\n Measuring CRC calculation time. Running {} calculations ...".format(
len(teststrings)
)
)
start_time = time.time()
for teststring in teststrings:
minimalmodbus._calculate_crc_string(teststring)
calculation_time = time.time() - start_time
minimalmodbus._print_out(
"CRC calculation time: {} calculations took {:.3f} s ({} s per calculation)\n\n".format(
len(teststrings),
calculation_time,
calculation_time / float(len(teststrings)),
)
)
def testNotStringInput(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._calculate_crc_string, value)
class TestCalculateLrcString(ExtendedTestCase):
knownValues = [
("ABCDE", "\xb1"),
(
"\x02\x30\x30\x31\x23\x03",
"\x47",
), # From C# example on http://en.wikipedia.org/wiki/Longitudinal_redundancy_check
]
def testKnownValues(self) -> None:
for inputstring, knownresult in self.knownValues:
resultstring = minimalmodbus._calculate_lrc_string(inputstring)
self.assertEqual(resultstring, knownresult)
def testNotStringInput(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._calculate_lrc_string, value)
class TestCheckFunctioncode(ExtendedTestCase):
def testCorrectFunctioncode(self) -> None:
minimalmodbus._check_functioncode(4, [4, 5])
def testCorrectFunctioncodeNoRange(self) -> None:
minimalmodbus._check_functioncode(4, None)
minimalmodbus._check_functioncode(75, None)
def testWrongFunctioncode(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_functioncode, 3, [4, 5])
self.assertRaises(ValueError, minimalmodbus._check_functioncode, 3, [])
def testWrongFunctioncodeNoRange(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_functioncode, 1000, None)
self.assertRaises(ValueError, minimalmodbus._check_functioncode, -1, None)
def testWrongFunctioncodeType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, minimalmodbus._check_functioncode, value, [4, 5]
)
def testWrongFunctioncodeListValues(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_functioncode, -1, [-1, 5])
self.assertRaises(ValueError, minimalmodbus._check_functioncode, 128, [4, 128])
def testWrongListType(self) -> None:
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, 4)
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, "ABC")
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, (4, 5))
self.assertRaises(ValueError, minimalmodbus._check_functioncode, 4, [4, -23])
self.assertRaises(ValueError, minimalmodbus._check_functioncode, 4, [4, 128])
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, [4, "5"])
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, [4, None])
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, [4, [5]])
self.assertRaises(TypeError, minimalmodbus._check_functioncode, 4, [4.0, 5])
class TestCheckSlaveaddress(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_slaveaddress(0) # Broadcast
minimalmodbus._check_slaveaddress(1)
minimalmodbus._check_slaveaddress(10)
minimalmodbus._check_slaveaddress(247)
minimalmodbus._check_slaveaddress(255) # Reserved
def testWrongValues(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_slaveaddress, -1)
self.assertRaises(ValueError, minimalmodbus._check_slaveaddress, 256)
def testNotIntegerInput(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._check_slaveaddress, value)
class TestCheckMode(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_mode("ascii")
minimalmodbus._check_mode("rtu")
def testWrongValues(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_mode, "asc")
self.assertRaises(ValueError, minimalmodbus._check_mode, "ASCII")
self.assertRaises(ValueError, minimalmodbus._check_mode, "RTU")
self.assertRaises(ValueError, minimalmodbus._check_mode, "")
self.assertRaises(ValueError, minimalmodbus._check_mode, "ascii ")
self.assertRaises(ValueError, minimalmodbus._check_mode, " rtu")
def testNotIntegerInput(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._check_mode, value)
class TestCheckRegisteraddress(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_registeraddress(0)
minimalmodbus._check_registeraddress(1)
minimalmodbus._check_registeraddress(10)
minimalmodbus._check_registeraddress(65535)
def testWrongValues(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_registeraddress, -1)
self.assertRaises(ValueError, minimalmodbus._check_registeraddress, 65536)
def testWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._check_registeraddress, value)
class TestCheckResponseSlaveErrorCode(ExtendedTestCase):
def testResponsesWithoutErrors(self) -> None:
minimalmodbus._check_response_slaveerrorcode("\x01\x01\x01\x00Q\x88")
minimalmodbus._check_response_slaveerrorcode("\x01\x01\x05")
minimalmodbus._check_response_slaveerrorcode("\x01\x81\x05")
def testResponsesWithErrors(self) -> None:
self.assertRaises(
IllegalRequestError,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x01",
)
self.assertRaises(
IllegalRequestError,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x02",
)
self.assertRaises(
IllegalRequestError,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x03",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x04",
)
self.assertRaises(
SlaveDeviceBusyError,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x06",
)
self.assertRaises(
NegativeAcknowledgeError,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x07",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x08",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x09",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x0A",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x0B",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\x0C",
)
self.assertRaises(
SlaveReportedException,
minimalmodbus._check_response_slaveerrorcode,
"\x01\x81\xFF",
)
def testTooShortResponses(self) -> None:
minimalmodbus._check_response_slaveerrorcode("")
minimalmodbus._check_response_slaveerrorcode("A")
minimalmodbus._check_response_slaveerrorcode("AB")
class TestCheckResponseNumberOfBytes(ExtendedTestCase):
def testCorrectNumberOfBytes(self) -> None:
minimalmodbus._check_response_bytecount("\x02\x03\x02")
minimalmodbus._check_response_bytecount(
"\x0C\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C"
)
def testWrongNumberOfBytes(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_bytecount,
"\x03\x03\x02",
)
self.assertRaises(
InvalidResponseError, minimalmodbus._check_response_bytecount, "ABC"
)
self.assertRaises(
InvalidResponseError, minimalmodbus._check_response_bytecount, ""
)
def testNotStringInput(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._check_response_bytecount, value)
class TestCheckResponseRegisterAddress(ExtendedTestCase):
def testCorrectResponseRegisterAddress(self) -> None:
minimalmodbus._check_response_registeraddress("\x00\x2d\x00\x58", 45)
minimalmodbus._check_response_registeraddress("\x00\x18\x00\x01", 24)
minimalmodbus._check_response_registeraddress("\x00\x47\xff\x00", 71)
minimalmodbus._check_response_registeraddress("\x00\x48\x00\x01", 72)
def testTooShortString(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_registeraddress,
"\x00",
46,
)
def testNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._check_response_registeraddress, value, 45
)
def testWrongResponseRegisterAddress(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_registeraddress,
"\x00\x2d\x00\x58",
46,
)
def testInvalidAddress(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_response_registeraddress,
"\x00\x2d\x00\x58",
-2,
)
self.assertRaises(
ValueError,
minimalmodbus._check_response_registeraddress,
"\x00\x2d\x00\x58",
65536,
)
def testAddressNotInteger(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError,
minimalmodbus._check_response_registeraddress,
"\x00\x2d\x00\x58",
value,
)
class TestCheckResponsenumber_of_registers(ExtendedTestCase):
def testCorrectResponsenumber_of_registers(self) -> None:
minimalmodbus._check_response_number_of_registers("\x00\x18\x00\x01", 1)
minimalmodbus._check_response_number_of_registers("\x00#\x00\x01", 1)
minimalmodbus._check_response_number_of_registers("\x00\x34\x00\x02", 2)
def testTooShortString(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_number_of_registers,
"\x00",
1,
)
def testNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._check_response_number_of_registers, value, 1
)
def testWrongResponsenumber_of_registers(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_number_of_registers,
"\x00#\x00\x01",
4,
)
def testInvalidResponsenumber_of_registersRange(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_response_number_of_registers,
"\x00\x18\x00\x00",
0,
)
self.assertRaises(
ValueError,
minimalmodbus._check_response_number_of_registers,
"\x00\x18\x00\x01",
-1,
)
self.assertRaises(
ValueError,
minimalmodbus._check_response_number_of_registers,
"\x00\x18\x00\x01",
65536,
)
def testnumber_of_registersNotInteger(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError,
minimalmodbus._check_response_number_of_registers,
"\x00\x18\x00\x01",
value,
)
class TestCheckResponseWriteData(ExtendedTestCase):
def testCorrectResponseWritedata(self) -> None:
minimalmodbus._check_response_writedata("\x00\x2d\x00\x58", "\x00\x58")
minimalmodbus._check_response_writedata(
"\x00\x2d\x00\x58", minimalmodbus._num_to_twobyte_string(88)
)
minimalmodbus._check_response_writedata("\x00\x47\xff\x00", "\xff\x00")
minimalmodbus._check_response_writedata(
"\x00\x47\xff\x00", minimalmodbus._num_to_twobyte_string(65280)
)
minimalmodbus._check_response_writedata(
"\x00\x2d\x00\x58ABCDEFGHIJKLMNOP", "\x00\x58"
)
def testWrongResponseWritedata(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_writedata,
"\x00\x2d\x00\x58",
"\x00\x59",
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_writedata,
"\x00\x2d\x00\x58",
minimalmodbus._num_to_twobyte_string(89),
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_writedata,
"\x00\x47\xff\x00",
"\xff\x01",
)
def testNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._check_response_writedata, value, "\x00\x58"
)
self.assertRaises(
TypeError,
minimalmodbus._check_response_writedata,
"\x00\x2d\x00\x58",
value,
)
def testTooShortPayload(self) -> None:
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_writedata,
"\x00\x58",
"\x00\x58",
)
self.assertRaises(
InvalidResponseError,
minimalmodbus._check_response_writedata,
"",
"\x00\x58",
)
def testInvalidReferenceData(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_response_writedata,
"\x00\x2d\x00\x58",
"\x00\x58\x00",
)
self.assertRaises(
ValueError,
minimalmodbus._check_response_writedata,
"\x00\x2d\x00\x58",
"\x58",
)
self.assertRaises(
ValueError, minimalmodbus._check_response_writedata, "\x00\x2d\x00\x58", ""
)
class TestCheckString(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_string("DEF", minlength=3, maxlength=3, description="ABC")
minimalmodbus._check_string(
"DEF", minlength=3, maxlength=3, description="ABC", force_ascii=True
)
minimalmodbus._check_string(
"DEF", minlength=0, maxlength=100, description="ABC"
)
def testTooShort(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"DE",
minlength=3,
maxlength=3,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"DEF",
minlength=10,
maxlength=3,
description="ABC",
)
def testTooLong(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"DEFG",
minlength=1,
maxlength=3,
description="ABC",
)
def testNotAscii(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"\u0394P",
minlength=2,
maxlength=2,
description="ABC",
force_ascii=True,
)
def testInconsistentLengthlimits(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"DEFG",
minlength=4,
maxlength=3,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"DEF",
minlength=-3,
maxlength=3,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_string,
"DEF",
minlength=3,
maxlength=-3,
description="ABC",
)
def testInputNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError,
minimalmodbus._check_string,
value,
minlength=3,
maxlength=3,
description="ABC",
)
def testNotIntegerInput(self) -> None:
for value in _NOT_INTERGERS_OR_NONE:
self.assertRaises(
TypeError,
minimalmodbus._check_string,
"DEF",
minlength=value,
maxlength=3,
description="ABC",
)
self.assertRaises(
TypeError,
minimalmodbus._check_string,
"DEF",
minlength=3,
maxlength=value,
description="ABC",
)
def testDescriptionNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError,
minimalmodbus._check_string,
"DEF",
minlength=3,
maxlength=3,
description=value,
)
def testWrongCustomError(self) -> None:
self.assertRaises(
TypeError,
minimalmodbus._check_string,
"DEF",
minlength=3,
maxlength=3,
description="ABC",
exception_type=list,
)
self.assertRaises(
TypeError,
minimalmodbus._check_string,
"DEF",
minlength=3,
maxlength=3,
description="ABC",
exception_type=7,
)
def testCustomError(self) -> None:
for ex in [NotImplementedError, MemoryError, InvalidResponseError]:
self.assertRaises(
ex,
minimalmodbus._check_string,
"DE",
minlength=3,
description="ABC",
exception_type=ex,
)
class TestCheckBytes(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_bytes(b"DEF", minlength=3, maxlength=3, description="ABC")
minimalmodbus._check_bytes(
b"DEF", minlength=0, maxlength=100, description="ABC"
)
def testTooShort(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_bytes,
b"DE",
minlength=3,
maxlength=3,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_bytes,
b"DEF",
minlength=10,
maxlength=3,
description="ABC",
)
def testTooLong(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_bytes,
b"DEFG",
minlength=1,
maxlength=3,
description="ABC",
)
def testInconsistentLengthlimits(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_bytes,
b"DEFG",
minlength=4,
maxlength=3,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_bytes,
b"DEF",
minlength=-3,
maxlength=3,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_bytes,
b"DEF",
minlength=3,
maxlength=-3,
description="ABC",
)
def testInputNotBytes(self) -> None:
for value in _NOT_BYTES:
self.assertRaises(
TypeError,
minimalmodbus._check_bytes,
value,
minlength=3,
maxlength=3,
description="ABC",
)
def testNotIntegerInput(self) -> None:
for value in _NOT_INTERGERS_OR_NONE:
self.assertRaises(
TypeError,
minimalmodbus._check_bytes,
b"DEF",
minlength=value,
maxlength=3,
description="ABC",
)
self.assertRaises(
TypeError,
minimalmodbus._check_bytes,
b"DEF",
minlength=3,
maxlength=value,
description="ABC",
)
def testDescriptionNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError,
minimalmodbus._check_bytes,
b"DEF",
minlength=3,
maxlength=3,
description=value,
)
class TestCheckInt(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_int(47, minvalue=None, maxvalue=None, description="ABC")
minimalmodbus._check_int(47, minvalue=40, maxvalue=50, description="ABC")
minimalmodbus._check_int(47, minvalue=-40, maxvalue=50, description="ABC")
minimalmodbus._check_int(47, description="ABC", maxvalue=50, minvalue=40)
minimalmodbus._check_int(47, minvalue=None, maxvalue=50, description="ABC")
minimalmodbus._check_int(47, minvalue=40, maxvalue=None, description="ABC")
def testTooLargeValue(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_int,
47,
minvalue=30,
maxvalue=40,
description="ABC",
)
self.assertRaises(ValueError, minimalmodbus._check_int, 47, maxvalue=46)
def testTooSmallValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_int, 47, minvalue=48)
self.assertRaises(
ValueError,
minimalmodbus._check_int,
47,
minvalue=48,
maxvalue=None,
description="ABC",
)
def testInconsistentLimits(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_int,
47,
minvalue=47,
maxvalue=45,
description="ABC",
)
def testWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, minimalmodbus._check_int, value, minvalue=40)
for value in _NOT_INTERGERS_OR_NONE:
self.assertRaises(
TypeError,
minimalmodbus._check_int,
47,
minvalue=value,
maxvalue=50,
description="ABC",
)
self.assertRaises(
TypeError,
minimalmodbus._check_int,
47,
minvalue=40,
maxvalue=value,
description="ABC",
)
for value in _NOT_STRINGS:
self.assertRaises(
TypeError,
minimalmodbus._check_int,
47,
minvalue=40,
maxvalue=50,
description=value,
)
class TestCheckNumerical(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_numerical(
47, minvalue=None, maxvalue=None, description="ABC"
)
minimalmodbus._check_numerical(47, minvalue=40, maxvalue=50, description="ABC")
minimalmodbus._check_numerical(47, minvalue=-40, maxvalue=50, description="ABC")
minimalmodbus._check_numerical(47, description="ABC", maxvalue=50, minvalue=40)
minimalmodbus._check_numerical(
47, minvalue=None, maxvalue=50, description="ABC"
)
minimalmodbus._check_numerical(
47, minvalue=40, maxvalue=None, description="ABC"
)
minimalmodbus._check_numerical(47.0, minvalue=40)
minimalmodbus._check_numerical(
47, minvalue=40.0, maxvalue=50, description="ABC"
)
minimalmodbus._check_numerical(
47.0, minvalue=40, maxvalue=None, description="ABC"
)
minimalmodbus._check_numerical(
47.0, minvalue=40.0, maxvalue=50.0, description="ABC"
)
def testTooLargeValue(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_numerical,
47.0,
minvalue=30,
maxvalue=40,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_numerical,
47.0,
minvalue=30.0,
maxvalue=40.0,
description="ABC",
)
self.assertRaises(ValueError, minimalmodbus._check_numerical, 47, maxvalue=46.0)
self.assertRaises(
ValueError, minimalmodbus._check_numerical, 47.0, maxvalue=46.0
)
self.assertRaises(ValueError, minimalmodbus._check_numerical, 47.0, maxvalue=46)
def testTooSmallValue(self) -> None:
self.assertRaises(ValueError, minimalmodbus._check_numerical, 47.0, minvalue=48)
self.assertRaises(
ValueError, minimalmodbus._check_numerical, 47.0, minvalue=48.0
)
self.assertRaises(ValueError, minimalmodbus._check_numerical, 47, minvalue=48.0)
self.assertRaises(
ValueError,
minimalmodbus._check_numerical,
47,
minvalue=48,
maxvalue=None,
description="ABC",
)
def testInconsistentLimits(self) -> None:
self.assertRaises(
ValueError,
minimalmodbus._check_numerical,
47,
minvalue=47,
maxvalue=45,
description="ABC",
)
self.assertRaises(
ValueError,
minimalmodbus._check_numerical,
47.0,
minvalue=47.0,
maxvalue=45.0,
description="ABC",
)
def testNotNumericInput(self) -> None:
for value in _NOT_NUMERICALS:
self.assertRaises(
TypeError, minimalmodbus._check_numerical, value, minvalue=40.0
)
for value in _NOT_NUMERICALS_OR_NONE:
self.assertRaises(
TypeError,
minimalmodbus._check_numerical,
47.0,
minvalue=value,
maxvalue=50.0,
description="ABC",
)
self.assertRaises(
TypeError,
minimalmodbus._check_numerical,
47.0,
minvalue=40.0,
maxvalue=value,
description="ABC",
)
def testDescriptionNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(
TypeError,
minimalmodbus._check_numerical,
47.0,
minvalue=40,
maxvalue=50,
description=value,
)
class TestCheckBool(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._check_bool(True, description="ABC")
minimalmodbus._check_bool(False, description="ABC")
def testWrongType(self) -> None:
for value in _NOT_BOOLEANS:
self.assertRaises(
TypeError, minimalmodbus._check_bool, value, description="ABC"
)
for value in _NOT_STRINGS:
self.assertRaises(
TypeError, minimalmodbus._check_bool, True, description=value
)
#####################
# Development tools #
#####################
class TestGetDiagnosticString(ExtendedTestCase):
def testReturnsString(self) -> None:
resultstring = minimalmodbus._get_diagnostic_string()
self.assertTrue(len(resultstring) > 100) # For Python 2.6 compatibility
class TestPrintOut(ExtendedTestCase):
def testKnownValues(self) -> None:
minimalmodbus._print_out("ABCDEFGHIJKL")
def testInputNotString(self) -> None:
for value in _NOT_STRINGS:
self.assertRaises(TypeError, minimalmodbus._print_out, value)
# TODO: TestInterpretRawMessage
# TODO: TestInterpretPayload
###########################################
# Communication using a dummy serial port #
###########################################
class TestDummyCommunication(ExtendedTestCase):
## Test fixture ##
def setUp(self) -> None:
# Prepare a dummy serial port to have proper responses,
# and monkey-patch minimalmodbus to use it
# Note that mypy is unhappy about this: https://github.com/python/mypy/issues/1152
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 1)
## Read bit ##
def testReadBit(self) -> None:
# Functioncode 2
self.assertEqual(self.instrument.read_bit(61), 1)
self.assertEqual(self.instrument.read_bit(61, functioncode=2), 1)
self.assertEqual(self.instrument.read_bit(61, 2), 1)
# Functioncode 1
self.assertEqual(self.instrument.read_bit(62, functioncode=1), 0)
self.assertEqual(self.instrument.read_bit(62, 1), 0)
def testReadBitWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.read_bit, -1)
self.assertRaises(ValueError, self.instrument.read_bit, 65536)
# Wrong function code
self.assertRaises(ValueError, self.instrument.read_bit, 62, 0)
self.assertRaises(ValueError, self.instrument.read_bit, 62, -1)
self.assertRaises(ValueError, self.instrument.read_bit, 62, 128)
def testReadBitWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.read_bit, value)
self.assertRaises(TypeError, self.instrument.read_bit, 62, value)
def testReadBitWithWrongByteCountResponse(self) -> None:
# Functioncode 2. Slave gives wrong byte count.
self.assertRaises(InvalidResponseError, self.instrument.read_bit, 63)
def testReadBitWithNoResponse(self) -> None:
# Functioncode 2. Slave gives no response.
self.assertRaises(NoResponseError, self.instrument.read_bit, 64)
## Write bit ##
def testWriteBit(self) -> None:
self.instrument.write_bit(71, 0)
self.instrument.write_bit(71, False)
self.instrument.write_bit(71, 1)
self.instrument.write_bit(71, True)
self.instrument.write_bit(71, 1, 5)
self.instrument.write_bit(71, True, 5)
self.instrument.write_bit(71, 1, functioncode=5)
self.instrument.write_bit(72, 1, 15)
self.instrument.write_bit(72, 1, functioncode=15)
def testWriteBitWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.write_bit, 65536, 1)
self.assertRaises(ValueError, self.instrument.write_bit, -1, 1)
# Wrong bit value
self.assertRaises(ValueError, self.instrument.write_bit, 71, 10)
self.assertRaises(ValueError, self.instrument.write_bit, 71, -5)
self.assertRaises(ValueError, self.instrument.write_bit, 71, 10, 5)
# Wrong function code
self.assertRaises(ValueError, self.instrument.write_bit, 71, 1, 6)
self.assertRaises(ValueError, self.instrument.write_bit, 71, 1, -1)
self.assertRaises(ValueError, self.instrument.write_bit, 71, 1, 0)
self.assertRaises(ValueError, self.instrument.write_bit, 71, 1, 128)
def testWriteBitWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.write_bit, value, 1)
self.assertRaises(TypeError, self.instrument.write_bit, 71, value)
self.assertRaises(TypeError, self.instrument.write_bit, 71, 1, value)
def testWriteBitWithWrongRegisternumbersResponse(self) -> None:
# Slave gives wrong number of registers
self.assertRaises(
InvalidResponseError, self.instrument.write_bit, 73, 1, functioncode=15
)
def testWriteBitWithWrongWritedataResponse(self) -> None:
# Slave gives wrong write data
self.assertRaises(InvalidResponseError, self.instrument.write_bit, 74, 1)
## Read bits ##
def testReadBits(self) -> None:
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
self.instrument.read_bits(196, 22, functioncode=2),
[0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1],
)
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.assertEqual(
self.instrument.read_bits(19, 19, functioncode=1),
[1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1],
)
# Recorded on Delta DTB4824
self.assertEqual(
self.instrument.read_bits(0x800, 16),
[0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
)
def testReadBitsWrongValue(self) -> None:
self.assertRaises(ValueError, self.instrument.read_bits, -1, 4)
## Write bits ##
def testWriteBits(self) -> None:
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
self.instrument.write_bits(19, [1, 0, 1, 1, 0, 0, 1, 1, 1, 0])
self.instrument.write_bits(
19, [True, False, True, True, False, False, True, True, True, False]
)
def testWriteBitsWrongValue(self) -> None:
self.assertRaises(ValueError, self.instrument.write_bits, -1, [0, 1])
self.assertRaises(TypeError, self.instrument.write_bits, 122, 1)
## Read register ##
def testReadRegister(self) -> None:
# functioncode 3
self.assertEqual(self.instrument.read_register(289), 770)
self.assertEqual(self.instrument.read_register(5), 184)
self.assertEqual(self.instrument.read_register(289, 0), 770)
self.assertEqual(self.instrument.read_register(289, 0, 3), 770)
# functioncode 4
self.assertEqual(self.instrument.read_register(14, 0, 4), 880)
self.assertAlmostEqual(self.instrument.read_register(289, 1), 77.0)
self.assertAlmostEqual(self.instrument.read_register(289, 2), 7.7)
self.assertEqual(self.instrument.read_register(101), 65531)
self.assertEqual(self.instrument.read_register(101, signed=True), -5)
def testReadRegisterWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.read_register, -1)
self.assertRaises(ValueError, self.instrument.read_register, -1, 0, 3)
self.assertRaises(ValueError, self.instrument.read_register, 65536)
# Wrong number of decimals
self.assertRaises(ValueError, self.instrument.read_register, 289, -1)
self.assertRaises(ValueError, self.instrument.read_register, 289, 11)
# Wrong function code
self.assertRaises(ValueError, self.instrument.read_register, 289, 0, 5)
self.assertRaises(ValueError, self.instrument.read_register, 289, 0, -4)
def testReadRegisterWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.read_register, value, 0, 3)
self.assertRaises(TypeError, self.instrument.read_register, 289, value)
self.assertRaises(TypeError, self.instrument.read_register, 289, 0, value)
## Write register ##
def testWriteRegister(self) -> None:
self.instrument.write_register(35, 20)
self.instrument.write_register(35, 20, functioncode=16)
self.instrument.write_register(35, 20.0)
self.instrument.write_register(24, 50)
self.instrument.write_register(45, 88, functioncode=6)
self.instrument.write_register(101, 5)
self.instrument.write_register(101, 5, signed=True)
self.instrument.write_register(101, 5, 1)
self.instrument.write_register(101, -5, signed=True)
self.instrument.write_register(101, -5, 1, signed=True)
def testWriteRegisterWithDecimals(self) -> None:
self.instrument.write_register(35, 2.0, 1)
self.instrument.write_register(45, 8.8, 1, functioncode=6)
def testWriteRegisterWrongValue(self) -> None:
# Wrong address
self.assertRaises(ValueError, self.instrument.write_register, -1, 20)
self.assertRaises(ValueError, self.instrument.write_register, 65536, 20)
# Wrong register value
self.assertRaises(ValueError, self.instrument.write_register, 35, -1)
self.assertRaises(ValueError, self.instrument.write_register, 35, 65536)
# Wrong number of decimals
self.assertRaises(ValueError, self.instrument.write_register, 35, 20, -1)
self.assertRaises(ValueError, self.instrument.write_register, 35, 20, 100)
# Wrong function code
self.assertRaises(
ValueError, self.instrument.write_register, 35, 20, functioncode=12
)
self.assertRaises(
ValueError, self.instrument.write_register, 35, 20, functioncode=-4
)
self.assertRaises(
ValueError, self.instrument.write_register, 35, 20, functioncode=129
)
def testWriteRegisterWrongType(self) -> None:
for value in _NOT_NUMERICALS:
self.assertRaises(TypeError, self.instrument.write_register, value, 20)
self.assertRaises(TypeError, self.instrument.write_register, 35, value)
self.assertRaises(TypeError, self.instrument.write_register, 35, 20, value)
self.assertRaises(
TypeError, self.instrument.write_register, 35, 20, functioncode=value
)
def testWriteRegisterWithWrongCrcResponse(self) -> None:
# Slave gives wrong CRC
self.assertRaises(InvalidResponseError, self.instrument.write_register, 51, 99)
def testWriteRegisterSuppressErrorMessageAtWrongCRC(self) -> None:
try:
self.instrument.write_register(51, 99) # Slave gives wrong CRC
except InvalidResponseError:
minimalmodbus._print_out("Minimalmodbus: An error was suppressed.")
def testWriteRegisterWithWrongSlaveaddressResponse(self) -> None:
# Slave gives wrong slaveaddress
self.assertRaises(InvalidResponseError, self.instrument.write_register, 54, 99)
def testWriteRegisterWithWrongFunctioncodeResponse(self) -> None:
# Slave gives wrong functioncode
self.assertRaises(InvalidResponseError, self.instrument.write_register, 55, 99)
# Slave indicates an error
self.assertRaises(
SlaveReportedException, self.instrument.write_register, 56, 99
)
def testWriteRegisterWithWrongRegisteraddressResponse(self) -> None:
# Slave gives wrong registeraddress
self.assertRaises(InvalidResponseError, self.instrument.write_register, 53, 99)
def testWriteRegisterWithWrongRegisternumbersResponse(self) -> None:
# Slave gives wrong number of registers
self.assertRaises(InvalidResponseError, self.instrument.write_register, 52, 99)
def testWriteRegisterWithWrongWritedataResponse(self) -> None:
# Functioncode 6. Slave gives wrong write data.
self.assertRaises(
InvalidResponseError, self.instrument.write_register, 55, 99, functioncode=6
)
## Read Long ##
def testReadLong(self) -> None:
self.assertEqual(self.instrument.read_long(102), 4294967295)
self.assertEqual(self.instrument.read_long(102, signed=True), -1)
self.assertEqual(
self.instrument.read_long(223, byteorder=BYTEORDER_BIG), 2923517522
)
self.assertEqual(
self.instrument.read_long(224, byteorder=BYTEORDER_BIG_SWAP), 2923517522
)
self.assertEqual(
self.instrument.read_long(225, byteorder=BYTEORDER_LITTLE_SWAP), 2923517522
)
self.assertEqual(
self.instrument.read_long(226, byteorder=BYTEORDER_LITTLE), 2923517522
)
def testReadLongWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.read_long, -1)
self.assertRaises(ValueError, self.instrument.read_long, 65536)
# Wrong function code
self.assertRaises(ValueError, self.instrument.read_long, 102, 1)
self.assertRaises(ValueError, self.instrument.read_long, 102, -1)
self.assertRaises(ValueError, self.instrument.read_long, 102, 256)
def testReadLongWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.read_long, value)
self.assertRaises(TypeError, self.instrument.read_long, 102, value)
for value in _NOT_BOOLEANS:
self.assertRaises(TypeError, self.instrument.read_long, 102, signed=value)
## Write Long ##
def testWriteLong(self) -> None:
self.instrument.write_long(102, 5)
self.instrument.write_long(102, 5, signed=True)
self.instrument.write_long(102, -5, signed=True)
self.instrument.write_long(102, 3, False)
self.instrument.write_long(102, -3, True)
self.instrument.write_long(222, 2923517522) # BYTEORDER_BIG
self.instrument.write_long(222, 2923517522, byteorder=BYTEORDER_BIG_SWAP)
self.instrument.write_long(222, 2923517522, byteorder=BYTEORDER_LITTLE_SWAP)
self.instrument.write_long(222, 2923517522, byteorder=BYTEORDER_LITTLE)
def testWriteLongWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.write_long, -1, 5)
self.assertRaises(ValueError, self.instrument.write_long, 65536, 5)
# Wrong value to write to slave
self.assertRaises(
ValueError, self.instrument.write_long, 102, 888888888888888888888
)
# Wrong value to write to slave
self.assertRaises(ValueError, self.instrument.write_long, 102, -5, signed=False)
def testWriteLongWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.write_long, value, 5)
self.assertRaises(TypeError, self.instrument.write_long, 102, value)
for value in _NOT_BOOLEANS:
self.assertRaises(
TypeError, self.instrument.write_long, 102, 5, signed=value
)
## Read Float ##
def testReadFloat(self) -> None:
# BYTEORDER_BIG
self.assertAlmostEqual(self.instrument.read_float(241), -4.3959787e-11)
self.assertAlmostEqual(
self.instrument.read_float(242, byteorder=BYTEORDER_BIG_SWAP),
-4.3959787e-11,
)
self.assertAlmostEqual(
self.instrument.read_float(243, byteorder=BYTEORDER_LITTLE_SWAP),
-4.3959787e-11,
)
self.assertAlmostEqual(
self.instrument.read_float(244, byteorder=BYTEORDER_LITTLE), -4.3959787e-11
)
self.assertEqual(self.instrument.read_float(103), 1.0)
self.assertEqual(self.instrument.read_float(103, 3), 1.0)
self.assertEqual(self.instrument.read_float(103, 3, 2), 1.0)
self.assertEqual(self.instrument.read_float(103, 3, 4), -2.0)
# Function code 4
self.assertAlmostEqualRatio(self.instrument.read_float(103, 4, 2), 3.65e30)
def testReadFloatWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.read_float, -1)
self.assertRaises(ValueError, self.instrument.read_float, -1, 3)
self.assertRaises(ValueError, self.instrument.read_float, -1, 3, 2)
self.assertRaises(ValueError, self.instrument.read_float, 65536)
# Wrong function code
self.assertRaises(ValueError, self.instrument.read_float, 103, 1)
self.assertRaises(ValueError, self.instrument.read_float, 103, -1)
self.assertRaises(ValueError, self.instrument.read_float, 103, 256)
# Wrong number of registers
for value in [-1, 0, 1, 5, 6, 7, 8, 16]:
self.assertRaises(ValueError, self.instrument.read_float, 103, 3, value)
self.assertRaises(InvalidResponseError, self.instrument.read_float, 103, 3, 3)
def testReadFloatWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.read_float, value, 3, 2)
self.assertRaises(TypeError, self.instrument.read_float, 103, value, 2)
self.assertRaises(TypeError, self.instrument.read_float, 103, 3, value)
## Write Float ##
def testWriteFloat(self) -> None:
self.instrument.write_float(103, 1.1)
self.instrument.write_float(103, 1.1, 4)
self.instrument.write_float(240, -4.3959787e-11) # BYTEORDER_BIG
self.instrument.write_float(240, -4.3959787e-11, byteorder=BYTEORDER_BIG_SWAP)
self.instrument.write_float(
240, -4.3959787e-11, byteorder=BYTEORDER_LITTLE_SWAP
)
self.instrument.write_float(240, -4.3959787e-11, byteorder=BYTEORDER_LITTLE)
def testWriteFloatWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.write_float, -1, 1.1)
self.assertRaises(ValueError, self.instrument.write_float, 65536, 1.1)
# Wrong number of registers
for value in [-1, 0, 1, 3, 5, 6, 7, 8, 16]:
self.assertRaises(ValueError, self.instrument.write_float, 103, 1.1, value)
def testWriteFloatWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.write_float, value, 1.1)
self.assertRaises(TypeError, self.instrument.write_float, 103, 1.1, value)
for value in _NOT_NUMERICALS:
self.assertRaises(TypeError, self.instrument.write_float, 103, value)
## Read String ##
def testReadString(self) -> None:
self.assertEqual(self.instrument.read_string(104, 1), "AB")
self.assertEqual(self.instrument.read_string(104, 4), "ABCDEFGH")
self.assertEqual(self.instrument.read_string(104, 4, 3), "ABCDEFGH")
# TODO test with function code 4
def testReadStringWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.read_string, -1)
self.assertRaises(ValueError, self.instrument.read_string, 65536)
# Wrong number of registers
self.assertRaises(ValueError, self.instrument.read_string, 104, -1)
self.assertRaises(ValueError, self.instrument.read_string, 104, 126)
# Wrong function code
self.assertRaises(ValueError, self.instrument.read_string, 104, 4, 1)
self.assertRaises(ValueError, self.instrument.read_string, 104, 4, -1)
self.assertRaises(ValueError, self.instrument.read_string, 104, 4, 256)
def testReadStringWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.read_string, value, 1)
self.assertRaises(TypeError, self.instrument.read_string, value, 4)
self.assertRaises(TypeError, self.instrument.read_string, 104, value)
self.assertRaises(TypeError, self.instrument.read_string, 104, 4, value)
## Write String ##
def testWriteString(self) -> None:
self.instrument.write_string(104, "A", 1)
self.instrument.write_string(104, "A", 4)
self.instrument.write_string(104, "ABCDEFGH", 4)
def testWriteStringWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.write_string, -1, "A")
self.assertRaises(ValueError, self.instrument.write_string, 65536, "A")
# Too long string
self.assertRaises(ValueError, self.instrument.write_string, 104, "AAA", 1)
self.assertRaises(ValueError, self.instrument.write_string, 104, "ABCDEFGHI", 4)
# Wrong number of registers
self.assertRaises(ValueError, self.instrument.write_string, 104, "A", -1)
self.assertRaises(ValueError, self.instrument.write_string, 104, "A", 124)
# Non-ASCII
self.assertRaises(ValueError, self.instrument.write_string, 104, "\u0394P", 1)
def testWriteStringWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.write_string, value, "A")
self.assertRaises(TypeError, self.instrument.write_string, 104, "A", value)
for value in _NOT_STRINGS:
self.assertRaises(TypeError, self.instrument.write_string, 104, value, 4)
## Read Registers ##
def testReadRegisters(self) -> None:
self.assertEqual(self.instrument.read_registers(105, 1), [16])
self.assertEqual(self.instrument.read_registers(105, 3), [16, 32, 64])
# TODO test with function code 4
def testReadRegistersWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.read_registers, -1, 1)
self.assertRaises(ValueError, self.instrument.read_registers, 65536, 1)
# Wrong number of registers
self.assertRaises(ValueError, self.instrument.read_registers, 105, -1)
self.assertRaises(ValueError, self.instrument.read_registers, 105, 126)
# Wrong function code
self.assertRaises(ValueError, self.instrument.read_registers, 105, 1, 1)
self.assertRaises(ValueError, self.instrument.read_registers, 105, 1, 256)
self.assertRaises(ValueError, self.instrument.read_registers, 105, 1, -1)
def testReadRegistersWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.read_registers, value, 1)
self.assertRaises(TypeError, self.instrument.read_registers, 105, value)
self.assertRaises(TypeError, self.instrument.read_registers, 105, 1, value)
## Write Registers ##
def testWriteRegisters(self) -> None:
self.instrument.write_registers(105, [2])
self.instrument.write_registers(105, [2, 4, 8])
# self.instrument.write_registers(105, [2]*123) # Todo create suitable response
def testWriteRegistersWrongValue(self) -> None:
# Wrong register address
self.assertRaises(ValueError, self.instrument.write_registers, -1, [2])
self.assertRaises(ValueError, self.instrument.write_registers, 65536, [2])
# Wrong list value
self.assertRaises(ValueError, self.instrument.write_registers, 105, [])
self.assertRaises(ValueError, self.instrument.write_registers, 105, [-1])
# Wrong number of registers
self.assertRaises(ValueError, self.instrument.write_registers, 105, [2] * 124)
def testWriteRegistersWrongType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(TypeError, self.instrument.write_registers, value, [2])
for value in _NOT_INTLISTS:
self.assertRaises(TypeError, self.instrument.write_registers, 105, value)
## Generic command ##
def testGenericCommand(self) -> None:
# read_bit(61)
self.assertEqual(
self.instrument._generic_command(
2, 61, number_of_bits=1, payloadformat=_Payloadformat.BIT
),
1,
)
# write_bit(71, 1)
self.instrument._generic_command(
5, 71, 1, number_of_bits=1, payloadformat=_Payloadformat.BIT
)
# read_bits(196, 22, functioncode=2)
self.assertEqual(
self.instrument._generic_command(
2, 196, number_of_bits=22, payloadformat=_Payloadformat.BITS
),
[0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1],
)
# read_bits(19, 19, functioncode=1)
self.assertEqual(
self.instrument._generic_command(
1, 19, number_of_bits=19, payloadformat=_Payloadformat.BITS
),
[1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1],
)
# write_bits(19, [1, 0, 1, 1, 0, 0, 1, 1, 1, 0])
self.instrument._generic_command(
15,
19,
[1, 0, 1, 1, 0, 0, 1, 1, 1, 0],
number_of_bits=10,
payloadformat=_Payloadformat.BITS,
)
# read_register(289)
self.assertEqual(
self.instrument._generic_command(
3, 289, number_of_registers=1, payloadformat=_Payloadformat.REGISTER
),
770,
)
# read_register(101, signed = True)
self.assertEqual(
self.instrument._generic_command(
3,
101,
number_of_registers=1,
signed=True,
payloadformat=_Payloadformat.REGISTER,
),
-5,
)
# read_register(289, 1)
self.assertAlmostEqual(
self.instrument._generic_command(
3,
289,
number_of_decimals=1,
number_of_registers=1,
payloadformat=_Payloadformat.REGISTER,
),
77.0,
)
# write_register(35, 20)
self.instrument._generic_command(
16, 35, 20, number_of_registers=1, payloadformat=_Payloadformat.REGISTER
)
# write_register(45, 88)
self.instrument._generic_command(
6, 45, 88, number_of_registers=1, payloadformat=_Payloadformat.REGISTER
)
# read_long(102)
self.assertEqual(
self.instrument._generic_command(
3, 102, number_of_registers=2, payloadformat=_Payloadformat.LONG
),
4294967295,
)
# write_long(102, 5)
self.instrument._generic_command(
16, 102, 5, number_of_registers=2, payloadformat=_Payloadformat.LONG
)
# read_float(103)
self.assertAlmostEqual(
self.instrument._generic_command(
3, 103, number_of_registers=2, payloadformat=_Payloadformat.FLOAT
),
1.0,
)
# write_float(103, 1.1)
self.instrument._generic_command(
16, 103, 1.1, number_of_registers=2, payloadformat=_Payloadformat.FLOAT
)
# read_string(104, 1)
self.assertEqual(
self.instrument._generic_command(
3, 104, number_of_registers=1, payloadformat=_Payloadformat.STRING
),
"AB",
)
# write_string(104, 'A', 1)
self.instrument._generic_command(
16, 104, "A", number_of_registers=1, payloadformat=_Payloadformat.STRING
)
# read_registers(105, 3)
self.assertEqual(
self.instrument._generic_command(
3, 105, number_of_registers=3, payloadformat=_Payloadformat.REGISTERS
),
[16, 32, 64],
)
# write_registers(105, [2, 4, 8])
self.instrument._generic_command(
16,
105,
[2, 4, 8],
number_of_registers=3,
payloadformat=_Payloadformat.REGISTERS,
)
def testGenericCommandWrongValue(
self,
) -> None:
# Detected without looking at parameter combinations
for functioncode in [-1, 0, 23, 35, 128, 255, 1234567]:
self.assertRaises(
ValueError,
self.instrument._generic_command,
functioncode,
1,
number_of_registers=1,
payloadformat=_Payloadformat.REGISTER,
)
for registeraddress in [-1, 65536]:
self.assertRaises(
ValueError, self.instrument._generic_command, 3, registeraddress
)
for number_of_decimals in [-1, 11]:
self.assertRaises(
ValueError,
self.instrument._generic_command,
3,
289,
number_of_decimals=number_of_decimals,
)
for number_of_registers in [-1, 126]:
self.assertRaises(
ValueError,
self.instrument._generic_command,
3,
289,
number_of_registers=number_of_registers,
)
for number_of_bits in [-1, 2001]:
self.assertRaises(
ValueError,
self.instrument._generic_command,
3,
289,
number_of_bits=number_of_bits,
)
self.assertRaises(
TypeError, self.instrument._generic_command, 3, 289, payloadformat="ABC"
)
def testGenericCommandWrongType(
self,
) -> None:
# Detected without looking at parameter combinations
# Note: The parameter 'value' type is dependent on the other parameters. See tests above.
for value in _NOT_INTERGERS:
# Function code
self.assertRaises(TypeError, self.instrument._generic_command, value, 289)
# Register address
self.assertRaises(TypeError, self.instrument._generic_command, 3, value)
self.assertRaises(
TypeError,
self.instrument._generic_command,
3,
289,
number_of_decimals=value,
)
self.assertRaises(
TypeError,
self.instrument._generic_command,
3,
289,
number_of_registers=value,
)
self.assertRaises(
TypeError,
self.instrument._generic_command,
3,
289,
number_of_bits=value,
)
self.assertRaises(
TypeError, self.instrument._generic_command, 3, 289, byteorder=value
)
for value in _NOT_BOOLEANS:
self.assertRaises(
TypeError, self.instrument._generic_command, 3, 289, signed=value
)
for value in _NOT_STRINGS_OR_NONE:
self.assertRaises(
TypeError, self.instrument._generic_command, 3, 289, payloadformat=value
)
def testGenericCommandWrongValueCombinations(self) -> None:
# Bit
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
1,
number_of_bits=2,
payloadformat=_Payloadformat.BIT,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
1,
number_of_bits=1,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
1,
number_of_bits=1,
number_of_decimals=1,
payloadformat=_Payloadformat.BIT,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
1,
number_of_bits=1,
number_of_registers=1,
payloadformat=_Payloadformat.BIT,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
1,
number_of_bits=1,
signed=True,
payloadformat=_Payloadformat.BIT,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
1,
number_of_bits=1,
byteorder=BYTEORDER_LITTLE,
payloadformat=_Payloadformat.BIT,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
5,
71,
2,
number_of_bits=1,
payloadformat=_Payloadformat.BIT,
)
self.assertRaises(
TypeError,
self.instrument._generic_command,
5,
71,
"abc",
number_of_bits=1,
payloadformat=_Payloadformat.BIT,
)
# Bits
self.assertRaises(
ValueError,
self.instrument._generic_command,
2,
71,
number_of_bits=-1,
payloadformat=_Payloadformat.BITS,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
2,
71,
number_of_bits=0,
payloadformat=_Payloadformat.BITS,
)
self.assertRaises(
TypeError,
self.instrument._generic_command,
15,
71,
1,
number_of_bits=1,
payloadformat=_Payloadformat.BITS,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
15,
71,
[1, 2],
number_of_bits=1,
payloadformat=_Payloadformat.BITS,
)
# Register
self.assertRaises(
ValueError,
self.instrument._generic_command,
3,
289,
number_of_registers=1,
number_of_bits=1,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
3,
289,
number_of_registers=0,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
16,
289,
number_of_registers=5,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
16,
289,
number_of_registers=1,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
6,
45,
88,
number_of_registers=7,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
3,
289,
88,
number_of_registers=1,
payloadformat=_Payloadformat.REGISTER,
)
self.assertRaises(
TypeError,
self.instrument._generic_command,
6,
123,
"abc",
number_of_registers=1,
payloadformat=_Payloadformat.REGISTER,
)
# Registers
self.assertRaises(
ValueError,
self.instrument._generic_command,
1,
105,
number_of_registers=3,
payloadformat=_Payloadformat.REGISTERS,
)
self.assertRaises(
TypeError,
self.instrument._generic_command,
16,
105,
2,
number_of_registers=3,
payloadformat=_Payloadformat.REGISTERS,
)
self.assertRaises(
ValueError,
self.instrument._generic_command,
16,
105,
[2, 4],
number_of_registers=3,
payloadformat=_Payloadformat.REGISTERS,
)
# String
self.assertRaises(
TypeError,
self.instrument._generic_command,
16,
123,
1.0,
number_of_registers=1,
payloadformat=_Payloadformat.STRING,
)
## Perform command ##
def testPerformcommandKnownResponse(self) -> None:
# Total response length should be 8 bytes
self.assertEqual(self.instrument._perform_command(16, "TESTCOMMAND"), "TRsp")
self.assertEqual(
self.instrument._perform_command(75, "TESTCOMMAND2"), "TESTCOMMANDRESPONSE2"
)
# Read bit register 61 on slave 1 using function code 2.
self.assertEqual(
self.instrument._perform_command(2, "\x00\x3d\x00\x01"), "\x01\x01"
)
def testPerformcommandWrongSlaveResponse(self) -> None:
# Wrong slave address in response
self.assertRaises(
InvalidResponseError, self.instrument._perform_command, 1, "TESTCOMMAND"
)
# Wrong function code in response
self.assertRaises(
InvalidResponseError, self.instrument._perform_command, 2, "TESTCOMMAND"
)
# Wrong CRC in response
self.assertRaises(
InvalidResponseError, self.instrument._perform_command, 3, "TESTCOMMAND"
)
# Too short response message from slave
self.assertRaises(
InvalidResponseError, self.instrument._perform_command, 4, "TESTCOMMAND"
)
# Error indication from slave
self.assertRaises(
InvalidResponseError, self.instrument._perform_command, 5, "TESTCOMMAND"
)
def testPerformcommandWrongInputValue(self) -> None:
# Wrong function code
self.assertRaises(
ValueError, self.instrument._perform_command, -1, "TESTCOMMAND"
)
self.assertRaises(
ValueError, self.instrument._perform_command, 128, "TESTCOMMAND"
)
def testPerformcommandWrongInputType(self) -> None:
for value in _NOT_INTERGERS:
self.assertRaises(
TypeError, self.instrument._perform_command, value, "TESTCOMMAND"
)
for value in _NOT_STRINGS:
self.assertRaises(TypeError, self.instrument._perform_command, 16, value)
## Communicate ##
def testCommunicateKnownResponse(self) -> None:
self.assertEqual(
self.instrument._communicate(b"TESTMESSAGE", _LARGE_NUMBER_OF_BYTES),
b"TESTRESPONSE",
)
def testCommunicateWrongType(self) -> None:
for value in _NOT_BYTES:
self.assertRaises(
TypeError, self.instrument._communicate, value, _LARGE_NUMBER_OF_BYTES
)
def testCommunicateNoMessage(self) -> None:
self.assertRaises(
ValueError, self.instrument._communicate, b"", _LARGE_NUMBER_OF_BYTES
)
def testCommunicateNoResponse(self) -> None:
self.assertRaises(
NoResponseError,
self.instrument._communicate,
b"MessageForEmptyResponse",
_LARGE_NUMBER_OF_BYTES,
)
def testCommunicateLocalEcho(self) -> None:
self.instrument.handle_local_echo = True
self.assertEqual(
self.instrument._communicate(b"TESTMESSAGE2", _LARGE_NUMBER_OF_BYTES),
b"TESTRESPONSE2",
)
def testCommunicateWrongLocalEcho(self) -> None:
self.instrument.handle_local_echo = True
self.assertRaises(
IOError,
self.instrument._communicate,
b"TESTMESSAGE3",
_LARGE_NUMBER_OF_BYTES,
) # TODO is this correct?
def testPortWillBeOpened(self) -> None:
assert self.instrument.serial is not None
self.instrument.serial.close()
self.instrument.write_bit(71, 1)
def testMeasureRoundtriptime(self) -> None:
self.instrument.debug = True
self.assertIsNone(self.instrument.roundtrip_time)
self.instrument.write_bit(71, 1)
self.assertIsNotNone(self.instrument.roundtrip_time)
# Measured round trip time in seconds, see dummy_serial
self.assertGreater(self.instrument.roundtrip_time, 0.001)
## __repr__ ##
def testRepresentation(self) -> None:
representation = repr(self.instrument)
self.assertTrue("minimalmodbus.Instrument<id=" in representation)
self.assertTrue(
", address=1, mode=rtu, close_port_after_each_call=False, "
in representation
)
self.assertTrue(
", precalculate_read_size=True, clear_buffers_before_each_transaction=True, "
in representation
)
self.assertTrue(", handle_local_echo=False, debug=False, " in representation)
self.assertTrue(", open=True>(port=" in representation)
## Test the dummy serial port itself ##
def testReadPortClosed(self) -> None:
assert self.instrument.serial is not None
self.instrument.serial.close()
# Error raised by dummy_serial
self.assertRaises(IOError, self.instrument.serial.read, 1000)
def testPortAlreadyOpen(self) -> None:
assert self.instrument.serial is not None
# Error raised by dummy_serial
self.assertRaises(IOError, self.instrument.serial.open)
def testPortAlreadyClosed(self) -> None:
assert self.instrument.serial is not None
self.instrument.serial.close()
# Error raised by dummy_serial
self.assertRaises(IOError, self.instrument.serial.close)
## Tear down test fixture ##
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationOmegaSlave1(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 1)
def testReadBit(self) -> None:
self.assertEqual(self.instrument.read_bit(2068), 1)
def testWriteBit(self) -> None:
self.instrument.write_bit(2068, 0)
self.instrument.write_bit(2068, 1)
self.instrument.write_bit(2068, True)
self.instrument.write_bit(2068, False)
def testReadRegister(self) -> None:
self.assertAlmostEqual(self.instrument.read_register(4097, 1), 823.6)
def testWriteRegister(self) -> None:
self.instrument.write_register(4097, 700.0, 1)
self.instrument.write_register(4097, 823.6, 1)
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationOmegaSlave10(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 10)
def testReadBit(self) -> None:
self.assertEqual(self.instrument.read_bit(2068), 1)
def testWriteBit(self) -> None:
self.instrument.write_bit(2068, 0)
self.instrument.write_bit(2068, 1)
def testReadRegister(self) -> None:
self.assertAlmostEqual(self.instrument.read_register(4096, 1), 25.0)
self.assertAlmostEqual(self.instrument.read_register(4097, 1), 325.8)
def testWriteRegister(self) -> None:
self.instrument.write_register(4097, 325.8, 1)
self.instrument.write_register(4097, 20.0, 1)
self.instrument.write_register(4097, 200.0, 1)
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationDTB4824_RTU(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 7)
def testReadBit(self) -> None:
self.assertEqual(self.instrument.read_bit(0x0800), 0) # LED AT
self.assertEqual(self.instrument.read_bit(0x0801), 0) # LED Out1
self.assertEqual(self.instrument.read_bit(0x0802), 0) # LED Out2
self.assertEqual(self.instrument.read_bit(0x0814), 0) # RUN/STOP
def testWriteBit(self) -> None:
self.instrument.write_bit(0x0810, 1) # "Communication write in enabled".
self.instrument.write_bit(0x0814, 0) # STOP
self.instrument.write_bit(0x0814, 1) # RUN
def testReadBits(self) -> None:
self.assertEqual(
self.instrument._perform_command(2, "\x08\x10\x00\x09"), "\x02\x07\x00"
)
def testReadRegister(self) -> None:
# Process value (PV)
self.assertEqual(self.instrument.read_register(0x1000), 64990)
# Setpoint (SV)
self.assertAlmostEqual(self.instrument.read_register(0x1001, 1), 80.0)
# Sensor type
self.assertEqual(self.instrument.read_register(0x1004), 14)
# Control method
self.assertEqual(self.instrument.read_register(0x1005), 1)
# Heating/cooling selection
self.assertEqual(self.instrument.read_register(0x1006), 0)
# Output 1
self.assertAlmostEqual(self.instrument.read_register(0x1012, 1), 0.0)
# Output 2
self.assertAlmostEqual(self.instrument.read_register(0x1013, 1), 0.0)
# System alarm setting
self.assertEqual(self.instrument.read_register(0x1023), 0)
# LED status
self.assertEqual(self.instrument.read_register(0x102A), 0)
# Pushbutton status
self.assertEqual(self.instrument.read_register(0x102B), 15)
# Firmware version
self.assertEqual(self.instrument.read_register(0x102F), 400)
def testReadRegisters(self) -> None:
# Process value (PV) and setpoint (SV)
self.assertEqual(self.instrument.read_registers(0x1000, 2), [64990, 350])
def testWriteRegister(self) -> None:
# Setpoint of 80.0 degrees
self.instrument.write_register(0x1001, 0x0320, functioncode=6)
self.instrument.write_register(0x1001, 25, 1, functioncode=6) # Setpoint
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationDTB4824_ASCII(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = ASCII_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument(
"DUMMYPORTNAME", 7, minimalmodbus.MODE_ASCII
)
def testReadBit(self) -> None:
self.assertEqual(self.instrument.read_bit(0x0800), 0) # LED AT
self.assertEqual(self.instrument.read_bit(0x0801), 1) # LED Out1
self.assertEqual(self.instrument.read_bit(0x0802), 0) # LED Out2
self.assertEqual(self.instrument.read_bit(0x0814), 1) # RUN/STOP
def testWriteBit(self) -> None:
self.instrument.write_bit(0x0810, 1) # "Communication write in enabled".
self.instrument.write_bit(0x0814, 0) # STOP
self.instrument.write_bit(0x0814, 1) # RUN
def testReadBits(self) -> None:
self.assertEqual(
self.instrument._perform_command(2, "\x08\x10\x00\x09"), "\x02\x17\x00"
)
def testReadRegister(self) -> None:
# Process value (PV)
self.assertEqual(self.instrument.read_register(0x1000), 64990)
# Setpoint (SV)
self.assertAlmostEqual(self.instrument.read_register(0x1001, 1), 80.0)
# Sensor type
self.assertEqual(self.instrument.read_register(0x1004), 14)
# Control method
self.assertEqual(self.instrument.read_register(0x1005), 1)
# Heating/cooling selection
self.assertEqual(self.instrument.read_register(0x1006), 0)
# Output 1
self.assertAlmostEqual(self.instrument.read_register(0x1012, 1), 100.0)
# Output 2
self.assertAlmostEqual(self.instrument.read_register(0x1013, 1), 0.0)
# System alarm setting
self.assertEqual(self.instrument.read_register(0x1023), 0)
# LED status
self.assertEqual(self.instrument.read_register(0x102A), 64)
# Pushbutton status
self.assertEqual(self.instrument.read_register(0x102B), 15)
# Firmware version
self.assertEqual(self.instrument.read_register(0x102F), 400)
def testReadRegisters(self) -> None:
# Process value (PV) and setpoint (SV)
self.assertEqual(self.instrument.read_registers(0x1000, 2), [64990, 350])
def testWriteRegister(self) -> None:
# Setpoint of 80.0 degrees
self.instrument.write_register(0x1001, 0x0320, functioncode=6)
self.instrument.write_register(0x1001, 25, 1, functioncode=6) # Setpoint
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationWithPortClosure(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
# Mimic a WindowsXP serial port
self.instrument = minimalmodbus.Instrument(
"DUMMYPORTNAME", 1, close_port_after_each_call=True
)
def testReadRegisterSeveralTimes(self) -> None:
self.assertEqual(self.instrument.read_register(289), 770)
self.assertEqual(self.instrument.read_register(289), 770)
self.assertEqual(self.instrument.read_register(289), 770)
def testPortAlreadyClosed(self) -> None:
self.assertEqual(self.instrument.read_register(289), 770)
assert self.instrument.serial is not None
self.assertEqual(self.instrument.serial.is_open, False)
self.assertRaises(IOError, self.instrument.serial.close)
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestVerboseDummyCommunicationWithPortClosure(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = True
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 1, debug=True)
# Mimic a WindowsXP serial port
self.instrument.close_port_after_each_call = True
def testReadRegister(self) -> None:
self.assertEqual(self.instrument.read_register(289), 770)
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationBroadcast(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
# Use broadcast (slave address 0)
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 0, debug=True)
def testWriteRegister(self) -> None:
assert self.instrument.serial is not None
self.instrument.serial._clean_mock_data()
start_time = time.time()
self.instrument.write_register(24, 50)
total_time = time.time() - start_time
self.assertEqual(
self.instrument.serial._last_written_data,
b"\x00\x10\x00\x18\x00\x01\x02\x002)\xcd",
)
self.assertGreater(total_time, 0.1) # seconds for broadcast delay
def testReadingNotAllowed(self) -> None:
self.assertRaises(ValueError, self.instrument.read_register, 289)
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
class TestDummyCommunicationThreeInstrumentsPortClosure(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrumentA = minimalmodbus.Instrument(
"DUMMYPORTNAME", 1, close_port_after_each_call=True, debug=True
)
assert self.instrumentA.serial is not None
self.instrumentA.serial.baudrate = 2400
self.instrumentB = minimalmodbus.Instrument(
"DUMMYPORTNAME", 1, close_port_after_each_call=True, debug=True
)
self.instrumentC = minimalmodbus.Instrument(
"DUMMYPORTNAME", 7, close_port_after_each_call=True, debug=True
)
def testCommunication(self) -> None:
self.assertEqual(self.instrumentA.read_register(289), 770)
self.assertEqual(self.instrumentB.read_register(289), 770)
self.assertEqual(self.instrumentC.read_bit(0x0800), 0)
self.assertEqual(self.instrumentA.read_register(289), 770)
self.assertEqual(self.instrumentB.read_register(289), 770)
self.assertEqual(self.instrumentC.read_bit(0x0800), 0)
def tearDown(self) -> None:
if self.instrumentA.serial is not None:
try:
self.instrumentA.serial.close()
except:
pass
del self.instrumentA
if self.instrumentB.serial is not None:
try:
self.instrumentB.serial.close()
except:
pass
del self.instrumentB
if self.instrumentC.serial is not None:
try:
self.instrumentC.serial.close()
except:
pass
del self.instrumentC
class TestDummyCommunicationHandleLocalEcho(ExtendedTestCase):
def setUp(self) -> None:
dummy_serial.VERBOSE = True
dummy_serial.RESPONSES = RTU_RESPONSES
minimalmodbus.serial.Serial = dummy_serial.Serial # type: ignore
self.instrument = minimalmodbus.Instrument("DUMMYPORTNAME", 20, debug=True)
self.instrument.handle_local_echo = True
def testReadRegister(self) -> None:
self.assertEqual(self.instrument.read_register(289), 770)
def testReadRegisterWrongEcho(self) -> None:
self.assertRaises(
minimalmodbus.LocalEchoError, self.instrument.read_register, 290
)
def tearDown(self) -> None:
if self.instrument.serial is not None:
try:
self.instrument.serial.close()
except:
pass
del self.instrument
RTU_RESPONSES: Dict[bytes, bytes] = {}
GOOD_RTU_RESPONSES: Dict[bytes, bytes] = {}
WRONG_RTU_RESPONSES: Dict[bytes, bytes] = {}
ASCII_RESPONSES: Dict[bytes, bytes] = {}
GOOD_ASCII_RESPONSES: Dict[bytes, bytes] = {}
WRONG_ASCII_RESPONSES: Dict[bytes, bytes] = {}
"""A dictionary of respones from a dummy instrument.
The key is the message (string) sent to the serial port, and the item is the response (string)
from the dummy serial port.
"""
# Note that the string 'AAAAAAA' might be easier to read if grouped,
# like 'AA' + 'AAAA' + 'A' for the initial part (address etc) + payload + CRC.
# ## READ BIT ##
# Read bit register 61 on slave 1 using function code 2. Also for testing _perform_command() #
# ----------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 2. Register address 61, 1 coil. CRC.
# Response: Slave address 1, function code 2. 1 byte, value=1. CRC.
GOOD_RTU_RESPONSES[b"\x01\x02" + b"\x00\x3d\x00\x01" + b"(\x06"] = (
b"\x01\x02" + b"\x01\x01" + b"`H"
)
# Read bit register 62 on slave 1 using function code 1 #
# ----------------------------------------------------- #
# Message: Slave address 1, function code 1. Register address 62, 1 coil. CRC.
# Response: Slave address 1, function code 1. 1 byte, value=0. CRC.
GOOD_RTU_RESPONSES[b"\x01\x01" + b"\x00\x3e\x00\x01" + b"\x9c\x06"] = (
b"\x01\x01" + b"\x01\x00" + b"Q\x88"
)
# Read bit register 63 on slave 1 using function code 2, slave gives wrong byte count #
# ----------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 2. Register address 63, 1 coil. CRC.
# Response: Slave address 1, function code 2. 2 bytes (wrong), value=1. CRC.
WRONG_RTU_RESPONSES[b"\x01\x02" + b"\x00\x3f\x00\x01" + b"\x89\xc6"] = (
b"\x01\x02" + b"\x02\x01" + b"`\xb8"
)
# Read bit register 64 on slave 1 using function code 2, slave gives no response #
# ------------------------------------------------------------------------------ #
# Message: Slave address 1, function code 2. Register address 64, 1 coil. CRC.
# Response: (empty string)
WRONG_RTU_RESPONSES[b"\x01\x02" + b"\x00\x40\x00\x01" + b"\xb8\x1e"] = b""
# ## WRITE BIT ##
# Write bit=1 register 71 on slave 1 using function code 5 #
# -------------------------------------------------------- #
# Message: Slave address 1, function code 5. Register address 71, value 1 (FF00). CRC.
# Response: Slave address 1, function code 5. Register address 71, value 1 (FF00). CRC.
GOOD_RTU_RESPONSES[b"\x01\x05" + b"\x00\x47\xff\x00" + b"</"] = (
b"\x01\x05" + b"\x00\x47\xff\x00" + b"</"
)
# Write bit=0 register 71 on slave 1 using function code 5 #
# -------------------------------------------------------- #
# Message: Slave address 1, function code 5. Register address 71, value 0 (0000). CRC.
# Response: Slave address 1, function code 5. Register address 71, value 0 (0000). CRC.
GOOD_RTU_RESPONSES[b"\x01\x05" + b"\x00\x47\x00\x00" + b"}\xDF"] = (
b"\x01\x05" + b"\x00\x47\x00\x00" + b"}\xDF"
)
# Write bit register 72 on slave 1 using function code 15 #
# ------------------------------------------------------ #
# Message: Slave address 1, function code 15. Register address 72, 1 bit, 1 byte, value 1 (0100). CRC.
# Response: Slave address 1, function code 15. Register address 72, 1 bit. CRC.
GOOD_RTU_RESPONSES[b"\x01\x0f" + b"\x00\x48\x00\x01\x01\x01" + b"\x0fY"] = (
b"\x01\x0f" + b"\x00\x48\x00\x01" + b"\x14\x1d"
)
# Write bit register 73 on slave 1 using function code 15, slave gives wrong number of registers #
# ---------------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 15. Register address 73, 1 bit, 1 byte, value 1 (0100). CRC.
# Response: Slave address 1, function code 15. Register address 73, 2 bits (wrong). CRC.
WRONG_RTU_RESPONSES[b"\x01\x0f" + b"\x00\x49\x00\x01\x01\x01" + b"2\x99"] = (
b"\x01\x0f" + b"\x00\x49\x00\x02" + b"\x05\xdc"
)
# Write bit register 74 on slave 1 using function code 5, slave gives wrong write data #
# ------------------------------------------------------------------------------------ #
# Message: Slave address 1, function code 5. Register address 74, value 1 (FF00). CRC.
# Response: Slave address 1, function code 5. Register address 74, value 0 (0000, wrong). CRC.
WRONG_RTU_RESPONSES[b"\x01\x05" + b"\x00\x4a\xff\x00" + b"\xad\xec"] = (
b"\x01\x05" + b"\x00\x47\x00\x00" + b"}\xdf"
)
# ## READ BITS ##
# Read 19 bits starting at address 19 on slave 1 using function code 1.
# Also for testing _perform_command()
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
# ----------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 1. Register address 19, 19 coils. CRC.
# Response: Slave address 1, function code 1. 3 bytes, values. CRC.
GOOD_RTU_RESPONSES[b"\x01\x01" + b"\x00\x13\x00\x13" + b"\x8c\x02"] = (
b"\x01\x01" + b"\x03\xCD\x6B\x05" + b"B\x82"
)
# Read 22 bits starting at address 196 on slave 1 using function code 2.
# Also for testing _perform_command()
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
# ----------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 2. Register address 196, 22 coils. CRC.
# Response: Slave address 1, function code 2. 3 bytes, values. CRC.
GOOD_RTU_RESPONSES[b"\x01\x02" + b"\x00\xC4\x00\x16" + b"\xB89"] = (
b"\x01\x02" + b"\x03\xAC\xDB\x35" + b'"\x88'
)
# Read 16 bits starting at address 0x800 on slave 1 using function code 2.
# Recorded on Delta DTB4824
# ----------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 2. Register address 0x800, 16 coils. CRC.
# Response: Slave address 1, function code 2. 2 bytes, values. CRC.
GOOD_RTU_RESPONSES[b"\x01\x02" + b"\x08\x00\x00\x10" + b"\x7B\xA6"] = (
b"\x01\x02" + b"\x02\x20\x0f" + b"\xE0\x7C"
)
# ## WRITE BITS ##
# Write 10 bits starting at address 19 on slave 1 using function code 15.
# Also for testing _perform_command()
# Example from MODBUS APPLICATION PROTOCOL SPECIFICATION V1.1b
# ----------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 15. Address 19, 10 coils, 2 bytes, values. CRC.
# Response: Slave address 1, function code 15. Address 19, 10 coils. CRC.
GOOD_RTU_RESPONSES[b"\x01\x0f" + b"\x00\x13\x00\x0A\x02\xCD\x01" + b"\x72\xCB"] = (
b"\x01\x0f" + b"\x00\x13\x00\x0A" + b"$\t"
)
# ## READ REGISTER ##
# Read register 289 on slave 1 using function code 3 #
# ---------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 289, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes, value=770. CRC=14709.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x01!\x00\x01" + b"\xd5\xfc"] = (
b"\x01\x03" + b"\x02\x03\x02" + b"\x39\x75"
)
# Read register 5 on slave 1 using function code 3 #
# ---------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 289, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes, value=184. CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\x05\x00\x01" + b"\x94\x0b"] = (
b"\x01\x03" + b"\x02\x00\xb8" + b"\xb86"
)
# Read register 14 on slave 1 using function code 4 #
# --------------------------------------------------#
# Message: Slave address 1, function code 4. Register address 14, 1 register. CRC.
# Response: Slave address 1, function code 4. 2 bytes, value=880. CRC.
GOOD_RTU_RESPONSES[b"\x01\x04" + b"\x00\x0e\x00\x01" + b"P\t"] = (
b"\x01\x04" + b"\x02\x03\x70" + b"\xb8$"
)
# Read register 101 on slave 1 using function code 3 #
# ---------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 101, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes, value=-5 or 65531 (depending on interpretation). CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00e\x00\x01" + b"\x94\x15"] = (
b"\x01\x03" + b"\x02\xff\xfb" + b"\xb87"
)
# Read register 201 on slave 1 using function code 3 #
# ---------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 201, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes, value=9. CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xc9\x00\x01" + b"T4"] = (
b"\x01\x03" + b"\x02\x00\x09" + b"xB"
)
# Read register 202 on slave 1 using function code 3. Too long response #
# ----------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 202, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes (wrong!), value=9. CRC
WRONG_RTU_RESPONSES[b"\x01\x03" + b"\x00\xca\x00\x01" + b"\xa44"] = (
b"\x01\x03" + b"\x02\x00\x00\x09" + b"\x84t"
)
# Read register 203 on slave 1 using function code 3. Too short response #
# ----------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 203, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes (wrong!), value=9. CRC
WRONG_RTU_RESPONSES[b"\x01\x03" + b"\x00\xcb\x00\x01" + b"\xf5\xf4"] = (
b"\x01\x03" + b"\x02\x09" + b"0\xbe"
)
# ## WRITE REGISTER ##
# Write value 50 in register 24 on slave 1 using function code 16 #
# ----------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 24, 1 register, 2 bytes, value=50. CRC.
# Response: Slave address 1, function code 16. Register address 24, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\x18\x00\x01\x02\x002" + b"$]"] = (
b"\x01\x10" + b"\x00\x18\x00\x01" + b"\x81\xce"
)
# Write value 20 in register 35 on slave 1 using function code 16 #
# ----------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 35, 1 register, 2 bytes, value=20. CRC.
# Response: Slave address 1, function code 16. Register address 35, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00#\x00\x01" + b"\x02\x00\x14" + b"\xa1\x0c"] = (
b"\x01\x10" + b"\x00#\x00\x01" + b"\xf0\x03"
)
# Write value 88 in register 45 on slave 1 using function code 6 #
# ---------------------------------------------------------------#
# Message: Slave address 1, function code 6. Register address 45, value=88. CRC.
# Response: Slave address 1, function code 6. Register address 45, value=88. CRC.
GOOD_RTU_RESPONSES[b"\x01\x06" + b"\x00\x2d\x00\x58" + b"\x189"] = (
b"\x01\x06" + b"\x00\x2d\x00\x58" + b"\x189"
)
# Write value 5 in register 101 on slave 1 using function code 16 #
# ----------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 101, 1 register, 2 bytes, value=5. CRC.
# Response: Slave address 1, function code 16. Register address 101, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00e\x00\x01\x02\x00\x05" + b"o\xa6"] = (
b"\x01\x10" + b"\x00e\x00\x01" + b"\x11\xd6"
)
# Write value 50 in register 101 on slave 1 using function code 16 #
# ----------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 101, 1 register, 2 bytes, value=5. CRC.
# Response: Slave address 1, function code 16. Register address 101, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00e\x00\x01\x02\x002" + b".p"] = (
b"\x01\x10" + b"\x00e\x00\x01" + b"\x11\xd6"
)
# Write value -5 in register 101 on slave 1 using function code 16 #
# ----------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 101, 1 register, 2 bytes, value=-5. CRC.
# Response: Slave address 1, function code 16. Register address 101, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00e\x00\x01\x02\xff\xfb" + b"\xaf\xd6"] = (
b"\x01\x10" + b"\x00e\x00\x01" + b"\x11\xd6"
)
# Write value -50 in register 101 on slave 1 using function code 16 #
# ----------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 101, 1 register, 2 bytes, value=-50. CRC.
# Response: Slave address 1, function code 16. Register address 101, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00e\x00\x01\x02\xff\xce" + b"o\xc1"] = (
b"\x01\x10" + b"\x00e\x00\x01" + b"\x11\xd6"
)
# Write value 99 in register 51 on slave 1 using function code 16, slave gives wrong CRC #
# ---------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 51, 1 register, 2 bytes, value=99. CRC.
# Response: Slave address 1, function code 16. Register address 51, 1 register. Wrong CRC.
WRONG_RTU_RESPONSES[
b"\x01\x10" + b"\x00\x33\x00\x01" + b"\x02\x00\x63" + b"\xe3\xba"
] = (b"\x01\x10" + b"\x00\x33\x00\x01" + b"AB")
# Write value 99 in register 52 on slave 1 using function code 16, slave gives wrong number of registers #
# -------------------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 52, 1 register, 2 bytes, value=99. CRC.
# Response: Slave address 1, function code 16. Register address 52, 2 registers (wrong). CRC.
WRONG_RTU_RESPONSES[b"\x01\x10" + b"\x00\x34\x00\x01" + b"\x02\x00\x63" + b"\xe2\r"] = (
b"\x01\x10" + b"\x00\x34\x00\x02" + b"\x00\x06"
)
# Write value 99 in register 53 on slave 1 using function code 16, slave gives wrong register address #
# ----------------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 53, 1 register, 2 bytes, value=99. CRC.
# Response: Slave address 1, function code 16. Register address 54 (wrong), 1 register. CRC.
WRONG_RTU_RESPONSES[
b"\x01\x10" + b"\x00\x35\x00\x01" + b"\x02\x00\x63" + b"\xe3\xdc"
] = (b"\x01\x10" + b"\x00\x36\x00\x01" + b"\xe1\xc7")
# Write value 99 in register 54 on slave 1 using function code 16, slave gives wrong slave address #
# ------------------------------------------------------------------------------------------------ #
# Message: Slave address 1, function code 16. Register address 54, 1 register, 2 bytes, value=99. CRC.
# Response: Slave address 2 (wrong), function code 16. Register address 54, 1 register. CRC.
GOOD_RTU_RESPONSES[
b"\x01\x10" + b"\x00\x36\x00\x01" + b"\x02\x00\x63" + b"\xe3\xef"
] = (b"\x02\x10" + b"\x00\x36\x00\x01" + b"\xe1\xf4")
# Write value 99 in register 55 on slave 1 using function code 16, slave gives wrong functioncode #
# ----------------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 16. Register address 55, 1 register, 2 bytes, value=99. CRC.
# Response: Slave address 1, function code 6 (wrong). Register address 55, 1 register. CRC.
WRONG_RTU_RESPONSES[b"\x01\x10" + b"\x00\x37\x00\x01" + b"\x02\x00\x63" + b"\xe2>"] = (
b"\x01\x06" + b"\x00\x37\x00\x01" + b"\xf9\xc4"
)
# Write value 99 in register 56 on slave 1 using function code 16, slave gives wrong functioncode (indicates an error) #
# -------------------------------------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 16. Register address 56, 1 register, 2 bytes, value=99. CRC.
# Response: Slave address 1, function code 144 (wrong). Register address 56, 1 register. CRC.
WRONG_RTU_RESPONSES[
b"\x01\x10" + b"\x00\x38\x00\x01" + b"\x02\x00\x63" + b"\xe2\xc1"
] = (b"\x01\x90" + b"\x00\x38\x00\x01" + b"\x81\xda")
# Write value 99 in register 55 on slave 1 using function code 6, slave gives wrong write data #
# -------------------------------------------------------------------------------------------- #
# Message: Slave address 1, function code 6. Register address 55, value=99. CRC.
# Response: Slave address 1, function code 6. Register address 55, value=98 (wrong). CRC.
WRONG_RTU_RESPONSES[b"\x01\x06" + b"\x00\x37\x00\x63" + b"x-"] = (
b"\x01\x06" + b"\x00\x37\x00\x62" + b"\xb9\xed"
)
# ## READ LONG ##
# Read long (2 registers, starting at 102) on slave 1 using function code 3 #
# --------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 289, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, value=-1 or 4294967295 (depending on interpretation). CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00f\x00\x02" + b"$\x14"] = (
b"\x01\x03" + b"\x04\xff\xff\xff\xff" + b"\xfb\xa7"
)
# Read long (2 registers, starting at 223) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_BIG
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 223, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, Value 2923517522. CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xDF\x00\x02" + b"\xF5\xF1"] = (
b"\x01\x03" + b"\x04\xAEAVR" + b"4\x92"
)
# Read long (2 registers, starting at 224) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_BIG_SWAP
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 224, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, Value 2923517522. CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xE0\x00\x02" + b"\xC5\xFD"] = (
b"\x01\x03" + b"\x04A\xAERV" + b"2\xB0"
)
# Read long (2 registers, starting at 225) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_LITTLE_SWAP
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 225, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, Value 2923517522. CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xE1\x00\x02" + b"\x94="] = (
b"\x01\x03" + b"\x04VR\xAEA" + b"\xF6:"
)
# Read long (2 registers, starting at 226) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_LITTLE
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 226, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, Value 2923517522. CRC
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xE2\x00\x02" + b"\x64\x3D"] = (
b"\x01\x03" + b"\x04RVA\xAE" + b"\xBBw"
)
# ## WRITE LONG ##
# Write long (2 registers, starting at 102) on slave 1 using function code 16, with value 5. #
# -------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 102, 2 registers, 4 bytes, value=5. CRC.
# Response: Slave address 1, function code 16. Register address 102, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00f\x00\x02\x04\x00\x00\x00\x05" + b"\xb5\xae"] = (
b"\x01\x10" + b"\x00f\x00\x02" + b"\xa1\xd7"
)
# Write long (2 registers, starting at 102) on slave 1 using function code 16, with value -5. #
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 102, 2 registers, 4 bytes, value=-5. CRC.
# Response: Slave address 1, function code 16. Register address 102, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00f\x00\x02\x04\xff\xff\xff\xfb" + b"u\xfa"] = (
b"\x01\x10" + b"\x00f\x00\x02" + b"\xa1\xd7"
)
# Write long (2 registers, starting at 102) on slave 1 using function code 16, with value 3. #
# -------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 102, 2 registers, 4 bytes, value=3. CRC.
# Response: Slave address 1, function code 16. Register address 102, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00f\x00\x02\x04\x00\x00\x00\x03" + b"5\xac"] = (
b"\x01\x10" + b"\x00f\x00\x02" + b"\xa1\xd7"
)
# Write long (2 registers, starting at 102) on slave 1 using function code 16, with value -3. #
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 102, 2 registers, 4 bytes, value=-3. CRC.
# Response: Slave address 1, function code 16. Register address 102, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00f\x00\x02\x04\xff\xff\xff\xfd" + b"\xf5\xf8"] = (
b"\x01\x10" + b"\x00f\x00\x02" + b"\xa1\xd7"
)
# Write long (2 registers, starting at 222) on slave 1 using function code 16, with value 2923517522 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_BIG
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 222, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 222, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xDE\x00\x02\x04\xAEAVR" + b"\xB1\xDE"] = (
b"\x01\x10" + b"\x00\xDE\x00\x02" + b"\x21\xF2"
)
# Write long (2 registers, starting at 222) on slave 1 using function code 16, with value 2923517522 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_LITTLE
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 222, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 222, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xDE\x00\x02\x04RVA\xAE" + b"\x3E\x3B"] = (
b"\x01\x10" + b"\x00\xDE\x00\x02" + b"\x21\xF2"
)
# Write long (2 registers, starting at 222) on slave 1 using function code 16, with value 2923517522 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_BIG_SWAP
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 222, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 222, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xDE\x00\x02\x04A\xAERV" + b"\xB7\xFC"] = (
b"\x01\x10" + b"\x00\xDE\x00\x02" + b"\x21\xF2"
)
# Write long (2 registers, starting at 222) on slave 1 using function code 16, with value 2923517522 #
# Example from https://www.simplymodbus.ca/FAQ.htm
# Byte order BYTEORDER_LITTLE_SWAP
# --------------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 222, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 222, 2 registers. CRC
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xDE\x00\x02\x04VR\xAEA" + b"sv"] = (
b"\x01\x10" + b"\x00\xDE\x00\x02" + b"\x21\xF2"
)
# ## READ FLOAT ##
# Read float from address 103 (2 registers) on slave 1 using function code 3 #
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 103, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, value=1.0. CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00g\x00\x02" + b"u\xd4"] = (
b"\x01\x03" + b"\x04\x3f\x80\x00\x00" + b"\xf7\xcf"
)
# Read float from address 103 (2 registers) on slave 1 using function code 4 #
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 4. Register address 103, 2 registers. CRC.
# Response: Slave address 1, function code 4. 4 bytes, value=3.65e30. CRC.
GOOD_RTU_RESPONSES[b"\x01\x04" + b"\x00g\x00\x02" + b"\xc0\x14"] = (
b"\x01\x04" + b"\x04\x72\x38\x47\x25" + b"\x93\x1a"
)
# Read float from address 103 (4 registers) on slave 1 using function code 3 #
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 103, 4 registers. CRC.
# Response: Slave address 1, function code 3. 8 bytes, value=-2.0 CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00g\x00\x04" + b"\xf5\xd6"] = (
b"\x01\x03" + b"\x08\xc0\x00\x00\x00\x00\x00\x00\x00" + b"\x99\x87"
)
# Read float from address 241 (2 registers) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page)
# BYTEORDER_BIG
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 241, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, value=-4.3959787e-11 CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xF1\x00\x02" + b"\x95\xF8"] = (
b"\x01\x03" + b"\x04\xAEAVR" + b"4\x92"
)
# Read float from address 242 (2 registers) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page, manually reshuffled)
# BYTEORDER_BIG_SWAP
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 242, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, value=-4.3959787e-11 CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xF2\x00\x02" + b"\x65\xF8"] = (
b"\x01\x03" + b"\x04A\xAERV" + b"2\xB0"
)
# Read float from address 243 (2 registers) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page, manually reshuffled)
# BYTEORDER_LITTLE_SWAP
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 243, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, value=-4.3959787e-11 CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xF3\x00\x02" + b"\x34\x38"] = (
b"\x01\x03" + b"\x04VR\xAEA" + b"\xf6:"
)
# Read float from address 244 (2 registers) on slave 1 using function code 3 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page, manually reshuffled)
# BYTEORDER_LITTLE
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 244, 2 registers. CRC.
# Response: Slave address 1, function code 3. 4 bytes, value=-4.3959787e-11 CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00\xF4\x00\x02" + b"\x85\xF9"] = (
b"\x01\x03" + b"\x04RVA\xAE" + b"\xBBw"
)
# ## WRITE FLOAT ##
# Write float 1.1 to address 103 (2 registers) on slave 1 using function code 16 #
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 103, 2 registers, 4 bytes, value=1.1 . CRC.
# Response: Slave address 1, function code 16. Register address 103, 2 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00g\x00\x02\x04?\x8c\xcc\xcd" + b"\xed\x0b"] = (
b"\x01\x10" + b"\x00g\x00\x02" + b"\xf0\x17"
)
# Write float 1.1 to address 103 (4 registers) on slave 1 using function code 16 #
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 103, 4 registers, 8 bytes, value=1.1 . CRC.
# Response: Slave address 1, function code 16. Register address 103, 4 registers. CRC.
GOOD_RTU_RESPONSES[
b"\x01\x10" + b"\x00g\x00\x04\x08?\xf1\x99\x99\x99\x99\x99\x9a" + b"u\xf7"
] = (b"\x01\x10" + b"\x00g\x00\x04" + b"p\x15")
# Write float 1.1 to address 103 (4 registers) on slave 1 using function code 16 #
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 103, 4 registers, 8 bytes, value=1.1 . CRC.
# Response: Slave address 1, function code 16. Register address 103, 4 registers. CRC.
GOOD_RTU_RESPONSES[
b"\x01\x10" + b"\x00g\x00\x04\x08?\xf1\x99\x99\x99\x99\x99\x9a" + b"u\xf7"
] = (b"\x01\x10" + b"\x00g\x00\x04" + b"p\x15")
# Write float -4.3959787e-11 to address 240 (42 registers) on slave 1 using function code 16 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page)
# BYTEORDER_BIG
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 240, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 240, 2 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xF0\x00\x02\x04\xAEAVR" + b"2J"] = (
b"\x01\x10" + b"\x00\xF0\x00\x02" + b"A\xFB"
)
# Write float -4.3959787e-11 to address 240 (42 registers) on slave 1 using function code 16 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page, manually reshuffled)
# BYTEORDER_LITTLE
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 240, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 240, 2 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xF0\x00\x02\x04RVA\xAE" + b"\xBD\xAF"] = (
b"\x01\x10" + b"\x00\xF0\x00\x02" + b"A\xFB"
)
# Write float -4.3959787e-11 to address 240 (42 registers) on slave 1 using function code 16 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page, manually reshuffled)
# BYTEORDER_LITTLE_SWAP
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 240, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 240, 2 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xF0\x00\x02\x04VR\xAEA" + b"\xF0\xE2"] = (
b"\x01\x10" + b"\x00\xF0\x00\x02" + b"A\xFB"
)
# Write float -4.3959787e-11 to address 240 (42 registers) on slave 1 using function code 16 #
# Example from https://www.simplymodbus.ca/FAQ.htm (truncated float on page, manually reshuffled)
# BYTEORDER_BIG_SWAP
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 240, 2 registers, 4 bytes, value. CRC.
# Response: Slave address 1, function code 16. Register address 240, 2 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00\xF0\x00\x02\x04A\xAERV" + b"4h"] = (
b"\x01\x10" + b"\x00\xF0\x00\x02" + b"A\xFB"
)
# ## READ STRING ##
# Read string from address 104 (1 register) on slave 1 using function code 3 #
# ---------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 104, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes, value = 'AB'. CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00h\x00\x01" + b"\x05\xd6"] = (
b"\x01\x03" + b"\x02AB" + b"\x08%"
)
# Read string from address 104 (4 registers) on slave 1 using function code 3 #
# ----------------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 104, 4 registers. CRC.
# Response: Slave address 1, function code 3. 8 bytes, value = 'ABCDEFGH'. CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00h\x00\x04" + b"\xc5\xd5"] = (
b"\x01\x03" + b"\x08ABCDEFGH" + b"\x0b\xcc"
)
# ## WRITE STRING ##
# Write string 'A' to address 104 (1 register) on slave 1 using function code 16 #
# -------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 104, 1 register, 2 bytes, value='A ' . CRC.
# Response: Slave address 1, function code 16. Register address 104, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00h\x00\x01\x02A " + b"\x9f0"] = (
b"\x01\x10" + b"\x00h\x00\x01" + b"\x80\x15"
)
# Write string 'A' to address 104 (4 registers) on slave 1 using function code 16 #
# --------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 104, 4 registers, 8 bytes, value='A ' . CRC.
# Response: Slave address 1, function code 16. Register address 104, 2 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00h\x00\x04\x08A " + b"\xa7\xae"] = (
b"\x01\x10" + b"\x00h\x00\x04" + b"@\x16"
)
# Write string 'ABCDEFGH' to address 104 (4 registers) on slave 1 using function code 16 #
# ---------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 104, 4 registers, 8 bytes, value='ABCDEFGH' . CRC.
# Response: Slave address 1, function code 16. Register address 104, 4 registers. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00h\x00\x04\x08ABCDEFGH" + b"I>"] = (
b"\x01\x10" + b"\x00h\x00\x04" + b"@\x16"
)
# ## READ REGISTERS ##
# Read from address 105 (1 register) on slave 1 using function code 3 #
# --------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 105, 1 register. CRC.
# Response: Slave address 1, function code 3. 2 bytes, value = 16. CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00i\x00\x01" + b"T\x16"] = (
b"\x01\x03" + b"\x02\x00\x10" + b"\xb9\x88"
)
# Read from address 105 (3 registers) on slave 1 using function code 3 #
# ---------------------------------------------------------------------#
# Message: Slave address 1, function code 3. Register address 105, 3 registers. CRC.
# Response: Slave address 1, function code 3. 6 bytes, value = 16, 32, 64. CRC.
GOOD_RTU_RESPONSES[b"\x01\x03" + b"\x00i\x00\x03" + b"\xd5\xd7"] = (
b"\x01\x03" + b"\x06\x00\x10\x00\x20\x00\x40" + b"\xe0\x8c"
)
# ## WRITE REGISTERS ##
# Write value [2] to address 105 (1 register) on slave 1 using function code 16 #
# ------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 105, 1 register, 2 bytes, value=2 . CRC.
# Response: Slave address 1, function code 16. Register address 105, 1 register. CRC.
GOOD_RTU_RESPONSES[b"\x01\x10" + b"\x00i\x00\x01\x02\x00\x02" + b".\xa8"] = (
b"\x01\x10" + b"\x00i\x00\x01" + b"\xd1\xd5"
)
# Write value [2, 4, 8] to address 105 (3 registers) on slave 1 using function code 16 #
# -------------------------------------------------------------------------------------#
# Message: Slave address 1, function code 16. Register address 105, 3 register, 6 bytes, value=2, 4, 8. CRC.
# Response: Slave address 1, function code 16. Register address 105, 3 registers. CRC.
GOOD_RTU_RESPONSES[
b"\x01\x10" + b"\x00i\x00\x03\x06\x00\x02\x00\x04\x00\x08" + b"\x0c\xd6"
] = (b"\x01\x10" + b"\x00i\x00\x03" + b"P\x14")
# ## OTHER RESPONSES ##
# Retrieve an empty response (for testing the _communicate method) #
# ---------------------------------------------------------------- #
WRONG_RTU_RESPONSES[b"MessageForEmptyResponse"] = b""
# Retrieve an known response (for testing the _communicate method) #
# ---------------------------------------------------------------- #
WRONG_RTU_RESPONSES[b"TESTMESSAGE"] = b"TESTRESPONSE"
# Retrieve an known response with local echo (for testing the _communicate method) #
# ---------------------------------------------------------------- #
WRONG_RTU_RESPONSES[b"TESTMESSAGE2"] = b"TESTMESSAGE2TESTRESPONSE2"
# Retrieve a response with wrong local echo (for testing the _communicate method) #
# ---------------------------------------------------------------- #
WRONG_RTU_RESPONSES[b"TESTMESSAGE3"] = b"TESTMeSSAGE3TESTRESPONSE3"
# Retrieve an known response (for testing the _perform_command method) #
# ---------------------------------------------------------------- #
WRONG_RTU_RESPONSES[
b"\x01\x10TESTCOMMAND\x08B"
] = b"\x01\x10TRspU<" # Response should be 8 bytes
WRONG_RTU_RESPONSES[
b"\x01\x4bTESTCOMMAND2\x18\xc8"
] = b"\x01\x4bTESTCOMMANDRESPONSE2K\x8c"
WRONG_RTU_RESPONSES[
b"\x01\x01TESTCOMMAND4~"
] = b"\x02\x01TESTCOMMANDRESPONSEx]" # Wrong slave address in response
WRONG_RTU_RESPONSES[
b"\x01\x02TESTCOMMAND0z"
] = b"\x01\x03TESTCOMMANDRESPONSE2\x8c" # Wrong function code in response
WRONG_RTU_RESPONSES[
b"\x01\x03TESTCOMMAND\xcd\xb9"
] = b"\x01\x03TESTCOMMANDRESPONSEab" # Wrong CRC in response
WRONG_RTU_RESPONSES[b"\x01\x04TESTCOMMAND8r"] = b"A" # Too short response message
WRONG_RTU_RESPONSES[
b"\x01\x05TESTCOMMAND\xc5\xb1"
] = b"\x01\x85TESTCOMMANDRESPONSE\xa54" # Error indication from slave
# Handle local echo: Read register 289 on slave 20 using function code 3 #
# ---------------------------------------------------------------------- #
# Message: Slave address 20, function code 3. Register address 289, 1 register. CRC.
# Response: Echo. Slave address 20, function code 3. 2 bytes, value=770. CRC.
WRONG_RTU_RESPONSES[b"\x14\x03" + b"\x01!\x00\x01" + b"\xd79"] = (
(b"\x14\x03" + b"\x01!\x00\x01" + b"\xd79")
+ b"\x14\x03"
+ b"\x02\x03\x02"
+ b"4\xb6"
)
# Handle local echo: Read register 290 on slave 20 using function code 3. Wrong echo #
# ---------------------------------------------------------------------------------- #
# Message: Slave address 20, function code 3. Register address 290, 1 register. CRC.
# Response: Wrong echo. Slave address 20, function code 3. 2 bytes, value=770. CRC.
WRONG_RTU_RESPONSES[b"\x14\x03" + b"\x01\x22\x00\x01" + b"\x27\x39"] = (
(b"\x14\x03" + b"\x01\x22\x00\x02" + b"\x27\x39")
+ b"\x14\x03"
+ b"\x02\x03\x02"
+ b"4\xb6"
)
## Recorded data from OmegaCN7500 ##
####################################
# (Sorted by slave address, register address)
# Slave address 1, read_bit(2068) Response value 1.
GOOD_RTU_RESPONSES[b"\x01\x02\x08\x14\x00\x01\xfb\xae"] = b"\x01\x02\x01\x01`H"
# Slave address 1, write_bit(2068, 0)
GOOD_RTU_RESPONSES[
b"\x01\x05\x08\x14\x00\x00\x8f\xae"
] = b"\x01\x05\x08\x14\x00\x00\x8f\xae"
# Slave address 1, write_bit(2068, 1)
GOOD_RTU_RESPONSES[b"\x01\x05\x08\x14\xff\x00\xce^"] = b"\x01\x05\x08\x14\xff\x00\xce^"
# Slave address 1, read_register(4097, 1) Response value 823.6
GOOD_RTU_RESPONSES[b"\x01\x03\x10\x01\x00\x01\xd1\n"] = b"\x01\x03\x02 ,\xa0Y"
# Slave address 1, write_register(4097, 700.0, 1)
GOOD_RTU_RESPONSES[
b"\x01\x10\x10\x01\x00\x01\x02\x1bX\xbdJ"
] = b"\x01\x10\x10\x01\x00\x01T\xc9"
# Slave address 1, write_register(4097, 823.6, 1)
GOOD_RTU_RESPONSES[
b"\x01\x10\x10\x01\x00\x01\x02 ,\xae]"
] = b"\x01\x10\x10\x01\x00\x01T\xc9"
# Slave address 10, read_bit(2068) Response value 1
GOOD_RTU_RESPONSES[b"\n\x02\x08\x14\x00\x01\xfa\xd5"] = b"\n\x02\x01\x01bl"
# Slave address 10, write_bit(2068, 0)
GOOD_RTU_RESPONSES[
b"\n\x05\x08\x14\x00\x00\x8e\xd5"
] = b"\n\x05\x08\x14\x00\x00\x8e\xd5"
# Slave address 10, write_bit(2068, 1)
GOOD_RTU_RESPONSES[b"\n\x05\x08\x14\xff\x00\xcf%"] = b"\n\x05\x08\x14\xff\x00\xcf%"
# Slave address 10, read_register(4096, 1) Response value 25.0
GOOD_RTU_RESPONSES[b"\n\x03\x10\x00\x00\x01\x81\xb1"] = b"\n\x03\x02\x00\xfa\x9d\xc6"
# Slave address 10, read_register(4097, 1) Response value 325.8
GOOD_RTU_RESPONSES[b"\n\x03\x10\x01\x00\x01\xd0q"] = b"\n\x03\x02\x0c\xba\x996"
# Slave address 10, write_register(4097, 325.8, 1)
GOOD_RTU_RESPONSES[
b"\n\x10\x10\x01\x00\x01\x02\x0c\xbaA\xc3"
] = b"\n\x10\x10\x01\x00\x01U\xb2"
# Slave address 10, write_register(4097, 20.0, 1)
GOOD_RTU_RESPONSES[
b"\n\x10\x10\x01\x00\x01\x02\x00\xc8\xc4\xe6"
] = b"\n\x10\x10\x01\x00\x01U\xb2"
# Slave address 10, write_register(4097, 200.0, 1)
GOOD_RTU_RESPONSES[
b"\n\x10\x10\x01\x00\x01\x02\x07\xd0\xc6\xdc"
] = b"\n\x10\x10\x01\x00\x01U\xb2"
## Recorded RTU data from Delta DTB4824 ##
##########################################
# (Sorted by register number)
# Slave address 7, read_bit(0x0800). This is LED AT.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x02\x08\x00\x00\x01\xbb\xcc"] = b"\x07\x02\x01\x00\xa1\x00"
# Slave address 7, read_bit(0x0801). This is LED Out1.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x02\x08\x01\x00\x01\xea\x0c"] = b"\x07\x02\x01\x00\xa1\x00"
# Slave address 7, read_bit(0x0802). This is LED Out2.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x02\x08\x02\x00\x01\x1a\x0c"] = b"\x07\x02\x01\x00\xa1\x00"
# Slave address 7, write_bit(0x0810, 1) This is "Communication write in enabled".
GOOD_RTU_RESPONSES[
b"\x07\x05\x08\x10\xff\x00\x8f\xf9"
] = b"\x07\x05\x08\x10\xff\x00\x8f\xf9"
# Slave address 7, _perform_command(2, '\x08\x10\x00\x09'). This is reading 9 bits starting at 0x0810.
# Response value '\x02\x07\x00'
GOOD_RTU_RESPONSES[b"\x07\x02\x08\x10\x00\t\xbb\xcf"] = b"\x07\x02\x02\x07\x003\x88"
# Slave address 7, read_bit(0x0814). This is RUN/STOP setting.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x02\x08\x14\x00\x01\xfb\xc8"] = b"\x07\x02\x01\x00\xa1\x00"
# Slave address 7, write_bit(0x0814, 0). This is STOP.
GOOD_RTU_RESPONSES[
b"\x07\x05\x08\x14\x00\x00\x8f\xc8"
] = b"\x07\x05\x08\x14\x00\x00\x8f\xc8"
# Slave address 7, write_bit(0x0814, 1). This is RUN.
GOOD_RTU_RESPONSES[b"\x07\x05\x08\x14\xff\x00\xce8"] = b"\x07\x05\x08\x14\xff\x00\xce8"
# Slave address 7, read_registers(0x1000, 2). This is process value (PV) and setpoint (SV).
# Response value [64990, 350]
GOOD_RTU_RESPONSES[
b"\x07\x03\x10\x00\x00\x02\xc0\xad"
] = b"\x07\x03\x04\xfd\xde\x01^M\xcd"
# Slave address 7, read_register(0x1000). This is process value (PV).
# Response value 64990
GOOD_RTU_RESPONSES[
b"\x07\x03\x10\x00\x00\x01\x80\xac"
] = b"\x07\x03\x02\xfd\xde\xf0\x8c"
# Slave address 7, read_register(0x1001, 1). This is setpoint (SV).
# Response value 80.0
GOOD_RTU_RESPONSES[b"\x07\x03\x10\x01\x00\x01\xd1l"] = b"\x07\x03\x02\x03 1l"
# Slave address 7, write_register(0x1001, 25, 1, functioncode=6)
GOOD_RTU_RESPONSES[
b"\x07\x06\x10\x01\x00\xfa\\\xef"
] = b"\x07\x06\x10\x01\x00\xfa\\\xef"
# Slave address 7, write_register(0x1001, 0x0320, functioncode=6) # Write value 800 to register 0x1001.
# This is a setpoint of 80.0 degrees (Centigrades, dependent on setting).
GOOD_RTU_RESPONSES[b"\x07\x06\x10\x01\x03 \xdd\x84"] = b"\x07\x06\x10\x01\x03 \xdd\x84"
# Slave address 7, read_register(0x1004). This is sensor type.
# Response value 14
GOOD_RTU_RESPONSES[b"\x07\x03\x10\x04\x00\x01\xc1m"] = b"\x07\x03\x02\x00\x0e\xb1\x80"
# Slave address 7, read_register(0x1005) This is control method.
# Response value 1
GOOD_RTU_RESPONSES[
b"\x07\x03\x10\x05\x00\x01\x90\xad"
] = b"\x07\x03\x02\x00\x01\xf1\x84"
# Slave address 7, read_register(0x1006). This is heating/cooling selection.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x03\x10\x06\x00\x01`\xad"] = b"\x07\x03\x02\x00\x000D"
# Slave address 7, read_register(0x1012, 1). This is output 1.
# Response value 0.0
GOOD_RTU_RESPONSES[b"\x07\x03\x10\x12\x00\x01 \xa9"] = b"\x07\x03\x02\x00\x000D"
# Slave address 7, read_register(0x1013, 1). This is output 2.
# Response value 0.0
GOOD_RTU_RESPONSES[b"\x07\x03\x10\x13\x00\x01qi"] = b"\x07\x03\x02\x00\x000D"
# Slave address 7, read_register(0x1023). This is system alarm setting.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x03\x10#\x00\x01qf"] = b"\x07\x03\x02\x00\x000D"
# Slave address 7, read_register(0x102A). This is LED status.
# Response value 0
GOOD_RTU_RESPONSES[b"\x07\x03\x10*\x00\x01\xa1d"] = b"\x07\x03\x02\x00\x000D"
# Slave address 7, read_register(0x102B). This is pushbutton status.
# Response value 15
GOOD_RTU_RESPONSES[b"\x07\x03\x10+\x00\x01\xf0\xa4"] = b"\x07\x03\x02\x00\x0fp@"
# Slave address 7, read_register(0x102F). This is firmware version.
# Response value 400
GOOD_RTU_RESPONSES[b"\x07\x03\x10/\x00\x01\xb1e"] = b"\x07\x03\x02\x01\x901\xb8"
## Recorded ASCII data from Delta DTB4824 ##
############################################
# (Sorted by register number)
# Slave address 7, read_bit(0x0800). This is LED AT.
# Response value 0
GOOD_ASCII_RESPONSES[b":070208000001EE\r\n"] = b":07020100F6\r\n"
# Slave address 7, read_bit(0x0801). This is LED Out1.
# Response value 1
GOOD_ASCII_RESPONSES[b":070208010001ED\r\n"] = b":07020101F5\r\n"
# Slave address 7, read_bit(0x0802). This is LED Out2.
# Response value 0
GOOD_ASCII_RESPONSES[b":070208020001EC\r\n"] = b":07020100F6\r\n"
# Slave address 7, _perform_command(2, '\x08\x10\x00\x09'). This is reading 9 bits starting at 0x0810.
# Response value '\x02\x17\x00'
GOOD_ASCII_RESPONSES[b":070208100009D6\r\n"] = b":0702021700DE\r\n"
# Slave address 7, write_bit(0x0810, 1) This is "Communication write in enabled".
GOOD_ASCII_RESPONSES[b":07050810FF00DD\r\n"] = b":07050810FF00DD\r\n"
# Slave address 7, read_bit(0x0814). This is RUN/STOP setting.
# Response value 1
GOOD_ASCII_RESPONSES[b":070208140001DA\r\n"] = b":07020101F5\r\n"
# Slave address 7, write_bit(0x0814, 0). This is STOP.
GOOD_ASCII_RESPONSES[b":070508140000D8\r\n"] = b":070508140000D8\r\n"
# Slave address 7, write_bit(0x0814, 1). This is RUN.
GOOD_ASCII_RESPONSES[b":07050814FF00D9\r\n"] = b":07050814FF00D9\r\n"
# Slave address 7, read_registers(0x1000, 2). This is process value (PV) and setpoint (SV).
# Response value [64990, 350]
GOOD_ASCII_RESPONSES[b":070310000002E4\r\n"] = b":070304FDDE015EB8\r\n"
# Slave address 7, read_register(0x1000). This is process value (PV).
# Response value 64990
GOOD_ASCII_RESPONSES[b":070310000001E5\r\n"] = b":070302FDDE19\r\n"
# Slave address 7, read_register(0x1001, 1). This is setpoint (SV).
# Response value 80.0
GOOD_ASCII_RESPONSES[b":070310010001E4\r\n"] = b":0703020320D1\r\n"
# Slave address 7, write_register(0x1001, 25, 1, functioncode=6)
GOOD_ASCII_RESPONSES[b":0706100100FAE8\r\n"] = b":0706100100FAE8\r\n"
# Slave address 7, write_register(0x1001, 0x0320, functioncode=6) # Write value 800 to register 0x1001.
# This is a setpoint of 80.0 degrees (Centigrades, dependent on setting).
GOOD_ASCII_RESPONSES[b":070610010320BF\r\n"] = b":070610010320BF\r\n"
# Slave address 7, read_register(0x1004). This is sensor type.
# Response value 14
GOOD_ASCII_RESPONSES[b":070310040001E1\r\n"] = b":070302000EE6\r\n"
# Slave address 7, read_register(0x1005) This is control method.
# Response value 1
GOOD_ASCII_RESPONSES[b":070310050001E0\r\n"] = b":0703020001F3\r\n"
# Slave address 7, read_register(0x1006). This is heating/cooling selection.
# Response value 0
GOOD_ASCII_RESPONSES[b":070310060001DF\r\n"] = b":0703020000F4\r\n"
# Slave address 7, read_register(0x1012, 1). This is output 1.
# Response value 100.0
GOOD_ASCII_RESPONSES[b":070310120001D3\r\n"] = b":07030203E809\r\n"
# Slave address 7, read_register(0x1013, 1). This is output 2.
# Response value 0.0
GOOD_ASCII_RESPONSES[b":070310130001D2\r\n"] = b":0703020000F4\r\n"
# Slave address 7, read_register(0x1023). This is system alarm setting.
# Response value 0
GOOD_ASCII_RESPONSES[b":070310230001C2\r\n"] = b":0703020000F4\r\n"
# Slave address 7, read_register(0x102A). This is LED status.
# Response value 64
GOOD_ASCII_RESPONSES[b":0703102A0001BB\r\n"] = b":0703020040B4\r\n"
# Slave address 7, read_register(0x102B). This is pushbutton status.
# Response value 15
GOOD_ASCII_RESPONSES[b":0703102B0001BA\r\n"] = b":070302000FE5\r\n"
# Slave address 7, read_register(0x102F). This is firmware version.
# Response value 400
GOOD_ASCII_RESPONSES[b":0703102F0001B6\r\n"] = b":070302019063\r\n"
#######################
# Group recorded data #
#######################
RTU_RESPONSES.update(WRONG_RTU_RESPONSES)
RTU_RESPONSES.update(GOOD_RTU_RESPONSES)
ASCII_RESPONSES.update(WRONG_ASCII_RESPONSES)
ASCII_RESPONSES.update(GOOD_ASCII_RESPONSES)
#################
# Run the tests #
#################
if __name__ == "__main__":
## Run all tests ##
unittest.main(verbosity=VERBOSITY)
## Run a test class ##
# suite = unittest.TestLoader().loadTestsFromTestCase(TestDummyCommunicationHandleLocalEcho)
# suite = unittest.TestLoader().loadTestsFromTestCase(TestCalculateCrcString)
# suite = unittest.TestLoader().loadTestsFromTestCase(TestHexdecode)
# suite = unittest.TestLoader().loadTestsFromTestCase(TestDummyCommunicationBroadcast)
# unittest.TextTestRunner(verbosity=2).run(suite)
## Run a single test ##
# suite = unittest.TestSuite()
# suite.addTest(TestDummyCommunication("testGenericCommand"))
# suite.addTest(TestDummyCommunication("testWriteBits"))
# suite.addTest(TestDummyCommunication("testReadBits"))
# suite.addTest(TestDummyCommunication("testWriteBit"))
# suite.addTest(TestDummyCommunication("testWriteFloat"))
# unittest.TextTestRunner(verbosity=2).run(suite)
## Run individual commands ##
# print(repr(minimalmodbus._calculate_crc_string('\x01\x05' + '\x00\x47\x00\x00')))
|
{
"content_hash": "8bdf022a24855e17aa4e6f2626aed699",
"timestamp": "",
"source": "github",
"line_count": 6228,
"max_line_length": 276,
"avg_line_length": 36.478002569043035,
"alnum_prop": 0.5637211963818034,
"repo_name": "pyhys/minimalmodbus",
"id": "aef1a07471fb8e8e6897ebd70aaca8445104bd0d",
"size": "227831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_minimalmodbus.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "3436"
},
{
"name": "Python",
"bytes": "352590"
}
],
"symlink_target": ""
}
|
from sys import stderr, stdin, stdout
from poker import Card, Hand, Pocket, Table
# Copied or simplified from initial example code
class Bot(object):
'''
Main bot class
'''
def __init__(self):
'''
Bot constructor
Add data that needs to be persisted between rounds here.
'''
self.settings = {}
self.match_settings = {}
self.game_state = {}
self.pocket = None
self.bots = {
'me': {},
'opponent': {}
}
# XXX:
# Don't always get amountToCall preflop for some reason,
# so need to remember the blinds
self.sb = 0
def run(self):
'''
Main loop
Keeps running while begin fed data from stdin.
Writes output to stdout, remember to flush.
'''
while not stdin.closed:
try:
rawline = stdin.readline()
# End of file check
if len(rawline) == 0:
break
line = rawline.strip()
# Empty lines can be ignored
if len(line) == 0:
continue
parts = line.split()
command = parts[0].lower()
if command == 'settings':
self.update_settings(parts[1:])
elif command == 'match':
self.update_match_info(parts[1:])
elif command.startswith('player'):
self.update_game_state(parts[0], parts[1], parts[2])
elif command == 'action':
stdout.write(self.make_move(parts[2]) + '\n')
stdout.flush()
else:
stderr.write('Unknown command: %s\n' % (command))
stderr.flush()
except EOFError:
return
def update_settings(self, options):
'''
Updates game settings
'''
key, value = options
self.settings[key] = value
def update_match_info(self, options):
'''
Updates match information
'''
key, value = options
self.match_settings[key] = value
if key=='smallBlind':
self.sb = int(value)
def update_game_state(self, player, info_type, info_value):
'''
Updates game state
'''
# Checks if info pertains self
if player == self.settings['yourBot']:
# Update bot stack
if info_type == 'stack':
self.bots['me']['stack'] = int(info_value)
# Remove blind from stack
elif info_type == 'post':
self.bots['me']['stack'] -= int(info_value)
elif info_type == 'call':
self.bots['me']['stack'] -= int(info_value)
elif info_type == 'raise':
amt = int(info_value) + int(self.match_settings['amountToCall']) if 'amountToCall' in self.match_settings else self.sb
self.bots['me']['stack'] -= amt
# Update bot cards
elif info_type == 'hand':
self.bots['me']['pocket'] = Pocket(self.parse_cards(info_value))
# Round winnings, currently unused
elif info_type == 'wins':
self.bots['me']['stack'] += int(info_value)
elif info_type == 'fold':
pass # no state adjustment needed
elif info_type == 'check':
pass # no state adjustment needed
else:
stderr.write('Unknown info_type (me): %s %s\n' % (info_type, info_value))
else:
# Update opponent stack
if info_type == 'stack':
self.bots['opponent']['stack'] = int(info_value)
# Remove blind from opponent stack
elif info_type == 'post':
self.bots['opponent']['stack'] -= int(info_value)
elif info_type == 'call':
self.bots['opponent']['stack'] -= int(info_value)
elif info_type == 'raise':
amt = int(info_value) + int(self.match_settings['amountToCall']) if 'amountToCall' in self.match_settings else self.sb
self.bots['opponent']['stack'] -= amt
# Opponent hand on showdown, currently unused
elif info_type == 'hand':
pass
# Opponent round winnings, currently unused
elif info_type == 'wins':
self.bots['opponent']['stack'] += int(info_value)
elif info_type == 'fold':
pass # no state adjustment needed
elif info_type == 'check':
pass # no state adjustment needed
else:
stderr.write('Unknown info_type (me): %s %s\n' % (info_type, info_value))
def make_move(self, timeout):
'''
Checks cards and makes a move
'''
# Get average card value
average_card_value = 0
for card in self.bots['me']['pocket']:
average_card_value += card.number
average_card_value /= 4
# Check if we have something good
if average_card_value > 8:
return 'raise ' + str(2 * int(self.match_settings['bigBlind']))
elif average_card_value > 4:
return 'call ' + self.match_settings['amountToCall']
return 'check 0'
def parse_cards(self, cards_string):
'''
Parses string of cards and returns a list of Card objects
'''
return [Card(card[1], card[0]) for card in cards_string[1:-1].split(',')]
|
{
"content_hash": "2f6ee2783cb4f5c217985487ee60361a",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 134,
"avg_line_length": 32.06214689265537,
"alnum_prop": 0.49497797356828194,
"repo_name": "nickname456/pbots",
"id": "004040a7bbb529017475e17645c54945230c81a0",
"size": "5675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49118"
}
],
"symlink_target": ""
}
|
import xml.parsers.expat
## Primitive
##
class Primitive(object):
def __init__(self, attrs):
if 'id' in attrs:
self.nid = int(attrs['id'])
else:
self.nid = None
self.attrs = attrs
self.names = []
self.props = []
return
def __repr__(self):
return ('<%s: attrs=%r, tags=%r>' %
(self.__class__.__name__, self.attrs, self.tags))
def add_tag(self, attrs):
assert 'k' in attrs and 'v' in attrs
k = attrs['k']
v = attrs['v']
if k == 'name' or k.startswith('name:'):
self.names.append((k, v))
else:
self.props.append((k, v))
return
## Node
##
class Node(Primitive):
def __init__(self, attrs):
Primitive.__init__(self, attrs)
self.pos = None
if 'lat' in attrs and 'lon' in attrs:
self.pos = (float(attrs['lat']),
float(attrs['lon']))
return
## Way
##
class Way(Primitive):
def __init__(self, attrs):
Primitive.__init__(self, attrs)
self.nodes = []
return
def add_node(self, attrs):
if 'ref' in attrs:
self.nodes.append(int(attrs['ref']))
return
## Relation
##
class Relation(Primitive):
def __init__(self, attrs):
Primitive.__init__(self, attrs)
self.members = []
return
def add_member(self, attrs):
if 'ref' in attrs:
self.members.append(int(attrs['ref']))
return
## OSMXMLParser
##
class OSMXMLParser(object):
def __init__(self):
self._expat = xml.parsers.expat.ParserCreate()
self._expat.StartElementHandler = self._start_element
self._expat.EndElementHandler = self._end_element
self._expat.CharacterDataHandler = self._char_data
self.reset()
return
def reset(self):
self._stack = []
self._obj = None
return
def feed(self, data):
self._expat.Parse(data)
return
def add_object(self, obj):
raise NotImplementedError
def _start_element(self, name, attrs):
self._stack.append((name, attrs))
#
if name == 'tag':
if isinstance(self._obj, Primitive):
self._obj.add_tag(attrs)
elif name == 'nd':
if isinstance(self._obj, Way):
self._obj.add_node(attrs)
elif name == 'member':
if isinstance(self._obj, Relation):
self._obj.add_member(attrs)
elif name == 'node':
assert self._obj is None
self._obj = Node(attrs)
elif name == 'way':
assert self._obj is None
self._obj = Way(attrs)
elif name == 'relation':
assert self._obj is None
self._obj = Relation(attrs)
return
def _end_element(self, name):
assert self._stack
(name0,attrs0) = self._stack.pop()
assert name == name0
#
if name in ('node', 'way', 'relation'):
assert self._obj is not None
self.add_object(self._obj)
self._obj = None
return
def _char_data(self, data):
#print (data, )
return
|
{
"content_hash": "945580775c926a2ac248a5fd2646d5ee",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 65,
"avg_line_length": 24.279411764705884,
"alnum_prop": 0.5024227740763174,
"repo_name": "euske/osmtools",
"id": "c3b2c67104372469b1c4217fbc55b02ae51432b1",
"size": "3324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "osmxmlparser.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60783"
},
{
"name": "Shell",
"bytes": "1525"
}
],
"symlink_target": ""
}
|
__version__ = "1.0b"
__author__ = "Li Chuan (daniellee0219@gmail.com)"
'''
Python client SDK for Youdao Note API using OAuth 2.
'''
try:
import json
except ImportError:
import simplejson as json
import urllib2, oauth2, time
ENCODING = 'utf-8'
BASE_URL = 'http://sandbox.note.youdao.com/'
OPTIONAL_BASE_URL = 'http://note.youdao.com/'
def _fix_url(url):
if url.startswith(BASE_URL):
return url
else:
return url.replace(OPTIONAL_BASE_URL, BASE_URL)
class User:
"""User class that represents a ynote user."""
def __init__(self, json_dict=None):
'''init with the data from a dictionary.'''
if json_dict:
self.id = json_dict['id']
self.user_name = json_dict['user']
self.total_size = json_dict['total_size']
self.used_size = json_dict['used_size']
self.register_time = int(json_dict['register_time'])
self.last_login_time = int(json_dict['last_login_time'])
self.last_modify_time = int(json_dict['last_modify_time'])
self.default_notebook = json_dict['default_notebook']
else:
self.id = ""
self.user_name = ""
self.total_size = 0
self.used_size = 0
self.register_time = 0
self.last_login_time = 0
self.last_modify_time = 0
self.default_notebook = ""
class Notebook:
"""Notebook class that represents a ynote notebook."""
def __init__(self, json_dict=None):
'''init with the data from a dictionary.'''
if json_dict:
self.path = json_dict['path']
self.name = json_dict['name']
self.notes_num = int(json_dict['notes_num'])
self.create_time = int(json_dict['create_time'])
self.modify_time = int(json_dict['modify_time'])
else:
self.path = ""
self.name = ""
self.notes_num = 0
self.create_time = 0
self.modify_time = 0
class Note:
"""Note class that represents a ynote note."""
def __init__(self, json_dict=None):
'''init with the data from a dictionary.'''
if json_dict:
self.path = json_dict['path']
self.title = json_dict['title']
self.author = json_dict['author']
self.source = json_dict['source']
self.size = int(json_dict['size'])
self.create_time = int(json_dict['create_time'])
self.modify_time = int(json_dict['modify_time'])
self.content = json_dict['content']
else:
self.path = ""
self.title = ""
self.author = ""
self.source = ""
self.size = 0
self.create_time = -1
self.modify_time = -1
self.content = ""
class Resource:
"""Resource class that represents a resource in a note."""
def __init__(self, json_dict):
'''init with the data from a dictionary.'''
if json_dict:
self.url = _fix_url(json_dict['url'])
if json_dict.has_key('src'):
self.icon = _fix_url(json_dict['src'])
else:
self.icon = ""
else:
self.url = ""
self.icon = ""
def to_resource_tag(self):
'''convert to an html tag'''
if self.icon:
return "<img path=\"%s\" src=\"%s\" />" % (self.url,self.icon)
else:
return "<img src=\"%s\" />" % self.url
class YNoteError(StandardError):
'''
SDK error class that represents API error as well as http error
'''
def __init__(self, error_type, error_code, message):
'''init with error code and message.'''
self.error_msg = message
self.error_code = int(error_code)
self.error_type = error_type
StandardError.__init__(self, message)
def __str__(self):
'''convert to a string.'''
return "YNoteError: type=%s, code=%d, message=%s" % (self.error_type, self.error_code, self.error_msg)
def _parse_api_error(body):
'''parse an YNote API error to YNoteError object'''
json_obj = json.loads(body)
return YNoteError('API_ERROR', int(json_obj['error']), json_obj['message'])
def _parse_http_error(e):
'''parse an urllib2.HTTPError object to YNoteError object'''
return YNoteError('HTTP_ERROR', e.code, e.reason)
def _parse_urlencoded(body):
'''parse an urlencoded string to dictionary'''
parts = body.split('&')
return dict([tuple(part.split('=')) for part in parts])
def _do_http(request):
'''initiate an http request.'''
try:
resp = urllib2.urlopen(request)
return resp.read()
except urllib2.HTTPError, e:
if e.code == 500:
raise _parse_api_error(e.read())
else:
raise _parse_http_error(e)
def _do_get(url, params, consumer, token):
'''
initiate an http GET request, return result as a string or raise error.
'''
req_builder = oauth2.RequestBuilder(oauth2.HTTP_GET, url, params)
req = req_builder.build_signed_request(consumer, token)
return _do_http(req)
def _do_post(url, params, consumer, token):
'''
initiate an http POST request with urlencoded content,
return result as string or raise error.
'''
return _do_post_urlencoded(url, params, consumer, token)
def _do_post_urlencoded(url, params, consumer, token):
'''
initiate an http POST request with urlencoded content,
return result as string or raise error.
'''
req_builder = oauth2.RequestBuilder(oauth2.HTTP_POST_URLENCODED, url, params)
req = req_builder.build_signed_request(consumer, token)
return _do_http(req)
def _do_post_multipart(url, params, consumer, token):
'''
initiate an http POST request with multipart content,
return result as string or raise error.
'''
req_builder = oauth2.RequestBuilder(oauth2.HTTP_POST_MULTIPART, url, params)
req = req_builder.build_signed_request(consumer, token)
return _do_http(req)
class YNoteClient:
"""API client for Youdao Note."""
def __init__(self, consumer_key, consumer_secret):
'''init with consumer key and consumer secret.'''
self.consumer = oauth2.Consumer(consumer_key, consumer_secret)
self.access_token = None
self.request_token = None
def grant_request_token(self, callback_url):
'''get request token(store in self.request_token), return authorization url.'''
if callback_url:
params = {'oauth_callback':callback_url}
else:
params = {'oauth_callback':'oob'}
res = _do_get(BASE_URL+'oauth/request_token', params, self.consumer, None)
res_dict = _parse_urlencoded(res)
self.request_token = oauth2.Token(res_dict['oauth_token'], res_dict['oauth_token_secret'])
auth_url = BASE_URL + 'oauth/authorize?oauth_token=' + self.request_token.key
if callback_url:
auth_url += '&oauth_callback=' + callback_url
return auth_url
def grant_access_token(self, verifier):
'''get access token(store in self.access_token).'''
params = {
'oauth_token':self.request_token.key,
'oauth_verifier':verifier
}
res = _do_get(BASE_URL+'oauth/access_token', params, self.consumer, self.request_token)
res_dict = _parse_urlencoded(res)
self.access_token = oauth2.Token(res_dict['oauth_token'], res_dict['oauth_token_secret'])
def set_access_token(self, token_key, token_secret):
'''set the access token'''
self.access_token = oauth2.Token(token_key, token_secret)
def get_access_token(self):
'''get current access token as key,secret'''
if self.access_token:
return self.access_token.key, self.access_token.secret
else:
return "", ""
def get_user(self):
'''get user information, return as a User object.'''
res = _do_get(BASE_URL+'yws/open/user/get.json', None, self.consumer, self.access_token)
return User(json.loads(res))
def get_notebooks(self):
'''get all notebooks, return as a list of Notebook objects.'''
res = _do_post(BASE_URL+'yws/open/notebook/all.json', None, self.consumer, self.access_token)
return [Notebook(d) for d in json.loads(res)]
def get_note_paths(self, book_path):
'''get path of all notes in a notebook, return as a list of path strings.'''
params = {'notebook':book_path}
res = _do_post(BASE_URL+'yws/open/notebook/list.json', params, self.consumer, self.access_token)
return json.loads(res)
def create_notebook(self, name, create_time=None):
'''create a notebook with specified name.'''
params = {'name':name}
if create_time:
params['create_time'] = create_time
res = _do_post(BASE_URL+'yws/open/notebook/create.json', params, self.consumer, self.access_token)
return json.loads(res)['path']
def delete_notebook(self, path):
'''delete a notebook with specified path.'''
params = {'notebook':path}
res = _do_post(BASE_URL+'yws/open/notebook/delete.json', params, self.consumer, self.access_token)
def get_note(self, path):
'''get a note with specified path, return as a Note object.'''
params = {'path':path}
res = _do_post(BASE_URL+'yws/open/note/get.json', params, self.consumer, self.access_token)
return Note(json.loads(res))
def create_note(self, book_path, note):
'''create a note in a notebook with information specified in "note".'''
params = {
'source':note.source,
'author':note.author,
'title':note.title,
'content':note.content,
'notebook':book_path
}
res = _do_post_multipart(BASE_URL+'yws/open/note/create.json', params, self.consumer, self.access_token)
return json.loads(res)['path']
def create_note_with_attributes(self, book_path, content, **kw):
'''create a note with attributes given by parameters'''
params = { 'notebook':book_path, 'content':content }
if 'source' in kw.keys():
params['source'] = kw['source']
if 'author' in kw.keys():
params['author'] = kw['author']
if 'title' in kw.keys():
params['title'] = kw['title']
if 'create_time' in kw.keys():
params['create_time'] = kw['create_time']
res = _do_post_multipart(BASE_URL+'yws/open/note/create.json', params, self.consumer, self.access_token)
return json.loads(res)['path']
def update_note(self, note, modify_time=None):
'''update the note with information in "note".'''
params = {
'path':note.path,
'source':note.source,
'author':note.author,
'title':note.title,
'content':note.content,
}
if modify_time:
params['modify_time'] = modify_time
_do_post_multipart(BASE_URL+'yws/open/note/update.json', params, self.consumer, self.access_token)
def update_note_attributes(self, note_path, **kw):
'''update the some attributes(given by kw) of the note.'''
params = {'path':note_path}
if 'source' in kw.keys():
params['source'] = kw['source']
if 'author' in kw.keys():
params['author'] = kw['author']
if 'title' in kw.keys():
params['title'] = kw['title']
if 'content' in kw.keys():
params['content'] = kw['content']
if 'modify_time' in kw.keys():
params['modify_time'] = kw['modify_time']
_do_post_multipart(BASE_URL+'yws/open/note/update.json', params, self.consumer, self.access_token)
def move_note(self, note_path, book_path):
'''move note to the notebook with path denoted by "book_path".'''
params = {
'path':note_path,
'notebook':book_path
}
res = _do_post(BASE_URL+'yws/open/note/move.json', params, self.consumer, self.access_token)
return json.loads(res)['path']
def delete_note(self, note_path):
'''delete a note with specified path.'''
params = {'path':note_path}
res = _do_post(BASE_URL+'yws/open/note/delete.json', params, self.consumer, self.access_token)
def share_note(self, note_path):
'''share a note with specified path, return shared url.'''
params = {'path':note_path}
res = _do_post(BASE_URL+'yws/open/share/publish.json', params, self.consumer, self.access_token)
return _fix_url(json.loads(res)['url'])
def upload_resource(self, res_file):
'''upload a file as a resource.'''
params = {'file':res_file}
res = _do_post_multipart(BASE_URL+'yws/open/resource/upload.json', params, self.consumer, self.access_token)
return Resource(json.loads(res))
def download_resource(self, resource_url):
'''download a resource file with specified url, return as a string.'''
res = _do_get(resource_url, None, self.consumer, self.access_token)
return res
|
{
"content_hash": "f6a9b3280d965532d9656f0a9c2b7425",
"timestamp": "",
"source": "github",
"line_count": 374,
"max_line_length": 116,
"avg_line_length": 35.77807486631016,
"alnum_prop": 0.5832150063522905,
"repo_name": "daniellee219/youdaonotepy",
"id": "e89f7043e6708f0ca9041dbc24970bf3d01b9daa",
"size": "13428",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ynote/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "24702"
}
],
"symlink_target": ""
}
|
import logging
logger = logging.getLogger(__name__)
import numpy as np
import weakref
from gym import error, monitoring
from gym.utils import closer, reraise
env_closer = closer.Closer()
# Env-related abstractions
class Env(object):
"""The main OpenAI Gym class. It encapsulates an environment with
arbitrary behind-the-scenes dynamics. An environment can be
partially or fully observed.
The main API methods that users of this class need to know are:
step
reset
render
close
configure
seed
When implementing an environment, override the following methods
in your subclass:
_step
_reset
_render
_close
_configure
_seed
And set the following attributes:
action_space: The Space object corresponding to valid actions
observation_space: The Space object corresponding to valid observations
reward_range: A tuple corresponding to the min and max possible rewards
The methods are accessed publicly as "step", "reset", etc.. The
non-underscored versions are wrapper methods to which we may add
functionality over time.
"""
def __new__(cls, *args, **kwargs):
# We use __new__ since we want the env author to be able to
# override __init__ without remembering to call super.
env = super(Env, cls).__new__(cls)
env._env_closer_id = env_closer.register(env)
env._closed = False
env._configured = False
env._unwrapped = None
# Will be automatically set when creating an environment via 'make'
env.spec = None
return env
# Set this in SOME subclasses
metadata = {'render.modes': []}
reward_range = (-np.inf, np.inf)
# Override in SOME subclasses
def _close(self):
pass
def _configure(self):
pass
# Set these in ALL subclasses
action_space = None
observation_space = None
# Override in ALL subclasses
def _step(self, action): raise NotImplementedError
def _reset(self): raise NotImplementedError
def _render(self, mode='human', close=False):
if close:
return
raise NotImplementedError
def _seed(self, seed=None): return []
@property
def monitor(self):
"""Lazily creates a monitor instance.
We do this lazily rather than at environment creation time
since when the monitor closes, we need remove the existing
monitor but also make it easy to start a new one. We could
still just forcibly create a new monitor instance on old
monitor close, but that seems less clean.
"""
if not hasattr(self, '_monitor'):
self._monitor = monitoring.Monitor(self)
return self._monitor
def step(self, action):
"""Run one timestep of the environment's dynamics. When end of
episode is reached, you are responsible for calling `reset()`
to reset this environment's state.
Accepts an action and returns a tuple (observation, reward, done, info).
Args:
action (object): an action provided by the environment
Returns:
observation (object): agent's observation of the current environment
reward (float) : amount of reward returned after previous action
done (boolean): whether the episode has ended, in which case further step() calls will return undefined results
info (dict): contains auxiliary diagnostic information (helpful for debugging, and sometimes learning)
"""
self.monitor._before_step(action)
observation, reward, done, info = self._step(action)
done = self.monitor._after_step(observation, reward, done, info)
return observation, reward, done, info
def reset(self):
"""
Resets the state of the environment and returns an initial observation.
Returns:
observation (object): the initial observation of the space. (Initial reward is assumed to be 0.)
"""
if self.metadata.get('configure.required') and not self._configured:
raise error.Error("{} requires calling 'configure()' before 'reset()'".format(self))
self.monitor._before_reset()
observation = self._reset()
self.monitor._after_reset(observation)
return observation
def render(self, mode='human', close=False):
"""Renders the environment.
The set of supported modes varies per environment. (And some
environments do not support rendering at all.) By convention,
if mode is:
- human: render to the current display or terminal and
return nothing. Usually for human consumption.
- rgb_array: Return an numpy.ndarray with shape (x, y, 3),
representing RGB values for an x-by-y pixel image, suitable
for turning into a video.
- ansi: Return a string (str) or StringIO.StringIO containing a
terminal-style text representation. The text can include newlines
and ANSI escape sequences (e.g. for colors).
Note:
Make sure that your class's metadata 'render.modes' key includes
the list of supported modes. It's recommended to call super()
in implementations to use the functionality of this method.
Args:
mode (str): the mode to render with
close (bool): close all open renderings
Example:
class MyEnv(Env):
metadata = {'render.modes': ['human', 'rgb_array']}
def render(self, mode='human'):
if mode == 'rgb_array':
return np.array(...) # return RGB frame suitable for video
elif mode is 'human':
... # pop up a window and render
else:
super(MyEnv, self).render(mode=mode) # just raise an exception
"""
if close:
return self._render(close=close)
# This code can be useful for calling super() in a subclass.
modes = self.metadata.get('render.modes', [])
if len(modes) == 0:
raise error.UnsupportedMode('{} does not support rendering (requested mode: {})'.format(self, mode))
elif mode not in modes:
raise error.UnsupportedMode('Unsupported rendering mode: {}. (Supported modes for {}: {})'.format(mode, self, modes))
return self._render(mode=mode, close=close)
def close(self):
"""Override _close in your subclass to perform any necessary cleanup.
Environments will automatically close() themselves when
garbage collected or when the program exits.
"""
# _closed will be missing if this instance is still
# initializing.
if not hasattr(self, '_closed') or self._closed:
return
# Automatically close the monitor and any render window
self.monitor.close()
self.render(close=True)
self._close()
env_closer.unregister(self._env_closer_id)
# If an error occurs before this line, it's possible to
# end up with double close.
self._closed = True
def seed(self, seed=None):
"""Sets the seed for this env's random number generator(s).
Note:
Some environments use multiple pseudorandom number generators.
We want to capture all such seeds used in order to ensure that
there aren't accidental correlations between multiple generators.
Returns:
list<bigint>: Returns the list of seeds used in this env's random
number generators. The first value in the list should be the
"main" seed, or the value which a reproducer should pass to
'seed'. Often, the main seed equals the provided 'seed', but
this won't be true if seed=None, for example.
"""
return self._seed(seed)
def configure(self, *args, **kwargs):
"""Provides runtime configuration to the environment.
This configuration should consist of data that tells your
environment how to run (such as an address of a remote server,
or path to your ImageNet data). It should not affect the
semantics of the environment.
"""
self._configured = True
try:
return self._configure(*args, **kwargs)
except TypeError as e:
# It can be confusing if you have the wrong environment
# and try calling with unsupported arguments, since your
# stack trace will only show core.py.
if self.spec:
reraise(suffix='(for {})'.format(self.spec.id))
else:
raise
@property
def unwrapped(self):
"""Completely unwrap this env.
Notes:
EXPERIMENTAL: may be removed in a later version of Gym
This is a dynamic property in order to avoid refcycles.
Returns:
gym.Env: The base non-wrapped gym.Env instance
"""
if self._unwrapped is not None:
return self._unwrapped
else:
return self
def __del__(self):
self.close()
def __str__(self):
return '<{} instance>'.format(type(self).__name__)
# Space-related abstractions
class Space(object):
"""Defines the observation and action spaces, so you can write generic
code that applies to any Env. For example, you can choose a random
action.
"""
def sample(self, seed=0):
"""
Uniformly randomly sample a random elemnt of this space
"""
raise NotImplementedError
def contains(self, x):
"""
Return boolean specifying if x is a valid
member of this space
"""
raise NotImplementedError
def to_jsonable(self, sample_n):
"""Convert a batch of samples from this space to a JSONable data type."""
# By default, assume identity is JSONable
return sample_n
def from_jsonable(self, sample_n):
"""Convert a JSONable data type to a batch of samples from this space."""
# By default, assume identity is JSONable
return sample_n
class Wrapper(Env):
def __init__(self, env):
self.env = env
self.metadata = env.metadata
self.action_space = env.action_space
self.observation_space = env.observation_space
self.reward_range = env.reward_range
self._spec = env.spec
self._unwrapped = env.unwrapped
def _step(self, action):
return self.env.step(action)
def _reset(self):
return self.env.reset()
def _render(self, mode='human', close=False):
return self.env.render(mode, close)
def _close(self):
return self.env.close()
def _configure(self, *args, **kwargs):
return self.env.configure(*args, **kwargs)
def _seed(self, seed=None):
return self.env.seed(seed)
def __str__(self):
return '<{}{} instance>'.format(type(self).__name__, self.env)
@property
def spec(self):
if self._spec is None:
self._spec = self.env.spec
return self._spec
@spec.setter
def spec(self, spec):
# Won't have an env attr while in the __new__ from gym.Env
if hasattr(self, 'env'):
self.env.spec = spec
self._spec = spec
|
{
"content_hash": "5dad300c50db7445fe17e9075baf0e57",
"timestamp": "",
"source": "github",
"line_count": 343,
"max_line_length": 129,
"avg_line_length": 33.54810495626822,
"alnum_prop": 0.6146693317111324,
"repo_name": "d1hotpep/openai_gym",
"id": "cdf6d1fa5b8ca1a730ab18d6f8472bcf70d76634",
"size": "11507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gym/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "461"
},
{
"name": "Python",
"bytes": "475083"
},
{
"name": "Shell",
"bytes": "711"
}
],
"symlink_target": ""
}
|
from rest_framework.test import APIClient
from datetime import datetime
from django.utils import timezone
from django.db import models
from django.apps import apps
from autofixture import AutoFixture
class OperisTestClient(APIClient):
models = {}
def register_fixtures(self,model,count):
fixture = AutoFixture(model)
items = fixture.create(count)
self.models[model] = items
return True
def create_fixture(self,model):
fixture = AutoFixture(model)
return fixture.create()
def generate_test_data(self,model,fields,omit_id=True):
item = {}
for field in fields:
if field['name'] == 'id' and omit_id:
continue
value = self.generate_field_data(model,field)
if value:
item[field['name']] = value
return item
def generate_field_data(self,model,field):
"""
"""
#We create unique FK Instances, in case the field requires "Unique"
if field['type'] == 'ForeignKey':
model = apps.get_model(app_label=field['parent_class_app'], model_name=field['parent_class'])
instance = self.create_fixture(model)
value = instance[0]
else:
fixture = AutoFixture(model)
fixture.return_default_values = True
field,modelItem,direct,m2m = model._meta.get_field_by_name(field['name'])
value = fixture.get_value(field)
if isinstance(value,datetime):
value = str(value.isoformat())
if value.endswith('+00:00'):
value = value[:-6] + 'Z'
return value
if value == AutoFixture.IGNORE_FIELD:
return None
if isinstance(value, models.Model):
return value.pk
return value
|
{
"content_hash": "96041cefc163b1062ab0f5b267b55fbe",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 105,
"avg_line_length": 33.78947368421053,
"alnum_prop": 0.5638629283489096,
"repo_name": "Goldcap/django-operis",
"id": "3c5fec9582ba30ac3b15f25114b88d987b1194f8",
"size": "1926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "operis/api/test/client.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "596"
},
{
"name": "Handlebars",
"bytes": "4745"
},
{
"name": "JavaScript",
"bytes": "32172"
},
{
"name": "Python",
"bytes": "60417"
},
{
"name": "Shell",
"bytes": "76"
}
],
"symlink_target": ""
}
|
"""
algorithms.segment.trained_model
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An API for a trained segmentation model to predict nodule boundaries and
descriptive statistics.
"""
import os
import numpy as np
import scipy.ndimage
from keras.models import load_model
from ...algorithms.segment.src.model import dice_coef_loss, dice_coef
from ...algorithms.segment.src.training import get_best_model_path, get_data_shape
from ...preprocess.load_ct import load_ct, MetaData
def predict(dicom_path, centroids):
""" Predicts nodule boundaries.
Given a path to DICOM images and a list of centroids
(1) load the segmentation model from its serialized state
(2) pre-process the dicom data into whatever format the segmentation
model expects
(3) for each pixel create an indicator 0 or 1 of if the pixel is
cancerous
(4) write this binary mask to disk, and return the path to the mask
Args:
dicom_path (str): a path to a DICOM directory
centroids (list[dict]): A list of centroids of the form::
{'x': int,
'y': int,
'z': int}
Returns:
dict: Dictionary containing path to serialized binary masks and
volumes per centroid with form::
{'binary_mask_path': str,
'volumes': list[float]}
"""
voxel_data, meta = load_ct(dicom_path)
model = load_model(get_best_model_path(), custom_objects={'dice_coef_loss': dice_coef_loss, 'dice_coef': dice_coef})
x, y, z, channels = get_data_shape()
input_data = np.ndarray((1, x, y, z, channels)) # batch, x, y, z, channels
# Crop the input data to the required data shape and pad with zeros
padded_data = np.zeros_like(input_data)
min_x, min_y, min_z = min(x, voxel_data.shape[0]), min(y, voxel_data.shape[1]), min(z, voxel_data.shape[2])
padded_data[0, :min_x, :min_y, :min_z, 0] = voxel_data[:min_x, :min_y, :min_z]
input_data = padded_data
output_data = model.predict(input_data)
segment_path = os.path.join(os.path.dirname(__file__), 'assets', "lung-mask.npy")
np.save(segment_path, output_data[0, :, :, :, 0])
volumes = calculate_volume(segment_path, centroids)
return {
'binary_mask_path': segment_path,
'volumes': volumes
}
def calculate_volume(segment_path, centroids, ct_path=None):
""" Calculates tumor volume in cubic mm if a ct_path has been provided.
Given the path to the serialized mask and a list of centroids
(1) For each centroid, calculate the volume of the tumor.
(2) DICOM has voxels' sizes in mm therefore the volume should be in real
measurements (not pixels).
Args:
segment_path (str): a path to a mask file
centroids (list[dict]): A list of centroids of the form::
{'x': int,
'y': int,
'z': int}
ct_path (str): contains the path to the folder containing the dcm-files of a series.
If None then volume will be returned in voxels.
Returns:
list[float]: a list of volumes in cubic mm (if a ct_path has been provided)
of a connected component for each centroid.
"""
mask = np.load(segment_path)
mask, _ = scipy.ndimage.label(mask)
labels = [mask[centroid['x'], centroid['y'], centroid['z']] for centroid in centroids]
volumes = np.bincount(mask.flatten())
volumes = volumes[labels].tolist()
if ct_path:
meta = load_ct(ct_path, voxel=False)
meta = MetaData(meta)
spacing = np.prod(meta.spacing)
volumes = [volume * spacing for volume in volumes]
return volumes
|
{
"content_hash": "63908ca0311fd8031f1b16ff03503815",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 120,
"avg_line_length": 37.53061224489796,
"alnum_prop": 0.6299619358346927,
"repo_name": "antonow/concept-to-clinic",
"id": "43eaf75546f0cf6249eab9001dd75ce770c76c09",
"size": "3702",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prediction/src/algorithms/segment/trained_model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "135"
},
{
"name": "HTML",
"bytes": "446"
},
{
"name": "JavaScript",
"bytes": "25989"
},
{
"name": "Python",
"bytes": "227606"
},
{
"name": "Shell",
"bytes": "2580"
},
{
"name": "Vue",
"bytes": "19612"
}
],
"symlink_target": ""
}
|
import sys
import os
import gzip
import zipfile
from optparse import make_option
from django.conf import settings
from django.core import serializers
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import connections, router, transaction, DEFAULT_DB_ALIAS
from django.db.models import get_apps
from django.utils.itercompat import product
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
class Command(BaseCommand):
help = 'Installs the named fixture(s) in the database.'
args = "fixture [fixture ...]"
option_list = BaseCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
'fixtures into. Defaults to the "default" database.'),
)
def handle(self, *fixture_labels, **options):
using = options.get('database', DEFAULT_DB_ALIAS)
connection = connections[using]
self.style = no_style()
verbosity = int(options.get('verbosity', 1))
show_traceback = options.get('traceback', False)
# commit is a stealth option - it isn't really useful as
# a command line option, but it can be useful when invoking
# loaddata from within another script.
# If commit=True, loaddata will use its own transaction;
# if commit=False, the data load SQL will become part of
# the transaction in place when loaddata was invoked.
commit = options.get('commit', True)
# Keep a count of the installed objects and fixtures
fixture_count = 0
object_count = 0
models = set()
humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database (if
# it isn't already initialized).
cursor = connection.cursor()
# Start transaction management. All fixtures are installed in a
# single transaction to ensure that all references are resolved.
if commit:
transaction.commit_unless_managed(using=using)
transaction.enter_transaction_management(using=using)
transaction.managed(True, using=using)
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
zipfile.ZipFile.__init__(self, *args, **kwargs)
if settings.DEBUG:
assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])
compression_types = {
None: file,
'gz': gzip.GzipFile,
'zip': SingleZipReader
}
if has_bz2:
compression_types['bz2'] = bz2.BZ2File
app_module_paths = []
for app in get_apps():
if hasattr(app, '__path__'):
# It's a 'models/' subpackage
for path in app.__path__:
app_module_paths.append(path)
else:
# It's a models.py module
app_module_paths.append(app.__file__)
app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]
for fixture_label in fixture_labels:
parts = fixture_label.split('.')
if len(parts) > 1 and parts[-1] in compression_types:
compression_formats = [parts[-1]]
parts = parts[:-1]
else:
compression_formats = compression_types.keys()
if len(parts) == 1:
fixture_name = parts[0]
formats = serializers.get_public_serializer_formats()
else:
fixture_name, format = '.'.join(parts[:-1]), parts[-1]
if format in serializers.get_public_serializer_formats():
formats = [format]
else:
formats = []
if formats:
if verbosity > 1:
print "Loading '%s' fixtures..." % fixture_name
else:
sys.stderr.write(
self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." %
(fixture_name, format)))
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
return
if os.path.isabs(fixture_name):
fixture_dirs = [fixture_name]
else:
fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']
for fixture_dir in fixture_dirs:
if verbosity > 1:
print "Checking %s for fixtures..." % humanize(fixture_dir)
label_found = False
for combo in product([using, None], formats, compression_formats):
database, format, compression_format = combo
file_name = '.'.join(
p for p in [
fixture_name, database, format, compression_format
]
if p
)
if verbosity > 1:
print "Trying %s for %s fixture '%s'..." % \
(humanize(fixture_dir), file_name, fixture_name)
full_path = os.path.join(fixture_dir, file_name)
open_method = compression_types[compression_format]
try:
fixture = open_method(full_path, 'r')
if label_found:
fixture.close()
print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
(fixture_name, humanize(fixture_dir)))
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
return
else:
fixture_count += 1
objects_in_fixture = 0
if verbosity > 0:
print "Installing %s fixture '%s' from %s." % \
(format, fixture_name, humanize(fixture_dir))
try:
objects = serializers.deserialize(format, fixture, using=using)
for obj in objects:
if router.allow_syncdb(using, obj.object.__class__):
objects_in_fixture += 1
models.add(obj.object.__class__)
obj.save(using=using)
object_count += objects_in_fixture
label_found = True
except (SystemExit, KeyboardInterrupt):
raise
except Exception:
import traceback
fixture.close()
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
if show_traceback:
traceback.print_exc()
else:
sys.stderr.write(
self.style.ERROR("Problem installing fixture '%s': %s\n" %
(full_path, ''.join(traceback.format_exception(sys.exc_type,
sys.exc_value, sys.exc_traceback)))))
return
fixture.close()
# If the fixture we loaded contains 0 objects, assume that an
# error was encountered during fixture loading.
if objects_in_fixture == 0:
sys.stderr.write(
self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" %
(fixture_name)))
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
return
except Exception, e:
if verbosity > 1:
print "No %s fixture '%s' in %s." % \
(format, fixture_name, humanize(fixture_dir))
# If we found even one object in a fixture, we need to reset the
# database sequences.
if object_count > 0:
sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
if sequence_sql:
if verbosity > 1:
print "Resetting sequences"
for line in sequence_sql:
cursor.execute(line)
if commit:
transaction.commit(using=using)
transaction.leave_transaction_management(using=using)
if object_count == 0:
if verbosity > 0:
print "No fixtures found."
else:
if verbosity > 0:
print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
# Close the DB connection. This is required as a workaround for an
# edge case in MySQL: if the same connection is used to
# create tables, load data, and query, the query can return
# incorrect results. See Django #7572, MySQL #37735.
if commit:
connection.close()
|
{
"content_hash": "ca14b58695c4eef9a3f2ed9eda01585c",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 118,
"avg_line_length": 44.07826086956522,
"alnum_prop": 0.49852041822844745,
"repo_name": "aprefontaine/TMScheduler",
"id": "6212d6151dcb9d2d796296728846fb710db1c974",
"size": "10138",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "django/core/management/commands/loaddata.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "589667"
},
{
"name": "Python",
"bytes": "5970832"
},
{
"name": "Shell",
"bytes": "3531"
}
],
"symlink_target": ""
}
|
import os
from utils import HoraceTestObject
from test.CSSTest.pages.testpage import CSSTestPage
from test.CSSTest.pages.testpage_missing_elements import CSSTestPageWithMissingRequiredElements
from test.CSSTest.modules.paragraph import ParagraphSectionModule
from horace.contentNode import element, content_module, content_module_list
from horace.exceptions import ElementNotFoundException
class TestPageObject(HoraceTestObject):
driver = None
def test_page_modules(self):
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.paragraphSection)
self.assertEquals(len(webPage.paragraphSection.paragraphs), 2)
self.assertIsNotNone(webPage.table)
self.assertIsNotNone(webPage.table.rows)
self.assertEquals(len(webPage.table.rows), 4)
self.assertEquals(webPage.table.rows[1].data.text, 'Division 1')
self.assertFalse('paragraphs' in webPage._content_instances)
self.assertEquals(len(webPage._content_instances), 4)
def test_page_title(self):
webPage = CSSTestPage(self.driver)
self.assertEquals(webPage.title, 'Horace Test Page')
def test_page_with_required_elements_throws_exception(self):
try:
page = CSSTestPageWithMissingRequiredElements(self.driver)
len(page.hardbreaks)
self.fail("Didn't raise Exception")
except ElementNotFoundException, e:
self.assertEquals(
'Element not found (br)',
e.message
)
def test_page_elements(self):
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.headingTwos)
self.assertEqual(len(webPage.headingTwos), 9)
def test_get_attribute(self):
webPage = CSSTestPage(self.driver)
try:
webPage.foo
except AttributeError, e:
self.assertEquals(
"'CSSTestPage' object has no attribute 'foo'",
e.message
)
def test_refresh_content(self):
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.headingTwos)
self.assertEquals(len(webPage.paragraphSection.paragraphs), 2)
webPage.refresh_content('paragraphSection')
self.assertEquals(len(webPage.paragraphSection.paragraphs), 2)
def test_iframe_with_elements(self):
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.anIFrame)
# self.assertEqual(len(webPage.anIFrame.get_elements_by_selector('h2')), 1)
self.assertEqual(len(webPage.anIFrame.headingTwoForIframe), 1)
self.assertEqual(len(webPage.headingTwos), 9)
def test_iframe_with_modules(self):
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.anIFrame)
self.assertIsNotNone(webPage.headingTwos)
self.assertEqual(len(webPage.anIFrame.headingTwoForIframe), 1)
self.assertEqual(len(webPage.headingTwos), 9)
self.assertIsNotNone(webPage.anIFrame.table)
webPage.anIFrame.activate()
self.assertEqual(len(webPage.anIFrame.table.rows), 2)
self.assertEqual(len(webPage.anIFrame.rows), 2)
def test_elements_helper(self):
element_definition = element('foo', False)
self.assertEquals(element_definition, {'selector': 'foo', 'required': False})
try:
element(None, False)
except Exception, e:
self.assertEquals(
"selector required",
e.message
)
def test_module_helper(self):
module_definition = content_module(ParagraphSectionModule, 'foo', False)
self.assertEquals(module_definition, {'module': ParagraphSectionModule, 'selector': 'foo', 'required': False})
try:
content_module(None, None, False)
except Exception, e:
self.assertEquals(
"selector or module required",
e.message
)
def test_module_list_helper(self):
module_list_definition = content_module_list(ParagraphSectionModule, 'foo', False)
self.assertEquals(module_list_definition,
{'module': ParagraphSectionModule, 'selector': 'foo', 'required': False, 'isList': True})
try:
content_module_list(None, 'foo', False)
except Exception, e:
self.assertEquals(
"selector or module required",
e.message
)
def test_take_screenshot(self):
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.headingTwos)
base64EncodedImage = webPage.take_screenshot()
self.assertEquals(base64EncodedImage[:10], 'iVBORw0KGg')
def test_take_screenshot_as_filename(self):
screenshotFilename = '/tmp/testScreenshot.png'
webPage = CSSTestPage(self.driver)
self.assertIsNotNone(webPage.headingTwos)
self.assertFalse(os.path.exists(screenshotFilename))
self.assertTrue(webPage.take_screenshot(screenshotFilename))
assert os.path.exists(screenshotFilename)
os.remove(screenshotFilename)
self.assertFalse(os.path.exists(screenshotFilename))
|
{
"content_hash": "e2824dd708a4d049cd4ff3659a6efae4",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 118,
"avg_line_length": 40.04615384615385,
"alnum_prop": 0.6613522858240491,
"repo_name": "lawrencec/horace",
"id": "cf5f994abc86fa6a69efa56641561d2643b29ac1",
"size": "5206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_page.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46364"
}
],
"symlink_target": ""
}
|
"""Signals emited from waliki
"""
#===============================================================================
# IMPORTS
#===============================================================================
from flask.signals import Namespace
#===============================================================================
# SIGNALS
#===============================================================================
wiki_signals = Namespace()
page_saved = wiki_signals.signal('page-saved')
pre_edit = wiki_signals.signal('pre-edit')
pre_display = wiki_signals.signal('pre-display')
#===============================================================================
# MAIN
#===============================================================================
if __name__ == "__main__":
print(__doc__)
|
{
"content_hash": "287e09df8c3e4991b39dbde9c4249a2f",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 80,
"avg_line_length": 28.75,
"alnum_prop": 0.2645962732919255,
"repo_name": "mgaitan/waliki_flask",
"id": "f687360d035191fccc69538af4c89b225f0c8818",
"size": "1251",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "waliki/signals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "137204"
},
{
"name": "JavaScript",
"bytes": "281948"
},
{
"name": "Python",
"bytes": "99114"
}
],
"symlink_target": ""
}
|
from django import forms
from taggit.forms import TagWidget
from apps.gallery.models import ResponsiveImage
class ResponsiveImageForm(forms.ModelForm):
class Meta(object):
model = ResponsiveImage
fields = ['name', 'description', 'photographer', 'tags']
widgets = {
'tags': TagWidget(attrs={
'placeholder': 'Eksempel: kontoret, kjelleren, åre',
}),
'photographer': forms.TextInput(attrs={'placeholder': 'Eventuell(e) fotograf(er)...'})
}
labels = {
'tags': 'Tags'
}
|
{
"content_hash": "1e89c5f0a374cac02c1e360ca282faa4",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 98,
"avg_line_length": 29.25,
"alnum_prop": 0.5863247863247864,
"repo_name": "dotKom/onlineweb4",
"id": "2af7100393f84907914521fbbcdaa60eeb9d698b",
"size": "644",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "apps/gallery/dashboard/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "71414"
},
{
"name": "HTML",
"bytes": "463894"
},
{
"name": "JavaScript",
"bytes": "745404"
},
{
"name": "Python",
"bytes": "925584"
},
{
"name": "Shell",
"bytes": "3130"
},
{
"name": "Standard ML",
"bytes": "1088"
}
],
"symlink_target": ""
}
|
from pyfinder import ClientHub
import unittest
import requests
class TestClientHub(unittest.TestCase):
def setUp(self):
self.cHub = ClientHub()
#@unittest.skip("Skipping test_get_tags")
def test_get_tags(self):
#repo_name = "library/java"
repo_name ="andoladockeradmin/ubuntu"
list_tags = self.cHub.get_all_tags(repo_name)
count_tags = self.cHub.get_num_tags(repo_name)
print(repo_name+": ")
print(list_tags)
self.assertEquals(len(list_tags), count_tags)
#@unittest.skip("Skipping test_crawl_images")
def test_crawl_images(self):
max_images = 50
num_images = 0
for list_images in self.cHub.crawl_images(page=1, page_size=10, max_images=max_images):
num_images += len(list_images)
self.assertEquals(max_images, num_images)
def test_json_tag(self):
json_response = self.cHub.get_json_tag("library/nginx", tag="latest")
self.assertEqual(json_response['name'], 'latest')
def test_get_repo(self):
json_image= self.cHub.get_json_repo("library/java")
self.assertEquals(json_image['name'], 'java')
json_image = self.cHub.get_json_repo("norepo/noimage")
self.assertDictEqual(json_image, {})
def test_count_all_images(self):
count = self.cHub.count_all_images()
self.assertGreater(count, 323650) # 9 Luglio 2016
def test_crawl_official(self):
json = self.cHub.get_dockerhub("/v2/repositories/library")
count = json['count']
list_images = self.cHub.crawl_official_images()
self.assertEqual(count, len(list_images))
|
{
"content_hash": "26a2f72704ed772d97c6a37ebdd15ac9",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 95,
"avg_line_length": 34.5,
"alnum_prop": 0.6388888888888888,
"repo_name": "di-unipi-socc/DockerFinder",
"id": "e9f0c281831ae8cef232cbe2705f6dc4563da462",
"size": "1656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysis/pyFinder/pyfinder/tests/test_docker_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1998"
},
{
"name": "HTML",
"bytes": "6115"
},
{
"name": "JavaScript",
"bytes": "25518"
},
{
"name": "Python",
"bytes": "172892"
},
{
"name": "Shell",
"bytes": "13615"
},
{
"name": "TypeScript",
"bytes": "22724"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.