text stringlengths 0 1.05M | meta dict |
|---|---|
"""A full convolutional neural network for road segmentation.
nohup python -u -m self_driving.road_seg.convnet > self_driving/road_seg/output.txt 2>&1 &
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import tensorflow as tf
from utils import kitti
from self_driving.road_seg import fcn8_vgg
import scipy as scp
import scipy.misc
import matplotlib as mpl
import matplotlib.cm
EPOCH = 5000
N_cl = 2
UU_TRAIN_SET_SIZE = 98 - 9
UU_TEST_SET_SIZE = 9
def _compute_cross_entropy_mean(labels, softmax):
cross_entropy = -tf.reduce_sum(
tf.multiply(labels * tf.log(softmax), [1, 1]), reduction_indices=[1])
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='xentropy_mean')
return cross_entropy_mean
def loss(logits, labels):
with tf.name_scope('loss'):
labels = tf.to_float(tf.reshape(labels, (-1, 2)))
logits = tf.reshape(logits, (-1, 2))
epsilon = 1e-9
softmax = tf.nn.softmax(logits) + epsilon
cross_entropy_mean = _compute_cross_entropy_mean(labels, softmax)
enc_loss = tf.add_n(tf.get_collection('losses'), name='total_loss')
dec_loss = tf.add_n(tf.get_collection('dec_losses'), name='total_loss')
fc_loss = tf.add_n(tf.get_collection('fc_wlosses'), name='total_loss')
weight_loss = enc_loss + dec_loss + fc_loss
total_loss = cross_entropy_mean + weight_loss
losses = {}
losses['total_loss'] = total_loss
losses['xentropy'] = cross_entropy_mean
losses['weight_loss'] = weight_loss
return losses
def f1_score(logits, labels):
true_labels = tf.to_float(tf.reshape(labels, (-1, 2)))[:, 1]
pred = tf.to_float(tf.reshape(logits, [-1]))
true_positives = tf.reduce_sum(pred * true_labels)
false_positives = tf.reduce_sum(pred * (1 - true_labels))
precision = true_positives / (true_positives + false_positives)
recall = true_positives / tf.reduce_sum(labels)
f1_score = 2 * precision * recall / (precision + recall)
return f1_score, precision, recall
def learning_rate(global_step):
starter_learning_rate = 1e-5
learning_rate_1 = tf.train.exponential_decay(
starter_learning_rate, global_step, EPOCH * 0.2, 0.1, staircase=True)
learning_rate_2 = tf.train.exponential_decay(
learning_rate_1, global_step, EPOCH * 0.4, 0.5, staircase=True)
decayed_learning_rate = tf.train.exponential_decay(
learning_rate_2, global_step, EPOCH * 0.6, 0.8, staircase=True)
tf.summary.scalar('learning_rate', decayed_learning_rate)
return decayed_learning_rate
def color_image(image, num_classes=20):
norm = mpl.colors.Normalize(vmin=0., vmax=num_classes)
mycm = mpl.cm.get_cmap('Set1')
return mycm(norm(image))
def save_output(index, training_image, prediction, label):
prediction_label = 1 - prediction[0]
output_image = copy.copy(training_image)
# Save prediction
up_color = color_image(prediction[0], 2)
scp.misc.imsave('output/decision_%d.png' % index, up_color)
# Merge true positive with training images' green channel
true_positive = prediction_label * label[..., 0][0]
merge_green = (1 - true_positive) * training_image[..., 1] + true_positive * 255
output_image[..., 1] = merge_green
# Merge false positive with training images' red channel
false_positive = prediction_label * label[..., 1][0]
merge_red = (1 - false_positive) * training_image[..., 0] + false_positive * 255
output_image[..., 0] = merge_red
# Merge false negative with training images' blue channel
false_negative = (1 - prediction_label) * label[..., 0][0]
merge_blue = (1 - false_negative) * training_image[..., 2] + false_negative * 255
output_image[..., 2] = merge_blue
# Save images
scp.misc.imsave('merge/decision_%d.png' % index, output_image)
def main(_):
kitti_data = kitti.Kitti()
x_image = tf.placeholder(tf.float32, [1, None, None, 3])
y_ = tf.placeholder(tf.float32, [1, None, None, N_cl])
tf.summary.image("images", x_image, max_outputs=1)
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path="data/vgg16.npy")
vgg_fcn.build(x_image, debug=True, num_classes=N_cl)
losses = loss(vgg_fcn.upscore32, y_)
f1, precision, recall = f1_score(vgg_fcn.pred_up, y_)
total_loss = losses['total_loss']
tf.summary.scalar("Loss", total_loss)
tf.summary.scalar("F1 Score", f1)
tf.summary.scalar("Precision", precision)
tf.summary.scalar("Recall", recall)
global_step = tf.Variable(0, trainable=False)
lr = learning_rate(global_step)
optimizer = tf.train.AdamOptimizer(lr)
grads_and_vars = optimizer.compute_gradients(total_loss)
grads, tvars = zip(*grads_and_vars)
clipped_grads, norm = tf.clip_by_global_norm(grads, 1.0)
grads_and_vars = zip(clipped_grads, tvars)
train_step = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
sess = tf.InteractiveSession()
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('train', sess.graph)
sess.run(tf.global_variables_initializer())
for i in range(EPOCH):
print("step %d" % i)
t_img, t_label = kitti_data.next_batch(i % UU_TRAIN_SET_SIZE)
pred, _ = sess.run([vgg_fcn.pred_up, train_step],
feed_dict={x_image: t_img, y_: t_label})
if i % 5 == 0:
for test_index in range(UU_TEST_SET_SIZE):
test_img, test_label = kitti_data.next_batch(test_index + UU_TRAIN_SET_SIZE)
pred, summary = sess.run([vgg_fcn.pred_up, merged],
feed_dict={x_image: test_img, y_: test_label})
save_output(test_index + UU_TRAIN_SET_SIZE, test_img[0], pred, test_label)
train_writer.add_summary(summary, i)
if __name__ == '__main__':
tf.app.run(main=main) | {
"repo_name": "mengli/PcmAudioRecorder",
"path": "self_driving/road_seg/convnet.py",
"copies": "2",
"size": "5948",
"license": "apache-2.0",
"hash": -4977446518754172000,
"line_mean": 35.950310559,
"line_max": 92,
"alpha_frac": 0.6442501681,
"autogenerated": false,
"ratio": 3.1978494623655913,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48420996304655917,
"avg_score": null,
"num_lines": null
} |
"""A fully featured python package for quaternion representation, manipulation, 3D rotation and animation.
See:
https://github.com/KieranWynn/pyquaternion
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name='pyquaternion',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.0.0',
description='A fully featured python package for quaternion representation, manipulation, 3D rotation and animation.',
long_description="A fully featured python package for quaternion representation, manipulation, 3D rotation and animation.",
# The project's main homepage.
url='https://github.com/KieranWynn/pyquaternion',
# Author details
author='KieranWynn',
author_email='KieranWynn@users.noreply.github.com',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Embedded Systems',
'Topic :: Scientific/Engineering :: Mathematics'
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
# What does your project relate to?
keywords='quaternion math development',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
py_modules=["quaternion"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={},
) | {
"repo_name": "timdelbruegger/pyquaternion",
"path": "setup.py",
"copies": "1",
"size": "4037",
"license": "mit",
"hash": -5288398573613396000,
"line_mean": 36.1037735849,
"line_max": 127,
"alpha_frac": 0.6603913797,
"autogenerated": false,
"ratio": 4.313034188034188,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5473425567734188,
"avg_score": null,
"num_lines": null
} |
"""A functional set of stubs to be used for unit testing.
Projects that use pycassa and need to run an automated unit test suite on a
system like Jenkins can use these stubs to emulate interactions with Cassandra
without spinning up a cluster locally.
"""
import operator
from uuid import UUID
from collections import MutableMapping
from pycassa import NotFoundException
from pycassa.util import OrderedDict
from pycassa.columnfamily import gm_timestamp
from pycassa.index import EQ, GT, GTE, LT, LTE
__all__ = ['ConnectionPoolStub', 'ColumnFamilyStub', 'SystemManagerStub']
class DictWithTime(MutableMapping):
def __init__(self, *args, **kwargs):
self.__timestamp = kwargs.pop('timestamp', None)
self.store = dict()
self.update(dict(*args, **kwargs))
def __getitem__(self, key):
return self.store[key]
def __setitem__(self, key, value, timestamp=None):
if timestamp is None:
timestamp = self.__timestamp or gm_timestamp()
self.store[key] = (value, timestamp)
def __delitem__(self, key):
del self.store[key]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
operator_dict = {
EQ: operator.eq,
GT: operator.gt,
GTE: operator.ge,
LT: operator.lt,
LTE: operator.le,
}
class ConnectionPoolStub(object):
"""Connection pool stub.
Notes created column families in :attr:`self.column_families`.
"""
def __init__(self, *args, **kwargs):
self.column_families = {}
def _register_mock_cf(self, name, cf):
if name:
self.column_families[name] = cf
def dispose(self, *args, **kwargs):
pass
class SystemManagerStub(object):
"""Functional System Manager stub object.
Records when column families, columns, and indexes have been created. To
see what has been recorded, look at :attr:`self.column_families`.
"""
def __init__(self, *args, **kwargs):
self.column_families = {}
def create_column_family(self, keyspace, table_name, *args, **kwargs):
"""Create a column family and record its existence."""
self.column_families[table_name] = {
'keyspace': keyspace,
'columns': {},
'indexes': {},
}
def alter_column(self, keyspace, table_name, column_name, column_type):
"""Alter a column, recording its name and type."""
self.column_families[table_name]['columns'][column_name] = column_type
def create_index(self, keyspace, table_name, column_name, column_type):
"""Create an index, recording its name and type."""
self.column_families[table_name]['indexes'][column_name] = column_type
def _schema(self):
ret = ','.join(self.column_families.keys())
for k in self.column_families:
for v in ('columns', 'indexes'):
ret += ','.join(self.column_families[k][v])
return hash(ret)
def describe_schema_versions(self):
"""Describes the schema based on a hash of the stub system state."""
return {self._schema(): ['1.1.1.1']}
class ColumnFamilyStub(object):
"""Functional ColumnFamily stub object.
Acts very similar to a remote column family, supporting a basic version of
the API. When instantiated, it registers itself with the supplied (stub)
connection pool.
"""
def __init__(self, pool=None, column_family=None, rows=None, **kwargs):
rows = rows or OrderedDict()
for r in rows.itervalues():
if not isinstance(r, DictWithTime):
r = DictWithTime(r)
self.rows = rows
if pool is not None:
pool._register_mock_cf(column_family, self)
def __len__(self):
return len(self.rows)
def __contains__(self, obj):
return self.rows.__contains__(obj)
def get(self, key, columns=None, column_start=None, column_finish=None,
column_reversed=False, column_count=100, include_timestamp=False, **kwargs):
"""Get a value from the column family stub."""
my_columns = self.rows.get(key)
if include_timestamp:
get_value = lambda x: x
else:
get_value = lambda x: x[0]
if not my_columns:
raise NotFoundException()
items = my_columns.items()
if isinstance(items[0], UUID) and items[0].version == 1:
items.sort(key=lambda uuid: uuid.time)
elif isinstance(items[0], tuple) and any(isinstance(x, UUID) for x in items[0]):
are_components_uuids = [isinstance(x, UUID) and x.version == 1 for x in items[0]]
def sortuuid(tup):
return [x.time if is_uuid else x for x, is_uuid in zip(tup, are_components_uuids)]
items.sort(key=sortuuid)
else:
items.sort()
if column_reversed:
items.reverse()
sliced_items = [(k, get_value(v)) for (k, v) in items
if self._is_column_in_range(k, columns,
column_start, column_finish, column_reversed)][:column_count]
return OrderedDict(sliced_items)
def _is_column_in_range(self, k, columns, column_start, column_finish, column_reversed):
lower_bound = column_start if not column_reversed else column_finish
upper_bound = column_finish if not column_reversed else column_start
if columns:
return k in columns
return (not lower_bound or k >= lower_bound) and (not upper_bound or k <= upper_bound)
def multiget(self, keys, columns=None, column_start=None, column_finish=None,
column_reversed=False, column_count=100, include_timestamp=False, **kwargs):
"""Get multiple key values from the column family stub."""
return OrderedDict(
(key, self.get(
key,
columns=columns,
column_start=column_start,
column_finish=column_finish,
column_reversed=column_reversed,
column_count=column_count,
include_timestamp=include_timestamp,
)) for key in keys if key in self.rows)
def batch(self, **kwargs):
"""Returns itself."""
return self
def send(self):
pass
def insert(self, key, columns, timestamp=None, **kwargs):
"""Insert data to the column family stub."""
if key not in self.rows:
self.rows[key] = DictWithTime([], timestamp=timestamp)
for column in columns:
self.rows[key].__setitem__(column, columns[column], timestamp)
return self.rows[key][columns.keys()[0]][1]
def get_indexed_slices(self, index_clause, **kwargs):
"""Grabs rows that match a pycassa index clause.
See :meth:`pycassa.index.create_index_clause()` for creating such an
index clause."""
keys = []
for key, row in self.rows.iteritems():
for expr in index_clause.expressions:
if (
expr.column_name in row and
operator_dict[expr.op](row[expr.column_name][0], expr.value)
):
keys.append(key)
data = self.multiget(keys, **kwargs).items()
return data
def remove(self, key, columns=None):
"""Remove a key from the column family stub."""
if key not in self.rows:
raise NotFoundException()
if columns is None:
del self.rows[key]
else:
for c in columns:
if c in self.rows[key]:
del self.rows[key][c]
if not self.rows[key]:
del self.rows[key]
return gm_timestamp()
def get_range(self, include_timestamp=False, columns=None, **kwargs):
"""Currently just gets all values from the column family."""
return [(key, self.get(key, columns, include_timestamp))
for key in self.rows]
def truncate(self):
"""Clears all data from the column family stub."""
self.rows.clear()
| {
"repo_name": "pycassa/pycassa",
"path": "pycassa/contrib/stubs.py",
"copies": "1",
"size": "8186",
"license": "mit",
"hash": 8383586363816461000,
"line_mean": 30.9765625,
"line_max": 113,
"alpha_frac": 0.5944295138,
"autogenerated": false,
"ratio": 4.070611636001989,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008492797253205186,
"num_lines": 256
} |
# A function for generating a parametric fuselage external surface (outer mould
# line) model. For a description of the parameterisation, see the article:
# A. Sobester, "'Self-designing' Parametric Geometries", AIAA SciTech 2015,
# Orlando, FL.
# ==============================================================================
# AirCONICS
# Aircraft CONfiguration through Integrated Cross-disciplinary Scripting
# version 0.2
# Andras Sobester, 2015.
# Bug reports to a.sobester@soton.ac.uk or @ASobester please.
# ==============================================================================
from __future__ import division
import rhinoscriptsyntax as rs, AirCONICStools as act, primitives as prim, itertools
import Rhino
import airconics_setup
def _AirlinerFuselagePlanView(NoseLengthRatio, TailLengthRatio):
# Internal function. Defines the control
# polygons of the fuselage in side view
kN = NoseLengthRatio/0.182
tN = TailLengthRatio/0.293
PlanPort = [
[0, 0, 0],
[0*kN, -0.1, 0],
[0.332*kN, -0.395, 0],
[1.250*kN, -0.810, 0],
[2.517*kN, -1.074, 0],
[4*kN , -1.15, 0],
[4*kN , -1.15, 0],
# Parallel sided section here
[22-(22-15.55)*tN, -1.15, 0],
[22-(22-15.55)*tN, -1.15, 0],
[22-(22-16.428)*tN, -1.126, 0],
[22-(22-20.3362)*tN,-0.483,0],
[22, -0.0987,0]]
for i in range(len(PlanPort)):
PlanPort[i][0] = PlanPort[i][0]*2.541
PlanPort[i][1] = PlanPort[i][1]*2.541
PlanPort[i][2] = PlanPort[i][2]*2.541
NoseEndX = 4*kN*2.541
TailStartX = (22-(22-15.55)*tN)*2.541
return PlanPort, NoseEndX, TailStartX
def _AirlinerFuselageSideView(NoseLengthRatio, TailLengthRatio):
# Internal function. Defines the control
# polygons of the fuselage in side view
kN = NoseLengthRatio/0.182
tN = TailLengthRatio/0.293
# The upper contour control points
# of the fuselage in side view
AFSVUpper = [
[0, 0, 0],
[0, 0, 0.3],
[1.395*kN, 0, 1.547],
[4*kN, 0, 1.686],
[4*kN, 0, 1.686],
# parallel section here
[22-(22-15.55)*tN, 0, 1.686],
[22-(22-15.55)*tN, 0, 1.686],
[22-(22-19.195)*tN,0, 1.549],
[22 ,0, 0.904]]
for i in range(len(AFSVUpper)):
AFSVUpper[i][0] = AFSVUpper[i][0]*2.541
AFSVUpper[i][1] = AFSVUpper[i][1]*2.541
AFSVUpper[i][2] = AFSVUpper[i][2]*2.541
# The lower contour control points
# of the fuselage in side view
AFSVLower = [
[0, 0, 0],
[0, 0, -0.3],
[0.947*kN, 0, -0.517],
[4*kN, 0, -0.654],
[4*kN, 0, -0.654],
# Parallel sides section
[22-(22-15.55)*tN, 0, -0.654],
[22-(22-15.55)*tN, 0, -0.654],
# Tailstrike slope section
[22-(22-18.787)*tN,0, -0.256],
[22 ,0, 0.694]]
for i in range(len(AFSVLower)):
AFSVLower[i][0] = AFSVLower[i][0]*2.541
AFSVLower[i][1] = AFSVLower[i][1]*2.541
AFSVLower[i][2] = AFSVLower[i][2]*2.541
return AFSVUpper, AFSVLower
def _FuselageLongitudinalGuideCurves(NoseLengthRatio, TailLengthRatio):
# Internal function. Defines the four longitudinal curves that outline the
# fuselage (outer mould line).
FSVU, FSVL = _AirlinerFuselageSideView(NoseLengthRatio, TailLengthRatio)
FSVUCurve = rs.AddCurve(FSVU)
FSVLCurve = rs.AddCurve(FSVL)
AFPVPort, NoseEndX, TailStartX = _AirlinerFuselagePlanView(NoseLengthRatio, TailLengthRatio)
# Generate plan view
PlanPortCurve = rs.AddCurve(AFPVPort)
# How wide is the fuselage?
(Xmin,Ymin,Zmin,Xmax,Ymax,Zmax) = act.ObjectsExtents(PlanPortCurve)
# Generate a slightly wider projection surface
FSVMeanCurve = rs.MeanCurve(FSVUCurve, FSVLCurve)
RuleLinePort = rs.AddLine((0,0,0),(0,-1.1*abs(Ymax-Ymin),0))
FSVMCEP = rs.CurveEndPoint(FSVMeanCurve)
AftLoftEdgePort = rs.CopyObject(RuleLinePort, FSVMCEP)
ParallelLoftEdgePort = rs.CopyObject(FSVMeanCurve,(0,-1.1*abs(Ymax-Ymin),0))
LSPort = rs.AddSweep2((FSVMeanCurve,ParallelLoftEdgePort ),(RuleLinePort, AftLoftEdgePort ))
# Project the plan view onto the mean surface
PortCurve = rs.ProjectCurveToSurface(PlanPortCurve , LSPort ,(0,0,100))
# House-keeping
rs.DeleteObjects([LSPort,PlanPortCurve,ParallelLoftEdgePort,RuleLinePort,AftLoftEdgePort])
# Tidy up the mean curve. This is necessary for a smooth result and removing
# it can render the algorithm unstable. However, FitCurve itself may sometimes
# be slightly unstable.
FLength = abs(Xmax-Xmin) # establish a reference length
PortCurveSimplified = rs.FitCurve(PortCurve, distance_tolerance = FLength*0.001)
StarboardCurveSimplified = act.MirrorObjectXZ(PortCurveSimplified)
rs.DeleteObject(PortCurve)
# Compute the actual end points of the longitudinal curves
(Xmin,Ymin,Zmin,Xmax1,Ymax,Zmax) = act.ObjectsExtents(StarboardCurveSimplified)
(Xmin,Ymin,Zmin,Xmax2,Ymax,Zmax) = act.ObjectsExtents(PortCurveSimplified)
(Xmin,Ymin,Zmin,Xmax3,Ymax,Zmax) = act.ObjectsExtents(FSVUCurve)
(Xmin,Ymin,Zmin,Xmax4,Ymax,Zmax) = act.ObjectsExtents(FSVLCurve)
EndX = min([Xmax1,Xmax2,Xmax3,Xmax4])
return StarboardCurveSimplified, PortCurveSimplified, FSVUCurve, FSVLCurve, FSVMeanCurve, NoseEndX, TailStartX, EndX
def _BuildFuselageOML(NoseLengthRatio, TailLengthRatio, CylindricalMidSection, SimplificationReqd):
MaxFittingAttempts = 6
FittingAttempts = -1
NetworkSrfSettings = [
[35, 20, 15, 5, 20],
[35, 30, 15, 5, 20],
[35, 20, 15, 2, 20],
[30, 30, 15, 2, 20],
[30, 20, 15, 2, 20],
[25, 20, 15, 2, 20],
[20, 20, 15, 2, 20],
[15, 20, 15, 2, 20]]
StarboardCurve, PortCurve, FSVUCurve, FSVLCurve, FSVMeanCurve, NoseEndX, TailStartX, EndX = _FuselageLongitudinalGuideCurves(NoseLengthRatio, TailLengthRatio)
while FittingAttempts <= MaxFittingAttempts:
FittingAttempts = FittingAttempts + 1
# Construct array of cross section definition frames
SX0 = 0
Step01 = NetworkSrfSettings[FittingAttempts][0]
SX1 = 0.04*NoseEndX
Step12 = NetworkSrfSettings[FittingAttempts][1]
SX2 = SX1 + 0.25*NoseEndX
Step23 = NetworkSrfSettings[FittingAttempts][2]
SX3 = NoseEndX
Step34 = NetworkSrfSettings[FittingAttempts][3]
SX4 = TailStartX
Step45 = NetworkSrfSettings[FittingAttempts][4]
SX5 = EndX
print "Attempting network surface fit with network density setup ", NetworkSrfSettings[FittingAttempts][:]
Stations01 = act.pwfrange(SX0,SX1,max([Step01,2]))
Stations12 = act.pwfrange(SX1,SX2,max([Step12,2]))
Stations23 = act.pwfrange(SX2,SX3,max([Step23,2]))
Stations34 = act.pwfrange(SX3,SX4,max([Step34,2]))
Stations45 = act.pwfrange(SX4,SX5,max([Step45,2]))
StationRange = Stations01[:-1] + Stations12[:-1] + Stations23[:-1] + Stations34[:-1] + Stations45
C = []
FirstTime = True
for XStation in StationRange:
P = rs.PlaneFromPoints((XStation,0,0),(XStation,1,0),(XStation,0,1))
IP1 = rs.PlaneCurveIntersection(P,StarboardCurve)
IP2 = rs.PlaneCurveIntersection(P,FSVUCurve)
IP3 = rs.PlaneCurveIntersection(P,PortCurve)
IP4 = rs.PlaneCurveIntersection(P,FSVLCurve)
IPcentre = rs.PlaneCurveIntersection(P,FSVMeanCurve)
IPoint1 = rs.AddPoint(IP1[0][1])
IPoint2 = rs.AddPoint(IP2[0][1])
IPoint3 = rs.AddPoint(IP3[0][1])
IPoint4 = rs.AddPoint(IP4[0][1])
IPointCentre = rs.AddPoint(IPcentre[0][1])
PseudoDiameter = abs(IP4[0][1].Z-IP2[0][1].Z)
if CylindricalMidSection and NoseEndX < XStation < TailStartX:
# Ensure that the parallel section of the fuselage is cylindrical
# if CylindricalMidSection is True
print "Enforcing circularity in the central section..."
if FirstTime:
PseudoRadius = PseudoDiameter/2
FirstTime = False
Pc = rs.PointCoordinates(IPointCentre)
P1 = P2 = P3 = Pc
P1 = rs.PointAdd(P1,(0,PseudoRadius,0))
P2 = rs.PointAdd(P2,(0,0,PseudoRadius))
P3 = rs.PointAdd(P3,(0,-PseudoRadius,0))
c = rs.AddCircle3Pt(P1, P2, P3)
else:
c = rs.AddInterpCurve([IPoint1,IPoint2,IPoint3,IPoint4,IPoint1],knotstyle=3)
# Once CSec is implemented in Rhino Python, this could be replaced
rs.DeleteObjects([IPoint1,IPoint2,IPoint3,IPoint4,IPointCentre])
list.append(C,c)
# Fit fuselage external surface
CurveNet = []
for c in C[1:]:
list.append(CurveNet,c)
list.append(CurveNet, FSVUCurve)
list.append(CurveNet, PortCurve)
list.append(CurveNet, FSVLCurve)
list.append(CurveNet, StarboardCurve)
FuselageOMLSurf = rs.AddNetworkSrf(CurveNet)
rs.DeleteObjects(C)
if not(FuselageOMLSurf==None):
print "Network surface fit succesful on attempt ", FittingAttempts+1
FittingAttempts = MaxFittingAttempts+1 # Force an exit from 'while'
# If all attempts at fitting a network surface failed, we attempt a Sweep2
if FuselageOMLSurf==None:
print "Failed to fit network surface to the external shape of the fuselage"
print "Attempting alternative fitting method, quality likely to be low..."
try:
FuselageOMLSurf = rs.AddSweep2([FSVUCurve,FSVLCurve],C[:])
except:
FuselageOMLSurf = False
SimplificationReqd = True # Enforce simplification
if not(FuselageOMLSurf):
print "Alternative fitting method failed too. Out of ideas."
if FuselageOMLSurf and SimplificationReqd:
rs.UnselectAllObjects()
rs.SelectObject(FuselageOMLSurf)
ToleranceStr = str(0.0005*EndX)
print "Smoothing..."
rs.Command("FitSrf " + ToleranceStr)
rs.UnselectAllObjects()
# Compute the stern point coordinates of the fuselage
Pu = rs.CurveEndPoint(FSVUCurve)
Pl = rs.CurveEndPoint(FSVLCurve)
SternPoint = [Pu.X, Pu.Y, 0.5*(Pu.Z+Pl.Z)]
rs.DeleteObjects([FSVUCurve,FSVLCurve,PortCurve,StarboardCurve,FSVMeanCurve])
return FuselageOMLSurf, SternPoint
def CockpitWindowContours(Height = 1.620, Depth = 5):
P1 = [0.000,0.076,Height-1.620+2.194]
P2 = [0.000,0.852,Height-1.620+2.290]
P3 = [0.000,0.904,Height+0.037]
P4 = [0.000,0.076,Height]
CWC1 = rs.AddPolyline([P1,P2,P3,P4,P1])
rs.SelectObject(CWC1)
rs.Command("_FilletCorners 0.08 ")
P1 = [0.000,0.951,Height-1.620+2.289]
P2 = [0.000,1.343,Height-1.620+2.224]
P3 = [0.000,1.634,Height-1.620+1.773]
P4 = [0.000,1.557,Height-1.620+1.588]
P5 = [0.000,1.027,Height-1.620+1.671]
CWC2 = rs.AddPolyline([P1,P2,P3,P4,P5,P1])
rs.SelectObject(CWC2)
rs.Command("_FilletCorners 0.08 ")
CWC3 = act.MirrorObjectXZ(CWC1)
CWC4 = act.MirrorObjectXZ(CWC2)
ExtPathId = rs.AddLine([0,0,0],[Depth, 0, 0])
CWC1s = rs.ExtrudeCurve(CWC1, ExtPathId)
CWC2s = rs.ExtrudeCurve(CWC2, ExtPathId)
CWC3s = rs.ExtrudeCurve(CWC3, ExtPathId)
CWC4s = rs.ExtrudeCurve(CWC4, ExtPathId)
rs.DeleteObjects([CWC1, CWC2, CWC3, CWC4, ExtPathId])
return CWC1s, CWC2s, CWC3s, CWC4s
def WindowContour(WinCenter):
P1 = [WinCenter[0], 0, WinCenter[1] + 0.468/2]
P2 = [WinCenter[0] + 0.272/2, 0, WinCenter[1]]
P3 = [WinCenter[0], 0, WinCenter[1] - 0.468/2]
P4 = [WinCenter[0] - 0.272/2, 0, WinCenter[1]]
WCurveU = rs.AddInterpCurve([P4, P1, P2], start_tangent = [0, 0, 2.5],
end_tangent = [0, 0, -2.5])
WCurveL = rs.AddInterpCurve([P2, P3, P4], start_tangent = [0, 0, -2.5],
end_tangent = [0, 0, 2.5])
WCurve = rs.JoinCurves([WCurveU, WCurveL], delete_input=True)
return WCurve
def MakeWindow(FuselageSrf, Xwc, Zwc):
WinCenter = [Xwc, Zwc]
WCurve = WindowContour(WinCenter)
ExtPathStbd = rs.AddLine([0,0,0],[0,10,0])
ExtPathPort = rs.AddLine([0,0,0],[0,-10,0])
TubeStbd = rs.ExtrudeCurve(WCurve, ExtPathStbd)
FuselageSrf, WinStbd = rs.SplitBrep(FuselageSrf, TubeStbd, delete_input=True)
TubePort = rs.ExtrudeCurve(WCurve, ExtPathPort)
FuselageSrf, WinPort = rs.SplitBrep(FuselageSrf, TubePort, delete_input=True)
rs.DeleteObjects([TubeStbd, TubePort, ExtPathStbd, ExtPathPort, WCurve])
return WinStbd, WinPort, FuselageSrf
def FuselageOML(NoseLengthRatio = 0.182, TailLengthRatio = 0.293, Scaling = [55.902, 55.902, 55.902], NoseCoordinates = [0,0,0], CylindricalMidSection = False, SimplificationReqd = False):
# Instantiates a parametric fuselage outer mould line (OML) geometry for a given
# set of design variables.
FuselageOMLSurf, SternPoint = _BuildFuselageOML(NoseLengthRatio, TailLengthRatio,CylindricalMidSection,SimplificationReqd)
if not(FuselageOMLSurf) or FuselageOMLSurf is None:
return
ScalingF = [0,0,0]
ScalingF[0] = Scaling[0]/55.902
ScalingF[1] = Scaling[1]/55.902
ScalingF[2] = Scaling[2]/55.902
# Overall scaling
FuselageOMLSurf = act.ScaleObjectWorld000(FuselageOMLSurf, ScalingF)
# A few other ways of performing the scaling...
# Variant one: this depends on the current CPlane!
# FuselageOMLSurf = rs.ScaleObject(FuselageOMLSurf, (0,0,0), Scaling)
# Variant two: define plane in World coordinates
#P = rs.PlaneFromFrame((0,0,0),(1,0,0),(0,1,0))
#TransfMatrix = Rhino.Geometry.Transform.Scale(P, Scaling[0], Scaling[1], Scaling[2])
#FuselageOMLSurf = rs.TransformObjects(FuselageOMLSurf, TransfMatrix)
# Variant three: World coordinate system based scaling
#xform = rs.XformScale(Scaling)
#FuselageOMLSurf = rs.TransformObjects(FuselageOMLSurf, xform)
SternPoint[0] = SternPoint[0]*ScalingF[0]
SternPoint[1] = SternPoint[1]*ScalingF[1]
SternPoint[2] = SternPoint[2]*ScalingF[2]
# Positioning
MoveVec = rs.VectorCreate(NoseCoordinates, [0,0,0])
FuselageOMLSurf = rs.MoveObject(FuselageOMLSurf, MoveVec)
SternPoint[0] = SternPoint[0]+NoseCoordinates[0]
SternPoint[1] = SternPoint[1]+NoseCoordinates[1]
SternPoint[2] = SternPoint[2]+NoseCoordinates[2]
return FuselageOMLSurf, SternPoint
if __name__ == '__main__':
rs.EnableRedraw(False)
# The defaults will yield a fuselage geometry similar to that of the
# Boeing 787-8.
FuselageOML()
# Another example: for a fuselage shape similar to that of the Airbus A380
# comment out the line above and uncomment the line below:
# FuselageOML(NoseLengthRatio = 0.182, TailLengthRatio = 0.293,
# Scaling = [70.4, 67.36, 80.1],
# NoseCoordinates = [0,0,0],
# CylindricalMidSection = False,
# SimplificationReqd = False)
rs.EnableRedraw() | {
"repo_name": "teknologika/foil-generator",
"path": "airconicsv021/fuselage_oml.py",
"copies": "1",
"size": "15541",
"license": "apache-2.0",
"hash": -8127807802933826000,
"line_mean": 36.6641791045,
"line_max": 188,
"alpha_frac": 0.6221607361,
"autogenerated": false,
"ratio": 2.862589795542457,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3984750531642457,
"avg_score": null,
"num_lines": null
} |
# A function for generating a parametric high bypass turbofan engine nacelle model.
# Run this file directly to get an example (scroll to the end to see it).
# ==============================================================================
# AirCONICS
# Aircraft CONfiguration through Integrated Cross-disciplinary Scripting
# version 0.2
# Andras Sobester, 2015.
# Bug reports to a.sobester@soton.ac.uk or @ASobester please.
# ==============================================================================
from __future__ import division
import rhinoscriptsyntax as rs
import primitives, airconics_setup, AirCONICStools as act
import math
import wing_example_transonic_airliner as tea
import liftingsurface
def TurbofanNacelle(EngineSection, Chord, CentreLocation = [0,0,0],
ScarfAngle = 3, HighlightRadius = 1.45,
MeanNacelleLength = 5.67):
# The defaults yield a nacelle similar to that of an RR Trent 1000 / GEnx
HighlightDepth = 0.12*MeanNacelleLength
SectionNo = 100
# Draw the nacelle with the centre of the intake highlight circle in 0,0,0
rs.EnableRedraw(False)
Highlight = rs.AddCircle3Pt((0,0,HighlightRadius),(0,-HighlightRadius,0),(0,0,-HighlightRadius))
HighlightCutterCircle = rs.AddCircle3Pt((0,0,HighlightRadius*1.5),(0,-HighlightRadius*1.5,0),(0,0,-HighlightRadius*1.5))
# Fan disk for CFD boundary conditions
FanCircle = rs.CopyObject(Highlight, (MeanNacelleLength*0.25, 0, 0))
FanDisk = rs.AddPlanarSrf(FanCircle)
# Aft outflow for CFD boundary conditions
BypassCircle = rs.CopyObject(Highlight, (MeanNacelleLength*0.85, 0, 0))
BypassDisk = rs.AddPlanarSrf(BypassCircle)
rs.DeleteObjects([FanCircle, BypassCircle])
# Outflow cone
TailConeBasePoint = [MeanNacelleLength*0.84, 0,0]
TailConeApex = [MeanNacelleLength*1.35, 0, 0]
TailConeRadius = HighlightRadius*0.782
TailCone = rs.AddCone(TailConeBasePoint, TailConeApex, TailConeRadius)
# Spinner cone
SpinnerConeBasePoint = [MeanNacelleLength*0.26, 0,0]
SpinnerConeApex = [MeanNacelleLength*0.08, 0, 0]
SpinnerConeRadius = MeanNacelleLength*0.09
Spinner = rs.AddCone(SpinnerConeBasePoint, SpinnerConeApex, SpinnerConeRadius)
# Tilt the intake
RotVec = rs.VectorCreate((0,0,0),(0,1,0))
Highlight = rs.RotateObject(Highlight, (0,0,0), ScarfAngle, axis = RotVec)
# Set up the disk for separating the intake lip later
HighlightCutterCircle = rs.RotateObject(HighlightCutterCircle, (0,0,0), ScarfAngle, axis = RotVec)
HighlightCutterDisk = rs.AddPlanarSrf(HighlightCutterCircle)
rs.DeleteObject(HighlightCutterCircle)
rs.MoveObject(HighlightCutterDisk, (HighlightDepth, 0,0))
# Build the actual airfoil sections to define the nacelle
HighlightPointVector = rs.DivideCurve(Highlight, SectionNo)
Sections = []
TailPoints = []
Rotation = 0
Twist = 0
AirfoilSeligName = 'goe613'
SmoothingPasses = 1
for HighlightPoint in HighlightPointVector:
ChordLength = MeanNacelleLength - HighlightPoint.X
Af = primitives.Airfoil(HighlightPoint,ChordLength, Rotation, Twist, airconics_setup.SeligPath)
AfCurve,Chrd = primitives.Airfoil.AddAirfoilFromSeligFile(Af, AirfoilSeligName, SmoothingPasses)
rs.DeleteObject(Chrd)
P = rs.CurveEndPoint(AfCurve)
list.append(TailPoints, P)
AfCurve = act.AddTEtoOpenAirfoil(AfCurve)
list.append(Sections, AfCurve)
Rotation = Rotation + 360.0/SectionNo
list.append(TailPoints, TailPoints[0])
# Build the actual nacelle OML surface
EndCircle = rs.AddInterpCurve(TailPoints)
Nacelle = rs.AddSweep2([Highlight, EndCircle], Sections, closed = True)
# Separate the lip
Cowling, HighlightSection = rs.SplitBrep(Nacelle, HighlightCutterDisk, True)
# Now build the pylon between the engine and the specified chord on the wing
CP1 = [MeanNacelleLength*0.26+CentreLocation[0],CentreLocation[1],CentreLocation[2]+HighlightRadius*0.1]
CP2 = [MeanNacelleLength*0.4+CentreLocation[0],CentreLocation[1],HighlightRadius*1.45+CentreLocation[2]]
CP3 = rs.CurveEndPoint(Chord)
rs.ReverseCurve(Chord)
CP4 = rs.CurveEndPoint(Chord)
# Move the engine into its actual place on the wing
rs.MoveObjects([HighlightSection, Cowling, FanDisk, BypassDisk, TailCone, Spinner], CentreLocation)
# Pylon wireframe
PylonTop = rs.AddInterpCurve([CP1, CP2, CP3, CP4])
PylonAf = primitives.Airfoil(CP1,MeanNacelleLength*1.35, 90, 0, airconics_setup.SeligPath)
PylonAfCurve,PylonChord = primitives.Airfoil.AddNACA4(PylonAf, 0, 0, 12, 3)
LowerTE = rs.CurveEndPoint(PylonChord)
PylonTE = rs.AddLine(LowerTE, CP4)
# Create the actual pylon surface
PylonLeft = rs.AddNetworkSrf([PylonTop, PylonAfCurve, PylonTE])
rs.MoveObject(PylonLeft, (0,-CentreLocation[1],0))
PylonRight = act.MirrorObjectXZ(PylonLeft)
rs.MoveObject(PylonLeft, (0,CentreLocation[1],0))
rs.MoveObject(PylonRight, (0,CentreLocation[1],0))
PylonAfCurve = act.AddTEtoOpenAirfoil(PylonAfCurve)
PylonAfSrf = rs.AddPlanarSrf(PylonAfCurve)
# Assigning basic surface properties
act.AssignMaterial(Cowling, "ShinyBABlueMetal")
act.AssignMaterial(HighlightSection, "UnpaintedMetal")
act.AssignMaterial(TailCone, "UnpaintedMetal")
act.AssignMaterial(FanDisk, "FanDisk")
act.AssignMaterial(Spinner, "ShinyBlack")
act.AssignMaterial(BypassDisk, "FanDisk")
act.AssignMaterial(PylonLeft,"White_composite_external")
act.AssignMaterial(PylonRight,"White_composite_external")
# Clean-up
rs.DeleteObject(HighlightCutterDisk)
rs.DeleteObjects(Sections)
rs.DeleteObject(EndCircle)
rs.DeleteObject(Highlight)
rs.DeleteObjects([PylonTop, PylonAfCurve, PylonChord, PylonTE])
rs.Redraw()
TFEngine = [Cowling, HighlightSection, TailCone, FanDisk, Spinner, BypassDisk]
TFPylon = [PylonLeft, PylonRight, PylonAfSrf]
return TFEngine, TFPylon
if __name__ == "__main__":
# Generate a wing first to attach the engine to
P = (0,0,0)
LooseSurf = 1
SegmentNo = 10
Wing = liftingsurface.LiftingSurface(P, tea.mySweepAngleFunctionAirliner,
tea.myDihedralFunctionAirliner,
tea.myTwistFunctionAirliner,
tea.myChordFunctionAirliner,
tea.myAirfoilFunctionAirliner,
LooseSurf, SegmentNo, TipRequired = True)
ChordFactor = 1
ScaleFactor = 50
rs.EnableRedraw(False)
WingSurf, ActualSemiSpan, LSP_area, RootChord, AR, WingTip = Wing.GenerateLiftingSurface(ChordFactor, ScaleFactor)
rs.EnableRedraw()
SpanStation = 0.3 # The engine is to be placed at 30% span
EngineDia = 2.9
NacelleLength = 1.95*EngineDia
rs.EnableRedraw(False)
EngineSection, Chord = act.CutSect(WingSurf, SpanStation)
CEP = rs.CurveEndPoint(Chord)
# Variables controlling the position of the engine with respect to the wing
EngineCtrFwdOfLE = 0.98
EngineCtrBelowLE = 0.35
Scarf_deg = 4
# Now build the engine and its pylon
EngineStbd, PylonStbd = TurbofanNacelle(EngineSection, Chord,
CentreLocation = [CEP.X-EngineCtrFwdOfLE*NacelleLength,CEP.Y,CEP.Z-EngineCtrBelowLE*NacelleLength],
ScarfAngle = Scarf_deg, HighlightRadius = EngineDia/2.0,
MeanNacelleLength = NacelleLength)
rs.DeleteObjects([EngineSection, Chord]) | {
"repo_name": "teknologika/foil-generator",
"path": "airconicsv021/engine.py",
"copies": "1",
"size": "7588",
"license": "apache-2.0",
"hash": 643333494008982100,
"line_mean": 40.4022346369,
"line_max": 124,
"alpha_frac": 0.6939905113,
"autogenerated": false,
"ratio": 3.1136643414033647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9182404683802603,
"avg_score": 0.0250500337801524,
"num_lines": 179
} |
""" A function that helps develop code for both Python 2 and 3.
Source: https://github.com/mitsuhiko/flask/blob/0.10.1/flask/_compat.py
"""
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
class Meta(type):
def __new__(cls, name, bases, d):
new_attr = {}
for name, val in d.items():
if name.startswith('__'):
new_attr[name] = val
else:
new_attr[name.upper()] = val
return type.__new__(cls, name, bases, new_attr)
class Foo(with_metaclass(Meta, object)):
bar = 'test'
def main():
print Foo.BAR
if __name__ == '__main__':
main()
| {
"repo_name": "seanlin0800/python_snippets",
"path": "compact_metaclass.py",
"copies": "1",
"size": "1473",
"license": "unlicense",
"hash": 3720676814937814000,
"line_mean": 27.8823529412,
"line_max": 72,
"alpha_frac": 0.5926680244,
"autogenerated": false,
"ratio": 3.8661417322834644,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49588097566834644,
"avg_score": null,
"num_lines": null
} |
# A function that returns names of the jobs on the queue
def jobs_on_queue():
onq = []
import os
import subprocess
pwd = os.getcwd().split('/')
if len(pwd)>1 and pwd[1]=='home':
username = pwd[2]
elif len(pwd)>3 and pwd[1]=='stash':
username = pwd[3]
else:
username = ''
if os.path.exists('/home/%s/stash'%username):
condor_q = subprocess.Popen(['condor_q','-long',username], \
stdout=subprocess.PIPE).stdout.read().split('\n\n')
condor_q = [dict([(line[:line.find('=')-1],line[line.find('=')+2:]) for line in c.split('\n')]) for c in condor_q if c!='']
onq = [q['Cmd'][1:-1] for q in condor_q if q['JobStatus']!='3'] # 3 is removed
onq = [os.path.basename(line) for line in onq] # Get rid of directory name
onq = ['-'.join(line.split('-')[:-1]) for line in onq] # Get rid of job number
elif os.path.exists('/home/dminh/scripts/qsub_command.py'): # CCB cluster
qstat = subprocess.Popen(['qstat','-f'], \
stdout=subprocess.PIPE).stdout.read().split('\n')
onq = [line.strip().split()[-1] for line in qstat if line.find('Job_Name')>-1]
return onq
| {
"repo_name": "gkumar7/AlGDock",
"path": "Pipeline/_jobs_on_queue.py",
"copies": "2",
"size": "1118",
"license": "mit",
"hash": -1114157465785252200,
"line_mean": 42,
"line_max": 127,
"alpha_frac": 0.6082289803,
"autogenerated": false,
"ratio": 3.063013698630137,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4671242678930137,
"avg_score": null,
"num_lines": null
} |
"""A function that sorts a list of numbers using the radix method."""
def radix(lst):
"""Sort a list of numbers using the radix sort method."""
for num in lst:
if not isinstance(num, int):
raise TypeError('Must use integers')
digit_place = 1
number_of_places, working_num = 0, 0
new_lst = lst
while number_of_places < len(str(max(lst))) + 1:
buckets = [[] for _ in range(10)]
for num in new_lst:
working_num = num // digit_place
buckets[working_num % 10].append(num)
new_lst = []
for b in buckets:
for num in b:
new_lst.append(num)
digit_place *= 10
number_of_places += 1
return new_lst
if __name__ == "__main__":
import timeit
import random
lst_1 = [1]
lst_2 = [x for x in range(100)][::-1]
lst_3 = [random.randint(0, 100) for x in range(100)]
best_case = timeit.timeit(
stmt="radix(lst_1)",
setup="from __main__ import radix, lst_1",
number=1000
) * 1000
worst_case = timeit.timeit(
stmt="radix(lst_2)",
setup="from __main__ import radix, lst_2",
number=1000
) * 1000
average_case = timeit.timeit(
stmt="radix(lst_3)",
setup="from __main__ import radix, lst_3",
number=1000
) * 1000
print("radix's best case scenario takes {} microseconds.\n radix's average case scenario takes {} microseconds.\n radix's worst case scenario takes {} microseconds.".format(best_case, average_case, worst_case))
| {
"repo_name": "rwisecar/data-structures",
"path": "src/radixsort.py",
"copies": "1",
"size": "1574",
"license": "mit",
"hash": -2097331424745691100,
"line_mean": 28.6981132075,
"line_max": 214,
"alpha_frac": 0.5673443456,
"autogenerated": false,
"ratio": 3.5610859728506785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4628430318450678,
"avg_score": null,
"num_lines": null
} |
"""A function that sorts a list using the quicksort method."""
def quicksort(lst):
"""Sort a list using the quicksort method."""
if len(lst) < 2:
return lst
elif isinstance(lst, list):
first_half = []
second_half = []
pivot = lst[len(lst) // 2]
for item in lst:
if item < pivot:
first_half.append(item)
elif item is pivot:
continue
else:
second_half.append(item)
return quicksort(first_half) + [pivot] + quicksort(second_half)
else:
raise TypeError("You may only sort a list.")
if __name__ == "__main__":
import timeit
import random
lst_1 = [1]
lst_2 = [x for x in range(100)][::-1]
lst_3 = [random.randint(0, 100) for x in range(100)]
best_case = timeit.timeit(
stmt="quicksort(lst_1)",
setup="from __main__ import quicksort, lst_1",
number=1000
) * 1000
worst_case = timeit.timeit(
stmt="quicksort(lst_2)",
setup="from __main__ import quicksort, lst_2",
number=1000
) * 1000
average_case = timeit.timeit(
stmt="quicksort(lst_3)",
setup="from __main__ import quicksort, lst_3",
number=1000
) * 1000
print("Quicksort's best case scenario takes {} microseconds.\n Quicksort's average case scenario takes {} microseconds.\n Quicksort's worst case scenario takes {} microseconds.".format(best_case, average_case, worst_case))
| {
"repo_name": "rwisecar/data-structures",
"path": "src/quicksort.py",
"copies": "1",
"size": "1509",
"license": "mit",
"hash": 312433318968042240,
"line_mean": 28.5882352941,
"line_max": 226,
"alpha_frac": 0.5705765408,
"autogenerated": false,
"ratio": 3.7725,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9842644649981999,
"avg_score": 0.00008637816360024187,
"num_lines": 51
} |
"""A function to begin dataset downloads in a separate thread."""
import sys
from time import time
from threading import Thread, Lock
import wx
from retriever.lib.tools import final_cleanup
class DownloadThread(Thread):
def __init__(self, engine, script):
Thread.__init__(self)
self.engine = engine
self.engine.disconnect()
self.script = script
self.daemon = True
self.output_lock = Lock()
self.output = []
self.done = False
def run(self):
try:
self.engine.connect()
self.download_script()
self.done = True
self.engine.disconnect()
except:
self.engine.disconnect()
raise
return
def finished(self):
return self.done
def download_script(self):
engine = self.engine
script = self.script
worker = self
start = time()
class download_stdout:
def write(self, s):
if s and s != '\n':
worker.output_lock.acquire()
worker.output.append(s)
worker.output_lock.release()
sys.stdout = download_stdout()
print "Connecting to database..."
# Connect
try:
engine.get_cursor()
except Exception as e:
print "<b><font color='red'>Error: There was an error with your database connection.<br />" + e.__str__() + "</font></b>"
return
# Download script
error = False
print "<b><font color='blue'>Downloading. . .</font></b>"
try:
script.download(engine)
except Exception as e:
error = True
print "<b><font color='red'>Error: " + e.__str__() + "</font></b>"
if not error:
finish = time()
time_diff = finish - start
if time_diff > 3600:
h = time_diff // 3600
time_diff %= 3600
else:
h = 0
if time_diff > 60:
m = time_diff // 60
time_diff %= 60
else:
m = 0
s = "%.2f" % (time_diff)
if len(s.split('.')[0]) < 2:
s = "0" + s
print "<b>Done!</b> <i>Elapsed time: %02d:%02d:%s</i>" % (h, m, s)
| {
"repo_name": "davharris/retriever",
"path": "lib/download.py",
"copies": "1",
"size": "2375",
"license": "mit",
"hash": 4922075260314003000,
"line_mean": 25.6853932584,
"line_max": 133,
"alpha_frac": 0.4804210526,
"autogenerated": false,
"ratio": 4.3498168498168495,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5330237902416849,
"avg_score": null,
"num_lines": null
} |
from itertools import islice, repeat
from array import array
def getPrimes(n):
r = n // 2
#plist = list(islice(repeat(True), r + 1))
plist = array("B", repeat(1, r + 1))
plist[0] = 0
lim = int((n**0.5) / 2 + 1)
for i in range(1, lim + 1):
p = 2 * i + 1
if plist[i]:
sp = 2 * i * (i + 1)
while(sp <= r):
plist[sp] = 0
sp = sp + p
primes = [2] + [(2 * i + 1) for i in range(r) if plist[i]]
return primes
if __name__ == '__main__':
from time import time
#n = int(input("Enter a number : "))
print("Testing it")
for i in range(1,9):
n = 10**i
print("\ni = {}, n = {:,}".format(i,n))
t0 = time()
primes = getPrimes(n)
print("Time Taken : {:.3f} secs. Number of primes : {:,}\n".format(time()-t0,len(primes)))
| {
"repo_name": "subhrm/google-code-jam-solutions",
"path": "solutions/helpers/utils/prime.py",
"copies": "1",
"size": "1024",
"license": "mit",
"hash": 1538517593782649000,
"line_mean": 23.380952381,
"line_max": 92,
"alpha_frac": 0.5283203125,
"autogenerated": false,
"ratio": 2.752688172043011,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3781008484543011,
"avg_score": null,
"num_lines": null
} |
import locale
locale.setlocale(locale.LC_NUMERIC, 'C')
import numpy as np
from numba import jit
@jit
def getPrimes(n):
plist = np.arange(n+1)
plist[0] = 0
plist[1] = 0
lim = int(n**0.5) + 1
for i in range(2, lim+1):
if plist[i] > 0:
p,sp = i , i*i
if sp <= n:
a = p
b = n//p
select = [p*j for j in range(a,b+1)]
plist[select] = 0
primes = plist[plist > 0]
return primes
if __name__ == '__main__':
from time import time
#n = int(input("Enter a number : "))
print("Testing it")
for i in range(1,9):
n = 10**i
print("\ni = {}, n = {:,}".format(i,n))
t0 = time()
primes = getPrimes(n)
print("Time Taken : {:.3f} secs. Number of primes : {:,}\n".format(time()-t0,len(primes)))
| {
"repo_name": "subhrm/google-code-jam-solutions",
"path": "solutions/helpers/utils/prime0.py",
"copies": "1",
"size": "1048",
"license": "mit",
"hash": -8981370574648114000,
"line_mean": 23.9523809524,
"line_max": 98,
"alpha_frac": 0.5219465649,
"autogenerated": false,
"ratio": 2.9273743016759775,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8890219233190015,
"avg_score": 0.011820326677192437,
"num_lines": 42
} |
''' A function to get all hotels located in a given city '''
import sys
from googleplaces import GooglePlaces, types
# GOOGLE_API_KEY = 'Put key here'
PLACEHOLDER_PRIORITY = 100
def write_hotels(region, query_result, hotel_csv):
''' Writes the query results in the csv file '''
global PLACEHOLDER_PRIORITY
city_name, country_name = map(lambda x: x.strip(), region.split(','))
for hotel in query_result.places:
hotel_csv.write('{};{};{};{}\n'.format(country_name, city_name, hotel.name, PLACEHOLDER_PRIORITY))
PLACEHOLDER_PRIORITY -= 1
def get_hotels(region):
''' Makes a query and gets result '''
hotel_locator = GooglePlaces(GOOGLE_API_KEY)
hotel_csv = open('hotels.csv', 'a')
query_result = hotel_locator.nearby_search(location=region, types=[types.TYPE_LODGING])
write_hotels(region, query_result, hotel_csv)
while query_result.has_next_page_token:
query_result = hotel_locator.nearby_search(pagetoken=query_result.next_page_token)
write_hotels(region, query_result, hotel_csv)
hotel_csv.close()
def main(region):
''' MAIN function '''
get_hotels(region)
if __name__ == '__main__':
main(sys.argv[1])
| {
"repo_name": "fahadakhan96/hotel-recommend",
"path": "src/gethotels.py",
"copies": "1",
"size": "1199",
"license": "mit",
"hash": 1367317882835370800,
"line_mean": 33.2571428571,
"line_max": 106,
"alpha_frac": 0.6738949124,
"autogenerated": false,
"ratio": 3.0431472081218276,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9190603338665335,
"avg_score": 0.00528775637129828,
"num_lines": 35
} |
""" A function to import symbols. """
def import_symbol(symbol_path):
""" Import the symbol defined by the specified symbol path.
Examples
--------
import_symbol('tarfile:TarFile') -> TarFile
import_symbol('tarfile:TarFile.open') -> TarFile.open
To allow compatibility with old-school traits symbol names we also allow
all-dotted paths, but in this case you can only import top-level names
from the module.
import_symbol('tarfile.TarFile') -> TarFile
"""
if ':' in symbol_path:
module_name, symbol_name = symbol_path.split(':')
module = __import__(module_name, {}, {}, [symbol_name], 0)
symbol = eval(symbol_name, module.__dict__)
else:
components = symbol_path.split('.')
module_name = '.'.join(components[:-1])
symbol_name = components[-1]
module = __import__(module_name, {}, {}, [symbol_name], 0)
symbol = getattr(module, symbol_name)
return symbol
#### EOF ######################################################################
| {
"repo_name": "burnpanck/traits",
"path": "traits/util/import_symbol.py",
"copies": "1",
"size": "1063",
"license": "bsd-3-clause",
"hash": 848248853987877400,
"line_mean": 27.7297297297,
"line_max": 79,
"alpha_frac": 0.5682031985,
"autogenerated": false,
"ratio": 4.374485596707819,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5442688795207818,
"avg_score": null,
"num_lines": null
} |
"""A function to start the GUI application."""
import sys
import os
from golem import gui
from werkzeug import _reloader
ORIGINAL_GET_ARGS = None
def run_gui(host=None, port=5000, debug=False):
# Patch Werkzeug._reloader._get_args_for_reloading()
# The Flask development server reloader does not work when
# started from the Golem standalone (PyInstaller) in Linux
# TODO
patch_werkzeug_get_args_for_reloading_wrapper()
app = gui.create_app()
app.run(host=host, port=port, debug=debug)
def patch_werkzeug_get_args_for_reloading_wrapper():
global ORIGINAL_GET_ARGS
if ORIGINAL_GET_ARGS is None:
ORIGINAL_GET_ARGS = _reloader._get_args_for_reloading
_reloader._get_args_for_reloading = _get_args_for_reloading_wrapper
def _get_args_for_reloading_wrapper():
rv = ORIGINAL_GET_ARGS()
__main__ = sys.modules["__main__"]
py_script = rv[1]
if __main__.__package__ is None:
# Executed a file, like "python app.py".
if os.name != 'nt' and os.path.isfile(py_script) and os.access(py_script, os.X_OK):
# The file is marked as executable. Nix adds a wrapper that
# shouldn't be called with the Python executable.
rv.pop(0)
return rv
| {
"repo_name": "lucianopuccio/golem",
"path": "golem/gui/gui_start.py",
"copies": "1",
"size": "1255",
"license": "mit",
"hash": -2126464203613431800,
"line_mean": 30.375,
"line_max": 91,
"alpha_frac": 0.6629482072,
"autogenerated": false,
"ratio": 3.3466666666666667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45096148738666664,
"avg_score": null,
"num_lines": null
} |
"""A function to track a grid and display its origin on the screen
"""
# The MIT License (MIT)
#
# Copyright (c) 2016 GTRC.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import cv2
import numpy as np
import camera
import json
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
class GridLocation(object):
"""Gets the location of a grid in an image and builds display images.
Attributes:
space: A float describing the spacing of the grid in mm
rows: An int describing the number of rows of interior corners on the
grid being tracked.
cols: An int describing the number of columns of interior corners on
the grid being tracked.
opencv_windows_open: A boolean, whether the openCV display windows are
open
image: numpy.ndarray of the undistorted image
result_image: numpy.ndarray of the final image, which is undistorted,
has grid corners drawn on it, and has the grid coordinates drawn on
it.
object_point: numpy.ndarray of the real world coordinates of the grid
in the grid's own coordinate system.
axis: numpy.ndarry of the axis line points to draw, relative to the
grid origin in the grid's coordinate system.
intrinsic: A numpy array of the camera intrinsic matrix
distortion: A numpy array of the camera distortion parameters
"""
def __init__(self, calibration, rows, cols, space, cam_name):
"""Initialize the GridLocation class.
Reads in camera calibration info, sets up communications with the
camera, and sets up the definition for an object point.
Args:
calibration (str): String of the file location of the camera .
calibration data. The data should be stored as a JSON file with
top level fields `intrinsic` which holds the intrinsic matrix
as a list of lists and `distortion` which holds the distortion
matrix as a list
rows (int): The number of rows of interior corners on the grid
cols (int): The number of columns of interior corners on the grid
space (float): The spacing of corners on the grid
Raises:
ValueError: The number of rows and cols was the same
"""
# From args:
self.space = space
if rows == cols:
raise ValueError('The grid mus be asymmetric. Rows cannot equal '
'Columns')
self.rows = rows
self.cols = cols
self.opencv_windows_open = False
self.image = None
self.result_image = None
# Grid Info:
self.object_point = np.zeros((self.cols * self.rows, 3), np.float32)
self.object_point[:, :2] = (np.mgrid[
0:(self.rows*self.space):self.space,
0:(self.cols*self.space):self.space]
.T.reshape(-1, 2))
self.axis = np.float32([[3*self.space, 0, 0], [0, 3*self.space, 0],
[0, 0, -3*self.space]]).reshape(-1, 3)
# Calibration Data setup:
with open(calibration, 'r') as calibration_file:
calibration_dictionary = json.load(calibration_file)
self.intrinsic = np.asarray(calibration_dictionary['intrinsic'])
self.distortion = np.asarray(calibration_dictionary['distortion'])
# Camera
self.cam = camera.Camera(cam_name, self.intrinsic, self.distortion)
print("done with init")
def __del__(self):
"""Destroy this instance of the GridLocation class
Closes any open OpenCV windows and closes the communications with the
camera.
"""
cv2.destroyWindow('result')
self.cam.__del__()
def show_images(self):
"""Displays the images.
If the windows have not yet been created, they are created. Note, there
is a programmed 5 ms delay to allow the images to be shown.
"""
# OpenCV window and image setup:
if not self.opencv_windows_open:
cv2.namedWindow('result', cv2.WINDOW_NORMAL)
self.opencv_windows_open = True
cv2.waitKey(1)
if self.result_image is not None:
cv2.imshow('result', self.result_image)
cv2.waitKey(5)
def get_cam2grid(self):
"""Extract grid information from image and generate result image.
Extract translation and rotation of grid from camera. Draw grid corners
on result image. Draw grid pose on result image. Return camera to grid
transformation matrix.
Returns: 6 member list, translation matrix
Raises:
RuntimeError: Could not find a grid
"""
# Get new image
self.image = self.cam.capture_image()
# Find chessboard corners.
re_projection_error, corners = cv2.findChessboardCorners(
self.image, (self. rows, self.cols),
flags=cv2.CALIB_CB_FAST_CHECK + cv2.CALIB_CB_ADAPTIVE_THRESH)
if not re_projection_error:
raise RuntimeError('unable to find grid')
corners2 = cv2.cornerSubPix(self.image, corners, (11, 11),
(-1, -1),
criteria)
if corners2 is None:
corners2 = corners
# Find the rotation and translation vectors.
rvecs, tvecs, inliers = cv2.solvePnPRansac(self.object_point,
corners2,
self.intrinsic,
self.distortion)
# project 3D points to image plane
image_points, jac = cv2.projectPoints(self.axis, rvecs, tvecs,
self.intrinsic,
self.distortion)
self.result_image = cv2.cvtColor(self.image,
cv2.COLOR_GRAY2RGB)
temp_image = cv2.drawChessboardCorners(self.result_image,
(self.cols, self.rows),
corners2,
re_projection_error)
# OpenCV 2 vs 3
if temp_image is not None:
self.result_image = temp_image
self.result_image = draw_axes(self.result_image, corners2,
image_points)
return (np.concatenate((tvecs, rvecs), axis=0)).ravel().tolist()
def __enter__(self):
"""Content manager entry point"""
return self
def __exit__(self, *_):
"""Content manager exit point"""
self.__del__()
def draw_axes(image_raw, corners, image_points, label=''):
"""Draw axes on an image
Draw axes which will be centered at the first corner and oriented by the
image points. Basic code from: http://docs.opencv.org/3.0-beta/doc/
py_tutorials/py_calib3d/py_pose/py_pose.html
Args:
image_raw (numpy.ndarray): The image on which to draw the axes
corners (numpy.ndarray): An array of 2D points on the image in which
the first point is the origin of the axes to draw
image_points (np.array): 2D points on the image at the end of the three
axes
label (str): A string label to place near the coordinate frame
Returns: numpy.ndarray Image with the axes drawn on it.
"""
corners = np.rint(corners).astype('int')
image_points = np.rint(image_points).astype('int')
corner = tuple(corners[0].ravel())
image = image_raw.copy()
temp = cv2.arrowedLine(image, corner, tuple(image_points[0].ravel()),
(255, 0, 0), 5)
if temp is not None:
image = temp
letters = np.array(image_points)
letter_space = 30
for row in range(letters.shape[0]):
if letters[row][0][0] < corner[0]:
letters[row][0][0] -= letter_space
if letters[row][0][1] < corner[1]:
letters[row][0][1] -= letter_space
else:
letters[row][0][1] += 1.5*letter_space
temp = cv2.putText(image, "x", tuple(letters[0].ravel()),
cv2.FONT_HERSHEY_SIMPLEX, 2, (255, 0, 0), 4)
if temp is not None:
image = temp
temp = cv2.arrowedLine(image, corner, tuple(image_points[1].ravel()),
(0, 255, 0), 5)
if temp is not None:
image = temp
temp = cv2.putText(image, "y", tuple(letters[1].ravel()),
cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 0), 4)
if temp is not None:
image = temp
temp = cv2.arrowedLine(image, corner, tuple(image_points[2].ravel()),
(0, 0, 255), 5)
if temp is not None:
image = temp
temp = cv2.putText(image, "z", tuple(letters[2].ravel()),
cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 4)
if temp is not None:
image = temp
# put below the axes in the middle:
temp = cv2.putText(image, label, corner,
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
if temp is not None:
image = temp
return image
| {
"repo_name": "mjsobrep/robot2camera-calibration",
"path": "robot2cam_calibration/track_grid.py",
"copies": "1",
"size": "10414",
"license": "mit",
"hash": 4953815504034475000,
"line_mean": 38.2981132075,
"line_max": 79,
"alpha_frac": 0.5858459766,
"autogenerated": false,
"ratio": 4.2008874546187975,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 265
} |
"""A function to unpack integer ranges of the form x-y,z."""
import re
__copyright__ = "Copyright (c) 2013- F-Secure"
def unpack_integer_range(integerrange):
"""Input an integer range spec like "200,205-207" and return a list of
integers like [200, 205, 206, 207]
:param integerrange: The range specification as a string
:return: Sorted integers in a list
"""
integers = [] # To hold the eventual result
valid_chars = re.compile("^[0-9\-, ]+$")
if re.match(valid_chars, integerrange) is None:
assert False, "Number range %s in the feature file is invalid. Must " \
"contain just numbers, commas, and hyphens" % integerrange
integerrange.replace(" ", "")
rangeparts = integerrange.split(',') # One+ comma-separated int ranges
for rangepart in rangeparts:
rangemaxmin = rangepart.split('-') # Range is defined with a hyphen
if len(rangemaxmin) == 1: # This was a single value
try:
integers.extend([int(rangemaxmin[0])])
except ValueError:
assert False, "Number range %s in the feature file is " \
"invalid. Must be integers separated with commas and " \
"hyphens" % integerrange
elif len(rangemaxmin) == 2: # It was a range of values
try:
rangemin = int(rangemaxmin[0])
rangemax = int(rangemaxmin[1]) + 1
except ValueError:
assert False, "Number range %s in the feature file is " \
"invalid. Must be integers separated with commas and " \
"hyphens" % integerrange
if rangemin >= rangemax:
assert False, "Number range %s in the feature file is " \
"invalid. Range minimum is more than " \
"maximum" % integerrange
integers.extend(range(rangemin, rangemax))
else: # Range specifier was not of the form x-y
assert False, "Number range %s in the feature file is invalid. " \
"Incorrect range specifier" % \
integerrange
return sorted(integers)
| {
"repo_name": "F-Secure/mittn",
"path": "mittn/httpfuzzer/number_ranges.py",
"copies": "1",
"size": "2232",
"license": "apache-2.0",
"hash": 442262853037580300,
"line_mean": 44.5510204082,
"line_max": 80,
"alpha_frac": 0.5689964158,
"autogenerated": false,
"ratio": 4.41106719367589,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007054673721340388,
"num_lines": 49
} |
"""A function to unpack integer ranges of the form x-y,z"""
"""
Copyright (c) 2014 F-Secure
See LICENSE for details
"""
import re
def unpack_integer_range(integerrange):
"""Input an integer range spec like "200,205-207" and return a list of
integers like [200, 205, 206, 207]
:param integerrange: The range specification as a string
:return: Sorted integers in a list
"""
integers = [] # To hold the eventual result
valid_chars = re.compile("^[0-9\-, ]+$")
if re.match(valid_chars, integerrange) is None:
assert False, "Number range %s in the feature file is invalid. Must " \
"contain just numbers, commas, and hyphens" % integerrange
integerrange.replace(" ", "")
rangeparts = integerrange.split(',') # One or more integer ranges
# separated by commas
for rangepart in rangeparts:
rangemaxmin = rangepart.split('-') # Range is defined with a hyphen
if len(rangemaxmin) == 1: # This was a single value
try:
integers.extend([int(rangemaxmin[0])])
except ValueError:
assert False, "Number range %s in the feature file is " \
"invalid. Must be integers separated with commas and " \
"hyphens" % integerrange
elif len(rangemaxmin) == 2: # It was a range of values
try:
rangemin = int(rangemaxmin[0])
rangemax = int(rangemaxmin[1]) + 1
except ValueError:
assert False, "Number range %s in the feature file is " \
"invalid. Must be integers separated with commas and " \
"hyphens" % integerrange
if rangemin >= rangemax:
assert False, "Number range %s in the feature file is " \
"invalid. Range minimum is more than " \
"maximum" % integerrange
integers.extend(range(rangemin, rangemax))
else: # Range specifier was not of the form x-y
assert False, "Number range %s in the feature file is invalid. " \
"Incorrect range specifier" % \
integerrange
return sorted(integers) | {
"repo_name": "mittn/mittn",
"path": "mittn/httpfuzzer/number_ranges.py",
"copies": "2",
"size": "2302",
"license": "apache-2.0",
"hash": 7268090408188413000,
"line_mean": 42.4528301887,
"line_max": 80,
"alpha_frac": 0.5621198957,
"autogenerated": false,
"ratio": 4.504892367906066,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6067012263606066,
"avg_score": null,
"num_lines": null
} |
## a function which gets a path as argument and returns the data
import numpy as np
import matplotlib.pyplot as plt
class datLoad:
def __init__(self,dirpath):
self.dirpath=dirpath
self.data={}
## lese jetzt Daten ein
with open (self.dirpath, 'r') as dirfile:
dirstringlines=dirfile.readlines()
dirstring=dirfile.read()
## loesche letzte Zeile, falls leer:
if not dirstringlines[-1]:
dirstringlines=dirstringlines[:-1]
## finde delimiter String heraus:
delimiterStrings=['\t',',',';']
columnNumberList=[]
rowNumberList=[]
delimiter=delimiterStrings[0]
for i in range(len(delimiterStrings)):
columnlist=[]
allcolumnlist=[]
for j in range(len(dirstringlines)):
columns=len(dirstringlines[j].split(delimiterStrings[i]))
if not columns ==1:
columnlist.append(columns)
allcolumnlist.append(columns)
rowNumberList.append(len(columnlist))
columnNumberList.append(allcolumnlist)
delimiterIndex=np.argmax(rowNumberList)
self.delimiter= delimiterStrings[delimiterIndex]
## Header Zeilen unter der Annahme, dass sie weniger Delimiter enthalten, als die anderen Zeilen.
self.CommentLines=np.argmax(columnNumberList[delimiterIndex])
print(columnNumberList[delimiterIndex])
## lese Daten unter Benutzung des Delimiter Strings
## zuerst Comment
self.comment=''
for i in range(0,self.CommentLines):
self.comment+=dirstringlines[i]
## lese Header, lege Listen mit Headern als Keys an.
self.data={}
self.headerList=[]
for iheader in dirstringlines[self.CommentLines].split(self.delimiter):
if iheader.endswith('\n'):
iheader=iheader[:-1]
if iheader:
self.data[iheader]=[]
self.headerList.append(iheader)
## lese Daten und schreibe sie in Listen
for i in range(self.CommentLines+1,len(dirstringlines)):
idataList=dirstringlines[i].split(self.delimiter)
for j in range(len(idataList)):
if idataList[j].endswith('\n'):
idataList[j]=idataList[j][:-1]
if(idataList[j]):
self.data[self.headerList[j]].append(float(idataList[j]))
for header in self.data:
self.data[header]=np.array(self.data[header])
print('headerList:',self.headerList)
print('data:',self.data)
print ('comment: ',self.comment)
def main():
dirpath = 'Testdaten/testdata4.dat'
datloadObject=datload(dirpath)
if __name__ == "__main__":
main()
| {
"repo_name": "Kylskap/PloPo",
"path": "handler/datLoad.py",
"copies": "1",
"size": "2815",
"license": "apache-2.0",
"hash": -3743816880884708400,
"line_mean": 34.6329113924,
"line_max": 105,
"alpha_frac": 0.5978685613,
"autogenerated": false,
"ratio": 3.856164383561644,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4954032944861644,
"avg_score": null,
"num_lines": null
} |
## a function which gets a path as argument and returns the data
import numpy as np
import matplotlib.pyplot as plt
class reader:
def __init__(self,dirpath):
self.dirpath=dirpath
self.data={}
## lese jetzt Daten ein
with open (self.dirpath, 'r') as dirfile:
dirstringlines=dirfile.readlines()
dirstring=dirfile.read()
## loesche letzte Zeile, falls leer:
if not dirstringlines[-1]:
dirstringlines=dirstringlines[:-1]
## finde delimiter String heraus:
delimiterStrings=['\t',',',';']
columnNumberList=[]
rowNumberList=[]
delimiter=delimiterStrings[0]
for i in range(len(delimiterStrings)):
columnlist=[]
allcolumnlist=[]
for j in range(len(dirstringlines)):
columns=len(dirstringlines[j].split(delimiterStrings[i]))
if not columns ==1:
columnlist.append(columns)
allcolumnlist.append(columns)
rowNumberList.append(len(columnlist))
columnNumberList.append(allcolumnlist)
delimiterIndex=np.argmax(rowNumberList)
self.delimiter= delimiterStrings[delimiterIndex]
## Header Zeilen unter der Annahme, dass sie weniger Delimiter enthalten, als die anderen Zeilen.
self.CommentLines=np.argmax(columnNumberList[delimiterIndex])
print(columnNumberList[delimiterIndex])
## lese Daten unter Benutzung des Delimiter Strings
## zuerst Comment
self.comment=''
for i in range(0,self.CommentLines):
self.comment+=dirstringlines[i]
## lese Header, lege Listen mit Headern als Keys an.
self.data={}
self.headerList=[]
for iheader in dirstringlines[self.CommentLines].split(self.delimiter):
if iheader.endswith('\n'):
iheader=iheader[:-1]
if iheader:
self.data[iheader]=[]
self.headerList.append(iheader)
## lese Daten und schreibe sie in Listen
for i in range(self.CommentLines+1,len(dirstringlines)):
idataList=dirstringlines[i].split(self.delimiter)
for j in range(len(idataList)):
if idataList[j].endswith('\n'):
idataList[j]=idataList[j][:-1]
if(idataList[j]):
self.data[self.headerList[j]].append(float(idataList[j]))
for header in self.data:
self.data[header]=np.array(self.data[header])
print('headerList:',self.headerList)
print('data:',self.data)
print ('comment: ',self.comment)
def main():
dirpath = 'Testdaten/testdata4.dat'
datloadObject=datload(dirpath)
if __name__ == "__main__":
main()
| {
"repo_name": "Kylskap/PloPo",
"path": "handler/reader/auto_Reader.py",
"copies": "1",
"size": "2814",
"license": "apache-2.0",
"hash": -8393292494957623000,
"line_mean": 34.6202531646,
"line_max": 105,
"alpha_frac": 0.5977256574,
"autogenerated": false,
"ratio": 3.860082304526749,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9772730683210624,
"avg_score": 0.03701545574322497,
"num_lines": 79
} |
# A funny story by HVNSweeting
#Must run with 2.7
import threading
import logging
import time
import sys
logging.basicConfig(level=logging.DEBUG,
format='%(threadName)-10s %(message)s'
)
print 'you are running %s' % str(sys.version_info)
def wait_for_her(e):
logging.debug("WaitBoy is waiting...")
is_she_came = e.wait()
logging.debug('and wait until she came : %s' % is_she_came)
def wait_for_her_little_hour(e, t):
logging.debug("HotBoy is waiting...")
is_she_came = e.wait(t)
logging.debug("Is she came? %s " % is_she_came)
if is_she_came:
logging.debug("HotBoy meet her, got her")
else:
logging.debug("HotBoy back to home")
# uncomment below if you are the hot boy :))
#def wait_for_her_little_hour(e, t):
# while not e.isSet():
# logging.debug("HotBoy is waiting...")
# is_she_came = e.wait(t)
# logging.debug("Is she came? %s " % is_she_came)
# if is_she_came:
# logging.debug("HotBoy meet her, got her")
# else:
# logging.debug("HotBoy playing game on his Android")
logging.debug("This is a story about...")
logging.debug("A girl with no name")
she_come = threading.Event()
logging.debug("A boy can wait for her 2 hours")
t2 = threading.Thread(name='HotBoy',
target=wait_for_her_little_hour,
args=(she_come,2))
logging.debug("And a boy who can wait for her until her come...")
t1 = threading.Thread(name='WaitBoy',
target=wait_for_her,
args=(she_come,))
logging.debug("*" * 20)
logging.debug("The story start...")
logging.debug("Two boys are waiting in the rain...")
t1.start()
t2.start()
time.sleep(3)
logging.debug("oops, she had slept for 3 hours and now she woke up")
logging.debug("she came to place...")
she_come.set()
| {
"repo_name": "familug/FAMILUG",
"path": "Python/threading_com.py",
"copies": "1",
"size": "1800",
"license": "bsd-2-clause",
"hash": 8278879320172419000,
"line_mean": 27.125,
"line_max": 68,
"alpha_frac": 0.6377777778,
"autogenerated": false,
"ratio": 3.0150753768844223,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4152853154684422,
"avg_score": null,
"num_lines": null
} |
"""A Future class"""
from hawkweed.functional.primitives import reduce
from hawkweed.classes.repr import Repr
class Future(Repr):
"""A Future class"""
def __init__(self, value):
"""
Takes a binary function (taking success and error, respectively)
and builds a Future from it.
Complexity: O(1)
params:
value: the function to encase
returns:
a Future
"""
self.value = value
self.transforms = []
@staticmethod
def of(value):
"""
Creates a Future from a static value, immediately returning it.
Complexity: O(1)
params:
value: the value to encase
returns:
a Future
"""
return Future(lambda res, rej: res(value))
@staticmethod
def reject(value):
"""
Creates a Future from a static value, immediately rejecting it.
Complexity: O(1)
params:
value: the value to encase
returns:
a Future
"""
return Future(lambda res, rej: rej(value))
@staticmethod
def encase(fun, args=None):
"""
Encases an ordinary function in a Future. If the function runs
as expected the return value will be returned to the success
callback. If an exception occurs it will be returned to the
error callback.
Special behaviour:
You need to specify args. If the function does not have any,
add args=[]. If you do not a function that takes arguments
will be returned.
Complexity: O(1)
params:
fun: the function to encase
args: the arguments to pass to the function (defaults to None,
override to an empty sequence if no arguments are needed)
returns:
a Future
"""
if args is None:
return lambda *args: Future.encase(fun, args=args)
def res(res, rej):
"""Internal encase function"""
try:
return res(fun(*args))
except Exception as e:
return rej(e)
return Future(res)
def __repr__(self):
return "Future({})".format(self.value)
def apply(self, fun):
"""
Apply a transformation function fun to the future value.
Complexity: Application O(1), Execution O(fun)
params:
fun: the function to apply
returns:
a Future
"""
self.transforms.append(fun)
return self
def chain(self, future):
"""
Chains a future to this one. This will intercept
any calls to fork insofar as both Futures are chained
before any call to the callbacks. Any error in both
Futures will result in a call to the error callback.
Complexity: O(1)
params:
future: the Future to chain
returns:
a Future
"""
def chained(res, rej):
"""Internal chain function"""
self.value(lambda x: future(x).fork(res, rej), rej)
return Future(chained)
def fork(self, res, err):
"""
Registers resolvers for this Future.
Complexity: O(1)
params:
res: the resolver function
err: the error function
returns:
whatever the functions return
"""
def resolver(trans):
"""Internal fork function that applies transformations"""
try:
return res(reduce(lambda acc, x: x(acc), trans, self.transforms))
except Exception as e:
if err:
return err(e)
raise
return self.value(resolver, err)
| {
"repo_name": "hellerve/hawkweed",
"path": "hawkweed/classes/future.py",
"copies": "1",
"size": "3790",
"license": "mit",
"hash": 1168330177188898300,
"line_mean": 27.9312977099,
"line_max": 81,
"alpha_frac": 0.5493403694,
"autogenerated": false,
"ratio": 4.684796044499382,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5734136413899382,
"avg_score": null,
"num_lines": null
} |
"""A Future class similar to the one in PEP 3148."""
__all__ = ['CancelledError', 'TimeoutError', 'InvalidStateError',
'Future', 'wrap_future', 'isfuture']
import concurrent.futures
import logging
import sys
import traceback
from . import base_futures
from . import compat
from . import events
CancelledError = base_futures.CancelledError
InvalidStateError = base_futures.InvalidStateError
TimeoutError = base_futures.TimeoutError
isfuture = base_futures.isfuture
_PENDING = base_futures._PENDING
_CANCELLED = base_futures._CANCELLED
_FINISHED = base_futures._FINISHED
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
class _TracebackLogger:
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('loop', 'source_traceback', 'exc', 'tb')
def __init__(self, future, exc):
self.loop = future._loop
self.source_traceback = future._source_traceback
self.exc = exc
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
msg = 'Future/Task exception was never retrieved\n'
if self.source_traceback:
src = ''.join(traceback.format_list(self.source_traceback))
msg += 'Future/Task created at (most recent call last):\n'
msg += '%s\n' % src.rstrip()
msg += ''.join(self.tb).rstrip()
self.loop.call_exception_handler({'message': msg})
class Future:
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon_threadsafe().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
# This field is used for a dual purpose:
# - Its presence is a marker to declare that a class implements
# the Future protocol (i.e. is intended to be duck-type compatible).
# The value must also be not-None, to enable a subclass to declare
# that it is not compatible by setting this to None.
# - It is set by __iter__() below so that Task._step() can tell
# the difference between `yield from Future()` (correct) vs.
# `yield Future()` (incorrect).
_asyncio_future_blocking = False
_log_traceback = False # Used for Python 3.4 and later
_tb_logger = None # Used for Python 3.3 only
def __init__(self, *, loop=None):
"""Initialize the future.
The optional event_loop argument allows explicitly setting the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
_repr_info = base_futures._future_repr_info
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, ' '.join(self._repr_info()))
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message': ('%s exception was never retrieved'
% self.__class__.__name__),
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
if self._state != _PENDING:
return False
self._state = _CANCELLED
self._schedule_callbacks()
return True
def _schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback in callbacks:
self._loop.call_soon(callback, self)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Result is not ready.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
if self._exception is not None:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Exception is not set.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
return self._exception
def add_done_callback(self, fn):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self)
else:
self._callbacks.append(fn)
# New method not in PEP 3148.
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [f for f in self._callbacks if f != fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = _FINISHED
self._schedule_callbacks()
def set_exception(self, exception):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
if isinstance(exception, type):
exception = exception()
if type(exception) is StopIteration:
raise TypeError("StopIteration interacts badly with generators "
"and cannot be raised into a Future")
self._exception = exception
self._state = _FINISHED
self._schedule_callbacks()
if compat.PY34:
self._log_traceback = True
else:
self._tb_logger = _TracebackLogger(self, exception)
# Arrange for the logger to be activated after all callbacks
# have had a chance to call result() or exception().
self._loop.call_soon(self._tb_logger.activate)
def __iter__(self):
if not self.done():
self._asyncio_future_blocking = True
yield self # This tells Task to wait for completion.
assert self.done(), "yield from wasn't used with future"
return self.result() # May raise too.
if compat.PY35:
__await__ = __iter__ # make compatible with 'await' expression
# Needed for testing purposes.
_PyFuture = Future
def _set_result_unless_cancelled(fut, result):
"""Helper setting the result only if the future was not cancelled."""
if fut.cancelled():
return
fut.set_result(result)
def _set_concurrent_future_state(concurrent, source):
"""Copy state from a future to a concurrent.futures.Future."""
assert source.done()
if source.cancelled():
concurrent.cancel()
if not concurrent.set_running_or_notify_cancel():
return
exception = source.exception()
if exception is not None:
concurrent.set_exception(exception)
else:
result = source.result()
concurrent.set_result(result)
def _copy_future_state(source, dest):
"""Internal helper to copy state from another Future.
The other Future may be a concurrent.futures.Future.
"""
assert source.done()
if dest.cancelled():
return
assert not dest.done()
if source.cancelled():
dest.cancel()
else:
exception = source.exception()
if exception is not None:
dest.set_exception(exception)
else:
result = source.result()
dest.set_result(result)
def _chain_future(source, destination):
"""Chain two futures so that when one completes, so does the other.
The result (or exception) of source will be copied to destination.
If destination is cancelled, source gets cancelled too.
Compatible with both asyncio.Future and concurrent.futures.Future.
"""
if not isfuture(source) and not isinstance(source,
concurrent.futures.Future):
raise TypeError('A future is required for source argument')
if not isfuture(destination) and not isinstance(destination,
concurrent.futures.Future):
raise TypeError('A future is required for destination argument')
source_loop = source._loop if isfuture(source) else None
dest_loop = destination._loop if isfuture(destination) else None
def _set_state(future, other):
if isfuture(future):
_copy_future_state(other, future)
else:
_set_concurrent_future_state(future, other)
def _call_check_cancel(destination):
if destination.cancelled():
if source_loop is None or source_loop is dest_loop:
source.cancel()
else:
source_loop.call_soon_threadsafe(source.cancel)
def _call_set_state(source):
if dest_loop is None or dest_loop is source_loop:
_set_state(destination, source)
else:
dest_loop.call_soon_threadsafe(_set_state, destination, source)
destination.add_done_callback(_call_check_cancel)
source.add_done_callback(_call_set_state)
def wrap_future(future, *, loop=None):
"""Wrap concurrent.futures.Future object."""
if isfuture(future):
return future
assert isinstance(future, concurrent.futures.Future), \
'concurrent.futures.Future is expected, got {!r}'.format(future)
if loop is None:
loop = events.get_event_loop()
new_future = loop.create_future()
_chain_future(future, new_future)
return new_future
try:
import _asyncio
except ImportError:
pass
else:
# _CFuture is needed for tests.
Future = _CFuture = _asyncio.Future
| {
"repo_name": "mindbender-studio/setup",
"path": "bin/windows/python36/Lib/asyncio/futures.py",
"copies": "4",
"size": "16028",
"license": "mit",
"hash": 1204929341273374500,
"line_mean": 35.1805869074,
"line_max": 81,
"alpha_frac": 0.628213127,
"autogenerated": false,
"ratio": 4.423958045818383,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000059321961610548605,
"num_lines": 443
} |
"""A Future class similar to the one in PEP 3148."""
__all__ = ['CancelledError', 'TimeoutError',
'InvalidStateError',
'Future', 'wrap_future', 'isfuture',
]
import concurrent.futures._base
import logging
import reprlib
import sys
import traceback
from ActualVim.lib.asyncio_inc import compat
from . import events
# States for Future.
_PENDING = 'PENDING'
_CANCELLED = 'CANCELLED'
_FINISHED = 'FINISHED'
Error = concurrent.futures._base.Error
CancelledError = concurrent.futures.CancelledError
TimeoutError = concurrent.futures.TimeoutError
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
class InvalidStateError(Error):
"""The operation is not allowed in this state."""
class _TracebackLogger:
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('loop', 'source_traceback', 'exc', 'tb')
def __init__(self, future, exc):
self.loop = future._loop
self.source_traceback = future._source_traceback
self.exc = exc
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
msg = 'Future/Task exception was never retrieved\n'
if self.source_traceback:
src = ''.join(traceback.format_list(self.source_traceback))
msg += 'Future/Task created at (most recent call last):\n'
msg += '%s\n' % src.rstrip()
msg += ''.join(self.tb).rstrip()
self.loop.call_exception_handler({'message': msg})
def isfuture(obj):
"""Check for a Future.
This returns True when obj is a Future instance or is advertising
itself as duck-type compatible by setting _asyncio_future_blocking.
See comment in Future for more details.
"""
return (hasattr(obj.__class__, '_asyncio_future_blocking') and
obj._asyncio_future_blocking is not None)
class Future:
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon_threadsafe().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
# This field is used for a dual purpose:
# - Its presence is a marker to declare that a class implements
# the Future protocol (i.e. is intended to be duck-type compatible).
# The value must also be not-None, to enable a subclass to declare
# that it is not compatible by setting this to None.
# - It is set by __iter__() below so that Task._step() can tell
# the difference between `yield from Future()` (correct) vs.
# `yield Future()` (incorrect).
_asyncio_future_blocking = False
_log_traceback = False # Used for Python 3.4 and later
_tb_logger = None # Used for Python 3.3 only
def __init__(self, *, loop=None):
"""Initialize the future.
The optional event_loop argument allows explicitly setting the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __format_callbacks(self):
cb = self._callbacks
size = len(cb)
if not size:
cb = ''
def format_cb(callback):
return events._format_callback_source(callback, ())
if size == 1:
cb = format_cb(cb[0])
elif size == 2:
cb = '{}, {}'.format(format_cb(cb[0]), format_cb(cb[1]))
elif size > 2:
cb = '{}, <{} more>, {}'.format(format_cb(cb[0]),
size-2,
format_cb(cb[-1]))
return 'cb=[%s]' % cb
def _repr_info(self):
info = [self._state.lower()]
if self._state == _FINISHED:
if self._exception is not None:
info.append('exception={!r}'.format(self._exception))
else:
# use reprlib to limit the length of the output, especially
# for very long strings
result = reprlib.repr(self._result)
info.append('result={}'.format(result))
if self._callbacks:
info.append(self.__format_callbacks())
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
info = self._repr_info()
return '<%s %s>' % (self.__class__.__name__, ' '.join(info))
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message': ('%s exception was never retrieved'
% self.__class__.__name__),
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
if self._state != _PENDING:
return False
self._state = _CANCELLED
self._schedule_callbacks()
return True
def _schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback in callbacks:
self._loop.call_soon(callback, self)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Result is not ready.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
if self._exception is not None:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Exception is not set.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
return self._exception
def add_done_callback(self, fn):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self)
else:
self._callbacks.append(fn)
# New method not in PEP 3148.
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [f for f in self._callbacks if f != fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = _FINISHED
self._schedule_callbacks()
def set_exception(self, exception):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
if isinstance(exception, type):
exception = exception()
if type(exception) is StopIteration:
raise TypeError("StopIteration interacts badly with generators "
"and cannot be raised into a Future")
self._exception = exception
self._state = _FINISHED
self._schedule_callbacks()
if compat.PY34:
self._log_traceback = True
else:
self._tb_logger = _TracebackLogger(self, exception)
# Arrange for the logger to be activated after all callbacks
# have had a chance to call result() or exception().
self._loop.call_soon(self._tb_logger.activate)
def __iter__(self):
if not self.done():
self._asyncio_future_blocking = True
yield self # This tells Task to wait for completion.
assert self.done(), "yield from wasn't used with future"
return self.result() # May raise too.
if compat.PY35:
__await__ = __iter__ # make compatible with 'await' expression
def _set_result_unless_cancelled(fut, result):
"""Helper setting the result only if the future was not cancelled."""
if fut.cancelled():
return
fut.set_result(result)
def _set_concurrent_future_state(concurrent, source):
"""Copy state from a future to a concurrent.futures.Future."""
assert source.done()
if source.cancelled():
concurrent.cancel()
if not concurrent.set_running_or_notify_cancel():
return
exception = source.exception()
if exception is not None:
concurrent.set_exception(exception)
else:
result = source.result()
concurrent.set_result(result)
def _copy_future_state(source, dest):
"""Internal helper to copy state from another Future.
The other Future may be a concurrent.futures.Future.
"""
assert source.done()
if dest.cancelled():
return
assert not dest.done()
if source.cancelled():
dest.cancel()
else:
exception = source.exception()
if exception is not None:
dest.set_exception(exception)
else:
result = source.result()
dest.set_result(result)
def _chain_future(source, destination):
"""Chain two futures so that when one completes, so does the other.
The result (or exception) of source will be copied to destination.
If destination is cancelled, source gets cancelled too.
Compatible with both asyncio.Future and concurrent.futures.Future.
"""
if not isfuture(source) and not isinstance(source,
concurrent.futures.Future):
raise TypeError('A future is required for source argument')
if not isfuture(destination) and not isinstance(destination,
concurrent.futures.Future):
raise TypeError('A future is required for destination argument')
source_loop = source._loop if isfuture(source) else None
dest_loop = destination._loop if isfuture(destination) else None
def _set_state(future, other):
if isfuture(future):
_copy_future_state(other, future)
else:
_set_concurrent_future_state(future, other)
def _call_check_cancel(destination):
if destination.cancelled():
if source_loop is None or source_loop is dest_loop:
source.cancel()
else:
source_loop.call_soon_threadsafe(source.cancel)
def _call_set_state(source):
if dest_loop is None or dest_loop is source_loop:
_set_state(destination, source)
else:
dest_loop.call_soon_threadsafe(_set_state, destination, source)
destination.add_done_callback(_call_check_cancel)
source.add_done_callback(_call_set_state)
def wrap_future(future, *, loop=None):
"""Wrap concurrent.futures.Future object."""
if isfuture(future):
return future
assert isinstance(future, concurrent.futures.Future), \
'concurrent.futures.Future is expected, got {!r}'.format(future)
if loop is None:
loop = events.get_event_loop()
new_future = loop.create_future()
_chain_future(future, new_future)
return new_future
| {
"repo_name": "lunixbochs/actualvim",
"path": "lib/asyncio/futures.py",
"copies": "1",
"size": "17563",
"license": "mit",
"hash": -8555216802579907000,
"line_mean": 35.6659707724,
"line_max": 79,
"alpha_frac": 0.6151568639,
"autogenerated": false,
"ratio": 4.417253521126761,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5532410385026761,
"avg_score": null,
"num_lines": null
} |
"""A Future class similar to the one in PEP 3148."""
__all__ = ['CancelledError', 'TimeoutError',
'InvalidStateError',
'Future', 'wrap_future',
]
import concurrent.futures._base
import logging
import reprlib
import sys
import traceback
from . import compat
from . import events
# States for Future.
_PENDING = 'PENDING'
_CANCELLED = 'CANCELLED'
_FINISHED = 'FINISHED'
Error = concurrent.futures._base.Error
CancelledError = concurrent.futures.CancelledError
TimeoutError = concurrent.futures.TimeoutError
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
class InvalidStateError(Error):
"""The operation is not allowed in this state."""
class _TracebackLogger:
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('loop', 'source_traceback', 'exc', 'tb')
def __init__(self, future, exc):
self.loop = future._loop
self.source_traceback = future._source_traceback
self.exc = exc
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
msg = 'Future/Task exception was never retrieved\n'
if self.source_traceback:
src = ''.join(traceback.format_list(self.source_traceback))
msg += 'Future/Task created at (most recent call last):\n'
msg += '%s\n' % src.rstrip()
msg += ''.join(self.tb).rstrip()
self.loop.call_exception_handler({'message': msg})
class Future:
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon_threadsafe().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
_blocking = False # proper use of future (yield vs yield from)
_log_traceback = False # Used for Python 3.4 and later
_tb_logger = None # Used for Python 3.3 only
def __init__(self, *, loop=None):
"""Initialize the future.
The optional event_loop argument allows to explicitly set the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def _format_callbacks(self):
cb = self._callbacks
size = len(cb)
if not size:
cb = ''
def format_cb(callback):
return events._format_callback_source(callback, ())
if size == 1:
cb = format_cb(cb[0])
elif size == 2:
cb = '{}, {}'.format(format_cb(cb[0]), format_cb(cb[1]))
elif size > 2:
cb = '{}, <{} more>, {}'.format(format_cb(cb[0]),
size-2,
format_cb(cb[-1]))
return 'cb=[%s]' % cb
def _repr_info(self):
info = [self._state.lower()]
if self._state == _FINISHED:
if self._exception is not None:
info.append('exception={!r}'.format(self._exception))
else:
# use reprlib to limit the length of the output, especially
# for very long strings
result = reprlib.repr(self._result)
info.append('result={}'.format(result))
if self._callbacks:
info.append(self._format_callbacks())
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
info = self._repr_info()
return '<%s %s>' % (self.__class__.__name__, ' '.join(info))
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message': ('%s exception was never retrieved'
% self.__class__.__name__),
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
if self._state != _PENDING:
return False
self._state = _CANCELLED
self._schedule_callbacks()
return True
def _schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback in callbacks:
self._loop.call_soon(callback, self)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Result is not ready.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
if self._exception is not None:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Exception is not set.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
return self._exception
def add_done_callback(self, fn):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self)
else:
self._callbacks.append(fn)
# New method not in PEP 3148.
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [f for f in self._callbacks if f != fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def _set_result_unless_cancelled(self, result):
"""Helper setting the result only if the future was not cancelled."""
if self.cancelled():
return
self.set_result(result)
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = _FINISHED
self._schedule_callbacks()
def set_exception(self, exception):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
if isinstance(exception, type):
exception = exception()
self._exception = exception
self._state = _FINISHED
self._schedule_callbacks()
if compat.PY34:
self._log_traceback = True
else:
self._tb_logger = _TracebackLogger(self, exception)
# Arrange for the logger to be activated after all callbacks
# have had a chance to call result() or exception().
self._loop.call_soon(self._tb_logger.activate)
# Truly internal methods.
def _copy_state(self, other):
"""Internal helper to copy state from another Future.
The other Future may be a concurrent.futures.Future.
"""
assert other.done()
if self.cancelled():
return
assert not self.done()
if other.cancelled():
self.cancel()
else:
exception = other.exception()
if exception is not None:
self.set_exception(exception)
else:
result = other.result()
self.set_result(result)
def __iter__(self):
if not self.done():
self._blocking = True
yield self # This tells Task to wait for completion.
assert self.done(), "yield from wasn't used with future"
return self.result() # May raise too.
if compat.PY35:
__await__ = __iter__ # make compatible with 'await' expression
def _set_concurrent_future_state(concurrent, source):
"""Copy state from a future to a concurrent.futures.Future."""
assert source.done()
if source.cancelled():
concurrent.cancel()
if not concurrent.set_running_or_notify_cancel():
return
exception = source.exception()
if exception is not None:
concurrent.set_exception(exception)
else:
result = source.result()
concurrent.set_result(result)
def _chain_future(source, destination):
"""Chain two futures so that when one completes, so does the other.
The result (or exception) of source will be copied to destination.
If destination is cancelled, source gets cancelled too.
Compatible with both asyncio.Future and concurrent.futures.Future.
"""
if not isinstance(source, (Future, concurrent.futures.Future)):
raise TypeError('A future is required for source argument')
if not isinstance(destination, (Future, concurrent.futures.Future)):
raise TypeError('A future is required for destination argument')
source_loop = source._loop if isinstance(source, Future) else None
dest_loop = destination._loop if isinstance(destination, Future) else None
def _set_state(future, other):
if isinstance(future, Future):
future._copy_state(other)
else:
_set_concurrent_future_state(future, other)
def _call_check_cancel(destination):
if destination.cancelled():
if source_loop is None or source_loop is dest_loop:
source.cancel()
else:
source_loop.call_soon_threadsafe(source.cancel)
def _call_set_state(source):
if dest_loop is None or dest_loop is source_loop:
_set_state(destination, source)
else:
dest_loop.call_soon_threadsafe(_set_state, destination, source)
destination.add_done_callback(_call_check_cancel)
source.add_done_callback(_call_set_state)
def wrap_future(future, *, loop=None):
"""Wrap concurrent.futures.Future object."""
if isinstance(future, Future):
return future
assert isinstance(future, concurrent.futures.Future), \
'concurrent.futures.Future is expected, got {!r}'.format(future)
new_future = Future(loop=loop)
_chain_future(future, new_future)
return new_future
| {
"repo_name": "gvanrossum/asyncio",
"path": "asyncio/futures.py",
"copies": "1",
"size": "16438",
"license": "apache-2.0",
"hash": 877274133838611000,
"line_mean": 35.2869757174,
"line_max": 78,
"alpha_frac": 0.6121182626,
"autogenerated": false,
"ratio": 4.440302539168017,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00003109162702484221,
"num_lines": 453
} |
"""A Future class similar to the one in PEP 3148."""
__all__ = ['CancelledError', 'TimeoutError',
'InvalidStateError',
'Future', 'wrap_future',
]
import concurrent.futures.cooperative
import concurrent.futures.multithreaded
import concurrent.futures.exceptions
import logging
from . import events
# TODO: Do we really want to depend on concurrent.futures internals?
Error = concurrent.futures.exceptions.Error
CancelledError = concurrent.futures.exceptions.CancelledError
TimeoutError = concurrent.futures.exceptions.TimeoutError
InvalidStateError = concurrent.futures.exceptions.InvalidStateError
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
def loop_as_executor(loop):
return loop.call_soon
def loop_as_executor_threadsafe(loop):
return loop.call_soon_threadsafe
class Future(concurrent.futures.cooperative.Future):
_blocking = False # proper use of future (yield vs yield from)
def __init__(self, *, loop=None):
self._loop = loop or events.get_event_loop()
super().__init__(clb_executor=loop_as_executor(self._loop))
def __iter__(self):
if not self.done():
self._blocking = True
yield self # This tells Task to wait for completion.
assert self.done(), "yield from wasn't used with future"
return self.result() # May raise too.
@classmethod
def _new(cls, other=None, *, clb_executor=None):
loop = other._loop if other else None
return cls(loop=loop)
@classmethod
def convert(cls, future):
"""Enables compatibility with other futures by wrapping."""
if isinstance(future, Future):
return future
if isinstance(future, concurrent.futures.FutureBase):
return wrap_future(future)
raise TypeError("{} is not compatible with {}"
.format(_typename(cls), _typename(type(future))))
@classmethod
def compatible(cls, futures):
"""Verifies that all futures belong to the same event loop."""
try:
it = iter(futures)
first = next(it)._loop
if not all(first is rest._loop for rest in it):
raise ValueError('Futures should belong to the same event loop')
except StopIteration:
pass
def _typename(cls):
return cls.__module__ + '.' + cls.__name__
def wrap_future(fut, *, loop=None):
"""Wrap concurrent.futures.FutureBase object."""
if isinstance(fut, Future):
return fut
assert isinstance(fut, concurrent.futures.FutureBase), \
'concurrent.futures.FutureBase is expected, got {!r}'.format(fut)
if loop is None:
loop = events.get_event_loop()
new_future = Future(loop=loop)
def _check_cancel_other(f):
if f.cancelled():
fut.cancel()
new_future.add_done_callback(_check_cancel_other)
fut.add_done_callback(
lambda future: loop.call_soon_threadsafe(
new_future.set_from, fut))
return new_future
| {
"repo_name": "mikhtonyuk/rxpython",
"path": "asyncio/futures.py",
"copies": "1",
"size": "3013",
"license": "mit",
"hash": 2839998452173772000,
"line_mean": 30.7157894737,
"line_max": 80,
"alpha_frac": 0.6518420179,
"autogenerated": false,
"ratio": 4.049731182795699,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005574600613923058,
"num_lines": 95
} |
"""A Future class similar to the one in PEP 3148."""
__all__ = ['CancelledError', 'TimeoutError',
'InvalidStateError',
'Future', 'wrap_future',
]
import logging
import six
import sys
import traceback
try:
import reprlib # Python 3
except ImportError:
import repr as reprlib # Python 2
from . import compat
from . import events
from . import executor
# States for Future.
_PENDING = 'PENDING'
_CANCELLED = 'CANCELLED'
_FINISHED = 'FINISHED'
Error = executor.Error
CancelledError = executor.CancelledError
TimeoutError = executor.TimeoutError
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
class InvalidStateError(Error):
"""The operation is not allowed in this state."""
class _TracebackLogger(object):
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('loop', 'source_traceback', 'exc', 'tb')
def __init__(self, future, exc):
self.loop = future._loop
self.source_traceback = future._source_traceback
self.exc = exc
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
msg = 'Future/Task exception was never retrieved\n'
if self.source_traceback:
src = ''.join(traceback.format_list(self.source_traceback))
msg += 'Future/Task created at (most recent call last):\n'
msg += '%s\n' % src.rstrip()
msg += ''.join(self.tb).rstrip()
self.loop.call_exception_handler({'message': msg})
class Future(object):
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon_threadsafe().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
_blocking = False # proper use of future (yield vs yield from)
# Used by Python 2 to raise the exception with the original traceback
# in the exception() method in debug mode
_exception_tb = None
_log_traceback = False # Used for Python 3.4 and later
_tb_logger = None # Used for Python 3.3 only
def __init__(self, loop=None):
"""Initialize the future.
The optional event_loop argument allows to explicitly set the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def _format_callbacks(self):
cb = self._callbacks
size = len(cb)
if not size:
cb = ''
def format_cb(callback):
return events._format_callback_source(callback, ())
if size == 1:
cb = format_cb(cb[0])
elif size == 2:
cb = '{0}, {1}'.format(format_cb(cb[0]), format_cb(cb[1]))
elif size > 2:
cb = '{0}, <{1} more>, {2}'.format(format_cb(cb[0]),
size-2,
format_cb(cb[-1]))
return 'cb=[%s]' % cb
def _repr_info(self):
info = [self._state.lower()]
if self._state == _FINISHED:
if self._exception is not None:
info.append('exception={0!r}'.format(self._exception))
else:
# use reprlib to limit the length of the output, especially
# for very long strings
result = reprlib.repr(self._result)
info.append('result={0}'.format(result))
if self._callbacks:
info.append(self._format_callbacks())
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
info = self._repr_info()
return '<%s %s>' % (self.__class__.__name__, ' '.join(info))
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message': ('%s exception was never retrieved'
% self.__class__.__name__),
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
if self._state != _PENDING:
return False
self._state = _CANCELLED
self._schedule_callbacks()
return True
def _schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback in callbacks:
self._loop.call_soon(callback, self)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Result is not ready.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
exc_tb = self._exception_tb
self._exception_tb = None
if self._exception is not None:
if exc_tb is not None:
compat.reraise(type(self._exception), self._exception, exc_tb)
else:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Exception is not set.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
self._exception_tb = None
return self._exception
def add_done_callback(self, fn):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self)
else:
self._callbacks.append(fn)
# New method not in PEP 3148.
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [f for f in self._callbacks if f != fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def _set_result_unless_cancelled(self, result):
"""Helper setting the result only if the future was not cancelled."""
if self.cancelled():
return
self.set_result(result)
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{0}: {1!r}'.format(self._state, self))
self._result = result
self._state = _FINISHED
self._schedule_callbacks()
def _get_exception_tb(self):
return self._exception_tb
def set_exception(self, exception):
self._set_exception_with_tb(exception, None)
def _set_exception_with_tb(self, exception, exc_tb):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{0}: {1!r}'.format(self._state, self))
if isinstance(exception, type):
exception = exception()
self._exception = exception
if exc_tb is not None:
self._exception_tb = exc_tb
exc_tb = None
elif not six.PY3:
self._exception_tb = sys.exc_info()[2]
self._state = _FINISHED
self._schedule_callbacks()
if compat.PY34:
self._log_traceback = True
else:
self._tb_logger = _TracebackLogger(self, exception)
if hasattr(exception, '__traceback__'):
# Python 3: exception contains a link to the traceback
# Arrange for the logger to be activated after all callbacks
# have had a chance to call result() or exception().
self._loop.call_soon(self._tb_logger.activate)
else:
if self._loop.get_debug():
frame = sys._getframe(1)
tb = ['Traceback (most recent call last):\n']
if self._exception_tb is not None:
tb += traceback.format_tb(self._exception_tb)
else:
tb += traceback.format_stack(frame)
tb += traceback.format_exception_only(type(exception), exception)
self._tb_logger.tb = tb
else:
self._tb_logger.tb = traceback.format_exception_only(
type(exception),
exception)
self._tb_logger.exc = None
# Truly internal methods.
def _copy_state(self, other):
"""Internal helper to copy state from another Future.
The other Future may be a concurrent.futures.Future.
"""
assert other.done()
if self.cancelled():
return
assert not self.done()
if other.cancelled():
self.cancel()
else:
exception = other.exception()
if exception is not None:
self.set_exception(exception)
else:
result = other.result()
self.set_result(result)
if events.asyncio is not None:
# Accept also asyncio Future objects for interoperability
_FUTURE_CLASSES = (Future, events.asyncio.Future)
else:
_FUTURE_CLASSES = Future
def wrap_future(fut, loop=None):
"""Wrap concurrent.futures.Future object."""
if isinstance(fut, _FUTURE_CLASSES):
return fut
assert isinstance(fut, executor.Future), \
'concurrent.futures.Future is expected, got {0!r}'.format(fut)
if loop is None:
loop = events.get_event_loop()
new_future = Future(loop=loop)
def _check_cancel_other(f):
if f.cancelled():
fut.cancel()
new_future.add_done_callback(_check_cancel_other)
fut.add_done_callback(
lambda future: loop.call_soon_threadsafe(
new_future._copy_state, future))
return new_future
| {
"repo_name": "haypo/trollius",
"path": "trollius/futures.py",
"copies": "1",
"size": "16289",
"license": "apache-2.0",
"hash": -2187980279730275300,
"line_mean": 35.1977777778,
"line_max": 85,
"alpha_frac": 0.5938363313,
"autogenerated": false,
"ratio": 4.431175190424375,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00016486444843534113,
"num_lines": 450
} |
"""A Future class similar to the one in PEP 3148."""
__all__ = (
'Future', 'wrap_future', 'isfuture',
)
import concurrent.futures
import contextvars
import logging
import sys
from . import base_futures
from . import events
from . import exceptions
from . import format_helpers
isfuture = base_futures.isfuture
_PENDING = base_futures._PENDING
_CANCELLED = base_futures._CANCELLED
_FINISHED = base_futures._FINISHED
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
class Future:
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- This class is not thread-safe.
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
# This field is used for a dual purpose:
# - Its presence is a marker to declare that a class implements
# the Future protocol (i.e. is intended to be duck-type compatible).
# The value must also be not-None, to enable a subclass to declare
# that it is not compatible by setting this to None.
# - It is set by __iter__() below so that Task._step() can tell
# the difference between
# `await Future()` or`yield from Future()` (correct) vs.
# `yield Future()` (incorrect).
_asyncio_future_blocking = False
__log_traceback = False
def __init__(self, *, loop=None):
"""Initialize the future.
The optional event_loop argument allows explicitly setting the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
if self._loop.get_debug():
self._source_traceback = format_helpers.extract_stack(
sys._getframe(1))
_repr_info = base_futures._future_repr_info
def __repr__(self):
return '<{} {}>'.format(self.__class__.__name__,
' '.join(self._repr_info()))
def __del__(self):
if not self.__log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message':
f'{self.__class__.__name__} exception was never retrieved',
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
@property
def _log_traceback(self):
return self.__log_traceback
@_log_traceback.setter
def _log_traceback(self, val):
if bool(val):
raise ValueError('_log_traceback can only be set to False')
self.__log_traceback = False
def get_loop(self):
"""Return the event loop the Future is bound to."""
return self._loop
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
self.__log_traceback = False
if self._state != _PENDING:
return False
self._state = _CANCELLED
self.__schedule_callbacks()
return True
def __schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback, ctx in callbacks:
self._loop.call_soon(callback, self, context=ctx)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise exceptions.CancelledError
if self._state != _FINISHED:
raise exceptions.InvalidStateError('Result is not ready.')
self.__log_traceback = False
if self._exception is not None:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise exceptions.CancelledError
if self._state != _FINISHED:
raise exceptions.InvalidStateError('Exception is not set.')
self.__log_traceback = False
return self._exception
def add_done_callback(self, fn, *, context=None):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self, context=context)
else:
if context is None:
context = contextvars.copy_context()
self._callbacks.append((fn, context))
# New method not in PEP 3148.
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [(f, ctx)
for (f, ctx) in self._callbacks
if f != fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise exceptions.InvalidStateError(f'{self._state}: {self!r}')
self._result = result
self._state = _FINISHED
self.__schedule_callbacks()
def set_exception(self, exception):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise exceptions.InvalidStateError(f'{self._state}: {self!r}')
if isinstance(exception, type):
exception = exception()
if type(exception) is StopIteration:
raise TypeError("StopIteration interacts badly with generators "
"and cannot be raised into a Future")
self._exception = exception
self._state = _FINISHED
self.__schedule_callbacks()
self.__log_traceback = True
def __await__(self):
if not self.done():
self._asyncio_future_blocking = True
yield self # This tells Task to wait for completion.
if not self.done():
raise RuntimeError("await wasn't used with future")
return self.result() # May raise too.
__iter__ = __await__ # make compatible with 'yield from'.
# Needed for testing purposes.
_PyFuture = Future
def _get_loop(fut):
# Tries to call Future.get_loop() if it's available.
# Otherwise fallbacks to using the old '_loop' property.
try:
get_loop = fut.get_loop
except AttributeError:
pass
else:
return get_loop()
return fut._loop
def _set_result_unless_cancelled(fut, result):
"""Helper setting the result only if the future was not cancelled."""
if fut.cancelled():
return
fut.set_result(result)
def _convert_future_exc(exc):
exc_class = type(exc)
if exc_class is concurrent.futures.CancelledError:
return exceptions.CancelledError(*exc.args)
elif exc_class is concurrent.futures.TimeoutError:
return exceptions.TimeoutError(*exc.args)
elif exc_class is concurrent.futures.InvalidStateError:
return exceptions.InvalidStateError(*exc.args)
else:
return exc
def _set_concurrent_future_state(concurrent, source):
"""Copy state from a future to a concurrent.futures.Future."""
assert source.done()
if source.cancelled():
concurrent.cancel()
if not concurrent.set_running_or_notify_cancel():
return
exception = source.exception()
if exception is not None:
concurrent.set_exception(_convert_future_exc(exception))
else:
result = source.result()
concurrent.set_result(result)
def _copy_future_state(source, dest):
"""Internal helper to copy state from another Future.
The other Future may be a concurrent.futures.Future.
"""
assert source.done()
if dest.cancelled():
return
assert not dest.done()
if source.cancelled():
dest.cancel()
else:
exception = source.exception()
if exception is not None:
dest.set_exception(_convert_future_exc(exception))
else:
result = source.result()
dest.set_result(result)
def _chain_future(source, destination):
"""Chain two futures so that when one completes, so does the other.
The result (or exception) of source will be copied to destination.
If destination is cancelled, source gets cancelled too.
Compatible with both asyncio.Future and concurrent.futures.Future.
"""
if not isfuture(source) and not isinstance(source,
concurrent.futures.Future):
raise TypeError('A future is required for source argument')
if not isfuture(destination) and not isinstance(destination,
concurrent.futures.Future):
raise TypeError('A future is required for destination argument')
source_loop = _get_loop(source) if isfuture(source) else None
dest_loop = _get_loop(destination) if isfuture(destination) else None
def _set_state(future, other):
if isfuture(future):
_copy_future_state(other, future)
else:
_set_concurrent_future_state(future, other)
def _call_check_cancel(destination):
if destination.cancelled():
if source_loop is None or source_loop is dest_loop:
source.cancel()
else:
source_loop.call_soon_threadsafe(source.cancel)
def _call_set_state(source):
if (destination.cancelled() and
dest_loop is not None and dest_loop.is_closed()):
return
if dest_loop is None or dest_loop is source_loop:
_set_state(destination, source)
else:
dest_loop.call_soon_threadsafe(_set_state, destination, source)
destination.add_done_callback(_call_check_cancel)
source.add_done_callback(_call_set_state)
def wrap_future(future, *, loop=None):
"""Wrap concurrent.futures.Future object."""
if isfuture(future):
return future
assert isinstance(future, concurrent.futures.Future), \
f'concurrent.futures.Future is expected, got {future!r}'
if loop is None:
loop = events.get_event_loop()
new_future = loop.create_future()
_chain_future(future, new_future)
return new_future
try:
import _asyncio
except ImportError:
pass
else:
# _CFuture is needed for tests.
Future = _CFuture = _asyncio.Future
| {
"repo_name": "batermj/algorithm-challenger",
"path": "code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/asyncio/futures.py",
"copies": "1",
"size": "13041",
"license": "apache-2.0",
"hash": 9213721555885797000,
"line_mean": 31.9318181818,
"line_max": 79,
"alpha_frac": 0.6165171383,
"autogenerated": false,
"ratio": 4.40723217303143,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.552374931133143,
"avg_score": null,
"num_lines": null
} |
"""A Future class similar to the one in PEP 3148."""
from .events import *
from .helpers import *
# States for Future.
_PENDING = 'PENDING'
_CANCELLED = 'CANCELLED'
_FINISHED = 'FINISHED'
class Error(Exception):
pass
class CancelledError(Error):
pass
class TimeoutError(Error):
pass
class InvalidStateError(Error):
"""The operation is not allowed in this state."""
class _TracebackLogger:
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('loop', 'source_traceback', 'exc', 'tb')
def __init__(self, future, exc):
self.loop = future._loop
self.source_traceback = future._source_traceback
self.exc = exc
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
#self.tb = traceback.format_exception(exc.__class__, exc,
#exc.__traceback__)
# FIXME: Brython does not provide __traceback__
try:
self.tb = traceback.format_exception(exc.__class__, exc)
except:
self.tb = None
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
msg = 'Future/Task exception was never retrieved\n'
if self.source_traceback:
src = ''.join(traceback.format_list(self.source_traceback))
msg += 'Future/Task created at (most recent call last):\n'
msg += '%s\n' % src.rstrip()
msg += ''.join(self.tb).rstrip()
self.loop.call_exception_handler({'message': msg})
class Future:
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon_threadsafe().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
_blocking = False # proper use of future (yield vs yield from)
_log_traceback = False # Used for Python 3.4 and later
_tb_logger = None # Used for Python 3.3 only
def __init__(self, *, loop=None):
"""Initialize the future.
The optional event_loop argument allows to explicitly set the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
def _format_callbacks(self):
cb = self._callbacks
size = len(cb)
if not size:
cb = ''
def format_cb(callback):
return helpers.format_callback(callback, ())
if size == 1:
cb = format_cb(cb[0])
elif size == 2:
cb = '{}, {}'.format(format_cb(cb[0]), format_cb(cb[1]))
elif size > 2:
cb = '{}, <{} more>, {}'.format(format_cb(cb[0]),
size-2,
format_cb(cb[-1]))
return 'cb=[%s]' % cb
def _repr_info(self):
info = [self._state.lower()]
if self._state == _FINISHED:
if self._exception is not None:
info.append('exception={!r}'.format(self._exception))
else:
# use reprlib to limit the length of the output, especially
# for very long strings
result = reprlib.repr(self._result)
info.append('result={}'.format(result))
if self._callbacks:
info.append(self._format_callbacks())
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
info = self._repr_info()
return '<%s %s>' % (self.__class__.__name__, ' '.join(info))
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message': ('%s exception was never retrieved'
% self.__class__.__name__),
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
if self._state != _PENDING:
return False
self._state = _CANCELLED
self._schedule_callbacks()
return True
def _schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback in callbacks:
self._loop.call_soon(callback, self)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Result is not ready.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
if self._exception is not None:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Exception is not set.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
return self._exception
def add_done_callback(self, fn):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self)
else:
self._callbacks.append(fn)
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [f for f in self._callbacks if not f == fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def _set_result_unless_cancelled(self, result):
"""Helper setting the result only if the future was not cancelled."""
if self.cancelled():
return
self.set_result(result)
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = _FINISHED
self._schedule_callbacks()
def set_exception(self, exception):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
if isinstance(exception, type):
exception = exception()
self._exception = exception
self._state = _FINISHED
self._schedule_callbacks()
self._log_traceback = True
# FIXME: This is not needed if we can rely on garbage collection
self._tb_logger = _TracebackLogger(self, exception)
# Arrange for the logger to be activated after all callbacks
# have had a chance to call result() or exception().
self._loop.call_soon(self._tb_logger.activate)
def __iter__(self):
if not self.done():
self._blocking = True
yield self # This tells Task to wait for completion.
assert self.done(), "yield from wasn't used with future"
return self.result() # May raise too.
class DelayedFuture(Future):
"""
A class representing a Future Call result.
"""
def __init__(self, func, timeout, *args, **kwargs):
super().__init__()
self._func = func
self._args = args
self._kwargs = kwargs
self._handle = self._loop.call_later(self.handler, timeout)
def handler(self):
try:
res = self._func(*self._args, **self._kwargs)
self.set_result(res)
except Exception as ex:
self.set_exception(ex)
def cancel(self):
self._handle.cancel()
class ImmediateFuture(DelayedFuture):
def __init__(self, func, *args, **kwargs):
super().__init__(func, 1, *args, **kwargs)
| {
"repo_name": "jonathanverner/brython",
"path": "www/src/Lib/site-packages/simpleaio/futures.py",
"copies": "5",
"size": "13617",
"license": "bsd-3-clause",
"hash": -5362133132676593000,
"line_mean": 34.0953608247,
"line_max": 78,
"alpha_frac": 0.6013806272,
"autogenerated": false,
"ratio": 4.42829268292683,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.752967331012683,
"avg_score": null,
"num_lines": null
} |
# AGAGD Models Imports
import agagd_core.models as agagd_models
# DJango Imports
import django_tables2 as tables
from django.core.exceptions import ObjectDoesNotExist
from django.urls import reverse
from django.utils.safestring import mark_safe
# Base Bootstrap Column Attributes
default_bootstrap_column_attrs = {
"th": {"class": "d-none d-lg-table-cell d-xl-table-cell"},
"td": {"class": "d-none d-lg-table-cell d-xl-table-cell"},
}
# Base Bootstrap Column Header Attributes
default_bootstrap_header_column_attrs = {
"class": "table",
"thead": {"class": "thead-dark"},
"th": {"scope": "col"},
}
# Column for the Winner of the Game
class LinkFullMembersNameColumn(tables.Column):
def __init__(
self,
color="W",
viewname=None,
urlconf=None,
args=None,
kwargs=None,
current_app=None,
attrs=None,
**extra,
):
super().__init__(
attrs=attrs,
linkify=dict(
viewname=viewname,
urlconf=urlconf,
args=args,
kwargs=kwargs,
current_app=current_app,
),
**extra,
)
self.color = color
def render(self, value, record):
if record["result"] == self.color:
self.attrs["td"] = {"class": "winner"}
elif record["result"] != self.color:
self.attrs["td"] = {"class": "runner-up"}
try:
member_name_and_id = agagd_models.Member.objects.get(member_id=value)
value = f"{member_name_and_id.full_name} ({value})"
except ObjectDoesNotExist:
value = None
return value
# Basic table which is use as as base for many of the game layouts.
class GamesTable(tables.Table):
game_date = tables.Column(
verbose_name="Date", attrs=default_bootstrap_column_attrs, orderable=False
)
handicap = tables.Column(attrs=default_bootstrap_column_attrs, orderable=False)
pin_player_1 = LinkFullMembersNameColumn(
color="W",
viewname="beta:players_profile",
verbose_name="White",
kwargs={"player_id": tables.A("pin_player_1")},
orderable=False,
)
pin_player_2 = LinkFullMembersNameColumn(
color="B",
viewname="beta:players_profile",
verbose_name="Black",
kwargs={"player_id": tables.A("pin_player_2")},
orderable=False,
)
tournament_code = tables.LinkColumn(
verbose_name="Tournament",
viewname="tournament_detail",
kwargs={"tourn_code": tables.A("tournament_code")},
orderable=False,
)
class Meta:
model = agagd_models.Game
fields = (
"game_date",
"handicap",
"pin_player_1",
"pin_player_2",
"tournament_code",
)
sequence = fields
attrs = default_bootstrap_header_column_attrs
template_name = "django_tables2/bootstrap4.html"
class PlayersInformationTable(tables.Table):
full_name = tables.Column()
member_id = tables.Column()
status = tables.Column()
rating = tables.Column()
renewal_due = tables.Column()
class Meta:
template_name = "beta.player_profile_information.html"
class PlayersOpponentTable(tables.Table):
opponent = tables.Column(
orderable=False,
linkify={
"viewname": "beta:players_profile",
"args": [tables.A("opponent.member_id")],
},
)
total = tables.Column(orderable=False, verbose_name="Games")
won = tables.Column(orderable=False, verbose_name="Won", default=0)
lost = tables.Column(orderable=False, verbose_name="Lost")
ratio = tables.Column(
verbose_name="Rate", default=0, empty_values=(-1,), orderable=False
)
def render_ratio(self, record):
ratio = record["won"] / record["total"]
return f"{ratio:.2f}"
class Meta:
attrs = default_bootstrap_header_column_attrs
template_name = "django_tables2/bootstrap4.html"
class TournamentsTable(tables.Table):
tournament_date = tables.Column(
verbose_name="Date", attrs=default_bootstrap_column_attrs, orderable=False
)
description = tables.LinkColumn(
"tournament_detail",
verbose_name="Name",
kwargs={"tourn_code": tables.A("tournament_code")},
orderable=False,
)
total_players = tables.Column(
verbose_name="# Players", attrs=default_bootstrap_column_attrs, orderable=False
)
elab_date = tables.Column(
verbose_name="Rated", attrs=default_bootstrap_column_attrs, orderable=False
)
class Meta:
model = agagd_models.Tournament
fields = ("tournament_date", "description", "total_players", "elab_date")
sequence = fields
attrs = default_bootstrap_header_column_attrs
template_name = "django_tables2/bootstrap4.html"
class PlayersTournamentTable(tables.Table):
tournament = tables.LinkColumn(
"tournament_detail",
kwargs={"tourn_code": tables.A("tournament.pk")},
orderable=False,
)
date = tables.Column(orderable=False, default="Unknown")
won = tables.Column(orderable=False, verbose_name="Won", default=0)
lost = tables.Column(orderable=False, verbose_name="Lost", default=0)
class Meta:
fields = ("date", "tournament", "won", "lost")
sequence = fields
attrs = default_bootstrap_header_column_attrs
template_name = "django_tables2/bootstrap4.html"
class Top10DanTable(tables.Table):
pin_player = tables.Column(
orderable=False,
linkify={
"viewname": "beta:players_profile",
"args": [tables.A("pin_player.member_id")],
},
)
sigma = tables.Column(orderable=False)
rating = tables.Column(orderable=False)
def render_pin_player(self, value):
try:
member_name_and_id = agagd_models.Member.objects.values(
"full_name", "member_id"
).get(member_id=value.member_id)
value = f"{value}"
except ObjectDoesNotExist:
value = None
return value
class Meta:
model = agagd_models.Players
fields = ("pin_player", "sigma", "rating")
sequence = fields
attrs = default_bootstrap_header_column_attrs
template_name = "django_tables2/bootstrap4.html"
class Top10KyuTable(tables.Table):
pin_player = tables.Column(
orderable=False,
linkify={
"viewname": "beta:players_profile",
"args": [tables.A("pin_player.member_id")],
},
)
sigma = tables.Column(orderable=False)
rating = tables.Column(orderable=False)
def render_pin_player(self, value):
try:
member_name_and_id = agagd_models.Member.objects.values(
"full_name", "member_id"
).get(member_id=value.member_id)
value = f"{value}"
except ObjectDoesNotExist:
value = None
return value
class Meta:
model = agagd_models.Players
fields = ("pin_player", "sigma", "rating")
sequence = fields
attrs = default_bootstrap_header_column_attrs
template_name = "django_tables2/bootstrap4.html"
| {
"repo_name": "usgo/agagd",
"path": "agagd/agagd_core/tables/beta.py",
"copies": "1",
"size": "7345",
"license": "mit",
"hash": 6620626038129519000,
"line_mean": 29.6041666667,
"line_max": 87,
"alpha_frac": 0.6016337645,
"autogenerated": false,
"ratio": 3.7570332480818416,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.985649007252036,
"avg_score": 0.00043538801229625504,
"num_lines": 240
} |
#again rerun
from lxml import html,etree
import os
from matlablike import *
from unidecode import unidecode
import re
import sys
fp = open(sys.argv[1],'r')
needsspace_re = re.compile(r'(\w[):;"\-\.,!?}]*) +(["(]*\w)')
paragraphcommands_re = re.compile(r'^ *\\(sub)*paragraph{.*}')
commentline_re = re.compile(r'^ *%')
normalline_re = re.compile(r'^\(%SPACE%\)\|\(%NEWLINE%\)')
notweird_re = re.compile(r'^(%SPACE%)|(%\[ORIG%)|(%ORIG\]\[NEW%)|(%NEW\]%)')
text_list = []
found_beginning = False
print('opened',sys.argv[1])
#{{{ pull out just the part between the document text
for thisline in fp:
if (thisline[:7] == '<<<<<<<') or (thisline[:7] == '=======') or (thisline[:7] == '>>>>>>>'):
text_list.append('%NEWLINE% %CONFLICT%'+thisline.strip()+'%NEWLINE%')
else:
text_list.append(thisline.rstrip())
fp.close()
text_list = [x.replace('%NEWLINE%','\n') for x in text_list]
#{{{ don't mess with the "special" lines
for j,thisline in enumerate(text_list):
if not notweird_re.match(thisline):
if paragraphcommands_re.match(thisline) or commentline_re.match(thisline):
print("found special line '",thisline,"'")
text_list[j] = thisline.replace(' ',' %SPACE% ')
#}}}
text_list = ' '.join(text_list)
text_list = needsspace_re.sub(r'\1 %SPACE% \2',text_list)
text_list = needsspace_re.sub(r'\1 %SPACE% \2',text_list)#again to catch the single letter ones
text_list = text_list.replace(' ','')
text_list = text_list.replace('%SPACE%',' ')
#{{{ write out the result
newfile = re.sub(r"(.*)(\..*)",r'\1_1wordcollapse\2',sys.argv[1])
fp = open(newfile,'w')
outputtext = ''.join(text_list)
fp.write(outputtext)
fp.close()
#}}}
| {
"repo_name": "jmfranck/pyDiffTools",
"path": "pydifftools/onewordify_undo.py",
"copies": "1",
"size": "1676",
"license": "bsd-3-clause",
"hash": 654094680259464300,
"line_mean": 37.976744186,
"line_max": 97,
"alpha_frac": 0.6217183771,
"autogenerated": false,
"ratio": 2.816806722689076,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3938525099789076,
"avg_score": null,
"num_lines": null
} |
#again rerun
from lxml import html,etree
import os
from matlablike import *
from unidecode import unidecode
import re
import sys
fp = open(sys.argv[1],'r')
paragraphcommands_re = re.compile(r'^ *\\(sub)*paragraph{.*}')
commentline_re = re.compile(r'^ *%')
beginlatex_re = re.compile(r'^[^#]*\\begin{document}(.*)')
endlatex_re = re.compile(r'^([^#]*)\\end{document}.*')
commandstart_re = re.compile(r'(\\[a-zA-Z]+[\[{])')
word_citation_re = re.compile(r'(\[[0-9 ,]+\][,\.)]*)')
tex_citation_re = re.compile(r'(.*)(\\cite{[a-zA-Z0-9,_]+}[,\.)]*)(.*)$')
tex_ref_re = re.compile(r'(.*)(\\c*ref{[a-zA-Z0-9,_:\-]+}[,\.)]*)(.*)$')
text_list = []
if sys.argv[1][-4:] == '.tex':
latex_file = True
else:
latex_file = False
found_beginning = False
start_line = 0
end_line = 0
print('opened',sys.argv[1])
#{{{ pull out just the part between the document text
j = 0
for thisline in fp:
thisline = thisline.replace('\xa0',' ')# because word sucks
thisline = thisline.replace('\x93','``')# this and following are just pulled from vim
thisline = thisline.replace('\x94',"''")
thisline = thisline.replace('\x96',"--")
j += 1
if latex_file:
if not found_beginning:
thismatch = beginlatex_re.match(thisline)
if thismatch:
text_list.append(thismatch.groups()[0].rstrip())
found_beginning = True
start_line = j+1
print('Found the beginning at line',start_line)
else:
thismatch = endlatex_re.match(thisline)
if thismatch:
text_list.append(thismatch.groups()[0].rstrip())
print('Found the end')
end_line = j
print('Found the end at line',end_line)
text_list.append(thisline)
else:
text_list.append(thisline.replace('$$','')) #no better place to check for the tex dollar sign double-up
if end_line == 0:
end_line = len(text_list)
fp.close()
j = 0
while j < len(text_list):# first, put citations on their own line, so I can next treat them as special lines
thismatch = tex_citation_re.match(text_list[j])
othermatch = tex_ref_re.match(text_list[j])
if othermatch:
thismatch = othermatch
if thismatch:
text_list.pop(j)
text_list.insert(j,thismatch.groups()[2])# push on backwards, so it shows up in the right order
text_list.insert(j,thismatch.groups()[1].replace(' ','\n%SPACE%\n')+'%NONEWLINE%\n')# since these are "fake" newlines, make sure they don't get broken! -- also to preserve spaces, I'm pre-processing the spacing here
text_list.insert(j,thismatch.groups()[0].replace(' ','\n%SPACE%\n')+'%NONEWLINE%\n')
print("found citation or reference, broke line:",text_list[j],text_list[j+1],text_list[j+2])
print("---")
j+=1# so that we skip the citation we just added
end_line+=2#because we added two lines
j+=1
for j in range(0,len(text_list)):
thismatch = paragraphcommands_re.match(text_list[j])
if thismatch:
text_list[j] = text_list[j].replace('\n','%NEWLINE%\n') # these lines are protected/preserved from being chopped up, since they are invisible
print('found paragraph line:',text_list[j])
else:
thismatch = tex_citation_re.match(text_list[j])
if not thismatch:
thismatch = tex_ref_re.match(text_list[j])
if thismatch:
print("found citation line:",text_list[j])
else:
text_list[j] = text_list[j].replace('~','\n~\n')
text_list[j] = commandstart_re.sub('\\1\n',text_list[j])
text_list[j] = word_citation_re.sub('\n\\1\n',text_list[j])
text_list[j] = text_list[j].replace('}','\n}\n')
text_list[j] = text_list[j].replace(']{','\n]{\n')
text_list[j] = text_list[j].replace(' ','\n%SPACE%\n')
if text_list[j][-12:] == '%NONEWLINE%\n':
print("trying to drop NONEWLINE going from:")
print(text_list[j])
text_list[j] = text_list[j][:-12]+'\n'
print('to:\n',text_list[j])
else:
print("line ends in:",text_list[j][-12:])
text_list[j] += '%NEWLINE%\n'
text_list[j] = text_list[j].replace('\r','\n%NEWLINE%\n')
#}}}
#{{{ write out the result
outputtext = ''.join(text_list)
outputtext = outputtext.split('\n')
outputtext = [j for j in outputtext if len(j)>0]
if not latex_file: # easier to just strip the tags here
print("this is not a latex file")
outputtext = [j for j in outputtext if j!='%SPACE%' and j!='%NEWLINE%']
else:
print("this is a latex file")
outputtex = ''.join(text_list[start_line:end_line]) #up to but not including the end document
outputtex = outputtex.split('\n')
outputtex = [j for j in outputtex if len(j)>0]
outputtex = [j for j in outputtex if j[0]!='%'] #takes care of space and newline as well as tex comments
newfile = re.sub(r"(.*)(\..*)",r'\1_1word\2',sys.argv[1])
fp = open(newfile,'w')
fp.write('\n'.join(outputtext))
fp.close()
if latex_file:
newfile = re.sub(r"(.*)(\..*)",r'\1_1wordstripped\2',sys.argv[1])
fp = open(newfile,'w')
fp.write('\n'.join(outputtex))
fp.close()
#}}}
| {
"repo_name": "jmfranck/pyDiffTools",
"path": "pydifftools/onewordify.py",
"copies": "1",
"size": "5230",
"license": "bsd-3-clause",
"hash": 380223453828810700,
"line_mean": 42.2231404959,
"line_max": 223,
"alpha_frac": 0.590248566,
"autogenerated": false,
"ratio": 3.113095238095238,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4203343804095238,
"avg_score": null,
"num_lines": null
} |
#again rerun
from lxml import html,etree
import os
from pyspecdata.fornotebook import *
from pyspecdata import *
import re
fp = open(sys.argv[1],'r')
content = fp.read()
fp.close()
doc = html.fromstring(content)
commentlabel_re = re.compile(r'\[([A-Z]+)([0-9])\]')
comment_dict = {}
#for j in doc.xpath('descendant::*[@style="mso-element:comment"]'):
newlist = []
thisbody = doc.find('body')
print('I found the body',lsafen(thisbody))
commentlist = etree.Element('div',style = 'mso-element:comment-list')
for j in doc.xpath('//span[@style="mso-element:comment"]'):
#for j in doc.xpath('//span[@style="mso-element:comment"]'):
#print 'found span with style:\n\n',lsafen(html.tostring(j),wrap = 60)
#if j.attrib['style'] == 'mso-element:comment':
print('found span with style:\n\n',lsafen(j.attrib,wrap = 60))
newlist.append(j)
j.drop_tree()
commentlist.append(j)
thisbody.append(commentlist)
#print lsafen(map(html.tostring,newlist),wrap = 60)
newfile = re.sub(r"(.*)(\.htm.*)",r'\1_htmlcomm\2',sys.argv[1])
fp = open(newfile,'w')
content = html.tostring(doc)
fp.write(content)
fp.close()
| {
"repo_name": "jmfranck/pyDiffTools",
"path": "pydifftools/html_comments.py",
"copies": "1",
"size": "1117",
"license": "bsd-3-clause",
"hash": -1091251118477178800,
"line_mean": 33.90625,
"line_max": 74,
"alpha_frac": 0.6768128917,
"autogenerated": false,
"ratio": 2.806532663316583,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8913500816644991,
"avg_score": 0.013968947674318253,
"num_lines": 32
} |
#again rerun
from lxml import html,etree
import os
from pyspecdata import *
from unidecode import unidecode
import re
import sys
from .comment_functions import generate_alphabetnumber,matchingbrackets,comment_definition
manual_math_conversion = False # this hacks some stuff that pandoc does much better
fp = open(sys.argv[1],'r')
content = fp.read()
fp.close()
#comrefwithnewline_re = re.compile(r"('mso-comment-reference:[^']*)[\n ]+")
#{{{ need to remove weird linebreaks with the following, or it doesn't interpret the styles correctly
newcontent = re.sub(r":\n *",r':',content)
content = newcontent
newcontent = re.sub(r"('mso-comment-reference:[^']*)[\n ]+",r'\1',content)
while content != newcontent:
content = newcontent
newcontent = re.sub(r"('mso-comment-reference:[^']*)[\n ]+",r'\1',content)
content = newcontent
newcontent = re.sub(r"('mso-comment-reference:[^'\"]*[^;])(['\"])",r'\1;\2',content)
while content != newcontent:
content = newcontent
newcontent = re.sub(r"('mso-comment-reference:[^'\"]*[^;])(['\"])",r'\1;\2',content)
content = newcontent
content = content.replace(r'\%',r"%EXPLICITPAREN%")
content = content.replace(r'%',r"%EXPLICITPAREN%")
content = content.replace(r'%EXPLICITPAREN%',r"\%")
if manual_math_conversion:
content = content.replace('Δ',r'%ENTERMATHMODE%\Delta%LEAVEMATHMODE%')
content = content.replace('\xb0C',r'\degC ')
content = content.replace(' \xb5M',r'\uM ')
content = content.replace('\xb5M',r'\uM ')
content = content.replace('α',r'%ENTERMATHMODE%\alpha%LEAVEMATHMODE%')
content = content.replace('β',r'%ENTERMATHMODE%\beta%LEAVEMATHMODE%')
content = content.replace('γ',r'%ENTERMATHMODE%\gamma%LEAVEMATHMODE%')
content = content.replace('δ',r'%ENTERMATHMODE%\delta%LEAVEMATHMODE%')
content = content.replace('ε',r'%ENTERMATHMODE%\varepsilon%LEAVEMATHMODE%')
content = content.replace('ζ',r'%ENTERMATHMODE%\zeta%LEAVEMATHMODE%')
content = content.replace('η',r'%ENTERMATHMODE%\eta%LEAVEMATHMODE%')
content = content.replace('θ',r'%ENTERMATHMODE%\theta%LEAVEMATHMODE%')
content = content.replace('ι',r'%ENTERMATHMODE%\iota%LEAVEMATHMODE%')
content = content.replace('κ',r'%ENTERMATHMODE%\kappa%LEAVEMATHMODE%')
content = content.replace('λ',r'%ENTERMATHMODE%\lambda%LEAVEMATHMODE%')
content = content.replace('μ',r'%ENTERMATHMODE%\mu%LEAVEMATHMODE%')
content = content.replace('ν',r'%ENTERMATHMODE%\nu%LEAVEMATHMODE%')
content = content.replace('ξ',r'%ENTERMATHMODE%\xi%LEAVEMATHMODE%')
content = content.replace('ο',r'%ENTERMATHMODE%\omicron%LEAVEMATHMODE%')
content = content.replace('π',r'%ENTERMATHMODE%\pi%LEAVEMATHMODE%')
content = content.replace('ρ',r'%ENTERMATHMODE%\rho%LEAVEMATHMODE%')
content = content.replace('σ',r'%ENTERMATHMODE%\sigma%LEAVEMATHMODE%')
content = content.replace('τ',r'%ENTERMATHMODE%\tau%LEAVEMATHMODE%')
content = content.replace('φ',r'%ENTERMATHMODE%\varphi%LEAVEMATHMODE%')
content = content.replace('χ',r'%ENTERMATHMODE%\chi%LEAVEMATHMODE%')
content = content.replace('ψ',r'%ENTERMATHMODE%\psi%LEAVEMATHMODE%')
content = content.replace('ω',r'%ENTERMATHMODE%\omega%LEAVEMATHMODE%')
content = content.replace('′',r'%ENTERMATHMODE%\'%LEAVEMATHMODE%')
content = content.replace('—',r"--")
content = content.replace('’',r"'")
content = content.replace('“',r'``')
content = content.replace('”',r"''")
content = content.replace('ℜ',r"%ENTERMATHMODE%\Re%LEAVEMATHMODE%")
content = content.replace('⇒',r"%ENTERMATHMODE%\Rightarrow%LEAVEMATHMODE%")
content = content.replace('⇐',r"%ENTERMATHMODE%\Leftarrow%LEAVEMATHMODE%")
content = content.replace('∑',r"%ENTERMATHMODE%\Sum%LEAVEMATHMODE%")
content = content.replace('−',r'--')
content = content.replace('∕',r"/")
content = content.replace('∗',r"%ENTERMATHMODE%^*%LEAVEMATHMODE%")
content = content.replace('∼',r'%ENTERMATHMODE%\sim%LEAVEMATHMODE%')
content = content.replace('∝',r'%ENTERMATHMODE%\propto%LEAVEMATHMODE%')
content = content.replace('∞',r'%ENTERMATHMODE%\infty%LEAVEMATHMODE%')
content = content.replace('≈',r'%ENTERMATHMODE%\approx%LEAVEMATHMODE%')
content = content.replace('≡',r'%ENTERMATHMODE%\equiv%LEAVEMATHMODE%')
content = content.replace('≤',r'%ENTERMATHMODE%\le%LEAVEMATHMODE%')
content = content.replace('≥',r'%ENTERMATHMODE%\ge%LEAVEMATHMODE%')
content = content.replace('≪',r'%ENTERMATHMODE%\ll%LEAVEMATHMODE%')
content = content.replace('≫',r'%ENTERMATHMODE%\gg%LEAVEMATHMODE%')
content = content.replace('⋅',r'%ENTERMATHMODE%\cdot%LEAVEMATHMODE%')
content = content.replace('𝔢',r"%ENTERMATHMODE%\mathfrak{e}%LEAVEMATHMODE%")
content = content.replace('$$','') # math symbols doubled back on each other
#}}}
#content = re.sub(r'mso-comment-reference:([a-zA-Z_0-9]+)&([a-zA-Z_0-9]+)',r'mso-comment-reference:\1AMPERSAND\2',content)
#content = re.sub(r'mso-comment-reference:[\n ]*([a-zA-Z0-9]+)',r'narg!mso-comment-reference:\1',content)
doc = html.fromstring(content)
commentlabel_re = re.compile(r'\[([A-Z]+)([0-9])\]')
inlineequation_re = re.compile(r'\$([^\$]*)\$')
#for j in doc.xpath('descendant::*[@style="mso-element:comment"]'):
thisbody = doc.find('body')
print('I found the body',lsafen(thisbody))
#commentlist = etree.Element('div',style = 'mso-element:comment-list')
num = 0
numcomments = 0
numcompara = 0
comment_dict = {}
comment_label_re = re.compile(r'_com_([0-9]+)')
for j in doc.xpath('//*[contains(@style,"font-family:Symbol")]'):
print('found symbol with text"',j.text,'" and dropped the tag')
j.drop_tag()
for j in doc.xpath('//div[@style="mso-element:comment-list"]'):
num += 1
for k in j.xpath('descendant-or-self::*[@style="mso-element:comment"]'):
numcomments += 1
numcompara = 0
commenttext = []
def process_comment_text(thistag,numcompara,commenttext):
for m in k.find_class('msocomtxt'):
mymatch = comment_label_re.match(m.attrib['id'])
if mymatch:
commentlabel = mymatch.groups()[0]
print("that means it's comment",commentlabel)
else:
raise ValueError("I don't understand what the comment id "+m.attrib['id']+' means')
numcompara += 1
for m in thistag.xpath('descendant-or-self::span[@style="mso-special-character:comment"]'):
m.drop_tree()
print('dropped special character')
commenttext.append(unidecode(thistag.text_content()))
return commentlabel,numcompara
found_something = False
class_types = ['MsoCommentText','MsoNormal','indent','noindent']
for class_type in class_types:
for l in k.find_class(class_type):
commentlabel,numcompara = process_comment_text(l,numcompara,commenttext)
found_something = True
if not found_something:
print(("Wargning: I found no "+','.join(class_types)+" in this comment --\n%s\n -- in the future, should search by paragraph tag, instead"%html.tostring(k)))
k.drop_tree() # drop the stuff at the end
print('for comment %d, I find %d paragraphs'%(numcomments,numcompara))
comment_dict[commentlabel] = '\n\n'.join(commenttext)
print('text looks like this:',comment_dict[commentlabel])
# and load into the dictionary
#{{{ remove the children, set the comment text as the text, and drop the tag
#for l in k.getchildren():
# l.drop_tree()
#k.text = '\n\n'.join(commenttext)
#k.drop_tag()
#}}}
#print 'comment %d is:'%numcomments,html.tostring(k)
# print 'for comment',numcomments,':'
# print unicode(l.text_content()).encode('utf-8')
#print 'found span with style:\n\n',lsafen(html.tostring(j),wrap = 60)
#if j.attrib['style'] == 'mso-element:comment':
#print 'found div with style:\n\n',lsafen(j.attrib,wrap = 60)
# print "found p with class MsoCommentText:"
# print unicode(k.text_content()).encode('utf-8')
#j.drop_tree()
#j.append("a comment found here")
#commentlist.append(j)
print("I found %d comment lists and %d comments"%(num,numcomments))
initial_translation_dict = {'JF':'john','y':'yuan','CoLA&S':'peter','SH':'songi',"PQ":"peter",'KE':"keith"}
commentlabel_re = re.compile(r'\[([A-Za-z&]+)([0-9]+)\]')
commentid_re = re.compile(r'_anchor_([0-9]+)')
numcomrefs = 0
numcomrefsrepd = 0
comment_file_text = ''
current_comment_number = 0
for thiscommentreference in doc.find_class('MsoCommentReference'):
thiscommentreference.drop_tag()
for thiscommentreference in doc.find_class('msocomanchor'):
comref_text = thiscommentreference.text
if comref_text is not None:
m = commentlabel_re.match(comref_text)
if m:
initials,number = m.groups()
try:
print("I found comment %s by %s"%(number,initial_translation_dict[initials]))
except KeyError:
raise ValueError("I don't know who %s is -- add to initial_translation_dict"%initials)
thiscommentreference.text = ''
thiscommentreference.drop_tag()
prevcomrefsrepd = numcomrefsrepd
for k in doc.xpath('descendant-or-self::*[contains(@style,"mso-comment-reference:%s_%s;")]'%(initials,number)):
print("\nThis reference has the text:",html.tostring(k))
if k.text is None:
k.text = ''
empty_tag = False
if k.text == '': empty_tag = True
if number not in list(comment_dict.keys()):
raise KeyError(repr(number)+'is not in comment_dict keys: '+repr(list(comment_dict.keys())))
if (len(comment_dict[number])>13) and (comment_dict[number][:14] == '(need to do:) ') and (initial_translation_dict[initials]=='john'):#if it's a "need to do"
#k.text = r'\%s['%('ntd')+k.text_content().replace('[',' ').replace(']',' ')+']{'+comment_dict[number][14:]+'}'
k.text = r'\%s%s{'%('ntd',generate_alphabetnumber(current_comment_number))+k.text_content().replace('[',' ').replace(']',' ')+'}'
comment_file_text += comment_definition('ntd'+generate_alphabetnumber(current_comment_number),'ntd',comment_dict[number][14:])
current_comment_number += 1
else:
k.text = r'\%s%s{'%(initial_translation_dict[initials],generate_alphabetnumber(current_comment_number))+k.text_content().replace('[',' ').replace(']',' ')+'}'
comment_file_text += comment_definition(initial_translation_dict[initials]+generate_alphabetnumber(current_comment_number),
initial_translation_dict[initials],
comment_dict[number])
current_comment_number += 1
k.drop_tag()
print("I convert it to this:",html.tostring(k))
numcomrefsrepd += 1
#if numcomrefsrepd > prevcomrefsrepd+1:
# if not empty_tag: raise RuntimeError("Warning: For some reason this comment is referenced twice!!:\n\n"+html.tostring(thiscommentreference))
if prevcomrefsrepd==numcomrefsrepd:
print("Warning: I can't find the highlighted text for the comment:\n\n"+html.tostring(thiscommentreference)+"so I'm dropping it")
else:
raise RuntimeError("Warning, I couldn't parse this!!")
numcomrefs += 1
else:
print("Warning, found a comment with no text")
print("I found %d comment references and replaced %d"%(numcomrefs,numcomrefsrepd))
if manual_math_conversion:
for j in doc.xpath('//sub'):
thistext = j.text_content()
#{{{ remove children
for l in j.getchildren():
l.drop_tree()
#}}}
if len(thistext)>0:
if j.tail is None: j.tail = ''
thistail = j.tail
j.tail = ''
j.text = '%%ENTERMATHMODE%%_{%s}%%LEAVEMATHMODE%%'%thistext + thistail
#j.text = '\\ensuremath{_{'+inlineequation_re.sub('\1',j.text)
#j.tail = inlineequation_re.sub('\1',j.tail)+'}}'
j.drop_tag()
for j in doc.xpath('//sup'):
thistext = j.text_content().encode("utf-8")
#{{{ remove children
for l in j.getchildren():
l.drop_tree()
#}}}
if len(thistext)>0:
if j.tail is None: j.tail = ''
thistail = str(j.tail)
j.tail = ''
j.text = '%%ENTERMATHMODE%%^{%s}%%LEAVEMATHMODE%%'%thistext + thistail
j.drop_tag()
#for j in doc.xpath('//*[contains(@class,"cmmi")]'):
for mathmodefontsize in [7,8,12,81,121]:
for mathmodefonttype in ['cmmi','cmr','cmsy']:
for j in doc.find_class('%s-%d'%(mathmodefonttype,mathmodefontsize)):# find the math-mode stuff
thistext = str(unidecode(j.text_content()))
#{{{ remove children
for l in j.getchildren():
l.drop_tree()
#}}}
if len(thistext)>0:
if j.tail is None: j.tail = ''
thistail = unidecode(j.tail)
j.tail = ''
j.text = '%%ENTERMATHMODE%%%s%%LEAVEMATHMODE%%'%thistext + thistail
#j.text = '\\ensuremath{_{'+inlineequation_re.sub('\1',j.text)
#j.tail = inlineequation_re.sub('\1',j.tail)+'}}'
j.drop_tag()
symbol_lookup = {'x':'\\xi ',
'p':'\\pi',
'k':'\\kappa',
's':'\\sigma',
'y':'\\psi',
'h':'\\eta',
'N':'\\Nu',
'n':'\\nu',
'e':'\\epsilon',
'o':'\\omicron',
'r':'\\rho',
' ':' ',
'_':'_',
'{':'{',
'}':'}'}
for j in doc.find_class("GramE"):
j.drop_tag()
for j in doc.xpath('//*[contains(@style,"font-family:Symbol")]'):
newtext = '%ENTERMATHMODE%'
thistail = str(j.tail)
j.tail = ''
thistext = str(j.text)
k_index = 0
while k_index < len(thistext):
k = thistext[k_index]
while k_index < len(thistext) and k=='\\':
print("found command")
print("pass %s\n"%k)
newtext = newtext + k
k_index += 1
k = thistext[k_index]
while k_index < len(thistext) and k not in [' ','\\','{']:
#gobble up commands
print("pass %s\n"%k)
newtext = newtext + k
k_index += 1
k = thistext[k_index]
try:
newtext = newtext + symbol_lookup[k]
except:
raise ValueError("symbol for symbol font '%s' not found! Open the script and put it in the symbol_lookup dictionary"%k)
k_index += 1
newtext = newtext + '%LEAVEMATHMODE%'
j.text = newtext + thistail
j.drop_tag()
#print lsafen(map(html.tostring,newlist),wrap = 60)
newfile = re.sub(r"(.*)(\.htm.*)",r'\1_texcomm\2',sys.argv[1])
fp = open(newfile,'w')
content = html.tostring(doc)
#content = content.replace('$$','')
for mathmodefonttype in ['cmmi','cmr','cmsy']:
if content.find('class=%s-'%mathmodefonttype)>0:
raise ValueError("error, I see a string '%s' which indicates math mode, but apparently you're not searching for the correct font size, so go add the font into the list of math mode font sizes"%content[content.find('%s-'%mathmodefonttype):content.find('%s-'%mathmodefonttype)+14])
content_list = list(content)
inmathmode = False
for j in range(0,len(content_list)):
if content_list[j] == '$':
if content_list[j-1] != '\\':
if inmathmode:
content_list[j] = '%LEAVEMATHMODE%'
inmathmode = False
else:
content_list[j] = '%ENTERMATHMODE%'
inmathmode = TRUE
content = ''.join(content_list)
#content = content.replace('%ENTERMATHMODE%','$')
#content = content.replace('%LEAVEMATHMODE%','$')
def decodemathmode(arg):
for j in range(0,20):
#just take a couple more passes to be sure
#arg = re.sub(r'\\ensuremath{(.*)}( *)\\ensuremath{(.*)}',r'\\ensuremath{\1\2\3}',arg)
arg = re.sub(r'([(),\.0-9]*)%LEAVEMATHMODE%([(),\.0-9]*)%ENTERMATHMODE%([(),\.0-9]*)',r'\1\2\3',arg)
arg = re.sub(r'_{([^}]*)}_{([^}]*)}',r'_{\1\2}',arg)
arg = re.sub(r'\^{([^}]*)}\^{([^}]*)}',r'^{\1\2}',arg)
nextenter = arg.find('%ENTERMATHMODE%')
while nextenter > 0:
arg = arg.replace('%ENTERMATHMODE%','$',1)
nextenter = arg.find('%ENTERMATHMODE%')
nextexit = arg.find('%LEAVEMATHMODE%')
replaced = True# just to start the loop
while replaced:
if nextenter < nextexit:# there is a math mode inside this one, so gobble it up
arg = arg.replace('%ENTERMATHMODE%','',1)
arg = arg.replace('%LEAVEMATHMODE%','',1)
nextenter = arg.find('%ENTERMATHMODE%')
nextexit = arg.find('%LEAVEMATHMODE%')
replaced = True
else:
arg = arg.replace('%LEAVEMATHMODE%','$',1)# close this math environment
replaced = False
nextenter = arg.find('%ENTERMATHMODE%')
print("next enter is at",nextenter)
return arg
content = decodemathmode(content)
fp.write(content)
#fp.write('\n'.join(map(html.tostring,newlist)))
fp.close()
fp = open(newfile,'r')
content = fp.read()
fp.close()
textfile = re.sub(r"(.*)(\.htm.*)",r'\1.txt',newfile)
doc = html.fromstring(content)
fp = open(textfile,'w')
fp.write(unidecode(doc.text_content()))
fp.close()
textfile = re.sub(r"(.*)(\.htm.*)",r'\1_comments.tex',newfile)
fp = open(textfile,'w')
fp.write(decodemathmode(comment_file_text).encode('utf-8'))
fp.close()
| {
"repo_name": "jmfranck/pyDiffTools",
"path": "pydifftools/html_uncomments.py",
"copies": "1",
"size": "18153",
"license": "bsd-3-clause",
"hash": -6997942207925776000,
"line_mean": 49.8487394958,
"line_max": 287,
"alpha_frac": 0.597477001,
"autogenerated": false,
"ratio": 3.278490157124797,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9280903540835631,
"avg_score": 0.01901272345783328,
"num_lines": 357
} |
#again rerun
from lxml import html,etree
import os
import re
def run(arguments):
needsspace_re = re.compile(r'(\w[):;"-\.,!}]*) +([^%}~])')
allmarkers_re = re.compile(r'(.*?)%(\[ORIG|ORIG\]\[NEW|NEW\])%(.*\n*)')# in all these, the newline at the end is just so it doesn't gobble up the newline
def parse_line(orig_log_text,new_log_text,log_in_orig,log_in_new,text_to_parse):
match = allmarkers_re.match(text_to_parse)
if match:
textone,typeofmarker,texttwo = match.groups()
if typeofmarker == 'NEW]':
print("found a matching line, current status is (",log_in_orig,',',log_in_new,")")
if log_in_new and not log_in_orig:
switchto = (True,True)# log in orig, log in new
print("in text:\n",text_to_parse,"\n--> encountered an end marker, switching to",switchto)
else:
raise ValueError("I encountered an %NEW]% marker, but I was not leaving orig along and logging only in new (False,True), but rather "+repr(log_in_orig)+','+repr(log_in_new)+":\n"+text_to_parse)
elif typeofmarker == 'ORIG][NEW':
print("found a matching line, current status is (",log_in_orig,',',log_in_new,")")
if log_in_orig and not log_in_new:
switchto = (False,True)# log in orig, log in new
print("in text:\n",text_to_parse,"\n--> encountered a middle marker, switching to",switchto)
else:
raise ValueError("I encountered an %ORIG][NEW% marker, but I was not logging in orig but not in new, but rather "+repr(log_in_orig)+','+repr(log_in_new),":\n",text_to_parse)
elif typeofmarker == '[ORIG':
print("found a matching line, current status is (",log_in_orig,',',log_in_new,")")
if log_in_new and log_in_orig:
switchto = (True,False)# log in orig, log in new
print("in text:\n",text_to_parse,"\n--> encountered an %[ORIG% marker, switching to",switchto)
else:
raise ValueError("I encountered an %[ORIG% marker, but I was not logging in both orig and new, but rather"+repr(log_in_orig)+','+repr(log_in_new)+":\n"+text_to_parse)
else:
textone = text_to_parse
texttwo = None
#}}} check to see if I have a separator
# regardless, dump the first group into the current bin
if log_in_orig:
orig_log_text += textone
if log_in_new:
new_log_text += textone
if match:
log_in_orig,log_in_new = switchto
print("yes, I am actually switching the binning")
print("so that status is (",log_in_orig,',',log_in_new,")")
# if there is a second group (if I have a separator), change which bin I'm in, and add to the end of the current line!
if texttwo is not None:
orig_log_text,new_log_text,log_in_orig,log_in_new = parse_line(orig_log_text,new_log_text,log_in_orig,log_in_new,texttwo)
return orig_log_text,new_log_text,log_in_orig,log_in_new
fp = open(arguments[0],'r')
text_list = []
print('opened',arguments[0])
log_in_orig = True
log_in_new = True
head_title = None
new_title = None
#{{{ pull out just the part between the document text
orig_textlist = []
new_textlist = []
j = 0
for thisline in fp:
if j == 0:
if thisline[:12] == '%ONEWORDDIFF':
print("found %ONEWORDDIFF marker, title is:")
head_title = 'HEAD\n'
new_title = thisline[14:]
print(new_title)
this_is_a_onewordfile = True
else:
this_is_a_onewordfile = False
if this_is_a_onewordfile:
print("I found this to be a oneword format file")
else:
print("I did not find this to be a oneword format file")
if this_is_a_onewordfile:# this is only stored if it's a onewordfile
#new processing for oneworddiff
#{{{ check to see if I have a separator, and set switchto, to show where I switch
orig_log_text,new_log_text,log_in_orig,log_in_new = parse_line('','',log_in_orig,log_in_new,thisline)
if len(orig_log_text) > 0:
orig_textlist.append(orig_log_text)
if len(new_log_text) > 0:
new_textlist.append(new_log_text)
else:
#standard processing
if thisline[-11:] == '%FIRSTSET%\n': # if the first set, treat like it's not a comment
if log_in_orig:
orig_textlist.append(thisline)
if log_in_new:
new_textlist.append(thisline)
else:
if (thisline[:7] == '<<<<<<<'):
log_in_orig = True
log_in_new = False
if (head_title is None): # for the first marker, store the title
head_title = thisline[7:]
elif thisline[7:] == head_title:
pass
else:
raise ValueError("I don't understand line %d, which seems to give an inconsistent head title. It gave:\n%s\nvs expected:\n%s"%(j,thisline[7:],head_title))
elif (thisline[:7] == '>>>>>>>'):
log_in_orig = True
log_in_new = True
if (new_title is None): # for the first marker, store the title
new_title = thisline[7:]
elif thisline[7:] == new_title:
pass
else:
raise ValueError("I don't understand line %d, which seems to give an inconsistent new title. It gave:\n%s\nvs expected:\n%s"%(j,thisline[7:],new_title))
elif (thisline[:7] == '======='):
log_in_orig = False
log_in_new = True
else:
if log_in_orig:
orig_textlist.append(thisline)
if log_in_new:
new_textlist.append(thisline)
j+=1
if this_is_a_onewordfile:
print("I found this to be a oneword format file")
else:
print("I did not find this to be a oneword format file")
fp.close()
#{{{ write out the result
newfile = re.sub(r"(.*)",r'\1.merge_new',arguments[0])
fp = open(newfile,'w')
new_textlist = ['#%%%%%BRANCH TITLE (This side is saved): '+new_title] + new_textlist
fp.write(''.join(new_textlist))
fp.close()
newfile = re.sub(r"(.*)",r'\1.merge_head',arguments[0])
fp = open(newfile,'w')
orig_textlist = ['#%%%%%BRANCH TITLE: '+head_title] + orig_textlist
fp.write(''.join(orig_textlist))
fp.close()
#}}}
| {
"repo_name": "jmfranck/pyDiffTools",
"path": "pydifftools/split_conflict.py",
"copies": "1",
"size": "6952",
"license": "bsd-3-clause",
"hash": 1550601529502375200,
"line_mean": 50.8805970149,
"line_max": 213,
"alpha_frac": 0.5309263521,
"autogenerated": false,
"ratio": 3.6705385427666313,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47014648948666315,
"avg_score": null,
"num_lines": null
} |
# again, this is copied liberally from scipy nnls -- see scipy licensing
from .general_functions import redim_F_to_C, redim_C_to_F, strm, inside_sphinx
if not inside_sphinx():
from . import _nnls
from numpy import asarray_chkfinite, zeros, double, isscalar, isfortran
from numpy import array as np_array
import multiprocessing.dummy as mpd
from multiprocessing import cpu_count
import logging
logger = logging.getLogger('pyspecdata.nnls')
__all__ = ['nnls_regularized']
def nnls_regularized(A, b, l=0, maxiter=None):
"""
Solve math:`argmin_x || Ax - b ||_2^2 + \lambda^2 ||x||_2^2` for ``x>=0``.
This is a wrapper for a FORTRAN non-negative least squares solver,
with regularization (added by stacking $A$ on top an identity matrix
times $\lambda$ and $b$ on top of a matching array of zero.
Parameters
----------
A : ndarray
Matrix ``A`` as shown above.
b : ndarray
Right-hand side vector.
l : double (default 0)
:math:`lambda` -- if this is set to 0, the algorithm reverts to
standard nnls (rather than stacking on top of two zero matrices
for no reason)
maxiter: int, optional
Maximum number of iterations, optional.
Default is ``3 * A.shape[1]``.
Returns
-------
x : ndarray
Solution vector.
rnorm : float
The residual, ``|| Ax-b ||_2``.
Notes
-----
The FORTRAN code was published in the book below. The algorithm
is an active set method. It solves the KKT (Karush-Kuhn-Tucker)
conditions for the non-negative least squares problem.
This was adapted from the source distributed with scipy --
see scipy for relevant licensing.
References
----------
Lawson C., Hanson R.J., (1987) Solving Least Squares Problems, SIAM
"""
logger.debug(strm("isfortran result",isfortran(A),isfortran(b)))
A, b = list(map(asarray_chkfinite, (A, b)))
if len(A.shape) != 2:
raise ValueError("expected matrix")
if len(b.shape) > 2:
raise ValueError("expected vector")
m, n = A.shape
if m != b.shape[-1]:
raise ValueError(strm("incompatible dimensions (rows of A",m," do not match size of data",b.shape,")"))
maxiter = -1 if maxiter is None else int(maxiter)
if isscalar(l):
if l == 0.0:
w = zeros((n,), dtype=double)
zz = zeros((m,), dtype=double)
index = zeros((n,), dtype=int)
x, rnorm, mode = _nnls.nnls(A, b, w, zz, index, maxiter)
else:
w = zeros((n,), dtype=double)
zz = zeros((m+n,), dtype=double)
index = zeros((n,), dtype=int)
# choose the correct subroutine based on the dimension
if len(b.shape) == 1:
x, rnorm, mode = _nnls.nnls_regularized(A, b, w, zz, index, maxiter, l)
if len(b.shape) == 2:
x, rnorm, mode = _nnls.nnls_regularized_loop(A, redim_C_to_F(b), w, zz, index, maxiter, l)
x = redim_F_to_C(x)
else:
nCPU = cpu_count()
#print("I found",nCPU,"CPU's")
p = mpd.Pool(nCPU)
if len(b.shape) == 1:
def nnls_func(l):
w = zeros((n,), dtype=double)
zz = zeros((m+n,), dtype=double)
index = zeros((n,), dtype=int)
return _nnls.nnls_regularized(A, b, w, zz, index, maxiter, l)
if len(b.shape) == 2:
def nnls_func(l):
w = zeros((n,), dtype=double)
zz = zeros((m+n,), dtype=double)
index = zeros((n,), dtype=int)
x, rnorm, mode = _nnls.nnls_regularized_loop(A, redim_C_to_F(b), w, zz, index, maxiter, l)
return redim_F_to_C(x), rnorm, mode
retval = p.map(nnls_func,l)
x,rnorm,mode = list(map(np_array,list(zip(*retval))))
if (isscalar(mode) and mode != 1):
# need something for the multiple lambda
raise RuntimeError("too many iterations")
return x, rnorm
| {
"repo_name": "jmfranck/pyspecdata",
"path": "pyspecdata/nnls.py",
"copies": "1",
"size": "4107",
"license": "bsd-3-clause",
"hash": 5601576959021495000,
"line_mean": 35.6696428571,
"line_max": 111,
"alpha_frac": 0.5648892135,
"autogenerated": false,
"ratio": 3.5042662116040955,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45691554251040956,
"avg_score": null,
"num_lines": null
} |
"""A game state for a level loaded from a file."""
# Copyright © 2014 Mikko Ronkainen <firstname@mikkoronkainen.com>
# License: MIT, see the LICENSE file.
import sfml as sf
from pymazing import world, level_loader, color, light, camera, coordinate_grid, renderer, matrix
class GameStateLoadedLevel:
def __init__(self, config):
self.world = world.World()
self.world.ambient_light.color = color.from_int(255, 255, 255)
self.world.ambient_light.intensity = 0.2
diffuse_light = light.Light()
diffuse_light.position[0] = 100
diffuse_light.position[1] = 150
diffuse_light.position[2] = 50
diffuse_light.color = color.from_int(255, 255, 255)
diffuse_light.intensity = 0.4
specular_light = light.Light()
specular_light.position[0] = 100
specular_light.position[1] = 150
specular_light.position[2] = 59
specular_light.color = color.from_int(255, 255, 255)
specular_light.intensity = 0.4
specular_light.shininess = 8.0
self.world.diffuse_lights.append(diffuse_light)
self.world.specular_lights.append(specular_light)
self.camera = camera.Camera(config)
self.camera.position[0] = 4
self.camera.position[1] = 3
self.camera.position[2] = 6
blocks = level_loader.generate_blocks_from_tga(config["game"]["level_file"])
self.meshes = level_loader.generate_partial_meshes(blocks)
self.coordinate_grid = coordinate_grid.CoordinateGrid()
self.render_wireframe = False
self.do_backface_culling = True
self.render_coordinate_grid = False
self.render_meshes = True
self.rotate_lights = False
self.key_released = dict()
def is_key_pressed_once(self, key_code):
"""
Determine if a key is pressed and signal it only once - key needs to be released before this returns true again.
"""
if sf.Keyboard.is_key_pressed(key_code):
if self.key_released.get(key_code):
self.key_released[key_code] = False
return True
else:
self.key_released[key_code] = True
return False
def update(self, time_step, mouse_delta):
self.camera.update(time_step, mouse_delta)
if self.rotate_lights:
light_rotation_matrix = matrix.create_rotation_matrix_y(0.5 * time_step)
self.world.diffuse_lights[0].position = light_rotation_matrix.dot(self.world.diffuse_lights[0].position)
self.world.specular_lights[0].position = light_rotation_matrix.dot(self.world.specular_lights[0].position)
if self.is_key_pressed_once(sf.Keyboard.F1):
self.render_wireframe = not self.render_wireframe
if self.is_key_pressed_once(sf.Keyboard.F2):
self.do_backface_culling = not self.do_backface_culling
if self.is_key_pressed_once(sf.Keyboard.F3):
self.render_coordinate_grid = not self.render_coordinate_grid
if self.is_key_pressed_once(sf.Keyboard.F4):
self.render_meshes = not self.render_meshes
if self.is_key_pressed_once(sf.Keyboard.F5):
self.world.ambient_light_enabled = not self.world.ambient_light_enabled
if self.is_key_pressed_once(sf.Keyboard.F6):
self.world.diffuse_lights_enabled = not self.world.diffuse_lights_enabled
if self.is_key_pressed_once(sf.Keyboard.F7):
self.world.specular_lights_enabled = not self.world.specular_lights_enabled
if self.is_key_pressed_once(sf.Keyboard.F8):
self.rotate_lights = not self.rotate_lights
def render(self, framebuffer, interpolation):
if self.render_coordinate_grid:
self.coordinate_grid.render(self.camera, framebuffer)
if self.render_meshes:
renderer.render_meshes(self.meshes[:1], self.world, self.camera, framebuffer, do_backface_culling=self.do_backface_culling, render_wireframe=self.render_wireframe)
renderer.render_meshes(self.meshes[1:], self.world, self.camera, framebuffer, do_backface_culling=self.do_backface_culling, render_wireframe=self.render_wireframe)
| {
"repo_name": "mikoro/pymazing",
"path": "pymazing/game_state_loaded_level.py",
"copies": "1",
"size": "4314",
"license": "mit",
"hash": -294939087564130800,
"line_mean": 39.8737864078,
"line_max": 175,
"alpha_frac": 0.6401576629,
"autogenerated": false,
"ratio": 3.512214983713355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9647316539632246,
"avg_score": 0.00101122139622173,
"num_lines": 103
} |
"""A game state for a simple rotating cube."""
# Copyright © 2014 Mikko Ronkainen <firstname@mikkoronkainen.com>
# License: MIT, see the LICENSE file.
from pymazing import world, mesh, color, light, euler_angle, camera, renderer
class GameStateSimpleCube:
def __init__(self, config):
self.world = world.World()
self.world.ambient_light.color = color.from_int(255, 255, 255)
self.world.ambient_light.intensity = 0.2
diffuse_light = light.Light()
diffuse_light.position[0] = 80
diffuse_light.position[1] = 100
diffuse_light.position[2] = 120
diffuse_light.color = color.from_int(255, 255, 255)
diffuse_light.intensity = 0.6
specular_light = light.Light()
specular_light.position[0] = 80
specular_light.position[1] = 100
specular_light.position[2] = 120
specular_light.color = color.from_int(255, 255, 255)
specular_light.intensity = 0.6
specular_light.shininess = 4.0
self.world.diffuse_lights.append(diffuse_light)
self.world.specular_lights.append(specular_light)
self.camera = camera.Camera(config)
self.camera.position[0] = 3
self.camera.position[1] = 3
self.camera.position[2] = 3
self.camera.euler_angle = euler_angle.EulerAngle(-40.0, 45.0, 0)
self.meshes = [mesh.create_cube(color.from_int(255, 215, 0))]
self.render_wireframe = False
def update(self, time_step, mouse_delta):
self.camera.update(time_step, mouse_delta)
def render(self, framebuffer, interpolation):
#renderer.render_meshes_solid(self.meshes, self.world, self.camera, framebuffer)
renderer.render_meshes_wireframe(self.meshes, self.world, self.camera, framebuffer)
| {
"repo_name": "mikoro/pymazing",
"path": "pymazing/game_state_simple_cube.py",
"copies": "1",
"size": "1832",
"license": "mit",
"hash": -6826012891778122000,
"line_mean": 36.9574468085,
"line_max": 91,
"alpha_frac": 0.6389950847,
"autogenerated": false,
"ratio": 3.3596330275229356,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44986281122229355,
"avg_score": null,
"num_lines": null
} |
"""agata URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from postsubjectivity import views
urlpatterns = [
url(r'^$', views.index, name='root'),
url(r'^sabrina/$', views.enter, name="enter"),
url(r'^indexajax/$', views.index_ajax, name="index_ajax"),
url(r'^sabrina/home/$', views.home, name='home'),
url(r'^sabrina/(?P<alias_id>[0-9]+)/thoughts/$', views.thoughts, name="thoughts"),
url(r'^sabrina/(?P<alias_id>[0-9]+)/questions/$', views.questions, name="questions"),
url(r'^sabrina/(?P<alias_id>[0-9]+)/home/$', views.contribute, name="contribute"),
url(r'^sabrina/(?P<alias_id>[0-9]+)/heartbeats/$', views.heartbeats, name="heartbeats"),
url(r'^sabrina/(?P<alias_id>[0-9]+)/official/$', views.official, name="official"),
url(r'^sabrina/(?P<alias_id>[0-9]+)/incarnations/$', views.incarnations, name="incarnations"),
url(r'^writing/$', views.writing, name="writing"),
url(r'^admin/', admin.site.urls),
]
| {
"repo_name": "superarius/sabrina",
"path": "agata/urls.py",
"copies": "1",
"size": "1584",
"license": "mit",
"hash": 6096424229965760000,
"line_mean": 47,
"line_max": 95,
"alpha_frac": 0.6736111111,
"autogenerated": false,
"ratio": 3.0171428571428573,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41907539682428574,
"avg_score": null,
"num_lines": null
} |
# Agbase - Algorithm Path
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import agbase_config as config
from models import *
from agbase import AgBase
import requests
import json
import datetime
import urllib
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'John'
class AlgorithmAPI:
def __init__(self, ab):
self.ab = ab
def create_algorithm(self, name, measurement_category):
result = self.ab.api_call('post', 'algorithms/', {'name': name, 'measurementCategoryId': measurement_category.id})
if result.status_code != 200:
return None
json_response = result.json()
json_algorithm = json_response[u'algorithm']
self.ab.log(json_response[u'message'])
a = Algorithm()
a.init_with_json(json_algorithm)
print "a :::::::::::::::::::::::::::: ", a.to_json()
return a
def remove_algorithm(self, algorithm):
result = self.ab.api_call('delete', 'algorithms/{}'.format(algorithm.id))
json_response = result.json()
self.ab.log(json_response[u'message'])
if result.status_code != 200:
return False
return True
def get_algorithms(self, measurement_category=None):
params = {}
if measurement_category is not None:
params['category'] = measurement_category.id
result = self.ab.api_call('get', 'algorithms/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
json_algorithms = json_response[u'algorithms']
algorithms = []
for json_algorithm in json_algorithms:
a = Algorithm()
a.init_with_json(json_algorithm)
print ">>>>>>>for loop a>>>>> ", a.to_json()
algorithms.append(a)
return algorithms
def get_algorithm(self, name):
result = self.ab.api_call('get', 'algorithms/', None, {"name":name})
if result.status_code != 200:
return None
json_response = result.json()
#self.ab.log("get_algorithm Dump >>> " + json.dumps(json_response))
json_algorithm = result.json()[u'algorithms'][0]
#self.ab.log("get_algorithm Dump >>> " + json.dumps(json_algorithm))
a = Algorithm()
a.init_with_json(json_algorithm)
return a | {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/algorithm.py",
"copies": "1",
"size": "2497",
"license": "mpl-2.0",
"hash": -6008745648964556000,
"line_mean": 25.0208333333,
"line_max": 118,
"alpha_frac": 0.649979976,
"autogenerated": false,
"ratio": 3.613603473227207,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4763583449227207,
"avg_score": null,
"num_lines": null
} |
# Agbase - Animal Path
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import agbase_config as config
from models import *
from agbase import AgBase
import requests
import json
import datetime
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'John'
class AnimalAPI:
def __init__(self, ab):
self.ab = ab
def create_animal(self, farm, eid, vid=None, herd=None):
animal_details = {'farmId': farm.id, 'eid': eid}
if vid is not None:
animal_details['vid'] = vid
if herd is not None:
animal_details['herdId'] = herd.id
result = self.ab.api_call('post', 'animals/', animal_details)
if result.status_code != 200:
return None
json_response = result.json()
json_animal = json_response[u'animal']
#self.ab.log("Animal Dump >>> " + json.dumps(json_response))
return Animal(json_animal[u'id'],
json_animal[u'eid'],
json_animal[u'vid'],
json_animal[u'herdId'],
json_animal[u'farmId'])
def merge_animals(self, eidAnimal, vidAnimal):
result = self.ab.api_call('put', 'animals/{}'.format(eidAnimal.id), {'sourceAnimalId': vidAnimal.id})
json_response = result.json()
#self.ab.log("Animal Dump >>> " + json.dumps(json_response))
if result.status_code != 200:
return False
return True
def set_animal_herd(self, animal, herd):
if herd.farm_id != animal.farm_id:
self.ab.log("Cannot add animal to herd on different farm!")
return False
result = self.ab.api_call('put', 'animals/{}'.format(animal.id), {'herdId': herd.id})
json_response = result.json()
#self.ab.log("Animal Dump >>> " + json.dumps(json_response))
if result.status_code != 200:
return False
animal.herd_id = herd.id
return True
def remove_animal(self, animal):
result = self.ab.api_call('delete', 'animals/{}'.format(animal.id))
json_response = result.json()
#self.ab.log("Animal Dump >>> " + json.dumps(json_response))
if result.status_code != 200:
return False
return True
def get_animals(self, farm, herd=None, limit=None, offset=None):
params = {'farm': farm.id}
if herd is not None:
params['herd'] = herd.id
if limit is not None:
params['limit'] = limit
if offset is not None:
params['offset'] = offset
print "----------------params-------------", params
result = self.ab.api_call('get', 'animals/',None,params)
if result.status_code != 200:
return None
json_response = result.json()
json_animals = json_response[u'animals']
animals = []
for json_animal in json_animals:
animals.append(Animal(json_animal[u'id'],
json_animal[u'eid'],
json_animal[u'vid'],
json_animal[u'herdId'],
json_animal[u'farmId']))
return animals
def get_animal_by_eid(self, farm, eid):
params = {'farm': farm.id, 'eid': (eid)}
result = self.ab.api_call('get', 'animals/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
self.ab.log("get_animal_by_eid -> %s" % json_response)
json_animals = json_response[u'animals']
if (len(json_animals) == 0):
return None
json_animal = json_animals[0]
return Animal( json_animal[u'id'],
json_animal[u'eid'],
json_animal[u'vid'],
json_animal[u'herdId'],
json_animal[u'farmId'])
'''
Return an animal object or 'None' if no such animal exists.
'''
def get_animal_by_vid(self, farm, vid):
params = {'farm': farm.id, 'vid': (str(vid))}
result = self.ab.api_call('get', 'animals/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
json_animals = json_response[u'animals']
if (json_animals == []):
return None
json_animal = json_animals[0]
return Animal( json_animal[u'id'],
json_animal[u'eid'],
json_animal[u'vid'],
json_animal[u'herdId'],
json_animal[u'farmId'])
def update_animal_vid(self, animal, vid):
result = self.ab.api_call('put', 'animals/{}'.format(animal.id), {'vid': str(vid)})
json_response = result.json()
#self.ab.log("Animal Dump >>> " + json.dumps(json_response))
if result.status_code != 200:
return False
animal.vid = vid
return True
| {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/animal.py",
"copies": "1",
"size": "5325",
"license": "mpl-2.0",
"hash": 571933544712748900,
"line_mean": 25.625,
"line_max": 109,
"alpha_frac": 0.5391549296,
"autogenerated": false,
"ratio": 3.554739652870494,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9450840186223696,
"avg_score": 0.028610879249359816,
"num_lines": 200
} |
# Agbase API Library
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import agbase_config as config
from models import *
from agbase import AgBase
import requests
import json
import datetime
import urllib
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'John'
class FarmAPI:
def __init__(self, ab):
self.ab = ab
def create_farm(self, name):
result = self.ab.api_call('post', 'farms/', {'name': name})
if result.status_code != 200:
return None
json_response = result.json()
self.ab.log(json_response[u'message'])
json_farm = json_response[u'farm']
return Farm(json_farm[u'name'], json_farm[u'id'])
def remove_farm(self, farm):
result = self.ab.api_call('delete', 'farms/{}'.format(farm.id))
self.ab.log(result.json()[u'message'])
if result.status_code != 200:
return False
return True
def get_farms(self, user=None):
params = None
if user is not None:
params = {'user': user.id}
result = self.ab.api_call('get','farms/', None, params )
if result.status_code != 200:
return None
json_response = result.json()
json_farms = json_response[u'farms']
farms = []
for json_farm in json_farms:
farms.append(Farm(json_farm[u'name'], json_farm[u'id']))
return farms
def get_farm(self, farmId):
result = self.ab.api_call('get', 'farms/{}'.format(farmId))
if result.status_code != 200:
return None
json_farm = result.json()[u'farm']
return Farm(json_farm[u'name'], json_farm[u'id'])
def get_farm_by_name(self, user, farmName):
farms = self.get_farms(user)
for f in farms:
if (f.name == farmName):
return f
return None
| {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/farm.py",
"copies": "1",
"size": "2050",
"license": "mpl-2.0",
"hash": 6644793093335567000,
"line_mean": 20.3541666667,
"line_max": 70,
"alpha_frac": 0.6346341463,
"autogenerated": false,
"ratio": 3.1490015360983103,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9082485395003634,
"avg_score": 0.040230057478935106,
"num_lines": 96
} |
# Agbase - Auth and User Path
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import agbase_config as config
from models import *
from measurement_list import *
import requests
import json
import datetime
import urllib
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'mark'
class AgBase(object):
def __init__(self):
self.agbase_api_url = config.defaultServerIp
self.session = requests.Session()
self.session.headers.update({'content-type': 'application/json'})
self.session.verify = config.defaultSigning
self.authenticationTime = 0
self.expiry_time = 0
self.logging = False
def log(self, string):
if self.logging is True:
print("-- AgBase: {}".format(string))
def api_call(self, http_verb, route, data=None, query_params=None):
self.log("API call of type {} to {}".format(http_verb,route))
authenticated = True
if self.authenticationTime == 0 or datetime.datetime.now() > self.expiry_time:
if self.authenticationTime != 0:
self.log('Token Expired - reauthenticating')
authenticated = self.__auth_user(self.currentUser, self.currentPwd)
if not authenticated:
return 0
http_call = getattr(self.session, http_verb)
if data is None and query_params is None:
response = http_call(self.agbase_api_url + route)
elif data is None and query_params is not None:
response = http_call(self.agbase_api_url + route, params=query_params)
elif query_params is None and data is not None:
response = http_call(self.agbase_api_url + route, data=json.dumps(data))
else:
response = http_call(self.agbase_api_url + route, params=query_params, data=json.dumps(data))
if response.status_code is not 200:
print response.content
return response
def __auth_user(self, email, pwd):
self.log("Authenticating User")
user_details = {"email": email, "password": pwd}
post_response = self.session.post(self.agbase_api_url + "auth/", data=json.dumps(user_details))
if post_response.status_code != 200:
self.log("Authentication Failed!")
return None
json_response = post_response.json()
self.session.headers.update({'Authorization': 'Bearer ' + json_response[u'token']})
self.authenticationTime = datetime.datetime.now()
self.expiry_time = datetime.timedelta(minutes=config.token_time_out) + self.authenticationTime
json_user = json_response[u'user']
self.log("Authenticated user {}!".format(json_user[u'email']))
return User(json_user[u'firstName'], json_user[u'lastName'], json_user[u'email'], json_user[u'id'])
def connect(self, email, pwd, agbase_api):
self.currentUser = email
self.currentPwd = pwd
self.agbase_api_url = agbase_api
return self.__auth_user(email, pwd)
def set_logging_on(self, is_on):
self.logging = is_on
if is_on is True:
self.log("AgBase Logging Enabled!")
#Requires current user to have admin rights
def get_roles(self):
result = self.api_call('get', 'farm-roles/')
if result.status_code != 200:
return None
json_response = result.json()
json_roles = json_response[u'roles']
roles = []
for json_role in json_roles:
roles.append(Role(json_role[u'name'], json_role[u'id']))
return roles
#Requires current user to have admin rights
def create_user(self, first_name, last_name, email, password, role):
user_details = {'firstName': first_name, 'lastName': last_name, 'email': email, 'password': password, 'roleId': role.id}
result = self.api_call('post', 'users/', user_details)
if result.status_code != 200:
return None
json_response = result.json()
json_user = json_response[u'user']
self.log(json_response[u'message'])
return User(json_user[u'firstName'], json_user[u'lastName'], json_user[u'email'], json_user[u'id'])
#Requires current user to have admin rights
def remove_user(self, user):
result = self.api_call('delete', 'users/{}'.format(user.id))
json_response = result.json()
self.log(json_response[u'message'])
if result.status_code != 200:
return False
return True
#Requires current user to have admin rights to access all users. Any user can see other users in their farm
def get_users(self, farm=None):
params = None
if farm is not None:
params = {'farm': farm.id}
result = self.api_call('get', 'users/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
json_users = json_response[u'users']
users = []
for json_user in json_users:
users.append(User(json_user[u'firstName'], json_user[u'lastName'], json_user[u'email'], json_user[u'id']))
return users
| {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/agbase.py",
"copies": "1",
"size": "5105",
"license": "mpl-2.0",
"hash": 3199347959478636500,
"line_mean": 25.5885416667,
"line_max": 124,
"alpha_frac": 0.6673849167,
"autogenerated": false,
"ratio": 3.5110041265474554,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9562814257273518,
"avg_score": 0.02311495719478777,
"num_lines": 192
} |
# Agbase - Herd Path
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import agbase_config as config
from models import *
from agbase import AgBase
import requests
import json
import datetime
import urllib
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'John'
class HerdAPI:
def __init__(self, ab):
self.ab = ab
def create_herd(self, farm, name):
result = self.ab.api_call('post', 'herds/', {'name': name, 'farmId': farm.id})
if result.status_code != 200:
return None
self.ab.log(result.json()[u'message'])
json_response = result.json()[u'herd']
return Herd(json_response[u'name'], json_response[u'id'], json_response[u'farmId'])
def remove_herd(self, herd):
result = self.ab.api_call('delete','herds/{}'.format(herd.id))
self.ab.log(result.json()[u'message'])
if result.status_code != 200:
return False
return True
def get_herds(self, farm=None):
params = None
if farm is not None:
params = {'farm': farm.id}
result = self.ab.api_call('get', 'herds/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
json_herds = json_response[u'herds']
herds = []
for json_herd in json_herds:
herds.append(Herd(json_herd[u'name'], json_herd[u'id']))
return herds
| {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/herd.py",
"copies": "1",
"size": "1664",
"license": "mpl-2.0",
"hash": 1249555577942793700,
"line_mean": 21.1866666667,
"line_max": 87,
"alpha_frac": 0.6472355769,
"autogenerated": false,
"ratio": 3.1695238095238096,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43167593864238096,
"avg_score": null,
"num_lines": null
} |
# Agbase - Measurement Category Path
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
from models import *
from agbase import AgBase
import requests
import json
import datetime
import urllib
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'John'
class MeasurementCategoryAPI:
def __init__(self, ab):
self.ab = ab
def create_measurement_category(self, name):
result = self.ab.api_call('post', 'measurement-categories/', {'name': name})
if result.status_code != 200:
return None
json_response = result.json()
json_category = json_response[u'category']
self.ab.log(json_response[u'message'])
return MeasurementCategory(json_category[u'name'], json_category[u'id'])
def remove_measurement_category(self, category):
result = self.ab.api_call('delete', 'measurement-categories/{}'.format(category.id))
json_response = result.json()
self.ab.log(json_response[u'message'])
if result.status_code != 200:
return False
return True
def get_measurement_categories(self):
result = self.ab.api_call('get', 'measurement-categories/')
if result.status_code != 200:
return None
json_response = result.json()
json_categories = json_response[u'categories']
categories = []
for json_category in json_categories:
categories.append(MeasurementCategory(json_category[u'name'], json_category[u'id']))
return categories
def get_measurement_category(self, categoryId):
result = self.ab.api_call('get', 'measurement-categories/{}'.format(categoryId))
if result.status_code != 200:
return None
json_category = result.json()[u'category']
return MeasurementCategory(json_category[u'name'], json_category[u'id'])
| {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/measurement_category.py",
"copies": "1",
"size": "2081",
"license": "mpl-2.0",
"hash": 868487349893014500,
"line_mean": 23.7738095238,
"line_max": 90,
"alpha_frac": 0.6881307064,
"autogenerated": false,
"ratio": 3.7094474153297683,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48975781217297687,
"avg_score": null,
"num_lines": null
} |
# Agbase - Measurement List
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
__author__ = 'Tim Molteno, Mark Butler'
class MeasurementList:
def __init__(self, algorithm, user,animal=None,farmId=None):
if(animal is not None):
self.animal = animal
else:
self.animal = None
self.farm_id = farmId
self.algorithm = algorithm
self.user = user
self.measurements = []
def add_measurement(self, time_stamp, w05, w25, w50, w75, w95, eid=None):
measurement_details = {'timeStamp': time_stamp, 'w05': w05}
if w25 is not None:
measurement_details['w25'] = w25
if w50 is not None:
measurement_details['w50'] = w50
if w75 is not None:
measurement_details['w75'] = w75
if w95 is not None:
measurement_details['w95'] = w95
if eid is not None:
measurement_details['eid'] = eid
self.measurements.append(measurement_details)
def get_measurement_count(self):
return len(self.measurements)
def get_json(self):
#if len(self.measurements) <= 0:
#return None
if (self.animal is not None):
json_list = {'farmId':self.animal.farm_id,
'animalId': self.animal.id,
'algorithmId': self.algorithm.id,
'userId': self.user.id,
'measurements': self.measurements}
else:
json_list = {'farmId':self.farm_id,
'algorithmId': self.algorithm.id,
'userId': self.user.id,
'measurements': self.measurements}
return json_list | {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/measurement_list.py",
"copies": "1",
"size": "1938",
"license": "mpl-2.0",
"hash": 2597913597769766400,
"line_mean": 28.8307692308,
"line_max": 77,
"alpha_frac": 0.5536635707,
"autogenerated": false,
"ratio": 3.8,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48536635707,
"avg_score": null,
"num_lines": null
} |
# Agbase - Measurement Path
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import agbase_config as config
from models import *
from measurement_list import *
from agbase import AgBase
import requests
import json
import datetime
import urllib
import os
if os.getenv('MOOGLE_RUNNING_UNIT_TESTS', '0') == '1':
# disable warnings about unverified https connections
requests.packages.urllib3.disable_warnings()
__author__ = 'mark'
class MeasurementAPI:
def __init__(self, ab):
self.ab = ab
#self.measurement = Measurement
'''
Upload a single measurement model object
TODO. This is not implemented.
'''
def upload_measurement(self, measurement_details):
result = self.ab.api_call('post', 'measurements/', measurement_details)
if result.status_code != 200:
return None
json_response = result.json()
json_measurement = json_response[u'measurement']
#self.ab.log("Measurement Dump >>> " + json.dumps(json_response))
result_measurement = Measurement(None)
result_measurement.init_with_json(json_measurement)
return result_measurement
def create_measurement(self, animal, algorithm, user, time_stamp, w05, w25, w50, w75, w95, comment):
m = Measurement(None).to_json_animal(animal,algorithm,user,time_stamp, w05, w25, w50, w75, w95, comment)
return self.upload_measurement(m)
def create_measurement_for_eid(self, eid, farm, algorithm, user, time_stamp, w05, w25, w50, w75, w95, comment):
m = Measurement(None).to_json_eid(eid,farm,algorithm,user,time_stamp, w05, w25, w50, w75, w95, comment)
return self.upload_measurement(m)
def remove_measurement(self, measurement):
result = self.ab.api_call('delete', 'measurements/{}'.format(measurement.id))
json_response = result.json()
#self.ab.log("Measurement Dump >>> " + json.dumps(json_response))
if result.status_code != 200:
return False
return True
def get_measurements_for_animal(self, animal, algorithm=None, first_date=None, last_date=None):
params = {'animal': animal.id}
params['farmId'] = animal.farm_id
if algorithm is not None:
params['algorithm'] = algorithm.id
result = self.ab.api_call('get', 'measurements/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
#self.ab.log("Measurement Dump >>> " + json.dumps(json_response))
json_measurements = json_response["measurements"]
measurements = []
for json_measurement in json_measurements:
new_measurement = Measurement(None)
new_measurement.init_with_json(json_measurement)
measurements.append(new_measurement)
return measurements
def get_condition_scores_for_farm(self, farm, algorithm, first_date, last_date):
params = {'farmId': farm.id}
params['algorithmId'] = algorithm.id
#params['startDate'] = str(first_date)
#params['endDate'] = str(last_date)
params['include'] = "animal"
result = self.ab.api_call('get', 'measurements/', None, params)
if result.status_code != 200:
return None
json_response = result.json()
#self.ab.log("Measurement Dump >>> " + json.dumps(json_response))
json_measurements = json_response["measurements"]
measurements = []
for json_measurement in json_measurements:
new_measurement = Measurement(None)
new_measurement.init_with_json(json_measurement)
measurements.append(new_measurement)
return measurements
def create_bulk_measurement_upload_list(self, algorithm, user,animal=None,farmId=None):
return MeasurementList(algorithm, user,animal,farmId)
def upload_measurement_list(self, measurement_list):
result = self.ab.api_call('post', 'measurements/', measurement_list.get_json())
if result.status_code != 200:
return None
return True
| {
"repo_name": "elec-otago/python-agbase",
"path": "agbase/measurement.py",
"copies": "1",
"size": "4039",
"license": "mpl-2.0",
"hash": -2818208930763733500,
"line_mean": 28.4817518248,
"line_max": 113,
"alpha_frac": 0.6905174548,
"autogenerated": false,
"ratio": 3.577502214348981,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9647642882150584,
"avg_score": 0.024075357399679174,
"num_lines": 137
} |
# Agbase - Unittest - Tests
#
# Copyright (c) 2015. Elec Research.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
import unittest
from agbase.agbase import AgBase
from agbase.algorithm import AlgorithmAPI
from agbase.animal import AnimalAPI
from agbase.farm import FarmAPI
from agbase.herd import HerdAPI
from agbase.measurement import MeasurementAPI
from agbase.measurement_category import MeasurementCategoryAPI
import pprint
import time
from test_common import TestCommon
__author__ = 'mark'
class TestAgBase(TestCommon):
def setUp(self):
print('TestAgBase.setUp')
super(self.__class__, self).setUp()
self.pp = pprint.PrettyPrinter(indent=4)
self.agbase = AgBase()
self.algorithm = AlgorithmAPI(self.agbase)
self.animal = AnimalAPI(self.agbase)
self.farm = FarmAPI(self.agbase)
self.herd = HerdAPI(self.agbase)
self.measurement = MeasurementAPI(self.agbase)
self.measurement_category = MeasurementCategoryAPI(self.agbase)
self.agbase.set_logging_on(True)
self.user = self.agbase.connect(self.testUser, self.testPwd, self.serverIp)
if self.user is None:
self.fail()
print('connected to mooogle with user: {} with id: {}'.format(self.user.email, self.user.id))
def test_farms(self):
test_farm = self.farm.create_farm("Python Test Farm")
if test_farm is None:
self.fail()
print('created farm: {} with id: {}'.format(test_farm.name, test_farm.id))
farms = self.farm.get_farms()
if farms is None:
self.fail()
for farm in farms:
print('found farm: {} with id: {}'.format(farm.name, farm.id))
single_query_farm = self.farm.get_farm(test_farm.id)
if single_query_farm.id != test_farm.id:
self.fail()
farms = self.farm.get_farms(self.user)
if farms is None:
self.fail()
for farm in farms:
print('The current user can access farm: {}'.format(farm.name))
deleted = self.farm.remove_farm(test_farm)
if not deleted:
self.fail()
def test_roles(self):
roles = self.agbase.get_roles()
if roles is None:
self.fail()
for role in roles:
print('Found role named {}'.format(role.name))
def test_users(self):
roles = self.agbase.get_roles()
admin_role = None
for role in roles:
if role.name == "Viewer":
admin_role = role
break
test_user = self.agbase.create_user("Test", "Testor", "test@test.com", "testpass", admin_role)
if test_user is None:
self.fail()
print('created user: {} with id: {}'.format(test_user.email, test_user.id))
users = self.agbase.get_users()
if users is None:
self.fail()
for user in users:
print('found user: {} {} with email: {}'.format(user.first_name, user.last_name, user.email))
deleted = self.agbase.remove_user(test_user)
if not deleted:
self.fail()
def test_measurement_categories(self):
test_category = self.measurement_category.create_measurement_category('Test Category')
if test_category is None:
self.fail()
print('created measurement category: {} with id: {}'.format(test_category.name, test_category.id))
single_query_category = self.measurement_category.get_measurement_category(test_category.id)
if test_category.id != single_query_category.id:
self.fail()
categories = self.measurement_category.get_measurement_categories()
if categories is None:
self.fail()
for category in categories:
print('found category: {}'.format(category.name))
deleted = self.measurement_category.remove_measurement_category(test_category)
if not deleted:
self.fail()
def test_algorithms(self):
test_category = self.measurement_category.create_measurement_category('Algorithm Test Category')
test_algorithm = self.algorithm.create_algorithm('Test Algorithm', test_category)
print "test_algorithm", test_algorithm.to_json()
if test_algorithm is None:
self.fail()
print('created algorithm {} with id: {}'.format(test_algorithm.name, test_algorithm.id))
single_query_algorithm = self.algorithm.get_algorithm(test_algorithm.id)
print "single_query_algorithm", single_query_algorithm.to_json()
if test_algorithm.id != single_query_algorithm.id:
self.fail()
algorithms = self.algorithm.get_algorithms()
if algorithms is None:
self.fail()
for algorithm in algorithms:
print('found algorithm: {}'.format(algorithm.name))
deleted = self.algorithm.remove_algorithm(test_algorithm)
if not deleted:
self.fail()
self.measurement_category.remove_measurement_category(test_category)
def test_animals(self):
test_farm = self.farm.create_farm('Animal Test Farm')
test_herd = self.herd.create_herd(test_farm, 'Animal Test Herd')
test_eid = "AN-EID-FOR_TESTING"
test_animal = self.animal.create_animal(test_farm, test_eid)
if test_animal is None:
self.fail()
print('created animal {} with id: {}'.format(test_animal.eid, test_animal.id))
result = self.animal.set_animal_herd(test_animal, test_herd)
if result is None:
self.fail()
print "============farm==========",test_farm.to_json()
print "============herd==========",test_herd.to_json()
animals = self.animal.get_animals(test_farm, test_herd)
if animals is None:
self.fail()
for animal in animals:
print('found animal: {}'.format(animal.eid))
updated = self.animal.update_animal_vid(test_animal, "My Pet Cow")
if not updated:
self.fail()
expected_animal = self.animal.get_animal_by_eid(test_farm, test_eid)
if expected_animal.id != test_animal.id:
self.fail()
deleted = self.animal.remove_animal(test_animal)
if not deleted:
self.fail()
self.herd.remove_herd(test_herd)
self.farm.remove_farm(test_farm)
def test_measurements(self):
test_farm = self.farm.create_farm('Animal Test Farm')
test_eid = "AN-EID-FOR_TESTING"
test_animal = self.animal.create_animal(test_farm, test_eid)
test_category = self.measurement_category.create_measurement_category('Algorithm Test Category')
test_algorithm = self.algorithm.create_algorithm('Test Algorithm', test_category)
measurement = self.measurement.create_measurement(test_animal, test_algorithm, self.user, time.strftime("%c"), None, None, 0.3344,None,None,None)
if measurement is None:
self.fail()
print('created measurement with id {}'.format(measurement.id))
animal_measurements = self.measurement.get_measurements_for_animal(test_animal)
if animal_measurements[0].id != measurement.id:
self.fail()
deleted = self.measurement.remove_measurement(measurement)
if not deleted:
self.fail()
eid_measurement = self.measurement.create_measurement_for_eid(test_animal.eid,test_farm, test_algorithm, self.user, time.strftime("%c"), None, None, 0.3344,None,None,None)
if eid_measurement is None:
self.fail()
if eid_measurement.animal_id != test_animal.id:
self.fail()
#self.pp.pprint(repr(eid_measurement))
for keys,values in eid_measurement.__repr__().items():
print(keys)
print(values)
self.measurement.remove_measurement(eid_measurement)
self.animal.remove_animal(test_animal)
self.farm.remove_farm(test_farm)
self.algorithm.remove_algorithm(test_algorithm)
self.measurement_category.remove_measurement_category(test_category)
def test_measurements_bulk_upload(self):
test_farm = self.farm.create_farm('Animal Test Farm')
test_eid = "AN-EID-FOR_TESTING"
test_animal = self.animal.create_animal(test_farm, test_eid)
test_category = self.measurement_category.create_measurement_category('Algorithm Test Category')
test_algorithm = self.algorithm.create_algorithm('Test Algorithm', test_category)
measurement_list = self.measurement.create_bulk_measurement_upload_list(test_animal, test_algorithm, self.user)
measurement_list.add_measurement(time.strftime("%c"), 0.3344)
measurement_list.add_measurement(time.strftime("%c"), 0.4455)
measurement_list.add_measurement(time.strftime("%c"), 0.5566)
success = self.measurement.upload_measurement_list(measurement_list)
if success is not True:
self.fail()
print('created bulk measurements')
animal_measurements = self.measurement.get_measurements_for_animal(test_animal)
if len(animal_measurements) != 3:
self.fail()
self.animal.remove_animal(test_animal)
self.farm.remove_farm(test_farm)
self.algorithm.remove_algorithm(test_algorithm)
self.measurement_category.remove_measurement_category(test_category)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestAgBase)
unittest.TextTestRunner(verbosity=2).run(suite)
| {
"repo_name": "elec-otago/python-agbase",
"path": "tests/test_agbase.py",
"copies": "1",
"size": "9799",
"license": "mpl-2.0",
"hash": -5272045273377145000,
"line_mean": 30.6096774194,
"line_max": 179,
"alpha_frac": 0.6308807021,
"autogenerated": false,
"ratio": 3.7258555133079847,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9818042768142272,
"avg_score": 0.007738689453142547,
"num_lines": 310
} |
"""agdss URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: path(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
import django.views
from django.urls import include, path
from django.views.generic.edit import CreateView
from django.contrib.auth.forms import UserCreationForm
from django.views.generic import RedirectView
from django.contrib import admin
from adminplus.sites import AdminSitePlus
#Set up admin site and import all admin.py files
admin.site = AdminSitePlus()
admin.sites.site = admin.site
admin.autodiscover()
urlpatterns = [
url(r'^webclient/', include('webclient.urls')),
url(r'^admin/', admin.site.urls),
url('^', include('django.contrib.auth.urls')),
url('^register/', CreateView.as_view(
template_name='registration/register.html',
form_class=UserCreationForm,
success_url= '/login'
)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^$', RedirectView.as_view(url='/webclient/')),
]
| {
"repo_name": "darknight-007/agdss-1",
"path": "agdss/urls.py",
"copies": "1",
"size": "1552",
"license": "apache-2.0",
"hash": 6919134785743478000,
"line_mean": 32.4888888889,
"line_max": 79,
"alpha_frac": 0.7074742268,
"autogenerated": false,
"ratio": 3.433628318584071,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4641102545384071,
"avg_score": null,
"num_lines": null
} |
age=9
name=["Minglong","minghu","jack",22,age,23,3,4,5,7,87,89,9,9,4]
print(name)
print(name[0:2]) #列表的切片,直接取多个元素,知道下标号码就行
print(name[2:5]) #取jack和age变量
print(name[-5:]) #取最后5个值
name[0]="xxxxx" #修改其中的一个元素
print(name)
name.insert(1,"mingmao") #插入一个元素
print(name)
name.append("haha") #往列表的最后追加一个值
print(name)
name.remove("haha") #删除一个元素
print(name)
#练习题需求
#写一个列表,列表里包含本组所有成员的名字,往中间位置插入2个临组成员的名字,然后取出第3-8的人的列表
#删除第七个人
#把刚才加入的那2个其它组的人一次性删除
#把组长的名字加上组长备注
n=["Alex",3,5,7,8,9,"jack",1,3,0,9,5,"Rain",1,3,4,7,1,"Eric",1,5,78,3423,4231,"Monica","Fiona",9,4,5,6,1,3]
n.insert(3,4)
n.insert(4,5)
print(n)
print(n[3:8])
n.remove("Monica")
print(n)
del n[3:5]
print(n)
print(n[::2]) #取步长
#print("Alex" in n)
'''
if "Alex" in n: #断的字符串Alex是否在n列表内
print("ok")
print(n.count(9)) #统计这n列表一共有几个元素9
需求把9改成99999
'''
if 9 in n:
print("9在n列表里面")
c=n.count(9)
for i in range(c):
weizhi=n.index(9)
n[weizhi]="99999"
print(n)
else:
print("9不在n列表里面")
#n.reverse() #倒着排序
#print(n)
#name2=["x","e","r"]
#n.extend(name2) #合并列表方法,扩展表
#print(n)
#n.pop() #默认删除最后一位
#print(n)
#v=n.copy()
#print(v)
import copy
#v=copy.deepcopy(n) #深copy
#print(v)
#找出有多少个9,把它改成999
#找出有多少个34,把它删除
l=[1,2,3,4,5,9,[9,2,34,9],34,34,5,3,1,9]
print(l)
c=l.count(9)
c2=l.count(34)
for i in range(c):
weizhi=l.index(9)
l[weizhi]="999"
for i in range(c2):
l.remove(34)
print(l)
le=l[6]
c=le.count(9)
c2=le.count(34)
for i in range(c):
weizhi=le.index(9)
le[weizhi]=999
for i in range(c2):
le.remove(34)
print(l)
| {
"repo_name": "xiaoyongaa/ALL",
"path": "python基础2周/24课课上实例.py",
"copies": "1",
"size": "2054",
"license": "apache-2.0",
"hash": -987320530964387100,
"line_mean": 13.6111111111,
"line_max": 107,
"alpha_frac": 0.6191381496,
"autogenerated": false,
"ratio": 1.5216972034715526,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.2640835353071552,
"avg_score": null,
"num_lines": null
} |
# age counter
# by prasith 11/09/2014
import time
def main():
print ("\n\nThis programme will calculate your age.\n(Accuracy: +/- 1 Day)")
main2()
def main2():
print ("\nEnter your birth year, month and date seperated by commas. eg:1999,5,21: ")
Y1 , M1 , D1 = input(">")
if Y1 < 1914:
print ("\nWoah, Are you a ghost!!!??\nanyway here's your age, ")
if Y1 > int((time.strftime("%Y"))) :
print ("\nGreetings! Time traveller...! \nbut sorry,no calculations for you. Next!")
main2()
if M1<0 or M1>13:
print ("\nThere are only 12 months! \nplease try again. ")
main2()
if D1<0 or D1>31:
print ("\nThe date you entered is incorrect. \nPlease try again. ")
main2()
Y2 = int((time.strftime("%Y")))
M2 = int((time.strftime("%m")))
D2 = int((time.strftime("%d")))
#calculate Years
Y = Y2 - Y1
#calculate months
if M1 > M2:
M = (13 - M1) + ( M2 - 1)
Y = Y - 1
else:
M = M2 - M1
#calculate days
if D1 > D2:
if M2 == 3:
D = (28 - D1) + (D2)
else:
D = (30 - D1) + ( D2)
if M > 0:
M = M - 1
else:
M = 11
Y = Y - 1
else:
D = D2 - D1
# The above calculations were made assuming
# a month only has 30 days(28 for February).
print "\n",Y, "Years", M, "Months", D,"Days\n"
inp = raw_input("type 'y' to Check another or press 'enter' to exit: ")
if inp == 'y':
main2()
main()
| {
"repo_name": "PrasithL/python_scripts",
"path": "age_calculator.py",
"copies": "1",
"size": "1601",
"license": "apache-2.0",
"hash": -8381620427573316000,
"line_mean": 22.8955223881,
"line_max": 92,
"alpha_frac": 0.4859462836,
"autogenerated": false,
"ratio": 3.1453831041257367,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4131329387725737,
"avg_score": null,
"num_lines": null
} |
"""agency_description to agency_request_summary
Revision ID: cf62ec87d973
Revises: 971f341c0204
Create Date: 2017-05-31 16:29:17.341283
"""
# revision identifiers, used by Alembic.
revision = "cf62ec87d973"
down_revision = "58a5abdd94ac"
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column(
"requests",
sa.Column("agency_request_summary", sa.String(length=5000), nullable=True),
)
op.add_column(
"requests",
sa.Column("agency_request_summary_release_date", sa.DateTime(), nullable=True),
)
op.drop_column("requests", "agency_description_release_date")
op.drop_column("requests", "agency_description")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column(
"requests",
sa.Column(
"agency_description",
sa.VARCHAR(length=5000),
autoincrement=False,
nullable=True,
),
)
op.add_column(
"requests",
sa.Column(
"agency_description_release_date",
postgresql.TIMESTAMP(),
autoincrement=False,
nullable=True,
),
)
op.drop_column("requests", "agency_request_summary_release_date")
op.drop_column("requests", "agency_request_summary")
### end Alembic commands ###
| {
"repo_name": "CityOfNewYork/NYCOpenRecords",
"path": "migrations/versions/cf62ec87d973_agency_description_to_agency_request_.py",
"copies": "1",
"size": "1494",
"license": "apache-2.0",
"hash": -8977271538269467000,
"line_mean": 26.1636363636,
"line_max": 87,
"alpha_frac": 0.6258366801,
"autogenerated": false,
"ratio": 3.7256857855361596,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.485152246563616,
"avg_score": null,
"num_lines": null
} |
agenda = []
gravou = True
def pedenome():
return input("Nome: ").replace("#", "$")
def pedetelefone():
return input("Telefone: ").replace("#", "$")
def pedearquivo():
return input("Nome do arquivo: ")
def mostra(nome, telefone):
print(f"Nome: {nome} Telefone: {telefone}")
def pesquisa(nome):
mnome = nome.lower()
for p, e in enumerate(agenda):
if e[0].lower() == mnome:
return p
return None
def novo():
global gravou
nome = pedenome()
telefone = pedetelefone()
agenda.append([nome, telefone])
gravou = False
def apaga():
global gravou
nome = pedenome()
p = pesquisa(nome)
if p is not None:
m = "Certeza que quer excluir? (1 - Para confirmar / 0 - para sair): "
valor = faixa(m, 0, 1)
if valor == 1:
del agenda[p]
gravou = False
else:
print("Não foi apagado!")
else:
print("Nome não encontrado.")
def altera():
global gravou
p = pesquisa(pedenome())
if p is not None:
print("Encontrado!")
nome = agenda[p][0]
telefone = agenda[p][1]
mostra(nome, telefone)
nome = pedenome()
telefone = pedetelefone()
m = "Certeza que quer alterar? (1 - Para confirmar / 0 - para sair): "
valor = faixa(m, 0, 1)
if valor == 1:
agenda[p] = [nome, telefone]
gravou = False
else:
print("Não alterado!")
else:
print("Não encontrado")
def lista():
print("\nAgenda\n")
print("-"*6)
for n, d in enumerate(agenda):
nome, telefone = d
print(n+1, end=' ')
mostra(nome, telefone)
print("-"*6)
def grava():
global gravou
if gravou is False:
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "w")
for nome, telefone in agenda:
arquivo.write(f"{nome}#{telefone}\n")
arquivo.close()
gravou = True
ultima = open("ultimaagenda.txt", "w")
ultima.write(nomearquivo)
ultima.close()
else:
print("Conteudo ja gravado")
def le():
global agenda, gravou
if gravou is False:
print("Grave a agenda primeiro (Opção 5)")
else:
agenda = []
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "r")
for linha in arquivo.readlines():
nome, telfone = linha.strip().split("#")
agenda.append([nome, telfone])
arquivo.close()
ultima = open("ultimaagenda.txt", "w")
ultima.write(nomearquivo)
ultima.close()
def faixa(pergunta, i, f):
while True:
try:
valor = int(input(pergunta))
if valor >= i and valor <= f:
return valor
except ValueError:
print(f"Valor inválido, favor digitar valor entre {i} e {f}")
def ordena():
global agenda
agenda.sort()
lista()
def menu():
print("""
0 - Sair
1 - Novo
2 - Alterar
3 - Excluir
4 - Lista
5 - Grava
6 - Lê
7 - Ordena por Nome
""")
la = len(agenda)
print(f"{la} contato(s) na agenda.")
return faixa("Escola uma opção: ", 0, 7)
while True:
opcao = menu()
if opcao == 0:
break
elif opcao == 1:
novo()
elif opcao == 2:
altera()
elif opcao == 3:
apaga()
elif opcao == 4:
lista()
elif opcao == 5:
grava()
elif opcao == 6:
le()
elif opcao == 7:
ordena()
| {
"repo_name": "laenderoliveira/exerclivropy",
"path": "cap09/exercicio-09-22.py",
"copies": "1",
"size": "3565",
"license": "mit",
"hash": 3301225766881733600,
"line_mean": 20.2874251497,
"line_max": 78,
"alpha_frac": 0.5232067511,
"autogenerated": false,
"ratio": 3,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9023206751099999,
"avg_score": 0,
"num_lines": 167
} |
agenda = []
gravou = True
def pedenome(op=""):
i = input("Nome: ").replace("#", "$")
if i == "":
i = op
return i
def pedetelefone(op=""):
i = input("Telefone: ").replace("#", "$")
if i == "":
i = op
return i
def pedeaniversario():
return input("Aniversário Ex 27/03/1998: ")
def pedeemail():
return input("Email: ")
def pedearquivo():
return input("Nome do arquivo: ")
def mostra(nome, telefone, aniversario, email):
print(f"Nome: {nome} Telefone: {telefone} Niver: {aniversario} Email: {email}")
def pesquisa(nome):
mnome = nome.lower()
for p, e in enumerate(agenda):
if e[0].lower() == mnome:
return p
return None
def verifica_repetido(nome):
for contato in agenda:
if contato[0].lower() == nome.lower():
print("Nome já existe!")
return False
return True
def novo():
global gravou
nome = pedenome()
if verifica_repetido(nome):
telefone = pedetelefone()
aniversario = pedeaniversario()
email = pedeemail()
agenda.append([nome, telefone, aniversario, email])
gravou = False
def apaga():
global gravou
nome = pedenome()
p = pesquisa(nome)
if p is not None:
m = "Certeza que quer excluir? (1 - Para confirmar / 0 - para sair): "
valor = faixa(m, 0, 1)
if valor == 1:
del agenda[p]
gravou = False
else:
print("Não foi apagado!")
else:
print("Nome não encontrado.")
def altera():
global gravou
p = pesquisa(pedenome())
if p is not None:
print("Encontrado!")
nome = agenda[p][0]
telefone = agenda[p][1]
aniversario = agenda[p][2]
email = agenda[p][3]
mostra(nome, telefone, aniversario, email)
nome = pedenome()
telefone = pedetelefone()
aniversario = pedeaniversario()
email = pedeemail()
m = "Certeza que quer alterar? (1 - Para confirmar / 0 - para sair): "
valor = faixa(m, 0, 1)
if valor == 1:
agenda[p] = [nome, telefone, aniversario, email]
gravou = False
else:
print("Não alterado!")
else:
print("Não encontrado")
def lista():
print("\nAgenda\n")
print("-"*6)
for n, d in enumerate(agenda):
nome, telefone, aniversario, email = d
print(n+1, end=' ')
mostra(nome, telefone, aniversario, email)
print("-"*6)
def grava():
global gravou
if gravou is False:
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "w")
for nome, telefone, aniversario, email in agenda:
arquivo.write(f"{nome}#{telefone}#{aniversario}#{email}\n")
arquivo.close()
gravou = True
grava_ultima_agenda(nomearquivo)
else:
print("Conteudo ja gravado")
def leia_arquivo(nomearquivo):
global agenda
agenda = []
nomearquivo = nomearquivo
arquivo = open(nomearquivo, "r")
for linha in arquivo.readlines():
nome, telefone, aniversario, email = linha.strip().split("#")
agenda.append([nome, telefone, aniversario, email])
arquivo.close()
def le():
global agenda, gravou
if gravou is False:
print("Grave a agenda primeiro (Opção 5)")
else:
agenda = []
nomearquivo = pedearquivo()
leia_arquivo(nomearquivo)
grava_ultima_agenda(nomearquivo)
def ultima_agenda():
try:
file = open("ultimaagenda.txt", "r")
ultima = file.readline().strip()
file.close()
return ultima
except:
return None
def grava_ultima_agenda(nome):
file = open("ultimaagenda.txt", "w")
file.write(nome)
file.close()
def le_ultima_agenda():
ultima = ultima_agenda()
if ultima is not None:
leia_arquivo(ultima)
def faixa(pergunta, i, f):
while True:
try:
valor = int(input(pergunta))
if valor >= i and valor <= f:
return valor
except ValueError:
print(f"Valor inválido, favor digitar valor entre {i} e {f}")
def ordena():
global agenda
agenda.sort()
lista()
def menu():
print("""
0 - Sair
1 - Novo
2 - Alterar
3 - Excluir
4 - Lista
5 - Grava
6 - Lê
7 - Ordena por Nome
""")
la = len(agenda)
print(f"{la} contato(s) na agenda.")
return faixa("Escola uma opção: ", 0, 7)
le_ultima_agenda()
while True:
opcao = menu()
if opcao == 0:
break
elif opcao == 1:
novo()
elif opcao == 2:
altera()
elif opcao == 3:
apaga()
elif opcao == 4:
lista()
elif opcao == 5:
grava()
elif opcao == 6:
le()
elif opcao == 7:
ordena()
| {
"repo_name": "laenderoliveira/exerclivropy",
"path": "cap09/exercicio-09-25.py",
"copies": "3",
"size": "4865",
"license": "mit",
"hash": -5901792432274009000,
"line_mean": 20.9592760181,
"line_max": 83,
"alpha_frac": 0.5487327426,
"autogenerated": false,
"ratio": 2.980958230958231,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5029690973558231,
"avg_score": null,
"num_lines": null
} |
agenda = []
def pedenome():
return input("Nome: ")
def pedetelefone():
return input("Telefone: ")
def pedearquivo():
return input("Nome do arquivo: ")
def mostra(nome, telefone):
print("Nome: {} Telefone: {}".format(nome, telefone))
def pesquisa(nome):
mnome = nome.lower()
for p, e in enumerate(agenda):
if e[0].lower() == mnome:
return p
return None
def novo():
nome = pedenome()
telefone = pedetelefone()
agenda.append([nome, telefone])
def apaga():
nome = pedenome()
p = pesquisa(nome)
if p != None:
del agenda[p]
else:
print("Nome não encontrado.")
def altera():
p = pesquisa(pedenome())
if p != None:
print("Encontrado!")
nome = agenda[p][0]
telefone = agenda[p][1]
mostra(nome, telefone)
nome = pedenome()
telefone = pedetelefone()
agenda[p] = [nome, telefone]
else:
print("Não encontrado")
def lista():
print("\nAgenda\n")
print("-"*6)
for nome, telefone in agenda:
mostra(nome, telefone)
print("-"*6)
def grava():
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "w")
for nome, telefone in agenda:
arquivo.write("{}#{}\n".format(nome, telefone))
arquivo.close()
def le():
global agenda
agenda = []
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "r")
for linha in arquivo.readlines():
nome, telfone = linha.strip().split("#")
agenda.append([nome, telfone])
arquivo.close()
def faixa(pergunta, i, f):
while True:
try:
valor = int(input(pergunta))
if valor >= i and valor <= f:
return valor
except ValueError:
print("Valor inválido, favor digitar valor entre {} e {}".format(i, f))
def menu():
print("""
0 - Sair
1 - Novo
2 - Alterar
3 - Excluir
4 - Lista
5 - Grava
6 - Lê
""")
print("{} contato(s) na agenda.".format(len(agenda)))
return faixa("Escola uma opção: ", 0, 6)
while True:
opcao = menu()
if opcao == 0:
break
elif opcao == 1:
novo()
elif opcao == 2:
altera()
elif opcao == 3:
apaga()
elif opcao == 4:
lista()
elif opcao == 5:
grava()
elif opcao == 6:
le()
| {
"repo_name": "laenderoliveira/exerclivropy",
"path": "cap09/exercicio-09-17.py",
"copies": "1",
"size": "2380",
"license": "mit",
"hash": 4973983456072742000,
"line_mean": 18.9495798319,
"line_max": 83,
"alpha_frac": 0.5366470093,
"autogenerated": false,
"ratio": 2.9490683229813666,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8968486218413597,
"avg_score": 0.0034458227735538662,
"num_lines": 119
} |
agenda = []
def pedenome():
return input("Nome: ").replace("#", "$")
def pedetelefone():
return input("Telefone: ").replace("#", "$")
def pedearquivo():
return input("Nome do arquivo: ")
def mostra(nome, telefone):
print(f"Nome: {nome} Telefone: {telefone}")
def pesquisa(nome):
mnome = nome.lower()
for p, e in enumerate(agenda):
if e[0].lower() == mnome:
return p
return None
def novo():
nome = pedenome()
telefone = pedetelefone()
agenda.append([nome, telefone])
def apaga():
nome = pedenome()
p = pesquisa(nome)
if p is not None:
m = "Certeza que quer excluir? (1 - Para confirmar / 0 - para sair): "
valor = faixa(m, 0, 1)
if valor == 1:
del agenda[p]
else:
print("Não foi apagado!")
else:
print("Nome não encontrado.")
def altera():
p = pesquisa(pedenome())
if p is not None:
print("Encontrado!")
nome = agenda[p][0]
telefone = agenda[p][1]
mostra(nome, telefone)
nome = pedenome()
telefone = pedetelefone()
m = "Certeza que quer alterar? (1 - Para confirmar / 0 - para sair): "
valor = faixa(m, 0, 1)
if valor == 1:
agenda[p] = [nome, telefone]
else:
print("Não alterado!")
else:
print("Não encontrado")
def lista():
print("\nAgenda\n")
print("-"*6)
for n, d in enumerate(agenda):
nome, telefone = d
print(n+1, end=' ')
mostra(nome, telefone)
print("-"*6)
def grava():
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "w")
for nome, telefone in agenda:
arquivo.write(f"{nome}#{telefone}\n")
arquivo.close()
def le():
global agenda
agenda = []
nomearquivo = pedearquivo()
arquivo = open(nomearquivo, "r")
for linha in arquivo.readlines():
nome, telfone = linha.strip().split("#")
agenda.append([nome, telfone])
arquivo.close()
def faixa(pergunta, i, f):
while True:
try:
valor = int(input(pergunta))
if valor >= i and valor <= f:
return valor
except ValueError:
print(f"Valor inválido, favor digitar valor entre {i} e {f}")
def ordena():
global agenda
agenda.sort()
lista()
def menu():
print("""
0 - Sair
1 - Novo
2 - Alterar
3 - Excluir
4 - Lista
5 - Grava
6 - Lê
7 - Ordena por Nome
""")
la = len(agenda)
print(f"{la} contato(s) na agenda.")
return faixa("Escola uma opção: ", 0, 7)
while True:
opcao = menu()
if opcao == 0:
break
elif opcao == 1:
novo()
elif opcao == 2:
altera()
elif opcao == 3:
apaga()
elif opcao == 4:
lista()
elif opcao == 5:
grava()
elif opcao == 6:
le()
elif opcao == 7:
ordena()
| {
"repo_name": "laenderoliveira/exerclivropy",
"path": "cap09/exercicio-09-21.py",
"copies": "1",
"size": "2962",
"license": "mit",
"hash": -5778730560117697000,
"line_mean": 19.2328767123,
"line_max": 78,
"alpha_frac": 0.5226811104,
"autogenerated": false,
"ratio": 2.9247524752475247,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39474335856475246,
"avg_score": null,
"num_lines": null
} |
# A general cross-validation
import numpy as np
class CrossValidation:
def __init__(self, X, Y):
"""General cross-validation for both classification and regression.
1) check the shape of X and Y are compatible with the input model;
2) the model doesn't have memory on previous trainings, i.e., all
parameters are only based on current fit.
"""
self.X, self.Y = X, Y
def cv_regression(self, model, folds=3, shuffle=True):
"""run cross-validation for regression.
For regression, make sure the input model has the following
functions: fit and predict
"""
self.Y_pre = np.zeros(self.Y.shape[0])
fold_len = int(self.Y.shape[0] / folds)
idx_all = np.arange(self.Y.shape[0])
if shuffle:
np.random.shuffle(idx_all)
for i in range(folds):
if i < folds - 1:
_idx = idx_all[i*fold_len : (i+1)*fold_len]
else:
_idx = idx_all[i*fold_len : self.Y.shape[0]]
Xtest = self.X[_idx, :]
Xtrain = np.delete(self.X, _idx, 0)
Ytrain = np.delete(self.Y, _idx)
model.fit(Xtrain, Ytrain)
self.Y_pre[_idx] = model.predict(Xtest)
return self.Y_pre
def cv_classification(self, model, folds=3, shuffle=True):
"""Run cross-validation for classification.
For classification, make sure the input model has the following
functions: fit, predict and predict_proba.
For using leave-one-out, set folds=-1 or the same length of Y.
"""
if folds == -1:
folds = self.Y.shape[0]
cc = np.unique(self.Y)
self.Ystate = np.zeros(self.Y.shape[0])
self.Yscore = np.zeros((self.Y.shape[0], len(cc)))
idx_all = []
fd_lens = []
for i in range(len(cc)):
_idx = np.where(self.Y == cc[i])[0]
if shuffle:
np.random.shuffle(_idx)
idx_all.append(_idx)
fd_lens.append(int(len(_idx)/folds))
for i in range(folds):
idx_use = np.array([], "int")
if (folds == -1 or folds == self.Y.shape[0]):
idx_use = [i]
else:
for j in range(len(cc)):
if i < folds-1:
_idx = idx_all[j][i*fd_lens[j]: (i+1)*fd_lens[j]]
else:
_idx = idx_all[j][i*fd_lens[j]:]
idx_use = np.append(idx_use, _idx)
Xtest = self.X[idx_use, :]
Xtrain = np.delete(self.X, idx_use, 0)
Ytrain = np.delete(self.Y, idx_use)
model.fit(Xtrain, Ytrain)
self.Ystate[idx_use] = model.predict(Xtest)
model.fit(Xtrain, Ytrain)
self.Yscore[idx_use,:] = model.predict_proba(Xtest)
return self.Ystate, self.Yscore
| {
"repo_name": "huangyh09/hilearn",
"path": "hilearn/models/cross_validation.py",
"copies": "1",
"size": "3018",
"license": "apache-2.0",
"hash": 8727750021901192000,
"line_mean": 35.8170731707,
"line_max": 75,
"alpha_frac": 0.5049701789,
"autogenerated": false,
"ratio": 3.597139451728248,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9538636139498378,
"avg_score": 0.012694698225973878,
"num_lines": 82
} |
"""A generally useful event scheduler class.
Each instance of this class manages its own queue.
No multi-threading is implied; you are supposed to hack that
yourself, or use a single instance per application.
Each instance is parametrized with two functions, one that is
supposed to return the current time, one that is supposed to
implement a delay. You can implement real-time scheduling by
substituting time and sleep from built-in module time, or you can
implement simulated time by writing your own functions. This can
also be used to integrate scheduling with STDWIN events; the delay
function is allowed to modify the queue. Time can be expressed as
integers or floating point numbers, as long as it is consistent.
Events are specified by tuples (time, priority, action, argument).
As in UNIX, lower priority numbers mean higher priority; in this
way the queue can be maintained as a priority queue. Execution of the
event means calling the action function, passing it the argument
sequence in "argument" (remember that in Python, multiple function
arguments are be packed in a sequence).
The action function may be an instance method so it
has another way to reference private data (besides global variables).
"""
# XXX The timefunc and delayfunc should have been defined as methods
# XXX so you can define new kinds of schedulers using subclassing
# XXX instead of having to define a module or class just to hold
# XXX the global state of your particular time and delay functions.
import heapq
# TODO: grumpy modified version
#from collections import namedtuple
__all__ = ["scheduler"]
# TODO: Use namedtuple
# Event = namedtuple('Event', 'time, priority, action, argument')
class Event(object):
__slots__ = ['time', 'priority', 'action', 'argument']
def __init__(self, time, priority, action, argument):
self.time = time
self.priority = priority
self.action = action
self.argument = argument
def get_fields(self):
return (self.time, self.priority, self.action, self.argument)
def __eq__(s, o): return (s.time, s.priority) == (o.time, o.priority)
def __lt__(s, o): return (s.time, s.priority) < (o.time, o.priority)
def __le__(s, o): return (s.time, s.priority) <= (o.time, o.priority)
def __gt__(s, o): return (s.time, s.priority) > (o.time, o.priority)
def __ge__(s, o): return (s.time, s.priority) >= (o.time, o.priority)
class scheduler(object):
def __init__(self, timefunc, delayfunc):
"""Initialize a new instance, passing the time and delay
functions"""
self._queue = []
self.timefunc = timefunc
self.delayfunc = delayfunc
def enterabs(self, time, priority, action, argument):
"""Enter a new event in the queue at an absolute time.
Returns an ID for the event which can be used to remove it,
if necessary.
"""
event = Event(time, priority, action, argument)
heapq.heappush(self._queue, event)
return event # The ID
def enter(self, delay, priority, action, argument):
"""A variant that specifies the time as a relative time.
This is actually the more commonly used interface.
"""
time = self.timefunc() + delay
return self.enterabs(time, priority, action, argument)
def cancel(self, event):
"""Remove an event from the queue.
This must be presented the ID as returned by enter().
If the event is not in the queue, this raises ValueError.
"""
self._queue.remove(event)
heapq.heapify(self._queue)
def empty(self):
"""Check whether the queue is empty."""
return not self._queue
def run(self):
"""Execute events until the queue is empty.
When there is a positive delay until the first event, the
delay function is called and the event is left in the queue;
otherwise, the event is removed from the queue and executed
(its action function is called, passing it the argument). If
the delay function returns prematurely, it is simply
restarted.
It is legal for both the delay function and the action
function to modify the queue or to raise an exception;
exceptions are not caught but the scheduler's state remains
well-defined so run() may be called again.
A questionable hack is added to allow other threads to run:
just after an event is executed, a delay of 0 is executed, to
avoid monopolizing the CPU when other threads are also
runnable.
"""
# localize variable access to minimize overhead
# and to improve thread safety
q = self._queue
delayfunc = self.delayfunc
timefunc = self.timefunc
pop = heapq.heappop
while q:
# TODO: modified part of grumpy version.
checked_event = q[0]
time, priority, action, argument = checked_event.get_fields()
now = timefunc()
if now < time:
delayfunc(time - now)
else:
event = pop(q)
# Verify that the event was not removed or altered
# by another thread after we last looked at q[0].
if event is checked_event:
action(*argument)
delayfunc(0) # Let other threads run
else:
heapq.heappush(q, event)
@property
def queue(self):
"""An ordered list of upcoming events.
Events are named tuples with fields for:
time, priority, action, arguments
"""
# Use heapq to sort the queue rather than using 'sorted(self._queue)'.
# With heapq, two events scheduled at the same time will show in
# the actual order they would be retrieved.
events = self._queue[:]
return map(heapq.heappop, [events]*len(events))
| {
"repo_name": "corona10/grumpy",
"path": "grumpy-runtime-src/third_party/stdlib/sched.py",
"copies": "5",
"size": "5964",
"license": "apache-2.0",
"hash": 2948077052532133000,
"line_mean": 41,
"line_max": 78,
"alpha_frac": 0.651240778,
"autogenerated": false,
"ratio": 4.31236442516269,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0013280119618147787,
"num_lines": 142
} |
"""A generally useful event scheduler class.
Each instance of this class manages its own queue.
No multi-threading is implied; you are supposed to hack that
yourself, or use a single instance per application.
Each instance is parametrized with two functions, one that is
supposed to return the current time, one that is supposed to
implement a delay. You can implement real-time scheduling by
substituting time and sleep from built-in module time, or you can
implement simulated time by writing your own functions. This can
also be used to integrate scheduling with STDWIN events; the delay
function is allowed to modify the queue. Time can be expressed as
integers or floating point numbers, as long as it is consistent.
Events are specified by tuples (time, priority, action, argument, kwargs).
As in UNIX, lower priority numbers mean higher priority; in this
way the queue can be maintained as a priority queue. Execution of the
event means calling the action function, passing it the argument
sequence in "argument" (remember that in Python, multiple function
arguments are be packed in a sequence) and keyword parameters in "kwargs".
The action function may be an instance method so it
has another way to reference private data (besides global variables).
"""
# XXX The timefunc and delayfunc should have been defined as methods
# XXX so you can define new kinds of schedulers using subclassing
# XXX instead of having to define a module or class just to hold
# XXX the global state of your particular time and delay functions.
import time
import heapq
from collections import namedtuple
try:
import threading
except ImportError:
import dummy_threading as threading
try:
from time import monotonic as _time
except ImportError:
from time import time as _time
__all__ = ["scheduler"]
class Event(namedtuple('Event', 'time, priority, action, argument, kwargs')):
def __eq__(s, o): return (s.time, s.priority) == (o.time, o.priority)
def __ne__(s, o): return (s.time, s.priority) != (o.time, o.priority)
def __lt__(s, o): return (s.time, s.priority) < (o.time, o.priority)
def __le__(s, o): return (s.time, s.priority) <= (o.time, o.priority)
def __gt__(s, o): return (s.time, s.priority) > (o.time, o.priority)
def __ge__(s, o): return (s.time, s.priority) >= (o.time, o.priority)
_sentinel = object()
class scheduler:
def __init__(self, timefunc=_time, delayfunc=time.sleep):
"""Initialize a new instance, passing the time and delay
functions"""
self._queue = []
self._lock = threading.RLock()
self.timefunc = timefunc
self.delayfunc = delayfunc
def enterabs(self, time, priority, action, argument=(), kwargs=_sentinel):
"""Enter a new event in the queue at an absolute time.
Returns an ID for the event which can be used to remove it,
if necessary.
"""
if kwargs is _sentinel:
kwargs = {}
with self._lock:
event = Event(time, priority, action, argument, kwargs)
heapq.heappush(self._queue, event)
return event # The ID
def enter(self, delay, priority, action, argument=(), kwargs=_sentinel):
"""A variant that specifies the time as a relative time.
This is actually the more commonly used interface.
"""
with self._lock:
time = self.timefunc() + delay
return self.enterabs(time, priority, action, argument, kwargs)
def cancel(self, event):
"""Remove an event from the queue.
This must be presented the ID as returned by enter().
If the event is not in the queue, this raises ValueError.
"""
with self._lock:
self._queue.remove(event)
heapq.heapify(self._queue)
def empty(self):
"""Check whether the queue is empty."""
with self._lock:
return not self._queue
def run(self, blocking=True):
"""Execute events until the queue is empty.
If blocking is False executes the scheduled events due to
expire soonest (if any) and then return the deadline of the
next scheduled call in the scheduler.
When there is a positive delay until the first event, the
delay function is called and the event is left in the queue;
otherwise, the event is removed from the queue and executed
(its action function is called, passing it the argument). If
the delay function returns prematurely, it is simply
restarted.
It is legal for both the delay function and the action
function to modify the queue or to raise an exception;
exceptions are not caught but the scheduler's state remains
well-defined so run() may be called again.
A questionable hack is added to allow other threads to run:
just after an event is executed, a delay of 0 is executed, to
avoid monopolizing the CPU when other threads are also
runnable.
"""
# localize variable access to minimize overhead
# and to improve thread safety
lock = self._lock
q = self._queue
delayfunc = self.delayfunc
timefunc = self.timefunc
pop = heapq.heappop
while True:
with lock:
if not q:
break
time, priority, action, argument, kwargs = q[0]
now = timefunc()
if time > now:
delay = True
else:
delay = False
pop(q)
if delay:
if not blocking:
return time - now
delayfunc(time - now)
else:
action(*argument, **kwargs)
delayfunc(0) # Let other threads run
@property
def queue(self):
"""An ordered list of upcoming events.
Events are named tuples with fields for:
time, priority, action, arguments, kwargs
"""
# Use heapq to sort the queue rather than using 'sorted(self._queue)'.
# With heapq, two events scheduled at the same time will show in
# the actual order they would be retrieved.
with self._lock:
events = self._queue[:]
return map(heapq.heappop, [events]*len(events))
| {
"repo_name": "r-lyeh/scriptorium",
"path": "python/micropython/tests/bytecode/pylib-tests/sched.py",
"copies": "22",
"size": "6393",
"license": "unlicense",
"hash": -8084954636860253000,
"line_mean": 37.0535714286,
"line_max": 78,
"alpha_frac": 0.6389801345,
"autogenerated": false,
"ratio": 4.40592694693315,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""A generally useful event scheduler class.
Each instance of this class manages its own queue.
No multi-threading is implied; you are supposed to hack that
yourself, or use a single instance per application.
Each instance is parametrized with two functions, one that is
supposed to return the current time, one that is supposed to
implement a delay. You can implement real-time scheduling by
substituting time and sleep from built-in module time, or you can
implement simulated time by writing your own functions. This can
also be used to integrate scheduling with STDWIN events; the delay
function is allowed to modify the queue. Time can be expressed as
integers or floating point numbers, as long as it is consistent.
Events are specified by tuples (time, priority, action, argument).
As in UNIX, lower priority numbers mean higher priority; in this
way the queue can be maintained as a priority queue. Execution of the
event means calling the action function, passing it the argument.
Remember that in Python, multiple function arguments can be packed
in a tuple. The action function may be an instance method so it
has another way to reference private data (besides global variables).
Parameterless functions or methods cannot be used, however.
"""
# XXX The timefunc and delayfunc should have been defined as methods
# XXX so you can define new kinds of schedulers using subclassing
# XXX instead of having to define a module or class just to hold
# XXX the global state of your particular time and delay functions.
import heapq
__all__ = ["scheduler"]
class scheduler:
def __init__(self, timefunc, delayfunc):
"""Initialize a new instance, passing the time and delay
functions"""
self.queue = []
self.timefunc = timefunc
self.delayfunc = delayfunc
def enterabs(self, time, priority, action, argument):
"""Enter a new event in the queue at an absolute time.
Returns an ID for the event which can be used to remove it,
if necessary.
"""
event = time, priority, action, argument
heapq.heappush(self.queue, event)
return event # The ID
def enter(self, delay, priority, action, argument):
"""A variant that specifies the time as a relative time.
This is actually the more commonly used interface.
"""
time = self.timefunc() + delay
return self.enterabs(time, priority, action, argument)
def cancel(self, event):
"""Remove an event from the queue.
This must be presented the ID as returned by enter().
If the event is not in the queue, this raises RuntimeError.
"""
self.queue.remove(event)
heapq.heapify(self.queue)
def empty(self):
"""Check whether the queue is empty."""
return not self.queue
def run(self):
"""Execute events until the queue is empty.
When there is a positive delay until the first event, the
delay function is called and the event is left in the queue;
otherwise, the event is removed from the queue and executed
(its action function is called, passing it the argument). If
the delay function returns prematurely, it is simply
restarted.
It is legal for both the delay function and the action
function to to modify the queue or to raise an exception;
exceptions are not caught but the scheduler's state remains
well-defined so run() may be called again.
A questionably hack is added to allow other threads to run:
just after an event is executed, a delay of 0 is executed, to
avoid monopolizing the CPU when other threads are also
runnable.
"""
# localize variable access to minimize overhead
# and to improve thread safety
q = self.queue
delayfunc = self.delayfunc
timefunc = self.timefunc
pop = heapq.heappop
while q:
time, priority, action, argument = checked_event = q[0]
now = timefunc()
if now < time:
delayfunc(time - now)
else:
event = pop(q)
# Verify that the event was not removed or altered
# by another thread after we last looked at q[0].
if event is checked_event:
void = action(*argument)
delayfunc(0) # Let other threads run
else:
heapq.heappush(event)
| {
"repo_name": "ericlink/adms-server",
"path": "playframework-dist/1.1-src/python/Lib/sched.py",
"copies": "2",
"size": "4652",
"license": "mit",
"hash": -4927871475221731000,
"line_mean": 37.7606837607,
"line_max": 70,
"alpha_frac": 0.6493981083,
"autogenerated": false,
"ratio": 4.633466135458168,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007505437612964494,
"num_lines": 117
} |
"""A general module with tools for use with the saltfp package"""
import math
import numpy as np
import scipy.ndimage as nd
from saltfit import interfit
from salterror import SaltError, SaltIOError
from FPRing import FPRing, ringfit
def fpfunc(z, r, t, coef=None):
"""A functional form fitting the Fabry Perot parameterization. The
FP parameterization is given by:
$\lambda = \\frac{A+Bz+Cz^2+Dz^3}{(1+r/F)^{0.5}}+Et$
Parameters
----------
z: float or ndarray
z position of the etalon
r: float or ndarray
r position in the image
t: float or ndarray
r position in the image
coef: list or ndarray
An array with the coefficients for the FP equations. The
coefficients should be given as [A B C D E F]
Returns
-------
w: float or ndarray
wavelength at position r
"""
if len(coef)!=6: raise Exception('Not enough FP Coefficients')
if coef[5]==0: raise Exception('F must not be zero')
#calcuate the value according to the above equation
w=coef[0]+coef[1]*z+coef[2]*z**2+coef[3]*z**3+coef[4]*t
w=w/(1+(r/coef[5])**2)**0.5
return w
def findrings(data, thresh=5, niter=5, minsize=10, axc=None, ayc=None):
"""findrings makes a rough calculation for the parameters of the rings
based on single line cuts through the data. It returns a list of rings
"""
ring_list=[]
#first guess the middle is in the middle of the data
if axc is None:
xc=int(0.5*len(data[0]))
else:
xc=axc
if ayc is None:
yc=int(0.5*len(data))
else:
yc=ayc
#take a look at the y cut through the data
xdata=data[yc,:]
#take a look through the xdata. check for the same thing and make sure they are consistent
ydata=data[:,xc]
#get rid of all the lower points
#find the peaks in the data
ypeak_list=findpeaks(ydata, 0.4, minsize)
xpeak_list=findpeaks(xdata, 0.4, minsize)
if abs(len(ypeak_list)-len(xpeak_list))>1:
msg="Non-symmetrically rings in the image"
#raise SaltError(msg)
nrings=int(max(len(ypeak_list)/2, len(xpeak_list)/2))
#throw an error if no rings are detected
if nrings<1:
msg="No rings detected in image"
raise SaltError(msg)
#loop through the image and determine parameters of rings
for i in range(0,nrings,2):
#determine the y-center
try:
y1,y2=ypeak_list[i]
yarr=np.arange(y1,y2)
ypa=y1+ydata[y1:y2].argmax()
ysiga=(abs(np.sum((yarr-ypa)**2*ydata[y1:y2])/ydata[y1:y2].sum()))**0.5
y1,y2=ypeak_list[i+1]
yarr=np.arange(y1,y2)
ypb=y1+ydata[y1:y2].argmax()
ysigb=(abs(np.sum((yarr-ypb)**2*ydata[y1:y2])/ydata[y1:y2].sum()))**0.5
if ayc is None:
yc=0.5*(ypa+ypb)
else:
yc=ayc
ymax=max(ydata[ypa], ydata[ypb])
yrad=0.5*abs(ypb-ypa)
ysig=0.5*(ysiga+ysigb)
except Exception, e:
yc=yc
yrad=0
ysig=0
ymax=ydata.max()
#determine the x-center
try:
x1,x2=xpeak_list[i]
xarr=np.arange(x1,x2)
xpa=x1+xdata[x1:x2].argmax()
xsiga=(abs(np.sum((xarr-xpa)**2*xdata[x1:x2])/xdata[x1:x2].sum()))**0.5
x1,x2=xpeak_list[i+1]
xpb=x1+xdata[x1:x2].argmax()
xarr=np.arange(x1,x2)
xsigb=(abs(np.sum((xarr-xpb)**2*xdata[x1:x2])/xdata[x1:x2].sum()))**0.5
if axc is None:
xc=0.5*(xpa+xpb)
else:
xc=axc
xmax=max(xdata[xpa], xdata[xpb])
xsig=0.5*(xsiga+xsigb)
xrad=0.5*abs(xpa-xpb)
except:
xc=xc
xrad=0
xsig=0
xmax=xdata.max()
prad_err=max(1.0, 0.5*abs(yrad-xrad))
ring_list.append(FPRing(xc, yc, max(yrad,xrad), max(xmax,ymax), max(xsig,ysig), prad_err=prad_err))
return ring_list
def findcenter(data, ring, method, niter=5, conv=0.05):
method=method.upper()
if method == 'FIT':
ring=ringfit(data, fpring=ring)
elif method == 'MAX':
i=0
c=conv+1
while i < niter and c>conv:
xc,yc=maxflux_center(data, ring.xc, ring.yc, ring.prad, 10, maxiter=20)
c=((ring.xc-xc)**2+(ring.yc-yc)**2)**0.5
i+=1
rad, rad_err=findradius(data, xc, yc, ring.prad, 10)
ring.xc=xc
ring.yc=yc
ring.prad=rad
ring.prad_err=rad_err
elif method == 'CENTER':
xc,yc,rad, rad_err=centerring(data, ring.xc, ring.yc, radmax=ring.prad, radstep=ring.sigma, nbins=8)
c=((ring.xc-xc)**2+(ring.yc-yc)**2)**0.5
ring.xc=xc
ring.yc=yc
ring.prad=rad
ring.prad_err=rad_err
elif method == 'MOMENT':
pass
else:
raise SaltError('%s is not a valid method' % method)
return ring
def maxflux_center(data, axc=None, ayc=None,radmax=450, radstep=5, maxiter=100):
"""Find the center of the data by trying to maximize the flux in the radial distribution
"""
ylen,xlen=data.shape
if axc is None: axc=0.5*xlen
if ayc is None: ayc=0.5*ylen
mflux=0
niter=0
found=True
bxc=axc
byc=ayc
while found and niter<maxiter:
niter+=1
found=False
for i in [-1,0,1]:
axc=bxc+i
for j in [-1,0,1]:
ayc=byc+j
flux=calcflux(data, axc, ayc, radmax, radstep)
if mflux< flux:
bxc=axc
byc=ayc
mflux=flux
found=True
#print bxc, byc, flux
continue
return bxc, byc
def findradius(data, axc=None, ayc=None,radmax=450, radstep=5, maxiter=100, rstep=0.25):
"""Find the radius of the ring by trying to maximum the value"""
ylen,xlen=data.shape
if axc is None: axc=0.5*xlen
if ayc is None: ayc=0.5*ylen
mflux=calcflux(data, axc, ayc, radmax, radstep)
niter=0
found=True
brad=radmax
brad_err=max(brad*(mflux/mflux**2)**0.5,1.0)
while found and niter<100:
niter+=1
found=False
for i in [-rstep,rstep]:
rad=brad+i
flux=calcflux(data, axc, ayc, rad, radstep)
if mflux< flux:
brad=rad
brad_err=(rad*(flux/flux**2)**0.5)
mflux=flux
found=True
continue
return brad, brad_err
def calcflux(data, axc, ayc, radmax, radstep):
"""Calculate the total flux between radmax-radstep and radmax+radstep"""
y,x=np.indices(data.shape)
r=((x-axc)**2+(y-ayc)**2)**0.5
mask=(r>radmax-radstep)*(r<radmax+radstep)
return data[mask].sum()
def centerring(data, axc, ayc, radmax=450, radstep=50, nbins=8):
"""Calculate the center of the ring by determining the radius of the
line in several bins
"""
rad=radmax
rad_err=1.0
#set up the radius and theta
y,x=np.indices(data.shape)
r=((x-axc)**2+(y-ayc)**2)**0.5
theta=np.arctan((y-ayc)/(x-axc))
theta[(x-axc<0)]+=math.pi
theta += 0.5*math.pi
#calculate the centroid in each bin
nsteps=2*math.pi/nbins
rad_arr=np.zeros(nbins)
theta_arr=np.zeros(nbins)
for i in range(nbins):
t1=i*nsteps
t2=t1+nsteps
mask=(theta>t1)*(theta<t2)*(abs(r-radmax)<radstep)
theta_arr[i]=0.5*(t1+t2)
try:
rad_arr[i]=fitradius(r[mask], data[mask])
except:
pass
#r[mask][j]
x_arr=rad_arr*np.cos(theta_arr-0.5*math.pi)
y_arr=rad_arr*np.sin(theta_arr-0.5*math.pi)
return axc+x_arr.mean(), ayc+y_arr.mean(), rad_arr.mean(), rad_arr.std()
def fitradius(radius, data):
"""Fit a line to the data"""
it=interfit(radius, data, function='polynomial', order=3)
it.interfit()
d=it(radius)
return radius[d.argmax()]
def findpeaks(data, fpeak=0.8,minsize=10):
"""Find peakes median filters an image and finds the peaks in the image
"""
#median filter the image
mdata=nd.filters.median_filter(data, size=minsize)
#take the 80% points and find the peaks
mask=(mdata>fpeak*mdata.max())
#find all the objects
obj_arr, obj_num=nd.label(mask)
#ypeaks
peaks=[]
for i in range(obj_num):
pid=np.where(obj_arr==i+1)[0]
if len(pid)>=minsize:
peaks.append((pid.min(), pid.max()))
return peaks
| {
"repo_name": "saltastro/pysalt",
"path": "saltfp/fptools.py",
"copies": "2",
"size": "8336",
"license": "bsd-3-clause",
"hash": 6850688405180854000,
"line_mean": 27.7448275862,
"line_max": 109,
"alpha_frac": 0.5832533589,
"autogenerated": false,
"ratio": 2.8248051507963403,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9135427226313146,
"avg_score": 0.054526256676638525,
"num_lines": 290
} |
""" A general notion of data grouped by key.
This module generally mimics the pandas API.
"""
from .functions_std.base import placeholder_like, struct
from .functions_std.error import *
from .column import build_dataframe, AbstractColumn
from .types import StructField, StructType
__all__ = ['KeyedGroup', 'groupby']
class KeyedGroup(object):
""" A dataset, grouped by some key.
Users should not have to create this object directly.
"""
def __init__(self, ref, key_col, value_col):
self._ref = ref # The referring dataframe.
self._key_col = key_col # A column of data with the same ref
self._value_col = value_col # A column with the same ref
def agg(self, aggobj, name=None):
""" Performs an aggregation on the given dataframe.
agg_obj: an object that describes some aggregation. It can be one of the
following:
- a string that contains a known aggregation function or UDAF, such as 'min', 'sum', 'count', ...
- a python function that accepts a column or dataframe and returns an
observable. Some restrictions apply, but a lot of things are tolerated.
- a non-empty list or tuple of accepted aggregation objects. In this case,
the fields are labeled '_XXX' with XXX the position (starting from 1)
- a non-empty list of (field name, agg object)
- a dictionary of {field name, agg obj}. The resulting structure is returned
sorted in field name order.
- an OrderedDict object. The order is preserved.
"""
return _agg_ks(self, aggobj, name)
def transform(self, fun, name=None):
create_error("not implemented")
def to_df(self, name=None):
create_error("not implemented")
def __repr__(self):
return "KeyedGroup(ref={}, key={}, value={})".format(
self._ref, self._key_col.type, self._value_col.type)
def groupby(obj, key):
if isinstance(obj, AbstractColumn):
# We group over the whole dataframe or a column of Karps data.
return _groupby_ks(obj.reference, key.as_column(), obj.as_column())
create_error("first argument not understood: {} {}".format(type(obj), obj))
def _groupby_ks(obj, key_obj, value_obj):
""" Implementation of the grouping logic for karps objects.
"""
if not isinstance(key_obj, AbstractColumn):
create_error("Key can only be column for now, got type {}".format(type(key_obj)))
key_col = key_obj.as_column()
value_col = value_obj.as_column()
if key_col.reference is not obj:
create_error("Key reference: {} is distinct from group reference: {}".format(key_col.reference, obj))
if value_col.reference is not obj:
create_error("Value reference {} is distinct from group reference: {}".format(value_col.reference, obj))
return KeyedGroup(obj, key_col, value_col)
def _agg_ks(kg, aggobj, name):
""" Implementation of the aggregation logic for karps keyed groups.
"""
# The implementation works as follows:
# - build a dataframe that packs the data: {key:keyDT, value:valueDT}
# - run structured transform on it, the results is also {key: keyDT, value:aggDT}
# - rename and if necessary unpack the key and values in a separate transform.
ph = placeholder_like(kg._value_col)
# For now, some operations like filtering are only possible with columns.
ph_col = ph.as_column()
out = _process_aggobj(aggobj, ph_col)
# The fields of the return type.
df_pre = struct([('key', kg._key_col), ('value', kg._value_col)]).as_dataframe(name_hint="agg_pre")
# The data structure returned by the aggregation:
if len(out) == 1:
(_, o) = out[0]
dt_value = o.type
else:
dt_value = StructType([StructField(o.type, fname) for (fname, o) in out])
dt = StructType([StructField(kg._key_col.type, "key"), StructField(dt_value, "value")])
df = build_dataframe(
op_name="org.spark.FunctionalShuffle",
type_p=dt,
parents=[df_pre, ph] + [obs for (_, obs) in out],
name_hint="shuffle",
path_extra=name)
# Give the proper names to the output columns:
key_name = kg._key_col._field_name if kg._key_col._field_name else 'key'
key_col = (key_name, df['key'])
if len(out) == 1:
(fname, _) = out[0]
value_cols = [(fname, df['value'])]
else:
value_cols = [(fname, df['value'][fname]) for (fname, _) in out]
df_post = struct([key_col] + value_cols).as_dataframe(name_hint="agg_post")
return df_post
def _process_aggobj(aggobj, pholder):
if hasattr(aggobj, '__call__'):
# Just a single element -> wrap with a tuple name
# No need to be too smart for now about the return name:
return _process_aggobj0([(aggobj.__name__, aggobj)], pholder)
return _process_aggobj0(aggobj, pholder)
def _process_aggobj0(aggobj, pholder):
# Returns a list of (name, observable)
if isinstance(aggobj, dict):
# Flatten the dictionary and sort by name:
l = sorted(aggobj.items(), key=lambda x:x[0])
return _process_aggobj0(l, pholder)
if isinstance(aggobj, (list, tuple)):
aggobj = list(aggobj)
res = []
for obj in aggobj:
if isinstance(obj, tuple):
assert len(obj) == 2, obj
(key, obj1) = obj
assert isinstance(key, str), (type(key), key)
res.append((key, _process_aggobj0(obj1, pholder)))
return res
if hasattr(aggobj, '__call__'):
# It is a function.
return aggobj(pholder)
create_error("Aggregation object not understood: {}: {}".format(type(aggobj), aggobj))
| {
"repo_name": "tjhunter/karps",
"path": "python/karps/groups.py",
"copies": "1",
"size": "5383",
"license": "apache-2.0",
"hash": -6758677642348388000,
"line_mean": 38.8740740741,
"line_max": 108,
"alpha_frac": 0.6695151403,
"autogenerated": false,
"ratio": 3.477390180878553,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46469053211785527,
"avg_score": null,
"num_lines": null
} |
"""A general-purpose javascript compiler."""
from __future__ import unicode_literals
import os.path, subprocess
from django.conf import settings
from django.utils import six
import optimizations
class JavascriptError(Exception):
"""Something went wrong with javascript compilation."""
def __init__(self, message, detail_message):
"""Initializes the javascript error."""
super(JavascriptError, self).__init__(message)
self.detail_message = detail_message
class JavascriptCompiler(object):
"""A compiler of javascript code."""
def __init__(self, cache_name="optimizations.javascriptcompiler"):
"""Initializes the JavascriptCompiler."""
self._compressor_path = os.path.join(os.path.abspath(os.path.dirname(optimizations.__file__)), "resources", "yuicompressor.jar")
def compile(self, source, force_compile=None):
"""Compiles the given javascript source code."""
if force_compile is None:
force_compile = not settings.DEBUG
# Convert to string.
if isinstance(source, six.string_types):
source = source.encode("utf-8")
# Don't compile in debug mode.
if not force_compile:
return source
# Compile the source.
process = subprocess.Popen(
("java", "-jar", self._compressor_path, "--type", "js", "--charset", "utf-8", "-v"),
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
)
stdoutdata, stderrdata = process.communicate(source)
# Check it all worked.
if process.returncode != 0:
raise JavascriptError("Error while compiling javascript.", stderrdata)
return stdoutdata
default_javascript_compiler = JavascriptCompiler()
| {
"repo_name": "etianen/django-optimizations",
"path": "src/optimizations/javascriptcompiler.py",
"copies": "1",
"size": "1814",
"license": "bsd-3-clause",
"hash": 6789613758103089000,
"line_mean": 31.9818181818,
"line_max": 136,
"alpha_frac": 0.6422271224,
"autogenerated": false,
"ratio": 4.569269521410579,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5711496643810579,
"avg_score": null,
"num_lines": null
} |
# A general purpose MFC CCtrlView view that uses Scintilla.
from . import control
from . import IDLEenvironment # IDLE emulation.
from pywin.mfc import docview
from pywin.mfc import dialog
from . import scintillacon
import win32con
import win32ui
import afxres
import string
import array
import sys
import types
import __main__ # for attribute lookup
from . import bindings
from . import keycodes
import struct
import re
import os
PRINTDLGORD = 1538
IDC_PRINT_MAG_EDIT = 1010
EM_FORMATRANGE = win32con.WM_USER+57
wordbreaks = "._" + string.ascii_uppercase + string.ascii_lowercase + string.digits
patImport=re.compile('import (?P<name>.*)')
_event_commands = [
# File menu
"win32ui.ID_FILE_LOCATE", "win32ui.ID_FILE_CHECK", "afxres.ID_FILE_CLOSE",
"afxres.ID_FILE_NEW", "afxres.ID_FILE_OPEN", "afxres.ID_FILE_SAVE",
"afxres.ID_FILE_SAVE_AS", "win32ui.ID_FILE_SAVE_ALL",
# Edit menu
"afxres.ID_EDIT_UNDO", "afxres.ID_EDIT_REDO", "afxres.ID_EDIT_CUT",
"afxres.ID_EDIT_COPY", "afxres.ID_EDIT_PASTE", "afxres.ID_EDIT_SELECT_ALL",
"afxres.ID_EDIT_FIND", "afxres.ID_EDIT_REPEAT", "afxres.ID_EDIT_REPLACE",
# View menu
"win32ui.ID_VIEW_WHITESPACE", "win32ui.ID_VIEW_FIXED_FONT",
"win32ui.ID_VIEW_BROWSE", "win32ui.ID_VIEW_INTERACTIVE",
# Window menu
"afxres.ID_WINDOW_ARRANGE", "afxres.ID_WINDOW_CASCADE",
"afxres.ID_WINDOW_NEW", "afxres.ID_WINDOW_SPLIT",
"afxres.ID_WINDOW_TILE_HORZ", "afxres.ID_WINDOW_TILE_VERT",
# Others
"afxres.ID_APP_EXIT", "afxres.ID_APP_ABOUT",
]
_extra_event_commands = [
("EditDelete", afxres.ID_EDIT_CLEAR),
("LocateModule", win32ui.ID_FILE_LOCATE),
("GotoLine", win32ui.ID_EDIT_GOTO_LINE),
("DbgBreakpointToggle", win32ui.IDC_DBG_ADD),
("DbgGo", win32ui.IDC_DBG_GO),
("DbgStepOver", win32ui.IDC_DBG_STEPOVER),
("DbgStep", win32ui.IDC_DBG_STEP),
("DbgStepOut", win32ui.IDC_DBG_STEPOUT),
("DbgBreakpointClearAll", win32ui.IDC_DBG_CLEAR),
("DbgClose", win32ui.IDC_DBG_CLOSE),
]
event_commands = []
def _CreateEvents():
for name in _event_commands:
val = eval(name)
name_parts = name.split("_")[1:]
name_parts = [p.capitalize() for p in name_parts]
event = ''.join(name_parts)
event_commands.append((event, val))
for name, id in _extra_event_commands:
event_commands.append((name, id))
_CreateEvents()
del _event_commands; del _extra_event_commands
command_reflectors = [
(win32ui.ID_EDIT_UNDO, win32con.WM_UNDO),
(win32ui.ID_EDIT_REDO, scintillacon.SCI_REDO),
(win32ui.ID_EDIT_CUT, win32con.WM_CUT),
(win32ui.ID_EDIT_COPY, win32con.WM_COPY),
(win32ui.ID_EDIT_PASTE, win32con.WM_PASTE),
(win32ui.ID_EDIT_CLEAR, win32con.WM_CLEAR),
(win32ui.ID_EDIT_SELECT_ALL, scintillacon.SCI_SELECTALL),
]
def DoBraceMatch(control):
curPos = control.SCIGetCurrentPos()
charBefore = ' '
if curPos: charBefore = control.SCIGetCharAt(curPos-1)
charAt = control.SCIGetCharAt(curPos)
braceAtPos = braceOpposite = -1
if charBefore in "[](){}": braceAtPos = curPos-1
if braceAtPos==-1:
if charAt in "[](){}": braceAtPos = curPos
if braceAtPos != -1:
braceOpposite = control.SCIBraceMatch(braceAtPos, 0)
if braceAtPos != -1 and braceOpposite==-1:
control.SCIBraceBadHighlight(braceAtPos)
else:
# either clear them both or set them both.
control.SCIBraceHighlight(braceAtPos, braceOpposite)
def _get_class_attributes(ob):
# Recurse into base classes looking for attributes
items = []
try:
items = items + dir(ob)
for i in ob.__bases__:
for item in _get_class_attributes(i):
if item not in items:
items.append(item)
except AttributeError:
pass
return items
# Supposed to look like an MFC CEditView, but
# also supports IDLE extensions and other source code generic features.
class CScintillaView(docview.CtrlView, control.CScintillaColorEditInterface):
def __init__(self, doc):
docview.CtrlView.__init__(self, doc, "Scintilla", win32con.WS_CHILD | win32con.WS_VSCROLL | win32con.WS_HSCROLL | win32con.WS_CLIPCHILDREN | win32con.WS_VISIBLE)
self._tabWidth = 8 # Mirror of what we send to Scintilla - never change this directly
self.bAutoCompleteAttributes = 1
self.bShowCallTips = 1
self.bMatchBraces = 0 # Editor option will default this to true later!
self.bindings = bindings.BindingsManager(self)
self.idle = IDLEenvironment.IDLEEditorWindow(self)
self.idle.IDLEExtension("AutoExpand")
# SendScintilla is called so frequently it is worth optimizing.
self.SendScintilla = self._obj_.SendMessage
def OnDestroy(self, msg):
self.SendScintilla = None
return docview.CtrlView.OnDestroy(self, msg)
def _MakeColorizer(self):
ext = os.path.splitext(self.GetDocument().GetPathName())[1]
from . import formatter
return formatter.BuiltinPythonSourceFormatter(self, ext)
# def SendScintilla(self, msg, w=0, l=0):
# return self._obj_.SendMessage(msg, w, l)
def SCISetTabWidth(self, width):
# I need to remember the tab-width for the AutoIndent extension. This may go.
self._tabWidth = width
control.CScintillaEditInterface.SCISetTabWidth(self, width)
def GetTabWidth(self):
return self._tabWidth
def HookHandlers(self):
# Create events for all the menu names.
for name, val in event_commands:
# handler = lambda id, code, tosend=val, parent=parent: parent.OnCommand(tosend, 0) and 0
self.bindings.bind(name, None, cid=val)
# Hook commands that do nothing other than send Scintilla messages.
for command, reflection in command_reflectors:
handler = lambda id, code, ss=self.SendScintilla, tosend=reflection: ss(tosend) and 0
self.HookCommand(handler, command)
self.HookCommand(self.OnCmdViewWS, win32ui.ID_VIEW_WHITESPACE)
self.HookCommandUpdate(self.OnUpdateViewWS, win32ui.ID_VIEW_WHITESPACE)
self.HookCommand(self.OnCmdViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
self.HookCommandUpdate(self.OnUpdateViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
self.HookCommand(self.OnCmdViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
self.HookCommandUpdate(self.OnUpdateViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
self.HookCommand(self.OnCmdViewEOL, win32ui.ID_VIEW_EOL)
self.HookCommandUpdate(self.OnUpdateViewEOL, win32ui.ID_VIEW_EOL)
self.HookCommand(self.OnCmdViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
self.HookCommandUpdate(self.OnUpdateViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
self.HookCommand(self.OnCmdFileLocate, win32ui.ID_FILE_LOCATE)
self.HookCommand(self.OnCmdEditFind, win32ui.ID_EDIT_FIND)
self.HookCommand(self.OnCmdEditRepeat, win32ui.ID_EDIT_REPEAT)
self.HookCommand(self.OnCmdEditReplace, win32ui.ID_EDIT_REPLACE)
self.HookCommand(self.OnCmdGotoLine, win32ui.ID_EDIT_GOTO_LINE)
self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT)
self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT_DIRECT)
self.HookCommand(self.OnFilePrintPreview,
win32ui.ID_FILE_PRINT_PREVIEW)
# Key bindings.
self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN)
# Hook wheeley mouse events
# self.HookMessage(self.OnMouseWheel, win32con.WM_MOUSEWHEEL)
self.HookFormatter()
def OnInitialUpdate(self):
doc = self.GetDocument()
# Enable Unicode
self.SendScintilla(scintillacon.SCI_SETCODEPAGE, scintillacon.SC_CP_UTF8, 0)
self.SendScintilla(scintillacon.SCI_SETKEYSUNICODE, 1, 0)
# Create margins
self.SendScintilla(scintillacon.SCI_SETMARGINTYPEN, 1, scintillacon.SC_MARGIN_SYMBOL);
self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 1, 0xF);
self.SendScintilla(scintillacon.SCI_SETMARGINTYPEN, 2, scintillacon.SC_MARGIN_SYMBOL);
self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 2, scintillacon.SC_MASK_FOLDERS);
self.SendScintilla(scintillacon.SCI_SETMARGINSENSITIVEN, 2, 1);
self.GetDocument().HookViewNotifications(self) # is there an MFC way to grab this?
self.HookHandlers()
# Load the configuration information.
self.OnWinIniChange(None)
self.SetSel()
self.GetDocument().FinalizeViewCreation(self) # is there an MFC way to grab this?
def _GetSubConfigNames(self):
return None # By default we use only sections without sub-sections.
def OnWinIniChange(self, section = None):
self.bindings.prepare_configure()
try:
self.DoConfigChange()
finally:
self.bindings.complete_configure()
def DoConfigChange(self):
# Bit of a hack I dont kow what to do about - these should be "editor options"
from pywin.framework.editor import GetEditorOption
self.bAutoCompleteAttributes = GetEditorOption("Autocomplete Attributes", 1)
self.bShowCallTips = GetEditorOption("Show Call Tips", 1)
# Update the key map and extension data.
configManager.configure(self, self._GetSubConfigNames())
if configManager.last_error:
win32ui.MessageBox(configManager.last_error, "Configuration Error")
self.bMatchBraces = GetEditorOption("Match Braces", 1)
self.ApplyFormattingStyles(1)
def OnDestroy(self, msg):
self.bindings.close()
self.bindings = None
self.idle.close()
self.idle = None
control.CScintillaColorEditInterface.close(self)
return docview.CtrlView.OnDestroy(self, msg)
def OnMouseWheel(self, msg):
zDelta = msg[2] >> 16
vpos = self.GetScrollPos(win32con.SB_VERT)
vpos = vpos - zDelta/40 # 3 lines per notch
self.SetScrollPos(win32con.SB_VERT, vpos)
self.SendScintilla(win32con.WM_VSCROLL,
(vpos<<16) | win32con.SB_THUMBPOSITION,
0)
def OnBraceMatch(self, std, extra):
if not self.bMatchBraces: return
DoBraceMatch(self)
def OnNeedShown(self, std, extra):
notify = self.SCIUnpackNotifyMessage(extra)
# OnNeedShown is called before an edit operation when
# text is folded (as it is possible the text insertion will happen
# in a folded region.) As this happens _before_ the insert,
# we ignore the length (if we are at EOF, pos + length may
# actually be beyond the end of buffer)
self.EnsureCharsVisible(notify.position)
def EnsureCharsVisible(self, start, end = None):
if end is None: end = start
lineStart = self.LineFromChar(min(start, end))
lineEnd = self.LineFromChar(max(start, end))
while lineStart <= lineEnd:
self.SCIEnsureVisible(lineStart)
lineStart = lineStart + 1
# Helper to add an event to a menu.
def AppendMenu(self, menu, text="", event=None, flags = None, checked=0):
if event is None:
assert flags is not None, "No event or custom flags!"
cmdid = 0
else:
cmdid = self.bindings.get_command_id(event)
if cmdid is None:
# No event of that name - no point displaying it.
print('View.AppendMenu(): Unknown event "%s" specified for menu text "%s" - ignored' % (event, text))
return
keyname = configManager.get_key_binding( event, self._GetSubConfigNames() )
if keyname is not None:
text = text + "\t" + keyname
if flags is None: flags = win32con.MF_STRING|win32con.MF_ENABLED
if checked: flags = flags | win32con.MF_CHECKED
menu.AppendMenu(flags, cmdid, text)
def OnKeyDown(self, msg):
return self.bindings.fire_key_event( msg )
def GotoEndOfFileEvent(self, event):
self.SetSel(-1)
def KeyDotEvent(self, event):
## Don't trigger autocomplete if any text is selected
s,e = self.GetSel()
if s!=e:
return 1
self.SCIAddText(".")
if self.bAutoCompleteAttributes:
self._AutoComplete()
# View Whitespace/EOL/Indentation UI.
def OnCmdViewWS(self, cmd, code): # Handle the menu command
viewWS = self.SCIGetViewWS()
self.SCISetViewWS(not viewWS)
def OnUpdateViewWS(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetViewWS())
cmdui.Enable()
def OnCmdViewIndentationGuides(self, cmd, code): # Handle the menu command
viewIG = self.SCIGetIndentationGuides()
self.SCISetIndentationGuides(not viewIG)
def OnUpdateViewIndentationGuides(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetIndentationGuides())
cmdui.Enable()
def OnCmdViewRightEdge(self, cmd, code): # Handle the menu command
if self.SCIGetEdgeMode() == scintillacon.EDGE_NONE:
mode = scintillacon.EDGE_BACKGROUND
else:
mode = scintillacon.EDGE_NONE
self.SCISetEdgeMode(mode)
def OnUpdateViewRightEdge(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetEdgeMode() != scintillacon.EDGE_NONE)
cmdui.Enable()
def OnCmdViewEOL(self, cmd, code): # Handle the menu command
viewEOL = self.SCIGetViewEOL()
self.SCISetViewEOL(not viewEOL)
def OnUpdateViewEOL(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetViewEOL())
cmdui.Enable()
def OnCmdViewFixedFont(self, cmd, code): # Handle the menu command
self._GetColorizer().bUseFixed = not self._GetColorizer().bUseFixed
self.ApplyFormattingStyles(0)
# Ensure the selection is visible!
self.ScrollCaret()
def OnUpdateViewFixedFont(self, cmdui): # Update the tick on the UI.
c = self._GetColorizer()
if c is not None: cmdui.SetCheck(c.bUseFixed)
cmdui.Enable(c is not None)
def OnCmdEditFind(self, cmd, code):
from . import find
find.ShowFindDialog()
def OnCmdEditRepeat(self, cmd, code):
from . import find
find.FindNext()
def OnCmdEditReplace(self, cmd, code):
from . import find
find.ShowReplaceDialog()
def OnCmdFileLocate(self, cmd, id):
line = self.GetLine().strip()
import pywin.framework.scriptutils
m = patImport.match(line)
if m:
# Module name on this line - locate that!
modName = m.group('name')
fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
if fileName is None:
win32ui.SetStatusText("Can't locate module %s" % modName)
return 1 # Let the default get it.
else:
win32ui.GetApp().OpenDocumentFile(fileName)
else:
# Just to a "normal" locate - let the default handler get it.
return 1
return 0
def OnCmdGotoLine(self, cmd, id):
try:
lineNo = int(input("Enter Line Number"))-1
except (ValueError, KeyboardInterrupt):
return 0
self.SCIEnsureVisible(lineNo)
self.SCIGotoLine(lineNo)
return 0
def SaveTextFile(self, filename, encoding=None):
doc = self.GetDocument()
doc._SaveTextToFile(self, filename, encoding=encoding)
doc.SetModifiedFlag(0)
return 1
def _AutoComplete(self):
def list2dict(l):
ret={}
for i in l:
ret[i] = None
return ret
self.SCIAutoCCancel() # Cancel old auto-complete lists.
# First try and get an object without evaluating calls
ob = self._GetObjectAtPos(bAllowCalls = 0)
# If that failed, try and process call or indexing to get the object.
if ob is None:
ob = self._GetObjectAtPos(bAllowCalls = 1)
items_dict = {}
if ob is not None:
try: # Catch unexpected errors when fetching attribute names from the object
# extra attributes of win32ui objects
if hasattr(ob, "_obj_"):
try:
items_dict.update(list2dict(dir(ob._obj_)))
except AttributeError:
pass # object has no __dict__
# normal attributes
try:
items_dict.update(list2dict(dir(ob)))
except AttributeError:
pass # object has no __dict__
if hasattr(ob, "__class__"):
items_dict.update(list2dict(_get_class_attributes(ob.__class__)))
# The object may be a COM object with typelib support - lets see if we can get its props.
# (contributed by Stefan Migowsky)
try:
# Get the automation attributes
items_dict.update(ob.__class__._prop_map_get_)
# See if there is an write only property
# could be optimized
items_dict.update(ob.__class__._prop_map_put_)
# append to the already evaluated list
except AttributeError:
pass
# The object might be a pure COM dynamic dispatch with typelib support - lets see if we can get its props.
if hasattr(ob, "_oleobj_"):
try:
for iTI in range(0,ob._oleobj_.GetTypeInfoCount()):
typeInfo = ob._oleobj_.GetTypeInfo(iTI)
self._UpdateWithITypeInfo (items_dict, typeInfo)
except:
pass
except:
win32ui.SetStatusText("Error attempting to get object attributes - %s" % (repr(sys.exc_info()[0]),))
# ensure all keys are strings.
items = [str(k) for k in items_dict.keys()]
# All names that start with "_" go!
items = [k for k in items if not k.startswith('_')]
if not items:
# Heuristics a-la AutoExpand
# The idea is to find other usages of the current binding
# and assume, that it refers to the same object (or at least,
# to an object of the same type)
# Contributed by Vadim Chugunov [vadimch@yahoo.com]
left, right = self._GetWordSplit()
if left=="": # Ignore standalone dots
return None
# We limit our search to the current class, if that
# information is available
minline, maxline, curclass = self._GetClassInfoFromBrowser()
endpos = self.LineIndex(maxline)
text = self.GetTextRange(self.LineIndex(minline),endpos)
try:
l = re.findall(r"\b"+left+"\.\w+",text)
except re.error:
# parens etc may make an invalid RE, but this code wouldnt
# benefit even if the RE did work :-)
l = []
prefix = len(left)+1
unique = {}
for li in l:
unique[li[prefix:]] = 1
# Assuming traditional usage of self...
if curclass and left=="self":
self._UpdateWithClassMethods(unique,curclass)
items = [word for word in unique.keys() if word[:2]!='__' or word[-2:]!='__']
# Ignore the word currently to the right of the dot - probably a red-herring.
try:
items.remove(right[1:])
except ValueError:
pass
if items:
items.sort()
self.SCIAutoCSetAutoHide(0)
self.SCIAutoCShow(items)
def _UpdateWithITypeInfo (self, items_dict, typeInfo):
import pythoncom
typeInfos = [typeInfo]
# suppress IDispatch and IUnknown methods
inspectedIIDs = {pythoncom.IID_IDispatch:None}
while len(typeInfos)>0:
typeInfo = typeInfos.pop()
typeAttr = typeInfo.GetTypeAttr()
if typeAttr.iid not in inspectedIIDs:
inspectedIIDs[typeAttr.iid] = None
for iFun in range(0,typeAttr.cFuncs):
funDesc = typeInfo.GetFuncDesc(iFun)
funName = typeInfo.GetNames(funDesc.memid)[0]
if funName not in items_dict:
items_dict[funName] = None
# Inspect the type info of all implemented types
# E.g. IShellDispatch5 implements IShellDispatch4 which implements IShellDispatch3 ...
for iImplType in range(0,typeAttr.cImplTypes):
iRefType = typeInfo.GetRefTypeOfImplType(iImplType)
refTypeInfo = typeInfo.GetRefTypeInfo(iRefType)
typeInfos.append(refTypeInfo)
# TODO: This is kinda slow. Probably need some kind of cache
# here that is flushed upon file save
# Or maybe we don't need the superclass methods at all ?
def _UpdateWithClassMethods(self,dict,classinfo):
if not hasattr(classinfo,"methods"):
# No 'methods' - probably not what we think it is.
return
dict.update(classinfo.methods)
for super in classinfo.super:
if hasattr(super,"methods"):
self._UpdateWithClassMethods(dict,super)
# Find which class definition caret is currently in and return
# indexes of the the first and the last lines of that class definition
# Data is obtained from module browser (if enabled)
def _GetClassInfoFromBrowser(self,pos=-1):
minline = 0
maxline = self.GetLineCount()-1
doc = self.GetParentFrame().GetActiveDocument()
browser = None
try:
if doc is not None:
browser = doc.GetAllViews()[1]
except IndexError:
pass
if browser is None:
return (minline,maxline,None) # Current window has no browser
if not browser.list: return (minline,maxline,None) # Not initialized
path = self.GetDocument().GetPathName()
if not path: return (minline,maxline,None) # No current path
import pywin.framework.scriptutils
curmodule, path = pywin.framework.scriptutils.GetPackageModuleName(path)
try:
clbrdata = browser.list.root.clbrdata
except AttributeError:
return (minline,maxline,None) # No class data for this module.
curline = self.LineFromChar(pos)
curclass = None
# Find out which class we are in
for item in clbrdata.values():
if item.module==curmodule:
item_lineno = item.lineno - 1 # Scintilla counts lines from 0, whereas pyclbr - from 1
if minline < item_lineno <= curline:
minline = item_lineno
curclass = item
if curline < item_lineno < maxline:
maxline = item_lineno
return (minline,maxline,curclass)
def _GetObjectAtPos(self, pos = -1, bAllowCalls = 0):
left, right = self._GetWordSplit(pos, bAllowCalls)
if left: # It is an attribute lookup
# How is this for a hack!
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
# Get the debugger's context.
try:
from pywin.framework import interact
if interact.edit is not None and interact.edit.currentView is not None:
globs, locs = interact.edit.currentView.GetContext()[:2]
if globs: namespace.update(globs)
if locs: namespace.update(locs)
except ImportError:
pass
try:
return eval(left, namespace)
except:
pass
return None
def _GetWordSplit(self, pos = -1, bAllowCalls = 0):
if pos==-1: pos = self.GetSel()[0]-1 # Character before current one
limit = self.GetTextLength()
before = []
after = []
index = pos-1
wordbreaks_use = wordbreaks
if bAllowCalls: wordbreaks_use = wordbreaks_use + "()[]"
while index>=0:
char = self.SCIGetCharAt(index)
if char not in wordbreaks_use: break
before.insert(0, char)
index = index-1
index = pos
while index<=limit:
char = self.SCIGetCharAt(index)
if char not in wordbreaks_use: break
after.append(char)
index=index+1
return ''.join(before), ''.join(after)
def OnPrepareDC (self, dc, pInfo):
# print "OnPrepareDC for page", pInfo.GetCurPage(), "of", pInfo.GetFromPage(), "to", pInfo.GetToPage(), ", starts=", self.starts
if dc.IsPrinting():
# Check if we are beyond the end.
# (only do this when actually printing, else messes up print preview!)
if not pInfo.GetPreview() and self.starts is not None:
prevPage = pInfo.GetCurPage() - 1
if prevPage > 0 and self.starts[prevPage] >= self.GetTextLength():
# All finished.
pInfo.SetContinuePrinting(0)
return
dc.SetMapMode(win32con.MM_TEXT);
def OnPreparePrinting(self, pInfo):
flags = win32ui.PD_USEDEVMODECOPIES | \
win32ui.PD_ALLPAGES | \
win32ui.PD_NOSELECTION # Dont support printing just a selection.
# NOTE: Custom print dialogs are stopping the user's values from coming back :-(
# self.prtDlg = PrintDialog(pInfo, PRINTDLGORD, flags)
# pInfo.SetPrintDialog(self.prtDlg)
pInfo.SetMinPage(1)
# max page remains undefined for now.
pInfo.SetFromPage(1)
pInfo.SetToPage(1)
ret = self.DoPreparePrinting(pInfo)
return ret
def OnBeginPrinting(self, dc, pInfo):
self.starts = None
return self._obj_.OnBeginPrinting(dc, pInfo)
def CalculatePageRanges(self, dc, pInfo):
# Calculate page ranges and max page
self.starts = {0:0}
metrics = dc.GetTextMetrics()
left, top, right, bottom = pInfo.GetDraw()
# Leave space at the top for the header.
rc = (left, top + int((9*metrics['tmHeight'])/2), right, bottom)
pageStart = 0
maxPage = 0
textLen = self.GetTextLength()
while pageStart < textLen:
pageStart = self.FormatRange(dc, pageStart, textLen, rc, 0)
maxPage = maxPage + 1
self.starts[maxPage] = pageStart
# And a sentinal for one page past the end
self.starts[maxPage+1] = textLen
# When actually printing, maxPage doesnt have any effect at this late state.
# but is needed to make the Print Preview work correctly.
pInfo.SetMaxPage(maxPage)
def OnFilePrintPreview(self, *arg):
self._obj_.OnFilePrintPreview()
def OnFilePrint(self, *arg):
self._obj_.OnFilePrint()
def FormatRange(self, dc, pageStart, lengthDoc, rc, draw):
"""
typedef struct _formatrange {
HDC hdc;
HDC hdcTarget;
RECT rc;
RECT rcPage;
CHARRANGE chrg;} FORMATRANGE;
"""
fmt='PPIIIIIIIIll'
hdcRender = dc.GetHandleOutput()
hdcFormat = dc.GetHandleAttrib()
fr = struct.pack(fmt, hdcRender, hdcFormat, rc[0], rc[1], rc[2], rc[3], rc[0], rc[1], rc[2], rc[3], pageStart, lengthDoc)
nextPageStart = self.SendScintilla(EM_FORMATRANGE, draw, fr)
return nextPageStart
def OnPrint(self, dc, pInfo):
metrics = dc.GetTextMetrics()
# print "dev", w, h, l, metrics['tmAscent'], metrics['tmDescent']
if self.starts is None:
self.CalculatePageRanges(dc, pInfo)
pageNum = pInfo.GetCurPage() - 1
# Setup the header of the page - docname on left, pagenum on right.
doc = self.GetDocument()
cxChar = metrics['tmAveCharWidth']
cyChar = metrics['tmHeight']
left, top, right, bottom = pInfo.GetDraw()
dc.TextOut(0, 2*cyChar, doc.GetTitle())
pagenum_str = win32ui.LoadString(afxres.AFX_IDS_PRINTPAGENUM) % (pageNum+1,)
dc.SetTextAlign(win32con.TA_RIGHT)
dc.TextOut(right, 2*cyChar, pagenum_str)
dc.SetTextAlign(win32con.TA_LEFT)
top = top + int((7*cyChar)/2)
dc.MoveTo(left, top)
dc.LineTo(right, top)
top = top + cyChar
rc = (left, top, right, bottom)
nextPageStart = self.FormatRange(dc, self.starts[pageNum], self.starts[pageNum+1], rc, 1)
def LoadConfiguration():
global configManager
# Bit of a hack I dont kow what to do about?
from .config import ConfigManager
configName = rc = win32ui.GetProfileVal("Editor", "Keyboard Config", "default")
configManager = ConfigManager(configName)
if configManager.last_error:
bTryDefault = 0
msg = "Error loading configuration '%s'\n\n%s" % (configName, configManager.last_error)
if configName != "default":
msg = msg + "\n\nThe default configuration will be loaded."
bTryDefault = 1
win32ui.MessageBox(msg)
if bTryDefault:
configManager = ConfigManager("default")
if configManager.last_error:
win32ui.MessageBox("Error loading configuration 'default'\n\n%s" % (configManager.last_error))
configManager = None
LoadConfiguration()
| {
"repo_name": "huguesv/PTVS",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pythonwin/pywin/scintilla/view.py",
"copies": "7",
"size": "25662",
"license": "apache-2.0",
"hash": -6712388173229187000,
"line_mean": 34.5429362881,
"line_max": 163,
"alpha_frac": 0.7186111761,
"autogenerated": false,
"ratio": 2.9615695326024234,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7180180708702423,
"avg_score": null,
"num_lines": null
} |
# A general purpose MFC CCtrlView view that uses Scintilla.
import control
import IDLEenvironment # IDLE emulation.
from pywin.mfc import docview
from pywin.mfc import dialog
import scintillacon
import win32con
import win32ui
import afxres
import string
import array
import sys
import types
import __main__ # for attribute lookup
import bindings
import keycodes
import struct
import re
import os
PRINTDLGORD = 1538
IDC_PRINT_MAG_EDIT = 1010
EM_FORMATRANGE = win32con.WM_USER+57
wordbreaks = "._" + string.ascii_uppercase + string.ascii_lowercase + string.digits
patImport=re.compile('import (?P<name>.*)')
_event_commands = [
# File menu
"win32ui.ID_FILE_LOCATE", "win32ui.ID_FILE_CHECK", "afxres.ID_FILE_CLOSE",
"afxres.ID_FILE_NEW", "afxres.ID_FILE_OPEN", "afxres.ID_FILE_SAVE",
"afxres.ID_FILE_SAVE_AS", "win32ui.ID_FILE_SAVE_ALL",
# Edit menu
"afxres.ID_EDIT_UNDO", "afxres.ID_EDIT_REDO", "afxres.ID_EDIT_CUT",
"afxres.ID_EDIT_COPY", "afxres.ID_EDIT_PASTE", "afxres.ID_EDIT_SELECT_ALL",
"afxres.ID_EDIT_FIND", "afxres.ID_EDIT_REPEAT", "afxres.ID_EDIT_REPLACE",
# View menu
"win32ui.ID_VIEW_WHITESPACE", "win32ui.ID_VIEW_FIXED_FONT",
"win32ui.ID_VIEW_BROWSE", "win32ui.ID_VIEW_INTERACTIVE",
# Window menu
"afxres.ID_WINDOW_ARRANGE", "afxres.ID_WINDOW_CASCADE",
"afxres.ID_WINDOW_NEW", "afxres.ID_WINDOW_SPLIT",
"afxres.ID_WINDOW_TILE_HORZ", "afxres.ID_WINDOW_TILE_VERT",
# Others
"afxres.ID_APP_EXIT", "afxres.ID_APP_ABOUT",
]
_extra_event_commands = [
("EditDelete", afxres.ID_EDIT_CLEAR),
("LocateModule", win32ui.ID_FILE_LOCATE),
("GotoLine", win32ui.ID_EDIT_GOTO_LINE),
("DbgBreakpointToggle", win32ui.IDC_DBG_ADD),
("DbgGo", win32ui.IDC_DBG_GO),
("DbgStepOver", win32ui.IDC_DBG_STEPOVER),
("DbgStep", win32ui.IDC_DBG_STEP),
("DbgStepOut", win32ui.IDC_DBG_STEPOUT),
("DbgBreakpointClearAll", win32ui.IDC_DBG_CLEAR),
("DbgClose", win32ui.IDC_DBG_CLOSE),
]
event_commands = []
def _CreateEvents():
for name in _event_commands:
val = eval(name)
name_parts = name.split("_")[1:]
name_parts = [p.capitalize() for p in name_parts]
event = ''.join(name_parts)
event_commands.append((event, val))
for name, id in _extra_event_commands:
event_commands.append((name, id))
_CreateEvents()
del _event_commands; del _extra_event_commands
command_reflectors = [
(win32ui.ID_EDIT_UNDO, win32con.WM_UNDO),
(win32ui.ID_EDIT_REDO, scintillacon.SCI_REDO),
(win32ui.ID_EDIT_CUT, win32con.WM_CUT),
(win32ui.ID_EDIT_COPY, win32con.WM_COPY),
(win32ui.ID_EDIT_PASTE, win32con.WM_PASTE),
(win32ui.ID_EDIT_CLEAR, win32con.WM_CLEAR),
(win32ui.ID_EDIT_SELECT_ALL, scintillacon.SCI_SELECTALL),
]
def DoBraceMatch(control):
curPos = control.SCIGetCurrentPos()
charBefore = ' '
if curPos: charBefore = control.SCIGetCharAt(curPos-1)
charAt = control.SCIGetCharAt(curPos)
braceAtPos = braceOpposite = -1
if charBefore in "[](){}": braceAtPos = curPos-1
if braceAtPos==-1:
if charAt in "[](){}": braceAtPos = curPos
if braceAtPos != -1:
braceOpposite = control.SCIBraceMatch(braceAtPos, 0)
if braceAtPos != -1 and braceOpposite==-1:
control.SCIBraceBadHighlight(braceAtPos)
else:
# either clear them both or set them both.
control.SCIBraceHighlight(braceAtPos, braceOpposite)
def _get_class_attributes(ob):
# Recurse into base classes looking for attributes
items = []
try:
items = items + dir(ob)
for i in ob.__bases__:
for item in _get_class_attributes(i):
if item not in items:
items.append(item)
except AttributeError:
pass
return items
# Supposed to look like an MFC CEditView, but
# also supports IDLE extensions and other source code generic features.
class CScintillaView(docview.CtrlView, control.CScintillaColorEditInterface):
def __init__(self, doc):
docview.CtrlView.__init__(self, doc, "Scintilla", win32con.WS_CHILD | win32con.WS_VSCROLL | win32con.WS_HSCROLL | win32con.WS_CLIPCHILDREN | win32con.WS_VISIBLE)
self._tabWidth = 8 # Mirror of what we send to Scintilla - never change this directly
self.bAutoCompleteAttributes = 1
self.bShowCallTips = 1
self.bMatchBraces = 0 # Editor option will default this to true later!
self.bindings = bindings.BindingsManager(self)
self.idle = IDLEenvironment.IDLEEditorWindow(self)
self.idle.IDLEExtension("AutoExpand")
# SendScintilla is called so frequently it is worth optimizing.
self.SendScintilla = self._obj_.SendMessage
def OnDestroy(self, msg):
self.SendScintilla = None
return docview.CtrlView.OnDestroy(self, msg)
def _MakeColorizer(self):
ext = os.path.splitext(self.GetDocument().GetPathName())[1]
import formatter
return formatter.BuiltinPythonSourceFormatter(self, ext)
# def SendScintilla(self, msg, w=0, l=0):
# return self._obj_.SendMessage(msg, w, l)
def SCISetTabWidth(self, width):
# I need to remember the tab-width for the AutoIndent extension. This may go.
self._tabWidth = width
control.CScintillaEditInterface.SCISetTabWidth(self, width)
def GetTabWidth(self):
return self._tabWidth
def HookHandlers(self):
# Create events for all the menu names.
for name, val in event_commands:
# handler = lambda id, code, tosend=val, parent=parent: parent.OnCommand(tosend, 0) and 0
self.bindings.bind(name, None, cid=val)
# Hook commands that do nothing other than send Scintilla messages.
for command, reflection in command_reflectors:
handler = lambda id, code, ss=self.SendScintilla, tosend=reflection: ss(tosend) and 0
self.HookCommand(handler, command)
self.HookCommand(self.OnCmdViewWS, win32ui.ID_VIEW_WHITESPACE)
self.HookCommandUpdate(self.OnUpdateViewWS, win32ui.ID_VIEW_WHITESPACE)
self.HookCommand(self.OnCmdViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
self.HookCommandUpdate(self.OnUpdateViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
self.HookCommand(self.OnCmdViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
self.HookCommandUpdate(self.OnUpdateViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
self.HookCommand(self.OnCmdViewEOL, win32ui.ID_VIEW_EOL)
self.HookCommandUpdate(self.OnUpdateViewEOL, win32ui.ID_VIEW_EOL)
self.HookCommand(self.OnCmdViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
self.HookCommandUpdate(self.OnUpdateViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
self.HookCommand(self.OnCmdFileLocate, win32ui.ID_FILE_LOCATE)
self.HookCommand(self.OnCmdEditFind, win32ui.ID_EDIT_FIND)
self.HookCommand(self.OnCmdEditRepeat, win32ui.ID_EDIT_REPEAT)
self.HookCommand(self.OnCmdEditReplace, win32ui.ID_EDIT_REPLACE)
self.HookCommand(self.OnCmdGotoLine, win32ui.ID_EDIT_GOTO_LINE)
self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT)
self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT_DIRECT)
self.HookCommand(self.OnFilePrintPreview,
win32ui.ID_FILE_PRINT_PREVIEW)
# Key bindings.
self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN)
# Hook wheeley mouse events
# self.HookMessage(self.OnMouseWheel, win32con.WM_MOUSEWHEEL)
self.HookFormatter()
def OnInitialUpdate(self):
doc = self.GetDocument()
# Enable Unicode
self.SendScintilla(scintillacon.SCI_SETCODEPAGE, scintillacon.SC_CP_UTF8, 0)
self.SendScintilla(scintillacon.SCI_SETKEYSUNICODE, 1, 0)
# Create margins
self.SendScintilla(scintillacon.SCI_SETMARGINTYPEN, 1, scintillacon.SC_MARGIN_SYMBOL);
self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 1, 0xF);
self.SendScintilla(scintillacon.SCI_SETMARGINTYPEN, 2, scintillacon.SC_MARGIN_SYMBOL);
self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 2, scintillacon.SC_MASK_FOLDERS);
self.SendScintilla(scintillacon.SCI_SETMARGINSENSITIVEN, 2, 1);
self.GetDocument().HookViewNotifications(self) # is there an MFC way to grab this?
self.HookHandlers()
# Load the configuration information.
self.OnWinIniChange(None)
self.SetSel()
self.GetDocument().FinalizeViewCreation(self) # is there an MFC way to grab this?
def _GetSubConfigNames(self):
return None # By default we use only sections without sub-sections.
def OnWinIniChange(self, section = None):
self.bindings.prepare_configure()
try:
self.DoConfigChange()
finally:
self.bindings.complete_configure()
def DoConfigChange(self):
# Bit of a hack I dont kow what to do about - these should be "editor options"
from pywin.framework.editor import GetEditorOption
self.bAutoCompleteAttributes = GetEditorOption("Autocomplete Attributes", 1)
self.bShowCallTips = GetEditorOption("Show Call Tips", 1)
# Update the key map and extension data.
configManager.configure(self, self._GetSubConfigNames())
if configManager.last_error:
win32ui.MessageBox(configManager.last_error, "Configuration Error")
self.bMatchBraces = GetEditorOption("Match Braces", 1)
self.ApplyFormattingStyles(1)
def OnDestroy(self, msg):
self.bindings.close()
self.bindings = None
self.idle.close()
self.idle = None
control.CScintillaColorEditInterface.close(self)
return docview.CtrlView.OnDestroy(self, msg)
def OnMouseWheel(self, msg):
zDelta = msg[2] >> 16
vpos = self.GetScrollPos(win32con.SB_VERT)
vpos = vpos - zDelta/40 # 3 lines per notch
self.SetScrollPos(win32con.SB_VERT, vpos)
self.SendScintilla(win32con.WM_VSCROLL,
(vpos<<16) | win32con.SB_THUMBPOSITION,
0)
def OnBraceMatch(self, std, extra):
if not self.bMatchBraces: return
DoBraceMatch(self)
def OnNeedShown(self, std, extra):
notify = self.SCIUnpackNotifyMessage(extra)
# OnNeedShown is called before an edit operation when
# text is folded (as it is possible the text insertion will happen
# in a folded region.) As this happens _before_ the insert,
# we ignore the length (if we are at EOF, pos + length may
# actually be beyond the end of buffer)
self.EnsureCharsVisible(notify.position)
def EnsureCharsVisible(self, start, end = None):
if end is None: end = start
lineStart = self.LineFromChar(min(start, end))
lineEnd = self.LineFromChar(max(start, end))
while lineStart <= lineEnd:
self.SCIEnsureVisible(lineStart)
lineStart = lineStart + 1
# Helper to add an event to a menu.
def AppendMenu(self, menu, text="", event=None, flags = None, checked=0):
if event is None:
assert flags is not None, "No event or custom flags!"
cmdid = 0
else:
cmdid = self.bindings.get_command_id(event)
if cmdid is None:
# No event of that name - no point displaying it.
print 'View.AppendMenu(): Unknown event "%s" specified for menu text "%s" - ignored' % (event, text)
return
keyname = configManager.get_key_binding( event, self._GetSubConfigNames() )
if keyname is not None:
text = text + "\t" + keyname
if flags is None: flags = win32con.MF_STRING|win32con.MF_ENABLED
if checked: flags = flags | win32con.MF_CHECKED
menu.AppendMenu(flags, cmdid, text)
def OnKeyDown(self, msg):
return self.bindings.fire_key_event( msg )
def GotoEndOfFileEvent(self, event):
self.SetSel(-1)
def KeyDotEvent(self, event):
## Don't trigger autocomplete if any text is selected
s,e = self.GetSel()
if s!=e:
return 1
self.SCIAddText(".")
if self.bAutoCompleteAttributes:
self._AutoComplete()
# View Whitespace/EOL/Indentation UI.
def OnCmdViewWS(self, cmd, code): # Handle the menu command
viewWS = self.SCIGetViewWS()
self.SCISetViewWS(not viewWS)
def OnUpdateViewWS(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetViewWS())
cmdui.Enable()
def OnCmdViewIndentationGuides(self, cmd, code): # Handle the menu command
viewIG = self.SCIGetIndentationGuides()
self.SCISetIndentationGuides(not viewIG)
def OnUpdateViewIndentationGuides(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetIndentationGuides())
cmdui.Enable()
def OnCmdViewRightEdge(self, cmd, code): # Handle the menu command
if self.SCIGetEdgeMode() == scintillacon.EDGE_NONE:
mode = scintillacon.EDGE_BACKGROUND
else:
mode = scintillacon.EDGE_NONE
self.SCISetEdgeMode(mode)
def OnUpdateViewRightEdge(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetEdgeMode() != scintillacon.EDGE_NONE)
cmdui.Enable()
def OnCmdViewEOL(self, cmd, code): # Handle the menu command
viewEOL = self.SCIGetViewEOL()
self.SCISetViewEOL(not viewEOL)
def OnUpdateViewEOL(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetViewEOL())
cmdui.Enable()
def OnCmdViewFixedFont(self, cmd, code): # Handle the menu command
self._GetColorizer().bUseFixed = not self._GetColorizer().bUseFixed
self.ApplyFormattingStyles(0)
# Ensure the selection is visible!
self.ScrollCaret()
def OnUpdateViewFixedFont(self, cmdui): # Update the tick on the UI.
c = self._GetColorizer()
if c is not None: cmdui.SetCheck(c.bUseFixed)
cmdui.Enable(c is not None)
def OnCmdEditFind(self, cmd, code):
import find
find.ShowFindDialog()
def OnCmdEditRepeat(self, cmd, code):
import find
find.FindNext()
def OnCmdEditReplace(self, cmd, code):
import find
find.ShowReplaceDialog()
def OnCmdFileLocate(self, cmd, id):
line = self.GetLine().strip()
import pywin.framework.scriptutils
m = patImport.match(line)
if m:
# Module name on this line - locate that!
modName = m.group('name')
fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
if fileName is None:
win32ui.SetStatusText("Can't locate module %s" % modName)
return 1 # Let the default get it.
else:
win32ui.GetApp().OpenDocumentFile(fileName)
else:
# Just to a "normal" locate - let the default handler get it.
return 1
return 0
def OnCmdGotoLine(self, cmd, id):
try:
lineNo = int(raw_input("Enter Line Number"))-1
except (ValueError, KeyboardInterrupt):
return 0
self.SCIEnsureVisible(lineNo)
self.SCIGotoLine(lineNo)
return 0
def SaveTextFile(self, filename):
doc = self.GetDocument()
# Open in binary mode as scintilla itself ensures the
# line endings are already appropriate, and our doc save
# method handles encoding, BOMs, etc.
f = open(filename, 'wb')
try:
doc._SaveTextToFile(self, f)
finally:
f.close()
doc.SetModifiedFlag(0)
return 1
def _AutoComplete(self):
def list2dict(l):
ret={}
for i in l:
ret[i] = None
return ret
self.SCIAutoCCancel() # Cancel old auto-complete lists.
# First try and get an object without evaluating calls
ob = self._GetObjectAtPos(bAllowCalls = 0)
# If that failed, try and process call or indexing to get the object.
if ob is None:
ob = self._GetObjectAtPos(bAllowCalls = 1)
items_dict = {}
if ob is not None:
try: # Catch unexpected errors when fetching attribute names from the object
try:
items_dict.update(list2dict(dir(ob)))
except AttributeError:
pass # object has no __dict__
if hasattr(ob, "__class__"):
items_dict.update(list2dict(_get_class_attributes(ob.__class__)))
# The object may be a COM object with typelib support - lets see if we can get its props.
# (contributed by Stefan Migowsky)
try:
# Get the automation attributes
items_dict.update(ob.__class__._prop_map_get_)
# See if there is an write only property
# could be optimized
items_dict.update(ob.__class__._prop_map_put_)
# append to the already evaluated list
except AttributeError:
pass
# The object might be a pure COM dynamic dispatch with typelib support - lets see if we can get its props.
if hasattr(ob, "_oleobj_"):
try:
for iTI in xrange(0,ob._oleobj_.GetTypeInfoCount()):
typeInfo = ob._oleobj_.GetTypeInfo(iTI)
typeAttr = typeInfo.GetTypeAttr()
for iFun in xrange(0,typeAttr.cFuncs):
funDesc = typeInfo.GetFuncDesc(iFun)
funName = typeInfo.GetNames(funDesc.memid)[0]
if funName not in items_dict:
items_dict[funName] = None
except:
pass
except:
win32ui.SetStatusText("Error attempting to get object attributes - %s" % (repr(sys.exc_info()[0]),))
# ensure all keys are strings.
items = [str(k) for k in items_dict.iterkeys()]
# All names that start with "_" go!
items = [k for k in items if not k.startswith('_')]
if not items:
# Heuristics a-la AutoExpand
# The idea is to find other usages of the current binding
# and assume, that it refers to the same object (or at least,
# to an object of the same type)
# Contributed by Vadim Chugunov [vadimch@yahoo.com]
left, right = self._GetWordSplit()
if left=="": # Ignore standalone dots
return None
# We limit our search to the current class, if that
# information is available
minline, maxline, curclass = self._GetClassInfoFromBrowser()
endpos = self.LineIndex(maxline)
text = self.GetTextRange(self.LineIndex(minline),endpos)
try:
l = re.findall(r"\b"+left+"\.\w+",text)
except re.error:
# parens etc may make an invalid RE, but this code wouldnt
# benefit even if the RE did work :-)
l = []
prefix = len(left)+1
unique = {}
for li in l:
unique[li[prefix:]] = 1
# Assuming traditional usage of self...
if curclass and left=="self":
self._UpdateWithClassMethods(unique,curclass)
items = [word for word in unique.iterkeys() if word[:2]!='__' or word[-2:]!='__']
# Ignore the word currently to the right of the dot - probably a red-herring.
try:
items.remove(right[1:])
except ValueError:
pass
if items:
items.sort()
self.SCIAutoCSetAutoHide(0)
self.SCIAutoCShow(items)
# TODO: This is kinda slow. Probably need some kind of cache
# here that is flushed upon file save
# Or maybe we don't need the superclass methods at all ?
def _UpdateWithClassMethods(self,dict,classinfo):
if not hasattr(classinfo,"methods"):
# No 'methods' - probably not what we think it is.
return
dict.update(classinfo.methods)
for super in classinfo.super:
if hasattr(super,"methods"):
self._UpdateWithClassMethods(dict,super)
# Find which class definition caret is currently in and return
# indexes of the the first and the last lines of that class definition
# Data is obtained from module browser (if enabled)
def _GetClassInfoFromBrowser(self,pos=-1):
minline = 0
maxline = self.GetLineCount()-1
doc = self.GetParentFrame().GetActiveDocument()
browser = None
try:
if doc is not None:
browser = doc.GetAllViews()[1]
except IndexError:
pass
if browser is None:
return (minline,maxline,None) # Current window has no browser
if not browser.list: return (minline,maxline,None) # Not initialized
path = self.GetDocument().GetPathName()
if not path: return (minline,maxline,None) # No current path
import pywin.framework.scriptutils
curmodule, path = pywin.framework.scriptutils.GetPackageModuleName(path)
try:
clbrdata = browser.list.root.clbrdata
except AttributeError:
return (minline,maxline,None) # No class data for this module.
curline = self.LineFromChar(pos)
curclass = None
# Find out which class we are in
for item in clbrdata.itervalues():
if item.module==curmodule:
item_lineno = item.lineno - 1 # Scintilla counts lines from 0, whereas pyclbr - from 1
if minline < item_lineno <= curline:
minline = item_lineno
curclass = item
if curline < item_lineno < maxline:
maxline = item_lineno
return (minline,maxline,curclass)
def _GetObjectAtPos(self, pos = -1, bAllowCalls = 0):
left, right = self._GetWordSplit(pos, bAllowCalls)
if left: # It is an attribute lookup
# How is this for a hack!
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
# Get the debugger's context.
try:
from pywin.framework import interact
if interact.edit is not None and interact.edit.currentView is not None:
globs, locs = interact.edit.currentView.GetContext()[:2]
if globs: namespace.update(globs)
if locs: namespace.update(locs)
except ImportError:
pass
try:
return eval(left, namespace)
except:
pass
return None
def _GetWordSplit(self, pos = -1, bAllowCalls = 0):
if pos==-1: pos = self.GetSel()[0]-1 # Character before current one
limit = self.GetTextLength()
before = []
after = []
index = pos-1
wordbreaks_use = wordbreaks
if bAllowCalls: wordbreaks_use = wordbreaks_use + "()[]"
while index>=0:
char = self.SCIGetCharAt(index)
if char not in wordbreaks_use: break
before.insert(0, char)
index = index-1
index = pos
while index<=limit:
char = self.SCIGetCharAt(index)
if char not in wordbreaks_use: break
after.append(char)
index=index+1
return ''.join(before), ''.join(after)
def OnPrepareDC (self, dc, pInfo):
# print "OnPrepareDC for page", pInfo.GetCurPage(), "of", pInfo.GetFromPage(), "to", pInfo.GetToPage(), ", starts=", self.starts
if dc.IsPrinting():
# Check if we are beyond the end.
# (only do this when actually printing, else messes up print preview!)
if not pInfo.GetPreview() and self.starts is not None:
prevPage = pInfo.GetCurPage() - 1
if prevPage > 0 and self.starts[prevPage] >= self.GetTextLength():
# All finished.
pInfo.SetContinuePrinting(0)
return
dc.SetMapMode(win32con.MM_TEXT);
def OnPreparePrinting(self, pInfo):
flags = win32ui.PD_USEDEVMODECOPIES | \
win32ui.PD_ALLPAGES | \
win32ui.PD_NOSELECTION # Dont support printing just a selection.
# NOTE: Custom print dialogs are stopping the user's values from coming back :-(
# self.prtDlg = PrintDialog(pInfo, PRINTDLGORD, flags)
# pInfo.SetPrintDialog(self.prtDlg)
pInfo.SetMinPage(1)
# max page remains undefined for now.
pInfo.SetFromPage(1)
pInfo.SetToPage(1)
ret = self.DoPreparePrinting(pInfo)
return ret
def OnBeginPrinting(self, dc, pInfo):
self.starts = None
return self._obj_.OnBeginPrinting(dc, pInfo)
def CalculatePageRanges(self, dc, pInfo):
# Calculate page ranges and max page
self.starts = {0:0}
metrics = dc.GetTextMetrics()
left, top, right, bottom = pInfo.GetDraw()
# Leave space at the top for the header.
rc = (left, top + (9*metrics['tmHeight'])/2, right, bottom)
pageStart = 0
maxPage = 0
textLen = self.GetTextLength()
while pageStart < textLen:
pageStart = self.FormatRange(dc, pageStart, textLen, rc, 0)
maxPage = maxPage + 1
self.starts[maxPage] = pageStart
# And a sentinal for one page past the end
self.starts[maxPage+1] = textLen
# When actually printing, maxPage doesnt have any effect at this late state.
# but is needed to make the Print Preview work correctly.
pInfo.SetMaxPage(maxPage)
def OnFilePrintPreview(self, *arg):
self._obj_.OnFilePrintPreview()
def OnFilePrint(self, *arg):
self._obj_.OnFilePrint()
def FormatRange(self, dc, pageStart, lengthDoc, rc, draw):
"""
typedef struct _formatrange {
HDC hdc;
HDC hdcTarget;
RECT rc;
RECT rcPage;
CHARRANGE chrg;} FORMATRANGE;
"""
fmt='PPIIIIIIIIll'
hdcRender = dc.GetHandleOutput()
hdcFormat = dc.GetHandleAttrib()
fr = struct.pack(fmt, hdcRender, hdcFormat, rc[0], rc[1], rc[2], rc[3], rc[0], rc[1], rc[2], rc[3], pageStart, lengthDoc)
nextPageStart = self.SendScintilla(EM_FORMATRANGE, draw, fr)
return nextPageStart
def OnPrint(self, dc, pInfo):
metrics = dc.GetTextMetrics()
# print "dev", w, h, l, metrics['tmAscent'], metrics['tmDescent']
if self.starts is None:
self.CalculatePageRanges(dc, pInfo)
pageNum = pInfo.GetCurPage() - 1
# Setup the header of the page - docname on left, pagenum on right.
doc = self.GetDocument()
cxChar = metrics['tmAveCharWidth']
cyChar = metrics['tmHeight']
left, top, right, bottom = pInfo.GetDraw()
dc.TextOut(0, 2*cyChar, doc.GetTitle())
pagenum_str = win32ui.LoadString(afxres.AFX_IDS_PRINTPAGENUM) % (pageNum+1,)
dc.SetTextAlign(win32con.TA_RIGHT)
dc.TextOut(right, 2*cyChar, pagenum_str)
dc.SetTextAlign(win32con.TA_LEFT)
top = top + (7*cyChar)/2
dc.MoveTo(left, top)
dc.LineTo(right, top)
top = top + cyChar
rc = (left, top, right, bottom)
nextPageStart = self.FormatRange(dc, self.starts[pageNum], self.starts[pageNum+1], rc, 1)
def LoadConfiguration():
global configManager
# Bit of a hack I dont kow what to do about?
from config import ConfigManager
configName = rc = win32ui.GetProfileVal("Editor", "Keyboard Config", "default")
configManager = ConfigManager(configName)
if configManager.last_error:
bTryDefault = 0
msg = "Error loading configuration '%s'\n\n%s" % (configName, configManager.last_error)
if configName != "default":
msg = msg + "\n\nThe default configuration will be loaded."
bTryDefault = 1
win32ui.MessageBox(msg)
if bTryDefault:
configManager = ConfigManager("default")
if configManager.last_error:
win32ui.MessageBox("Error loading configuration 'default'\n\n%s" % (configManager.last_error))
configManager = None
LoadConfiguration()
| {
"repo_name": "IronLanguages/ironpython2",
"path": "Src/StdLib/Lib/site-packages/pythonwin/pywin/scintilla/view.py",
"copies": "2",
"size": "24844",
"license": "apache-2.0",
"hash": -8092190392549420000,
"line_mean": 34.4407988588,
"line_max": 163,
"alpha_frac": 0.7183625825,
"autogenerated": false,
"ratio": 2.9562113279390765,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9475745823432957,
"avg_score": 0.03976561740122378,
"num_lines": 701
} |
# A general purpose MFC CCtrlView view that uses Scintilla.
import control
import IDLEenvironment # IDLE emulation.
from pywin.mfc import docview
from pywin.mfc import dialog
from scintillacon import *
import win32con
import win32ui
import afxres
import string
import array
import sys
import types
import __main__ # for attribute lookup
import bindings
import keycodes
import struct
import re
import os
from pywin import is_platform_unicode
PRINTDLGORD = 1538
IDC_PRINT_MAG_EDIT = 1010
EM_FORMATRANGE = win32con.WM_USER+57
wordbreaks = "._" + string.uppercase + string.lowercase + string.digits
patImport=re.compile('import (?P<name>.*)')
_event_commands = [
# File menu
"win32ui.ID_FILE_LOCATE", "win32ui.ID_FILE_CHECK", "afxres.ID_FILE_CLOSE",
"afxres.ID_FILE_NEW", "afxres.ID_FILE_OPEN", "afxres.ID_FILE_SAVE",
"afxres.ID_FILE_SAVE_AS", "win32ui.ID_FILE_SAVE_ALL",
# Edit menu
"afxres.ID_EDIT_UNDO", "afxres.ID_EDIT_REDO", "afxres.ID_EDIT_CUT",
"afxres.ID_EDIT_COPY", "afxres.ID_EDIT_PASTE", "afxres.ID_EDIT_SELECT_ALL",
"afxres.ID_EDIT_FIND", "afxres.ID_EDIT_REPEAT", "afxres.ID_EDIT_REPLACE",
# View menu
"win32ui.ID_VIEW_WHITESPACE", "win32ui.ID_VIEW_FIXED_FONT",
"win32ui.ID_VIEW_BROWSE", "win32ui.ID_VIEW_INTERACTIVE",
# Window menu
"afxres.ID_WINDOW_ARRANGE", "afxres.ID_WINDOW_CASCADE",
"afxres.ID_WINDOW_NEW", "afxres.ID_WINDOW_SPLIT",
"afxres.ID_WINDOW_TILE_HORZ", "afxres.ID_WINDOW_TILE_VERT",
# Others
"afxres.ID_APP_EXIT", "afxres.ID_APP_ABOUT",
]
_extra_event_commands = [
("EditDelete", afxres.ID_EDIT_CLEAR),
("LocateModule", win32ui.ID_FILE_LOCATE),
("GotoLine", win32ui.ID_EDIT_GOTO_LINE),
("DbgBreakpointToggle", win32ui.IDC_DBG_ADD),
("DbgGo", win32ui.IDC_DBG_GO),
("DbgStepOver", win32ui.IDC_DBG_STEPOVER),
("DbgStep", win32ui.IDC_DBG_STEP),
("DbgStepOut", win32ui.IDC_DBG_STEPOUT),
("DbgBreakpointClearAll", win32ui.IDC_DBG_CLEAR),
("DbgClose", win32ui.IDC_DBG_CLOSE),
]
event_commands = []
def _CreateEvents():
for name in _event_commands:
val = eval(name)
name_parts = string.split(name, "_")[1:]
name_parts = map(string.capitalize, name_parts)
event =string.join(name_parts,'')
event_commands.append((event, val))
for name, id in _extra_event_commands:
event_commands.append((name, id))
_CreateEvents()
del _event_commands; del _extra_event_commands
command_reflectors = [
(win32ui.ID_EDIT_UNDO, win32con.WM_UNDO),
(win32ui.ID_EDIT_REDO, SCI_REDO),
(win32ui.ID_EDIT_CUT, win32con.WM_CUT),
(win32ui.ID_EDIT_COPY, win32con.WM_COPY),
(win32ui.ID_EDIT_PASTE, win32con.WM_PASTE),
(win32ui.ID_EDIT_CLEAR, win32con.WM_CLEAR),
(win32ui.ID_EDIT_SELECT_ALL, SCI_SELECTALL),
]
def DoBraceMatch(control):
curPos = control.SCIGetCurrentPos()
charBefore = ' '
if curPos: charBefore = control.SCIGetCharAt(curPos-1)
charAt = control.SCIGetCharAt(curPos)
braceAtPos = braceOpposite = -1
if charBefore in "[](){}": braceAtPos = curPos-1
if braceAtPos==-1:
if charAt in "[](){}": braceAtPos = curPos
if braceAtPos != -1:
braceOpposite = control.SCIBraceMatch(braceAtPos, 0)
if braceAtPos != -1 and braceOpposite==-1:
control.SCIBraceBadHighlight(braceAtPos)
else:
# either clear them both or set them both.
control.SCIBraceHighlight(braceAtPos, braceOpposite)
def _get_class_attributes(ob):
# Recurse into base classes looking for attributes
items = []
try:
items = items + dir(ob)
for i in ob.__bases__:
for item in _get_class_attributes(i):
if item not in items:
items.append(item)
except AttributeError:
pass
return items
# Supposed to look like an MFC CEditView, but
# also supports IDLE extensions and other source code generic features.
class CScintillaView(docview.CtrlView, control.CScintillaColorEditInterface):
def __init__(self, doc):
docview.CtrlView.__init__(self, doc, "Scintilla", win32con.WS_CHILD | win32con.WS_VSCROLL | win32con.WS_HSCROLL | win32con.WS_CLIPCHILDREN | win32con.WS_VISIBLE)
self._tabWidth = 8 # Mirror of what we send to Scintilla - never change this directly
self.bAutoCompleteAttributes = 1
self.bShowCallTips = 1
self.bMatchBraces = 0 # Editor option will default this to true later!
self.bindings = bindings.BindingsManager(self)
self.idle = IDLEenvironment.IDLEEditorWindow(self)
self.idle.IDLEExtension("AutoExpand")
# SendScintilla is called so frequently it is worth optimizing.
self.SendScintilla = self._obj_.SendMessage
def OnDestroy(self, msg):
self.SendScintilla = None
return docview.CtrlView.OnDestroy(self, msg)
def _MakeColorizer(self):
ext = os.path.splitext(self.GetDocument().GetPathName())[1]
import formatter
return formatter.BuiltinPythonSourceFormatter(self, ext)
# def SendScintilla(self, msg, w=0, l=0):
# return self._obj_.SendMessage(msg, w, l)
def SCISetTabWidth(self, width):
# I need to remember the tab-width for the AutoIndent extension. This may go.
self._tabWidth = width
control.CScintillaEditInterface.SCISetTabWidth(self, width)
def GetTabWidth(self):
return self._tabWidth
def HookHandlers(self):
# Create events for all the menu names.
for name, val in event_commands:
# handler = lambda id, code, tosend=val, parent=parent: parent.OnCommand(tosend, 0) and 0
self.bindings.bind(name, None, cid=val)
# Hook commands that do nothing other than send Scintilla messages.
for command, reflection in command_reflectors:
handler = lambda id, code, ss=self.SendScintilla, tosend=reflection: ss(tosend) and 0
self.HookCommand(handler, command)
self.HookCommand(self.OnCmdViewWS, win32ui.ID_VIEW_WHITESPACE)
self.HookCommandUpdate(self.OnUpdateViewWS, win32ui.ID_VIEW_WHITESPACE)
self.HookCommand(self.OnCmdViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
self.HookCommandUpdate(self.OnUpdateViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
self.HookCommand(self.OnCmdViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
self.HookCommandUpdate(self.OnUpdateViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
self.HookCommand(self.OnCmdViewEOL, win32ui.ID_VIEW_EOL)
self.HookCommandUpdate(self.OnUpdateViewEOL, win32ui.ID_VIEW_EOL)
self.HookCommand(self.OnCmdViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
self.HookCommandUpdate(self.OnUpdateViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
self.HookCommand(self.OnCmdFileLocate, win32ui.ID_FILE_LOCATE)
self.HookCommand(self.OnCmdEditFind, win32ui.ID_EDIT_FIND)
self.HookCommand(self.OnCmdEditRepeat, win32ui.ID_EDIT_REPEAT)
self.HookCommand(self.OnCmdEditReplace, win32ui.ID_EDIT_REPLACE)
self.HookCommand(self.OnCmdGotoLine, win32ui.ID_EDIT_GOTO_LINE)
self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT)
self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT_DIRECT)
self.HookCommand(self.OnFilePrintPreview,
win32ui.ID_FILE_PRINT_PREVIEW)
# Key bindings.
self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN)
# Hook wheeley mouse events
# self.HookMessage(self.OnMouseWheel, win32con.WM_MOUSEWHEEL)
self.HookFormatter()
def OnInitialUpdate(self):
doc = self.GetDocument()
# Enable Unicode if we can
if is_platform_unicode:
self.SendScintilla(SCI_SETCODEPAGE, SC_CP_UTF8, 0)
# Create margins
self.SendScintilla(SCI_SETMARGINTYPEN, 1, SC_MARGIN_SYMBOL);
self.SendScintilla(SCI_SETMARGINMASKN, 1, 0xF);
self.SendScintilla(SCI_SETMARGINTYPEN, 2, SC_MARGIN_SYMBOL);
self.SendScintilla(SCI_SETMARGINMASKN, 2, SC_MASK_FOLDERS);
self.SendScintilla(SCI_SETMARGINSENSITIVEN, 2, 1);
self.GetDocument().HookViewNotifications(self) # is there an MFC way to grab this?
self.HookHandlers()
# Load the configuration information.
self.OnWinIniChange(None)
self.SetSel()
self.GetDocument().FinalizeViewCreation(self) # is there an MFC way to grab this?
def _GetSubConfigNames(self):
return None # By default we use only sections without sub-sections.
def OnWinIniChange(self, section = None):
self.bindings.prepare_configure()
try:
self.DoConfigChange()
finally:
self.bindings.complete_configure()
def DoConfigChange(self):
# Bit of a hack I dont kow what to do about - these should be "editor options"
from pywin.framework.editor import GetEditorOption
self.bAutoCompleteAttributes = GetEditorOption("Autocomplete Attributes", 1)
self.bShowCallTips = GetEditorOption("Show Call Tips", 1)
# Update the key map and extension data.
configManager.configure(self, self._GetSubConfigNames())
if configManager.last_error:
win32ui.MessageBox(configManager.last_error, "Configuration Error")
self.bMatchBraces = GetEditorOption("Match Braces", 1)
self.ApplyFormattingStyles(1)
def OnDestroy(self, msg):
self.bindings.close()
self.bindings = None
self.idle.close()
self.idle = None
control.CScintillaColorEditInterface.close(self)
return docview.CtrlView.OnDestroy(self, msg)
def OnMouseWheel(self, msg):
zDelta = msg[2] >> 16
vpos = self.GetScrollPos(win32con.SB_VERT)
vpos = vpos - zDelta/40 # 3 lines per notch
self.SetScrollPos(win32con.SB_VERT, vpos)
self.SendScintilla(win32con.WM_VSCROLL,
(vpos<<16) | win32con.SB_THUMBPOSITION,
0)
def OnBraceMatch(self, std, extra):
if not self.bMatchBraces: return
DoBraceMatch(self)
def OnNeedShown(self, std, extra):
notify = self.SCIUnpackNotifyMessage(extra)
# OnNeedShown is called before an edit operation when
# text is folded (as it is possible the text insertion will happen
# in a folded region.) As this happens _before_ the insert,
# we ignore the length (if we are at EOF, pos + length may
# actually be beyond the end of buffer)
self.EnsureCharsVisible(notify.position)
def EnsureCharsVisible(self, start, end = None):
if end is None: end = start
lineStart = self.LineFromChar(min(start, end))
lineEnd = self.LineFromChar(max(start, end))
while lineStart <= lineEnd:
self.SCIEnsureVisible(lineStart)
lineStart = lineStart + 1
# Helper to add an event to a menu.
def AppendMenu(self, menu, text="", event=None, flags = None, checked=0):
if event is None:
assert flags is not None, "No event or custom flags!"
cmdid = 0
else:
cmdid = self.bindings.get_command_id(event)
if cmdid is None:
# No event of that name - no point displaying it.
print 'View.AppendMenu(): Unknown event "%s" specified for menu text "%s" - ignored' % (event, text)
return
keyname = configManager.get_key_binding( event, self._GetSubConfigNames() )
if keyname is not None:
text = text + "\t" + keyname
if flags is None: flags = win32con.MF_STRING|win32con.MF_ENABLED
if checked: flags = flags | win32con.MF_CHECKED
menu.AppendMenu(flags, cmdid, text)
def OnKeyDown(self, msg):
return self.bindings.fire_key_event( msg )
def GotoEndOfFileEvent(self, event):
self.SetSel(-1)
def KeyDotEvent(self, event):
## Don't trigger autocomplete if any text is selected
s,e = self.GetSel()
if s!=e:
return 1
self.SCIAddText(".")
if self.bAutoCompleteAttributes:
self._AutoComplete()
# View Whitespace/EOL/Indentation UI.
def OnCmdViewWS(self, cmd, code): # Handle the menu command
viewWS = self.SCIGetViewWS()
self.SCISetViewWS(not viewWS)
def OnUpdateViewWS(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetViewWS())
cmdui.Enable()
def OnCmdViewIndentationGuides(self, cmd, code): # Handle the menu command
viewIG = self.SCIGetIndentationGuides()
self.SCISetIndentationGuides(not viewIG)
def OnUpdateViewIndentationGuides(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetIndentationGuides())
cmdui.Enable()
def OnCmdViewRightEdge(self, cmd, code): # Handle the menu command
if self.SCIGetEdgeMode() == EDGE_NONE:
mode = EDGE_BACKGROUND
else:
mode = EDGE_NONE
self.SCISetEdgeMode(mode)
def OnUpdateViewRightEdge(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetEdgeMode() != EDGE_NONE)
cmdui.Enable()
def OnCmdViewEOL(self, cmd, code): # Handle the menu command
viewEOL = self.SCIGetViewEOL()
self.SCISetViewEOL(not viewEOL)
def OnUpdateViewEOL(self, cmdui): # Update the tick on the UI.
cmdui.SetCheck(self.SCIGetViewEOL())
cmdui.Enable()
def OnCmdViewFixedFont(self, cmd, code): # Handle the menu command
self._GetColorizer().bUseFixed = not self._GetColorizer().bUseFixed
self.ApplyFormattingStyles(0)
# Ensure the selection is visible!
self.ScrollCaret()
def OnUpdateViewFixedFont(self, cmdui): # Update the tick on the UI.
c = self._GetColorizer()
if c is not None: cmdui.SetCheck(c.bUseFixed)
cmdui.Enable(c is not None)
def OnCmdEditFind(self, cmd, code):
import find
find.ShowFindDialog()
def OnCmdEditRepeat(self, cmd, code):
import find
find.FindNext()
def OnCmdEditReplace(self, cmd, code):
import find
find.ShowReplaceDialog()
def OnCmdFileLocate(self, cmd, id):
line=string.strip(self.GetLine())
import pywin.framework.scriptutils
m = patImport.match(line)
if m:
# Module name on this line - locate that!
modName = m.group('name')
fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
if fileName is None:
win32ui.SetStatusText("Can't locate module %s" % modName)
return 1 # Let the default get it.
else:
win32ui.GetApp().OpenDocumentFile(fileName)
else:
# Just to a "normal" locate - let the default handler get it.
return 1
return 0
def OnCmdGotoLine(self, cmd, id):
try:
lineNo = string.atoi(raw_input("Enter Line Number"))-1
except (ValueError, KeyboardInterrupt):
return 0
self.SCIEnsureVisible(lineNo)
self.SCIGotoLine(lineNo)
return 0
def SaveTextFile(self, filename):
doc = self.GetDocument()
s = self.GetTextRange()
if is_platform_unicode:
s = unicode(s,"utf-8").encode("mbcs")
f = open(filename, 'wb')
f.write(s)
f.close()
doc.SetModifiedFlag(0)
return 1
def _AutoComplete(self):
def list2dict(l):
ret={}
for i in l:
ret[i] = None
return ret
self.SCIAutoCCancel() # Cancel old auto-complete lists.
# First try and get an object without evaluating calls
ob = self._GetObjectAtPos(bAllowCalls = 0)
# If that failed, try and process call or indexing to get the object.
if ob is None:
ob = self._GetObjectAtPos(bAllowCalls = 1)
items_dict = {}
if ob is not None:
try: # Catch unexpected errors when fetching attribute names from the object
try:
items_dict.update(list2dict(dir(ob)))
except AttributeError:
pass # object has no __dict__
if hasattr(ob, "__class__"):
items_dict.update(list2dict(_get_class_attributes(ob.__class__)))
# The object may be a COM object with typelib support - lets see if we can get its props.
# (contributed by Stefan Migowsky)
try:
# Get the automation attributes
items_dict.update(ob.__class__._prop_map_get_)
# See if there is an write only property
# could be optimized
items_dict.update(ob.__class__._prop_map_put_)
# append to the already evaluated list
except AttributeError:
pass
# The object might be a pure COM dynamic dispatch with typelib support - lets see if we can get its props.
if hasattr(ob, "_oleobj_"):
try:
for iTI in xrange(0,ob._oleobj_.GetTypeInfoCount()):
typeInfo = ob._oleobj_.GetTypeInfo(iTI)
typeAttr = typeInfo.GetTypeAttr()
for iFun in xrange(0,typeAttr.cFuncs):
funDesc = typeInfo.GetFuncDesc(iFun)
funName = typeInfo.GetNames(funDesc.memid)[0]
if not items_dict.has_key(funName):
items_dict[funName] = None
except:
pass
except:
win32ui.SetStatusText("Error attempting to get object attributes - %s" % (`sys.exc_info()[0]`,))
# ensure all keys are strings.
items = map(str, items_dict.keys())
# All names that start with "_" go!
items = filter(lambda word: word[0]!='_', items)
if not items:
# Heuristics a-la AutoExpand
# The idea is to find other usages of the current binding
# and assume, that it refers to the same object (or at least,
# to an object of the same type)
# Contributed by Vadim Chugunov [vadimch@yahoo.com]
left, right = self._GetWordSplit()
if left=="": # Ignore standalone dots
return None
# We limit our search to the current class, if that
# information is available
minline, maxline, curclass = self._GetClassInfoFromBrowser()
endpos = self.LineIndex(maxline)
text = self.GetTextRange(self.LineIndex(minline),endpos)
try:
list = re.findall(r"\b"+left+"\.\w+",text)
except re.error:
# parens etc may make an invalid RE, but this code wouldnt
# benefit even if the RE did work :-)
list = []
prefix = len(left)+1
unique = {}
for li in list:
unique[li[prefix:]] = 1
# Assuming traditional usage of self...
if curclass and left=="self":
self._UpdateWithClassMethods(unique,curclass)
items = filter(lambda word: word[:2]!='__' or word[-2:]!='__', unique.keys())
# Ignore the word currently to the right of the dot - probably a red-herring.
try:
items.remove(right[1:])
except ValueError:
pass
if items:
items.sort()
self.SCIAutoCSetAutoHide(0)
self.SCIAutoCShow(items)
# TODO: This is kinda slow. Probably need some kind of cache
# here that is flushed upon file save
# Or maybe we don't need the superclass methods at all ?
def _UpdateWithClassMethods(self,dict,classinfo):
if not hasattr(classinfo,"methods"):
# No 'methods' - probably not what we think it is.
return
dict.update(classinfo.methods)
for super in classinfo.super:
if hasattr(super,"methods"):
self._UpdateWithClassMethods(dict,super)
# Find which class definition caret is currently in and return
# indexes of the the first and the last lines of that class definition
# Data is obtained from module browser (if enabled)
def _GetClassInfoFromBrowser(self,pos=-1):
minline = 0
maxline = self.GetLineCount()-1
doc = self.GetParentFrame().GetActiveDocument()
browser = None
try:
if doc is not None:
browser = doc.GetAllViews()[1]
except IndexError:
pass
if browser is None:
return (minline,maxline,None) # Current window has no browser
if not browser.list: return (minline,maxline,None) # Not initialized
path = self.GetDocument().GetPathName()
if not path: return (minline,maxline,None) # No current path
import pywin.framework.scriptutils
curmodule, path = pywin.framework.scriptutils.GetPackageModuleName(path)
try:
clbrdata = browser.list.root.clbrdata
except AttributeError:
return (minline,maxline,None) # No class data for this module.
curline = self.LineFromChar(pos)
curclass = None
# Find out which class we are in
for item in clbrdata.values():
if item.module==curmodule:
item_lineno = item.lineno - 1 # Scintilla counts lines from 0, whereas pyclbr - from 1
if minline < item_lineno <= curline:
minline = item_lineno
curclass = item
if curline < item_lineno < maxline:
maxline = item_lineno
return (minline,maxline,curclass)
def _GetObjectAtPos(self, pos = -1, bAllowCalls = 0):
left, right = self._GetWordSplit(pos, bAllowCalls)
if left: # It is an attribute lookup
# How is this for a hack!
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
# Get the debugger's context.
try:
from pywin.framework import interact
if interact.edit is not None and interact.edit.currentView is not None:
globs, locs = interact.edit.currentView.GetContext()[:2]
if globs: namespace.update(globs)
if locs: namespace.update(locs)
except ImportError:
pass
try:
return eval(left, namespace)
except:
pass
return None
def _GetWordSplit(self, pos = -1, bAllowCalls = 0):
if pos==-1: pos = self.GetSel()[0]-1 # Character before current one
limit = self.GetTextLength()
before = []
after = []
index = pos-1
wordbreaks_use = wordbreaks
if bAllowCalls: wordbreaks_use = wordbreaks_use + "()[]"
while index>=0:
char = self.SCIGetCharAt(index)
if char not in wordbreaks_use: break
before.insert(0, char)
index = index-1
index = pos
while index<=limit:
char = self.SCIGetCharAt(index)
if char not in wordbreaks_use: break
after.append(char)
index=index+1
return string.join(before,''), string.join(after,'')
def OnPrepareDC (self, dc, pInfo):
# print "OnPrepareDC for page", pInfo.GetCurPage(), "of", pInfo.GetFromPage(), "to", pInfo.GetToPage(), ", starts=", self.starts
if dc.IsPrinting():
# Check if we are beyond the end.
# (only do this when actually printing, else messes up print preview!)
if not pInfo.GetPreview() and self.starts is not None:
prevPage = pInfo.GetCurPage() - 1
if prevPage > 0 and self.starts[prevPage] >= self.GetTextLength():
# All finished.
pInfo.SetContinuePrinting(0)
return
dc.SetMapMode(win32con.MM_TEXT);
def OnPreparePrinting(self, pInfo):
flags = win32ui.PD_USEDEVMODECOPIES | \
win32ui.PD_ALLPAGES | \
win32ui.PD_NOSELECTION # Dont support printing just a selection.
# NOTE: Custom print dialogs are stopping the user's values from coming back :-(
# self.prtDlg = PrintDialog(pInfo, PRINTDLGORD, flags)
# pInfo.SetPrintDialog(self.prtDlg)
pInfo.SetMinPage(1)
# max page remains undefined for now.
pInfo.SetFromPage(1)
pInfo.SetToPage(1)
ret = self.DoPreparePrinting(pInfo)
return ret
def OnBeginPrinting(self, dc, pInfo):
self.starts = None
return self._obj_.OnBeginPrinting(dc, pInfo)
def CalculatePageRanges(self, dc, pInfo):
# Calculate page ranges and max page
self.starts = {0:0}
metrics = dc.GetTextMetrics()
left, top, right, bottom = pInfo.GetDraw()
# Leave space at the top for the header.
rc = (left, top + (9*metrics['tmHeight'])/2, right, bottom)
pageStart = 0
maxPage = 0
textLen = self.GetTextLength()
while pageStart < textLen:
pageStart = self.FormatRange(dc, pageStart, textLen, rc, 0)
maxPage = maxPage + 1
self.starts[maxPage] = pageStart
# And a sentinal for one page past the end
self.starts[maxPage+1] = textLen
# When actually printing, maxPage doesnt have any effect at this late state.
# but is needed to make the Print Preview work correctly.
pInfo.SetMaxPage(maxPage)
def OnFilePrintPreview(self, *arg):
self._obj_.OnFilePrintPreview()
def OnFilePrint(self, *arg):
self._obj_.OnFilePrint()
def FormatRange(self, dc, pageStart, lengthDoc, rc, draw):
"""
typedef struct _formatrange {
HDC hdc;
HDC hdcTarget;
RECT rc;
RECT rcPage;
CHARRANGE chrg;} FORMATRANGE;
"""
fmt='PPIIIIIIIIll'
hdcRender = dc.GetHandleOutput()
hdcFormat = dc.GetHandleAttrib()
fr = struct.pack(fmt, hdcRender, hdcFormat, rc[0], rc[1], rc[2], rc[3], rc[0], rc[1], rc[2], rc[3], pageStart, lengthDoc)
frBuff = array.array('c', fr)
addressFrBuff = frBuff.buffer_info()[0]
nextPageStart = self.SendScintilla(EM_FORMATRANGE, draw, addressFrBuff)
return nextPageStart
def OnPrint(self, dc, pInfo):
metrics = dc.GetTextMetrics()
# print "dev", w, h, l, metrics['tmAscent'], metrics['tmDescent']
if self.starts is None:
self.CalculatePageRanges(dc, pInfo)
pageNum = pInfo.GetCurPage() - 1
# Setup the header of the page - docname on left, pagenum on right.
doc = self.GetDocument()
cxChar = metrics['tmAveCharWidth']
cyChar = metrics['tmHeight']
left, top, right, bottom = pInfo.GetDraw()
dc.TextOut(0, 2*cyChar, doc.GetTitle())
pagenum_str = win32ui.LoadString(afxres.AFX_IDS_PRINTPAGENUM) % (pageNum+1,)
dc.SetTextAlign(win32con.TA_RIGHT)
dc.TextOut(right, 2*cyChar, pagenum_str)
dc.SetTextAlign(win32con.TA_LEFT)
top = top + (7*cyChar)/2
dc.MoveTo(left, top)
dc.LineTo(right, top)
top = top + cyChar
rc = (left, top, right, bottom)
nextPageStart = self.FormatRange(dc, self.starts[pageNum], self.starts[pageNum+1], rc, 1)
def LoadConfiguration():
global configManager
# Bit of a hack I dont kow what to do about?
from config import ConfigManager
configName = rc = win32ui.GetProfileVal("Editor", "Keyboard Config", "default")
configManager = ConfigManager(configName)
if configManager.last_error:
bTryDefault = 0
msg = "Error loading configuration '%s'\n\n%s" % (configName, configManager.last_error)
if configName != "default":
msg = msg + "\n\nThe default configuration will be loaded."
bTryDefault = 1
win32ui.MessageBox(msg)
if bTryDefault:
configManager = ConfigManager("default")
if configManager.last_error:
win32ui.MessageBox("Error loading configuration 'default'\n\n%s" % (configManager.last_error))
configManager = None
LoadConfiguration()
| {
"repo_name": "Southpaw-TACTIC/Team",
"path": "src/python/Lib/site-packages/pythonwin/pywin/scintilla/view.py",
"copies": "1",
"size": "25363",
"license": "epl-1.0",
"hash": 8705341516641559000,
"line_mean": 34.1296296296,
"line_max": 163,
"alpha_frac": 0.6966052912,
"autogenerated": false,
"ratio": 3.011517454286393,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42081227454863934,
"avg_score": null,
"num_lines": null
} |
"""A general purpose stripifier, based on NvTriStrip (http://developer.nvidia.com/)
Credit for porting NvTriStrip to Python goes to the RuneBlade Foundation
library:
http://techgame.net/projects/Runeblade/browser/trunk/RBRapier/RBRapier/Tools/Geometry/Analysis/TriangleStripifier.py?rev=760
The algorithm of this stripifier is an improved version of the RuneBlade
Foundation / NVidia stripifier; it makes no assumptions about the
underlying geometry whatsoever and is intended to produce valid
output in all circumstances.
"""
# ***** BEGIN LICENSE BLOCK *****
#
# Copyright (c) 2007-2012, Python File Format Interface
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Python File Format Interface
# project nor the names of its contributors may be used to endorse
# or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# ***** END LICENSE BLOCK *****
import itertools
import random # choice
from pyffi.utils.trianglemesh import Face, Mesh
class TriangleStrip(object):
"""A heavily specialized oriented strip of faces.
Heavily adapted from NvTriStrip and RuneBlade. Originals can be found at
http://developer.nvidia.com/view.asp?IO=nvtristrip_library
and
http://techgame.net/projects/Runeblade/browser/trunk/RBRapier/RBRapier/Tools/Geometry/Analysis/TriangleStripifier.py?rev=760
"""
def __init__(self, stripped_faces=None,
faces=None, vertices=None, reversed_=False):
"""Initialise the triangle strip."""
self.faces = faces if faces is not None else []
self.vertices = vertices if vertices is not None else []
self.reversed_ = reversed_
# set of indices of stripped faces
self.stripped_faces = (stripped_faces
if stripped_faces is not None else set())
def __repr__(self):
return ("TriangleStrip(stripped_faces=%s, faces=%s, vertices=%s, reversed_=%s)"
% (repr(self.stripped_faces), repr(self.faces),
repr(self.vertices), repr(self.reversed_)))
def get_unstripped_adjacent_face(self, face, vi):
"""Get adjacent face which is not yet stripped."""
for otherface in face.get_adjacent_faces(vi):
if otherface.index not in self.stripped_faces:
return otherface
def traverse_faces(self, start_vertex, start_face, forward):
"""Builds a strip traveral of faces starting from the
start_face and the edge opposite start_vertex. Returns number
of faces added.
"""
count = 0
pv0 = start_vertex
pv1 = start_face.get_next_vertex(pv0)
pv2 = start_face.get_next_vertex(pv1)
next_face = self.get_unstripped_adjacent_face(start_face, pv0)
while next_face:
self.stripped_faces.add(next_face.index)
count += 1
if count & 1:
if forward:
pv0 = pv1
pv1 = next_face.get_next_vertex(pv0)
self.vertices.append(pv1)
self.faces.append(next_face)
else:
pv0 = pv2
pv2 = next_face.get_next_vertex(pv1)
self.vertices.insert(0, pv2)
self.faces.insert(0, next_face)
self.reversed_ = not self.reversed_
else:
if forward:
pv0 = pv2
pv2 = next_face.get_next_vertex(pv1)
self.vertices.append(pv2)
self.faces.append(next_face)
else:
pv0 = pv1
pv1 = next_face.get_next_vertex(pv0)
self.vertices.insert(0, pv1)
self.faces.insert(0, next_face)
self.reversed_ = not self.reversed_
next_face = self.get_unstripped_adjacent_face(next_face, pv0)
return count
def build(self, start_vertex, start_face):
"""Builds the face strip forwards, then backwards. Returns
index of start_face.
Check case of single triangle
-----------------------------
>>> m = Mesh()
>>> face = m.add_face(0, 1, 2)
>>> m.lock()
>>> t = TriangleStrip()
>>> t.build(0, face)
0
>>> t
TriangleStrip(stripped_faces={0}, faces=[Face(0, 1, 2)], vertices=[0, 1, 2], reversed_=False)
>>> t.get_strip()
[0, 1, 2]
>>> t = TriangleStrip()
>>> t.build(1, face)
0
>>> t
TriangleStrip(stripped_faces={0}, faces=[Face(0, 1, 2)], vertices=[1, 2, 0], reversed_=False)
>>> t.get_strip()
[1, 2, 0]
>>> t = TriangleStrip()
>>> t.build(2, face)
0
>>> t
TriangleStrip(stripped_faces={0}, faces=[Face(0, 1, 2)], vertices=[2, 0, 1], reversed_=False)
>>> t.get_strip()
[2, 0, 1]
Check case of two triangles, with special strip winding fix
-----------------------------------------------------------
>>> m = Mesh()
>>> face0 = m.add_face(0, 1, 2)
>>> face1 = m.add_face(2, 1, 3)
>>> m.lock()
>>> t = TriangleStrip()
>>> t.build(0, face0)
0
>>> t
TriangleStrip(stripped_faces={0, 1}, faces=[Face(0, 1, 2), Face(1, 3, 2)], vertices=[0, 1, 2, 3], reversed_=False)
>>> t.get_strip()
[0, 1, 2, 3]
>>> t = TriangleStrip()
>>> t.build(1, face0)
1
>>> t
TriangleStrip(stripped_faces={0, 1}, faces=[Face(1, 3, 2), Face(0, 1, 2)], vertices=[3, 1, 2, 0], reversed_=True)
>>> t.get_strip()
[3, 2, 1, 0]
>>> t = TriangleStrip()
>>> t.build(2, face1)
1
>>> t
TriangleStrip(stripped_faces={0, 1}, faces=[Face(0, 1, 2), Face(1, 3, 2)], vertices=[0, 2, 1, 3], reversed_=True)
>>> t.get_strip()
[0, 1, 2, 3]
>>> t = TriangleStrip()
>>> t.build(3, face1)
0
>>> t
TriangleStrip(stripped_faces={0, 1}, faces=[Face(1, 3, 2), Face(0, 1, 2)], vertices=[3, 2, 1, 0], reversed_=False)
>>> t.get_strip()
[3, 2, 1, 0]
Check that extra vertex is appended to fix winding
--------------------------------------------------
>>> m = Mesh()
>>> face0 = m.add_face(1, 3, 2)
>>> face1 = m.add_face(2, 3, 4)
>>> face2 = m.add_face(4, 3, 5)
>>> face3 = m.add_face(4, 5, 6)
>>> m.lock()
>>> t = TriangleStrip()
>>> t.build(2, face1)
1
>>> t
TriangleStrip(stripped_faces={0, 1, 2, 3}, faces=[Face(1, 3, 2), Face(2, 3, 4), Face(3, 5, 4), Face(4, 5, 6)], vertices=[1, 2, 3, 4, 5, 6], reversed_=True)
>>> t.get_strip()
[1, 1, 2, 3, 4, 5, 6]
Check that strip is reversed to fix winding
-------------------------------------------
>>> m = Mesh()
>>> face0 = m.add_face(1, 3, 2)
>>> face1 = m.add_face(2, 3, 4)
>>> face2 = m.add_face(4, 3, 5)
>>> m.lock()
>>> t = TriangleStrip()
>>> t.build(2, face1)
1
>>> t
TriangleStrip(stripped_faces={0, 1, 2}, faces=[Face(1, 3, 2), Face(2, 3, 4), Face(3, 5, 4)], vertices=[1, 2, 3, 4, 5], reversed_=True)
>>> t.get_strip()
[5, 4, 3, 2, 1]
More complicated mesh
---------------------
>>> m = Mesh()
>>> face0 = m.add_face(0, 1, 2)
>>> face1 = m.add_face(2, 1, 7)
>>> face2 = m.add_face(2, 7, 4)
>>> face3 = m.add_face(5, 3, 2)
>>> face4 = m.add_face(2, 1, 9)
>>> face5 = m.add_face(4, 7, 10)
>>> face6 = m.add_face(4, 10, 11)
>>> face7 = m.add_face(11, 10, 12)
>>> face8 = m.add_face(1, 0, 13)
>>> m.lock()
>>> t = TriangleStrip()
>>> t.build(7, face1)
4
>>> t.faces[4] == face1 # check result from build
True
>>> t.stripped_faces
{0, 1, 2, 5, 6, 7, 8}
>>> t.faces
[Face(10, 12, 11), Face(4, 10, 11), Face(4, 7, 10), Face(2, 7, 4), Face(1, 7, 2), Face(0, 1, 2), Face(0, 13, 1)]
>>> t.vertices
[12, 11, 10, 4, 7, 2, 1, 0, 13]
>>> t.reversed_
False
>>> t.get_strip()
[12, 11, 10, 4, 7, 2, 1, 0, 13]
Mesh which has more than a single strip
---------------------------------------
>>> m = Mesh()
>>> tmp = m.add_face(2, 1, 7) # in strip
>>> start_face = m.add_face(0, 1, 2) # in strip
>>> tmp = m.add_face(2, 7, 4) # in strip
>>> tmp = m.add_face(4, 7, 11) # in strip
>>> tmp = m.add_face(5, 3, 2)
>>> tmp = m.add_face(1, 0, 8) # in strip
>>> tmp = m.add_face(0, 8, 9) # bad orientation!
>>> tmp = m.add_face(8, 0, 10) # in strip
>>> m.lock()
>>> t = TriangleStrip()
>>> t.build(0, start_face)
2
>>> t.vertices
[10, 8, 0, 1, 2, 7, 4, 11]
>>> t.get_strip()
[10, 8, 0, 1, 2, 7, 4, 11]
"""
del self.faces[:]
del self.vertices[:]
self.reversed_ = False
v0 = start_vertex
v1 = start_face.get_next_vertex(v0)
v2 = start_face.get_next_vertex(v1)
self.stripped_faces.add(start_face.index)
self.faces.append(start_face)
self.vertices.append(v0)
self.vertices.append(v1)
self.vertices.append(v2)
self.traverse_faces(v0, start_face, True)
return self.traverse_faces(v2, start_face, False)
def get_strip(self):
"""Get strip in forward winding."""
strip = []
if self.reversed_:
if len(self.vertices) & 1:
strip = list(reversed(self.vertices))
elif len(self.vertices) == 4:
strip = list(self.vertices[i] for i in (0, 2, 1, 3))
else:
strip = list(self.vertices)
strip.insert(0, strip[0])
else:
strip = list(self.vertices)
return strip
class Experiment(object):
"""A stripification experiment, essentially consisting of a set of
adjacent strips.
"""
def __init__(self, start_vertex, start_face):
self.stripped_faces = set()
self.start_vertex = start_vertex
self.start_face = start_face
self.strips = []
def build(self):
"""Build strips, starting from start_vertex and start_face.
>>> m = Mesh()
>>> tmp = m.add_face(2, 1, 7)
>>> s1_face = m.add_face(0, 1, 2)
>>> tmp = m.add_face(2, 7, 4) # in strip
>>> tmp = m.add_face(4, 7, 11) # in strip
>>> tmp = m.add_face(5, 3, 2)
>>> tmp = m.add_face(1, 0, 8) # in strip
>>> tmp = m.add_face(0, 8, 9) # bad orientation!
>>> tmp = m.add_face(8, 0, 10) # in strip
>>> tmp = m.add_face(10, 11, 8) # in strip
>>> # parallel strip
>>> s2_face = m.add_face(0, 2, 21) # in strip
>>> tmp = m.add_face(21, 2, 22) # in strip
>>> tmp = m.add_face(2, 4, 22) # in strip
>>> tmp = m.add_face(21, 24, 0) # in strip
>>> tmp = m.add_face(9, 0, 24) # in strip
>>> # parallel strip, further down
>>> s3_face = m.add_face(8, 11, 31) # in strip
>>> tmp = m.add_face(8, 31, 32) # in strip
>>> tmp = m.add_face(31, 11, 33) # in strip
>>> m.lock()
>>> # build experiment
>>> exp = Experiment(0, s1_face)
>>> exp.build()
>>> len(exp.strips)
2
>>> exp.strips[0].get_strip()
[11, 4, 7, 2, 1, 0, 8, 10, 11]
>>> exp.strips[1].get_strip()
[4, 22, 2, 21, 0, 24, 9]
>>> # note: with current algorithm [32, 8, 31, 11, 33] is not found
"""
# build initial strip
strip = TriangleStrip(stripped_faces=self.stripped_faces)
strip.build(self.start_vertex, self.start_face)
self.strips.append(strip)
# build adjacent strips
num_faces = len(strip.faces)
if num_faces >= 4:
face_index = num_faces >> 1 # quick / 2
self.build_adjacent(strip, face_index)
self.build_adjacent(strip, face_index + 1)
elif num_faces == 3:
if not self.build_adjacent(strip, 0):
self.build_adjacent(strip, 2)
self.build_adjacent(strip, 1)
elif num_faces == 2:
self.build_adjacent(strip, 0)
self.build_adjacent(strip, 1)
elif num_faces == 1:
self.build_adjacent(strip, 0)
def build_adjacent(self, strip, face_index):
"""Build strips adjacent to given strip, and add them to the
experiment. This is a helper function used by build.
"""
opposite_vertex = strip.vertices[face_index + 1]
face = strip.faces[face_index]
other_face = strip.get_unstripped_adjacent_face(face, opposite_vertex)
if other_face:
winding = strip.reversed_
if face_index & 1:
winding = not winding
other_strip = TriangleStrip(stripped_faces=self.stripped_faces)
if winding:
other_vertex = strip.vertices[face_index]
face_index = other_strip.build(other_vertex, other_face)
else:
other_vertex = strip.vertices[face_index + 2]
face_index = other_strip.build(other_vertex, other_face)
self.strips.append(other_strip)
if face_index > (len(other_strip.faces) >> 1): # quick / 2
self.build_adjacent(other_strip, face_index - 1)
elif face_index < len(other_strip.faces) - 1:
self.build_adjacent(other_strip, face_index + 1)
return True
return False
class ExperimentSelector(object):
def __init__(self):
self.best_score = -1.0
self.best_experiment = None
def update(self, experiment):
"""Updates best experiment with given experiment, if given
experiment beats current experiment.
"""
score = (sum((len(strip.faces) for strip in experiment.strips), 0.0)
/ len(experiment.strips))
if score > self.best_score:
self.best_score = score
self.best_experiment = experiment
def clear(self):
"""Remove best experiment, to start a fresh sequence of
experiments.
"""
self.best_score = -1.0
self.best_experiment = None
class TriangleStripifier(object):
"""Implementation of a triangle stripifier.
Heavily adapted from NvTriStrip.
Original can be found at http://developer.nvidia.com/view.asp?IO=nvtristrip_library.
"""
def __init__(self, mesh):
self.num_samples = 10
self.mesh = mesh
@staticmethod
def sample(population, k):
"""Return a k length list of unique elements chosen from the
population sequence. Used for random sampling without
replacement. Deterministic version of random.sample (being
deterministic means that it is easier to test).
>>> TriangleStripifier.sample(range(10), 1)
[0]
>>> TriangleStripifier.sample(range(10), 2)
[0, 9]
>>> TriangleStripifier.sample(range(10), 3)
[0, 4, 9]
>>> TriangleStripifier.sample(range(10), 4)
[0, 3, 6, 9]
>>> TriangleStripifier.sample(range(10), 5)
[0, 2, 4, 6, 9]
>>> TriangleStripifier.sample(range(10), 6)
[0, 1, 3, 5, 7, 9]
>>> TriangleStripifier.sample(range(10), 7)
[0, 1, 3, 4, 6, 7, 9]
>>> TriangleStripifier.sample(range(10), 8)
[0, 1, 2, 3, 5, 6, 7, 9]
>>> TriangleStripifier.sample(range(10), 9)
[0, 1, 2, 3, 4, 5, 6, 7, 9]
>>> TriangleStripifier.sample(range(10), 10)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
if k == 1:
# corner case
return [population[0]]
else:
# all other cases
return [
population[int((i * (float(len(population)) - 1)) / (k - 1))]
for i in range(k)]
def find_all_strips(self):
"""Find all strips.
Empty mesh
----------
>>> m = Mesh()
>>> m.lock()
>>> ts = TriangleStripifier(m)
>>> ts.find_all_strips()
[]
Full mesh
---------
>>> m = Mesh()
>>> tmp = m.add_face(2, 1, 7)
>>> tmp = m.add_face(0, 1, 2)
>>> tmp = m.add_face(2, 7, 4) # in strip
>>> tmp = m.add_face(4, 7, 11) # in strip
>>> tmp = m.add_face(5, 3, 2)
>>> tmp = m.add_face(1, 0, 8) # in strip
>>> tmp = m.add_face(0, 8, 9) # bad orientation!
>>> tmp = m.add_face(8, 0, 10) # in strip
>>> tmp = m.add_face(10, 11, 8) # in strip
>>> # parallel strip
>>> tmp = m.add_face(0, 2, 21) # in strip
>>> tmp = m.add_face(21, 2, 22) # in strip
>>> tmp = m.add_face(2, 4, 22) # in strip
>>> tmp = m.add_face(21, 24, 0) # in strip
>>> tmp = m.add_face(9, 0, 24) # in strip
>>> # parallel strip, further down
>>> tmp = m.add_face(8, 11, 31) # in strip
>>> tmp = m.add_face(8, 31, 32) # in strip
>>> tmp = m.add_face(31, 11, 33) # in strip
>>> m.lock()
>>> ts = TriangleStripifier(m)
>>> sorted(ts.find_all_strips())
[[3, 2, 5], [4, 22, 2, 21, 0, 24, 9], [9, 0, 8], [11, 4, 7, 2, 1, 0, 8, 10, 11], [32, 8, 31, 11, 33]]
"""
all_strips = []
selector = ExperimentSelector()
unstripped_faces = set(range(len(self.mesh.faces)))
while True:
experiments = []
# note: using deterministic self.sample
# instead of existing random.sample in python
# because deterministic version is easier to test
for sample in self.sample(list(unstripped_faces),
min(self.num_samples,
len(unstripped_faces))):
exp_face = self.mesh.faces[sample]
for exp_vertex in exp_face.verts:
experiments.append(
Experiment(start_vertex=exp_vertex,
start_face=exp_face))
if not experiments:
# done!
return all_strips
# note: use while loop so we only need to keep at most two
# built experiments at the same time in memory
while experiments:
experiment = experiments.pop()
experiment.build()
selector.update(experiment)
unstripped_faces -= selector.best_experiment.stripped_faces
# remove stripped faces from mesh
for strip in selector.best_experiment.strips:
for face in strip.faces:
self.mesh.discard_face(face)
# calculate actual strips for experiment
all_strips.extend(
(strip.get_strip()
for strip in selector.best_experiment.strips))
selector.clear()
if __name__=='__main__':
import doctest
doctest.testmod()
| {
"repo_name": "griest024/PokyrimTools",
"path": "pyffi-develop/pyffi/utils/trianglestripifier.py",
"copies": "1",
"size": "20714",
"license": "mit",
"hash": -4843858487775067000,
"line_mean": 36.799270073,
"line_max": 163,
"alpha_frac": 0.524138264,
"autogenerated": false,
"ratio": 3.5542210020590255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45783592660590255,
"avg_score": null,
"num_lines": null
} |
# a generator for a PE with maximum number of section, for XP (limit to 96 sections, empty debug directory is necessary
SEC_NUMB = 96 # XP
SECTIONS_VSTART = 0x01400 # XP, 96 sections
f = open("max_secXP.asm", "wt")
f.write("""; PE file with a maximum of sections
%include '..\..\consts.asm'
FILEALIGN equ 200h
SECTIONALIGN equ 1000h
org IMAGEBASE
istruc IMAGE_DOS_HEADER
at IMAGE_DOS_HEADER.e_magic, db 'MZ'
at IMAGE_DOS_HEADER.e_lfanew, dd nt_header - IMAGEBASE
iend
nt_header:
istruc IMAGE_NT_HEADERS
at IMAGE_NT_HEADERS.Signature, db 'PE',0,0
iend
istruc IMAGE_FILE_HEADER
at IMAGE_FILE_HEADER.Machine, dw IMAGE_FILE_MACHINE_I386
at IMAGE_FILE_HEADER.NumberOfSections, dw NUMBEROFSECTIONS
at IMAGE_FILE_HEADER.SizeOfOptionalHeader, dw SIZEOFOPTIONALHEADER
at IMAGE_FILE_HEADER.Characteristics, dw CHARACTERISTICS
iend
OptionalHeader:
istruc IMAGE_OPTIONAL_HEADER32
at IMAGE_OPTIONAL_HEADER32.Magic, dw IMAGE_NT_OPTIONAL_HDR32_MAGIC
at IMAGE_OPTIONAL_HEADER32.AddressOfEntryPoint, dd EntryPoint - IMAGEBASE
at IMAGE_OPTIONAL_HEADER32.ImageBase, dd IMAGEBASE
at IMAGE_OPTIONAL_HEADER32.SectionAlignment, dd SECTIONALIGN
at IMAGE_OPTIONAL_HEADER32.FileAlignment, dd FILEALIGN
at IMAGE_OPTIONAL_HEADER32.MajorSubsystemVersion, dw 4
at IMAGE_OPTIONAL_HEADER32.SizeOfImage, dd SIZEOFIMAGE
at IMAGE_OPTIONAL_HEADER32.SizeOfHeaders, dd SIZEOFHEADERS ; can be 0 in some circumstances
at IMAGE_OPTIONAL_HEADER32.Subsystem, dw IMAGE_SUBSYSTEM_WINDOWS_GUI
at IMAGE_OPTIONAL_HEADER32.NumberOfRvaAndSizes, dd NUMBEROFRVAANDSIZES
iend
DataDirectory:
istruc IMAGE_DATA_DIRECTORY_16 ; need empty debug directory under XP
at IMAGE_DATA_DIRECTORY_16.ExportsVA, dd Exports_Directory - IMAGEBASE
at IMAGE_DATA_DIRECTORY_16.ImportsVA, dd IMPORT_DESCRIPTOR - IMAGEBASE
iend
NUMBEROFRVAANDSIZES equ ($ - DataDirectory) / IMAGE_DATA_DIRECTORY_size
SIZEOFOPTIONALHEADER equ $ - OptionalHeader
SectionHeader:
""")
for i in xrange(SEC_NUMB):
f.write("""istruc IMAGE_SECTION_HEADER
at IMAGE_SECTION_HEADER.Name, db ".sec%(counter)02X",0
at IMAGE_SECTION_HEADER.VirtualSize, dd SECTION%(counter)iVSIZE
at IMAGE_SECTION_HEADER.VirtualAddress, dd Section%(counter)iStart - IMAGEBASE
at IMAGE_SECTION_HEADER.SizeOfRawData, dd SECTION%(counter)iSIZE
at IMAGE_SECTION_HEADER.PointerToRawData, dd %(pstart)08xh
at IMAGE_SECTION_HEADER.Characteristics, dd IMAGE_SCN_MEM_EXECUTE ; necessary under Win7 (with DEP?)
iend
""" % {"counter":i, "pstart":(i * 0x200 + SECTIONS_VSTART)})
f.write(
"""
NUMBEROFSECTIONS equ ($ - SectionHeader) / IMAGE_SECTION_HEADER_size
align 400h, db 0
bits 32
EntryPoint equ 061000h + IMAGEBASE ; XP
""")
f.write("""
SECTION .0 align=200h valign=1000h
Section0PStart equ 0%(SECTIONS_VSTART)08Xh
SIZEOFHEADERS equ $ - IMAGEBASE
Section0Start:
push MB_ICONINFORMATION ; UINT uType
push tada ; LPCTSTR lpCaption
push helloworld ; LPCTSTR lpText
push 0 ; HWND hWnd
call MessageBoxA
push 0 ; UINT uExitCode
call ExitProcess
tada db "Tada!", 0
helloworld db "Hello World!", 0
;%%IMPORT user32.dll!MessageBoxA
;%%IMPORT kernel32.dll!ExitProcess
;%%IMPORTS
SECTION0VSIZE equ $ - Section0Start
end_:
align 200h, db 0
SECTION0SIZE equ $ - Section0Start
""" % locals())
for i in xrange(SEC_NUMB - 1):
f.write("""Section%(counter)iStart equ %(RVA)i + IMAGEBASE
SECTION%(counter)iSIZE equ 0200h
SECTION%(counter)iVSIZE equ 1000h
jmp $ - 01000h
db 0h
align 200h, db 0
""" % {"counter":i + 1, "RVA": (i + 3)* 0x1000})
f.write("""
SIZEOFIMAGE equ %(sizeofimage)08Xh
;Ange Albertini, BSD Licence, 2011
""" % {"sizeofimage": SECTIONS_VSTART + SEC_NUMB * 0x1000})
f.close()
| {
"repo_name": "angea/corkami",
"path": "wip/MakePE/examples/PE/max_secXP.py",
"copies": "1",
"size": "4035",
"license": "bsd-2-clause",
"hash": 5088760992289762000,
"line_mean": 30.8048780488,
"line_max": 119,
"alpha_frac": 0.6837670384,
"autogenerated": false,
"ratio": 3.246178600160901,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4429945638560901,
"avg_score": null,
"num_lines": null
} |
# a generator for a PE with maximum number of section
# 199 sections confirmed working
SEC_NUMB = 199
SECTIONS_VSTART = 0x02000
f = open("max_sec.asm", "wt")
f.write("""; PE file with a maximum of sections
%include '..\..\consts.asm'
FILEALIGN equ 200h
SECTIONALIGN equ 1000h
org IMAGEBASE
istruc IMAGE_DOS_HEADER
at IMAGE_DOS_HEADER.e_magic, db 'MZ'
at IMAGE_DOS_HEADER.e_lfanew, dd nt_header - IMAGEBASE
iend
nt_header:
istruc IMAGE_NT_HEADERS
at IMAGE_NT_HEADERS.Signature, db 'PE',0,0
iend
istruc IMAGE_FILE_HEADER
at IMAGE_FILE_HEADER.Machine, dw IMAGE_FILE_MACHINE_I386
at IMAGE_FILE_HEADER.NumberOfSections, dw NUMBEROFSECTIONS
at IMAGE_FILE_HEADER.SizeOfOptionalHeader, dw SIZEOFOPTIONALHEADER
at IMAGE_FILE_HEADER.Characteristics, dw CHARACTERISTICS
iend
OptionalHeader:
istruc IMAGE_OPTIONAL_HEADER32
at IMAGE_OPTIONAL_HEADER32.Magic, dw IMAGE_NT_OPTIONAL_HDR32_MAGIC
at IMAGE_OPTIONAL_HEADER32.AddressOfEntryPoint, dd EntryPoint - IMAGEBASE
at IMAGE_OPTIONAL_HEADER32.ImageBase, dd IMAGEBASE
at IMAGE_OPTIONAL_HEADER32.SectionAlignment, dd SECTIONALIGN
at IMAGE_OPTIONAL_HEADER32.FileAlignment, dd FILEALIGN
at IMAGE_OPTIONAL_HEADER32.MajorSubsystemVersion, dw 4
at IMAGE_OPTIONAL_HEADER32.SizeOfImage, dd SIZEOFIMAGE
at IMAGE_OPTIONAL_HEADER32.SizeOfHeaders, dd SIZEOFHEADERS ; can be 0 in some circumstances
at IMAGE_OPTIONAL_HEADER32.Subsystem, dw IMAGE_SUBSYSTEM_WINDOWS_GUI
at IMAGE_OPTIONAL_HEADER32.NumberOfRvaAndSizes, dd NUMBEROFRVAANDSIZES
iend
DataDirectory:
istruc IMAGE_DATA_DIRECTORY2
at ExportsVA, dd Exports_Directory - IMAGEBASE
at ImportsVA, dd IMPORT_DESCRIPTOR - IMAGEBASE
iend
NUMBEROFRVAANDSIZES equ ($ - DataDirectory) / IMAGE_DATA_DIRECTORY_size
SIZEOFOPTIONALHEADER equ $ - OptionalHeader
SectionHeader:
""")
for i in xrange(SEC_NUMB):
f.write("""istruc IMAGE_SECTION_HEADER
at IMAGE_SECTION_HEADER.Name, db ".sec%(counter)02X",0
at IMAGE_SECTION_HEADER.VirtualSize, dd SECTION%(counter)iVSIZE
at IMAGE_SECTION_HEADER.VirtualAddress, dd Section%(counter)iStart - IMAGEBASE
at IMAGE_SECTION_HEADER.SizeOfRawData, dd SECTION%(counter)iSIZE
at IMAGE_SECTION_HEADER.PointerToRawData, dd %(pstart)08xh
at IMAGE_SECTION_HEADER.Characteristics, dd IMAGE_SCN_MEM_EXECUTE ; necessary under Win7 (with DEP?)
iend
""" % {"counter":i, "pstart":(i * 0x200 + SECTIONS_VSTART)})
f.write(
"""
NUMBEROFSECTIONS equ ($ - SectionHeader) / IMAGE_SECTION_HEADER_size
align 400h, db 0
bits 32
EntryPoint equ 0c8000h + IMAGEBASE
""")
f.write("""
SECTION .0 align=200h valign=1000h
Section0PStart equ 0%(SECTIONS_VSTART)08Xh
SIZEOFHEADERS equ $ - IMAGEBASE
Section0Start:
push MB_ICONINFORMATION ; UINT uType
push tada ; LPCTSTR lpCaption
push helloworld ; LPCTSTR lpText
push 0 ; HWND hWnd
call MessageBoxA
push 0 ; UINT uExitCode
call ExitProcess
tada db "Tada!", 0
helloworld db "Hello World!", 0
;%%IMPORT user32.dll!MessageBoxA
;%%IMPORT kernel32.dll!ExitProcess
;%%IMPORTS
SECTION0VSIZE equ $ - Section0Start
end_:
align 200h, db 0
SECTION0SIZE equ $ - Section0Start
""" % locals())
for i in xrange(SEC_NUMB - 1):
f.write("""Section%(counter)iStart equ %(RVA)i + IMAGEBASE
SECTION%(counter)iSIZE equ 0200h
SECTION%(counter)iVSIZE equ 1000h
jmp $ - 01000h
db 0h
align 200h, db 0
""" % {"counter":i + 1, "RVA": (i + 3)* 0x1000})
f.write("""
SIZEOFIMAGE equ %(sizeofimage)08Xh
;Ange Albertini, BSD Licence, 2011
""" % {"sizeofimage": SECTIONS_VSTART + SEC_NUMB * 0x1000})
f.close()
| {
"repo_name": "angea/corkami",
"path": "wip/MakePE/examples/PE/max_sec.py",
"copies": "1",
"size": "3885",
"license": "bsd-2-clause",
"hash": -1712395775167908400,
"line_mean": 29.3306451613,
"line_max": 108,
"alpha_frac": 0.6810810811,
"autogenerated": false,
"ratio": 3.245614035087719,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44266951161877194,
"avg_score": null,
"num_lines": null
} |
'''A generator to create all possible indexes from a pattern.
'''
import re
from functools import partial
import types
from dmrg_helpers.core.dmrg_exceptions import DMRGException
class SiteFilter(object):
"""A class to filter out integers according to a pattern.
You construct this class using a string with some index specification,
such as `2*i+1` to filter out even numbers, or `1` to get out only index
labeled by 1.
You use this class in the generator.
Attributes
----------
pattern: a regex with the pattern accepted filters should have.
Basically, filters can have a mute index, 'i', which may be multiplied
by an integer and can have a positive or negative constant added.
Example
-------
>>> from dmrg_helpers.extract.generate_indexes import SiteFilter
>>> site_filter = SiteFilter('2*i+1')
>>> print [site_filter.a, site_filter.i, site_filter.pm, site_filter.b]
['2', 'i', '+', '1']
>>> site_filter = SiteFilter('i-1')
>>> print [site_filter.a, site_filter.i, site_filter.pm, site_filter.b]
[None, 'i', '-', '1']
"""
pattern = r"([0-9]+)?\*?([a-z])?([\+|\-])?([0-9]+)?"
def __init__(self, string):
"""
Parameters
----------
string: a string.
The filter you want to apply to the sites of the chain.
"""
super(SiteFilter, self).__init__()
match = re.search(SiteFilter.pattern, string)
if not match:
raise DMRGException('Not match in site filter')
self.a = match.group(1)
self.i = match.group(2)
self.pm = match.group(3)
self.b = match.group(4)
if not self.is_index_ok():
raise DMRGException('Bad expression for site indexes')
def is_index_ok(self):
"""Checks whether an index is right.
"""
not_ok = (self.pm is None and self.b is not None) or (self.i is None
and self.pm is not None)
return not not_ok
def is_constant(self):
'''Checks whether the expression is just a constant.
'''
return self.i is None
def build_index(self, index):
'''Evaluates the (possible) mute indexes in the filter to 'index'.
You use this function to generate the integer, which correspond to site
in the chain, by evaluating the expression for the filter. Evaluating
the expression for the filter means making the value of the `self.i`,
if is not None, equal to the argument `i` of this function.
Parameters
----------
i : an int.
The value of a mute index.
Returns
-------
result : an int.
The value of the expression at the mute index.
'''
result = None
if self.is_constant():
result = int(self.a)
else:
result = index
if self.a is not None:
result *= int(self.a)
if self.pm == '+':
result += int(self.b)
elif self.pm == '-':
result -= int(self.b)
return result
def sites_are_ok(sites, number_of_sites):
'''Checks whether a list of sites can index an estimator.
The conditions for this is that it fits in the chain, i.e. the largest
site is smaller than the length of the chain, and that the indexes in the
list appear ordered in increasing order.
Parameters
----------
sites: a list of ints.
The indexes for eah of the single-site operators in the estimator.
number_of_sites: an int.
The length of the chain in the main DMRG code.
Returns
-------
a bool with the result.
'''
are_sorted = all(sites[i] < sites[i+1] for i in xrange(len(sites)-1))
return sites[-1] < number_of_sites and are_sorted
def generate_indexes(site_expressions, number_of_sites):
'''Generates all the possible indexes that can be obtained evaluating the
`site_expressions`.
Parameters
----------
site_expressions: a list of strings.
The expressions that specify the values for each single-site operator
index in an estimator. E.g. `1`, `2*i+1`.
number_of_sites: an int.
The length of the chain in the main DMRG code.
Example
-------
>>> from dmrg_helpers.extract.generate_indexes import generate_indexes
>>> [x for x in generate_indexes('2*i+1', 10)]
[[1], [3], [5], [7], [9]]
>>> [x for x in generate_indexes('1', 10)]
[[1]]
>>> [x for x in generate_indexes(['2*i+1', '2*i+2'], 10)]
[[1, 2], [3, 4], [5, 6], [7, 8]]
'''
if isinstance(site_expressions, types.StringTypes):
site_expressions = [site_expressions]
site_filters = map(SiteFilter, site_expressions)
are_all_filters_constant = not(False in
map(SiteFilter.is_constant, site_filters))
if are_all_filters_constant:
yield map(partial(SiteFilter.build_index, index=0), site_filters)
else:
for i in xrange(number_of_sites):
sites = map(partial(SiteFilter.build_index, index=i), site_filters)
if sites_are_ok(sites, number_of_sites):
yield sites
| {
"repo_name": "iglpdc/dmrg_helpers",
"path": "dmrg_helpers/extract/generate_indexes.py",
"copies": "1",
"size": "5301",
"license": "mit",
"hash": -1930521831463769600,
"line_mean": 32.7643312102,
"line_max": 79,
"alpha_frac": 0.5817770232,
"autogenerated": false,
"ratio": 3.9354120267260577,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9991531334790731,
"avg_score": 0.005131543027065306,
"num_lines": 157
} |
# A generator version of words lookup
# Still early for use in a concurrent setup
from collections import defaultdict
import re
class Words:
def __init__(self, wordfile='words'):
self.wordfile = wordfile
self.word_dict = defaultdict(set)
self.load_words()
def load_words(self):
f = open(self.wordfile)
for line in f:
line = line.strip()
match = re.match(r'(\w+)', line)
if match:
wv = match.group(1).upper()
wk = ''.join(sorted(list(wv)))
self.word_dict[wk].add(wv)
f.close()
def select(self, lst, n):
if n <= 0 or len(lst) == 0 or len(lst) < n:
yield [[]]
elif n == 1:
for e in [[x] for x in lst]:
yield e
else:
sub_g1 = self.select(lst[1:], n - 1)
sub_g2 = self.select(lst[1:], n)
for x in sub_g1:
if x != [[]]:
yield [lst[0]] + x
else:
yield [lst[0]]
for y in sub_g2:
if y != [[]]:
yield y
def sorted_key_select(self, word, n):
lst = [x.upper() for x in sorted(list(word))]
gen = self.select(lst, n)
for l in gen:
e = ''.join(l)
yield e
def lookup_word(self, word, n):
gen = self.sorted_key_select(word, n)
for w in gen:
if w in self.word_dict:
yield self.word_dict[w]
def collect(self, word, n):
gen = self.lookup_word(word, n)
accm = set()
def generator():
for ws in gen:
nonlocal accm # Py3 ;-)
accm1 = accm.union(ws)
if accm1.difference(accm) != set():
yield accm1
accm = accm1
accm1 = None
return generator()
| {
"repo_name": "souravdatta/words",
"path": "gwords.py",
"copies": "1",
"size": "1970",
"license": "mit",
"hash": -8068219511769591000,
"line_mean": 27.9705882353,
"line_max": 53,
"alpha_frac": 0.4406091371,
"autogenerated": false,
"ratio": 3.8030888030888033,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9654162598079932,
"avg_score": 0.017907068421774305,
"num_lines": 68
} |
"""A generic API for contactology"""
from __future__ import print_function
import six
from six.moves.urllib.parse import urlencode
from pprint import pformat
import json
from twisted.web.client import getPage
from twisted.internet import defer
from twisted.python import log
__version__ = "3.0"
class APIError(Exception):
"""Base class for all api errors from contactology"""
def __init__(self, code, message):
self.code = code
self.message = message
super(APIError, self).__init__("API Error: %s (%s)" % (message, code))
class Contactology(object):
"""Proxy object"""
host = "api.emailcampaigns.net"
path = "/2/REST/"
_logio = False
def __init__(self, key, useHTTPS=True):
self.key = key
self.useHTTPS = useHTTPS
def _log_query(self, method, r):
log.msg("SENT: %s: %s" % (method, pformat(r)))
return r
def __getattr__(self, name):
def call_wrapper(**args):
return self._call(name, **args)
return call_wrapper
@defer.inlineCallbacks
def _call(self, method, **kw):
if self._logio:
self._log_query(method, kw)
# serialize non-strings using json
for k, v in list(kw.items()):
if six.PY2 and isinstance(v, six.text_type):
v = v.encode('utf-8')
if not isinstance(v, str):
v = json.dumps(v)
kw[k] = v
# add our preset arguments
kw.update({'key': self.key, 'method': method})
# construct request data
postdata = urlencode(sorted(kw.items())).encode('utf-8')
schema = self.useHTTPS and 'https' or 'http'
url = '%s://%s%s' % (schema, self.host, self.path)
url = url.encode('utf-8')
headers = {b"Content-type": b"application/x-www-form-urlencoded",
b"User-Agent": b"Twisted Wrapper %s" % str(__version__).encode('utf-8')}
resp = yield getPage(url, method=b'POST', headers=headers, postdata=postdata)
# de-serialize response
resp = json.loads(resp.decode('utf-8'))
if self._logio:
log.msg("RECEIVED: %s" % pformat(resp))
# check for errors
if isinstance(resp, dict):
if resp.get('result', None) == 'error':
raise APIError(resp['code'], resp['message'])
yield defer.returnValue(resp)
if __name__ == '__main__':
from twisted.internet import reactor
from pprint import pprint
proxy = Contactology('Your API key here')
@defer.inlineCallbacks
def test():
try:
resp = yield proxy.List_Get_Active_Lists()
print(resp)
resp = yield proxy.List_Get_Active_Lists(optionalParameters={'offset': 1})
print(resp)
resp = yield proxy.List_Get_Info(listId=1)
print(resp)
finally:
reactor.stop()
reactor.callWhenRunning(test)
reactor.run()
| {
"repo_name": "jinty/van.contactology",
"path": "van/contactology/__init__.py",
"copies": "1",
"size": "2964",
"license": "bsd-3-clause",
"hash": -2132753996216031200,
"line_mean": 31.9333333333,
"line_max": 91,
"alpha_frac": 0.5782726046,
"autogenerated": false,
"ratio": 3.775796178343949,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4854068782943949,
"avg_score": null,
"num_lines": null
} |
"""A generic class to build line-oriented command interpreters.
Interpreters constructed with this class obey the following conventions:
1. End of file on input is processed as the command 'EOF'.
2. A command is parsed out of each line by collecting the prefix composed
of characters in the identchars member.
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
is passed a single argument consisting of the remainder of the line.
4. Typing an empty line repeats the last command. (Actually, it calls the
method `emptyline', which may be overridden in a subclass.)
5. There is a predefined `help' method. Given an argument `topic', it
calls the command `help_topic'. With no arguments, it lists all topics
with defined help_ functions, broken into up to three topics; documented
commands, miscellaneous help topics, and undocumented commands.
6. The command '?' is a synonym for `help'. The command '!' is a synonym
for `shell', if a do_shell method exists.
7. If completion is enabled, completing commands will be done automatically,
and completing of commands args is done by calling complete_foo() with
arguments text, line, begidx, endidx. text is string we are matching
against, all returned matches must begin with it. line is the current
input line (lstripped), begidx and endidx are the beginning and end
indexes of the text being matched, which could be used to provide
different completion depending upon which position the argument is in.
The `default' method may be overridden to intercept commands for which there
is no do_ method.
The `completedefault' method may be overridden to intercept completions for
commands that have no complete_ method.
The data member `self.ruler' sets the character used to draw separator lines
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
If the value of `self.intro' is nonempty when the cmdloop method is called,
it is printed out on interpreter startup. This value may be overridden
via an optional argument to the cmdloop() method.
The data members `self.doc_header', `self.misc_header', and
`self.undoc_header' set the headers used for the help function's
listings of documented functions, miscellaneous topics, and undocumented
functions respectively.
"""
import string, sys
__all__ = ["Cmd"]
PROMPT = '(Cmd) '
IDENTCHARS = string.ascii_letters + string.digits + '_'
class Cmd:
"""A simple framework for writing line-oriented command interpreters.
These are often useful for test harnesses, administrative tools, and
prototypes that will later be wrapped in a more sophisticated interface.
A Cmd instance or subclass instance is a line-oriented interpreter
framework. There is no good reason to instantiate Cmd itself; rather,
it's useful as a superclass of an interpreter class you define yourself
in order to inherit Cmd's methods and encapsulate action methods.
"""
prompt = PROMPT
identchars = IDENTCHARS
ruler = '='
lastcmd = ''
intro = None
doc_leader = ""
doc_header = "Documented commands (type help <topic>):"
misc_header = "Miscellaneous help topics:"
undoc_header = "Undocumented commands:"
nohelp = "*** No help on %s"
use_rawinput = 1
def __init__(self, completekey='tab', stdin=None, stdout=None):
"""Instantiate a line-oriented interpreter framework.
The optional argument 'completekey' is the readline name of a
completion key; it defaults to the Tab key. If completekey is
not None and the readline module is available, command completion
is done automatically. The optional arguments stdin and stdout
specify alternate input and output file objects; if not specified,
sys.stdin and sys.stdout are used.
"""
if stdin is not None:
self.stdin = stdin
else:
self.stdin = sys.stdin
if stdout is not None:
self.stdout = stdout
else:
self.stdout = sys.stdout
self.cmdqueue = []
self.completekey = completekey
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey+": complete")
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = input(self.prompt)
except EOFError:
line = 'EOF'
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = self.stdin.readline()
if not len(line):
line = 'EOF'
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
except ImportError:
pass
def precmd(self, line):
"""Hook method executed just before the command line is
interpreted, but after the input prompt is generated and issued.
"""
return line
def postcmd(self, stop, line):
"""Hook method executed just after a command dispatch is finished."""
return stop
def preloop(self):
"""Hook method executed once when the cmdloop() method is called."""
pass
def postloop(self):
"""Hook method executed once when the cmdloop() method is about to
return.
"""
pass
def parseline(self, line):
"""Parse the line into a command name and a string containing
the arguments. Returns a tuple containing (command, args, line).
'command' and 'args' may be None if the line couldn't be parsed.
"""
line = line.strip()
if not line:
return None, None, line
elif line[0] == '?':
line = 'help ' + line[1:]
elif line[0] == '!':
if hasattr(self, 'do_shell'):
line = 'shell ' + line[1:]
else:
return None, None, line
i, n = 0, len(line)
while i < n and line[i] in self.identchars: i = i+1
cmd, arg = line[:i], line[i:].strip()
return cmd, arg, line
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
This may be overridden, but should not normally need to be;
see the precmd() and postcmd() methods for useful execution hooks.
The return value is a flag indicating whether interpretation of
commands by the interpreter should stop.
"""
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
def emptyline(self):
"""Called when an empty line is entered in response to the prompt.
If this method is not overridden, it repeats the last nonempty
command entered.
"""
if self.lastcmd:
return self.onecmd(self.lastcmd)
def default(self, line):
"""Called on an input line when the command prefix is not recognized.
If this method is not overridden, it prints an error message and
returns.
"""
self.stdout.write('*** Unknown syntax: %s\n'%line)
def completedefault(self, *ignored):
"""Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
"""
return []
def completenames(self, text, *ignored):
dotext = 'do_'+text
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
def complete(self, text, state):
"""Return the next possible completion for 'text'.
If a command has not been entered, then complete against command list.
Otherwise try to call complete_<command> to get list of completions.
"""
if state == 0:
import readline
origline = readline.get_line_buffer()
line = origline.lstrip()
stripped = len(origline) - len(line)
begidx = readline.get_begidx() - stripped
endidx = readline.get_endidx() - stripped
if begidx>0:
cmd, args, foo = self.parseline(line)
if cmd == '':
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, 'complete_' + cmd)
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
self.completion_matches = compfunc(text, line, begidx, endidx)
try:
return self.completion_matches[state]
except IndexError:
return None
def get_names(self):
# This method used to pull in base class attributes
# at a time dir() didn't do it yet.
return dir(self.__class__)
def complete_help(self, *args):
commands = set(self.completenames(*args))
topics = set(a[5:] for a in self.get_names()
if a.startswith('help_' + args[0]))
return list(commands | topics)
def do_help(self, arg):
'List available commands with "help" or detailed help with "help cmd".'
if arg:
# XXX check arg syntax
try:
func = getattr(self, 'help_' + arg)
except AttributeError:
try:
doc=getattr(self, 'do_' + arg).__doc__
if doc:
self.stdout.write("%s\n"%str(doc))
return
except AttributeError:
pass
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
return
func()
else:
names = self.get_names()
cmds_doc = []
cmds_undoc = []
help = {}
for name in names:
if name[:5] == 'help_':
help[name[5:]]=1
names.sort()
# There can be duplicates if routines overridden
prevname = ''
for name in names:
if name[:3] == 'do_':
if name == prevname:
continue
prevname = name
cmd=name[3:]
if cmd in help:
cmds_doc.append(cmd)
del help[cmd]
elif getattr(self, name).__doc__:
cmds_doc.append(cmd)
else:
cmds_undoc.append(cmd)
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
self.print_topics(self.misc_header, list(help.keys()),15,80)
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
def print_topics(self, header, cmds, cmdlen, maxcol):
if cmds:
self.stdout.write("%s\n"%str(header))
if self.ruler:
self.stdout.write("%s\n"%str(self.ruler * len(header)))
self.columnize(cmds, maxcol-1)
self.stdout.write("\n")
def columnize(self, list, displaywidth=80):
"""Display a list of strings as a compact set of columns.
Each column is only as wide as necessary.
Columns are separated by two spaces (one was not legible enough).
"""
if not list:
self.stdout.write("<empty>\n")
return
nonstrings = [i for i in range(len(list))
if not isinstance(list[i], str)]
if nonstrings:
raise TypeError("list[i] not a string for i in %s"
% ", ".join(map(str, nonstrings)))
size = len(list)
if size == 1:
self.stdout.write('%s\n'%str(list[0]))
return
# Try every row count from 1 upwards
for nrows in range(1, len(list)):
ncols = (size+nrows-1) // nrows
colwidths = []
totwidth = -2
for col in range(ncols):
colwidth = 0
for row in range(nrows):
i = row + nrows*col
if i >= size:
break
x = list[i]
colwidth = max(colwidth, len(x))
colwidths.append(colwidth)
totwidth += colwidth + 2
if totwidth > displaywidth:
break
if totwidth <= displaywidth:
break
else:
nrows = len(list)
ncols = 1
colwidths = [0]
for row in range(nrows):
texts = []
for col in range(ncols):
i = row + nrows*col
if i >= size:
x = ""
else:
x = list[i]
texts.append(x)
while texts and not texts[-1]:
del texts[-1]
for col in range(len(texts)):
texts[col] = texts[col].ljust(colwidths[col])
self.stdout.write("%s\n"%str(" ".join(texts)))
| {
"repo_name": "glwu/python-for-android",
"path": "python3-alpha/python3-src/Lib/cmd.py",
"copies": "46",
"size": "14803",
"license": "apache-2.0",
"hash": 3673645380942845000,
"line_mean": 36.1002506266,
"line_max": 79,
"alpha_frac": 0.5525231372,
"autogenerated": false,
"ratio": 4.499392097264438,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0013030080756008633,
"num_lines": 399
} |
"""A generic class to build line-oriented command interpreters.
Interpreters constructed with this class obey the following conventions:
1. End of file on input is processed as the command 'EOF'.
2. A command is parsed out of each line by collecting the prefix composed
of characters in the identchars member.
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
is passed a single argument consisting of the remainder of the line.
4. Typing an empty line repeats the last command. (Actually, it calls the
method `emptyline', which may be overridden in a subclass.)
5. There is a predefined `help' method. Given an argument `topic', it
calls the command `help_topic'. With no arguments, it lists all topics
with defined help_ functions, broken into up to three topics; documented
commands, miscellaneous help topics, and undocumented commands.
6. The command '?' is a synonym for `help'. The command '!' is a synonym
for `shell', if a do_shell method exists.
7. If completion is enabled, completing commands will be done automatically,
and completing of commands args is done by calling complete_foo() with
arguments text, line, begidx, endidx. text is string we are matching
against, all returned matches must begin with it. line is the current
input line (lstripped), begidx and endidx are the beginning and end
indexes of the text being matched, which could be used to provide
different completion depending upon which position the argument is in.
The `default' method may be overridden to intercept commands for which there
is no do_ method.
The `completedefault' method may be overridden to intercept completions for
commands that have no complete_ method.
The data member `self.ruler' sets the character used to draw separator lines
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
If the value of `self.intro' is nonempty when the cmdloop method is called,
it is printed out on interpreter startup. This value may be overridden
via an optional argument to the cmdloop() method.
The data members `self.doc_header', `self.misc_header', and
`self.undoc_header' set the headers used for the help function's
listings of documented functions, miscellaneous topics, and undocumented
functions respectively.
"""
import string, sys
__all__ = ["Cmd"]
PROMPT = '(Cmd) '
IDENTCHARS = string.ascii_letters + string.digits + '_'
class Cmd:
"""A simple framework for writing line-oriented command interpreters.
These are often useful for test harnesses, administrative tools, and
prototypes that will later be wrapped in a more sophisticated interface.
A Cmd instance or subclass instance is a line-oriented interpreter
framework. There is no good reason to instantiate Cmd itself; rather,
it's useful as a superclass of an interpreter class you define yourself
in order to inherit Cmd's methods and encapsulate action methods.
"""
prompt = PROMPT
identchars = IDENTCHARS
ruler = '='
lastcmd = ''
intro = None
doc_leader = ""
doc_header = "Documented commands (type help <topic>):"
misc_header = "Miscellaneous help topics:"
undoc_header = "Undocumented commands:"
nohelp = "*** No help on %s"
use_rawinput = 1
def __init__(self, completekey='tab', stdin=None, stdout=None):
"""Instantiate a line-oriented interpreter framework.
The optional argument 'completekey' is the readline name of a
completion key; it defaults to the Tab key. If completekey is
not None and the readline module is available, command completion
is done automatically. The optional arguments stdin and stdout
specify alternate input and output file objects; if not specified,
sys.stdin and sys.stdout are used.
"""
if stdin is not None:
self.stdin = stdin
else:
self.stdin = sys.stdin
if stdout is not None:
self.stdout = stdout
else:
self.stdout = sys.stdout
self.cmdqueue = []
self.completekey = completekey
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey+": complete")
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = input(self.prompt)
except EOFError:
line = 'EOF'
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = self.stdin.readline()
if not len(line):
line = 'EOF'
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
except ImportError:
pass
def precmd(self, line):
"""Hook method executed just before the command line is
interpreted, but after the input prompt is generated and issued.
"""
return line
def postcmd(self, stop, line):
"""Hook method executed just after a command dispatch is finished."""
return stop
def preloop(self):
"""Hook method executed once when the cmdloop() method is called."""
pass
def postloop(self):
"""Hook method executed once when the cmdloop() method is about to
return.
"""
pass
def parseline(self, line):
"""Parse the line into a command name and a string containing
the arguments. Returns a tuple containing (command, args, line).
'command' and 'args' may be None if the line couldn't be parsed.
"""
line = line.strip()
if not line:
return None, None, line
elif line[0] == '?':
line = 'help ' + line[1:]
elif line[0] == '!':
if hasattr(self, 'do_shell'):
line = 'shell ' + line[1:]
else:
return None, None, line
i, n = 0, len(line)
while i < n and line[i] in self.identchars: i = i+1
cmd, arg = line[:i], line[i:].strip()
return cmd, arg, line
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
This may be overridden, but should not normally need to be;
see the precmd() and postcmd() methods for useful execution hooks.
The return value is a flag indicating whether interpretation of
commands by the interpreter should stop.
"""
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if line == 'EOF' :
self.lastcmd = ''
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
def emptyline(self):
"""Called when an empty line is entered in response to the prompt.
If this method is not overridden, it repeats the last nonempty
command entered.
"""
if self.lastcmd:
return self.onecmd(self.lastcmd)
def default(self, line):
"""Called on an input line when the command prefix is not recognized.
If this method is not overridden, it prints an error message and
returns.
"""
self.stdout.write('*** Unknown syntax: %s\n'%line)
def completedefault(self, *ignored):
"""Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
"""
return []
def completenames(self, text, *ignored):
dotext = 'do_'+text
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
def complete(self, text, state):
"""Return the next possible completion for 'text'.
If a command has not been entered, then complete against command list.
Otherwise try to call complete_<command> to get list of completions.
"""
if state == 0:
import readline
origline = readline.get_line_buffer()
line = origline.lstrip()
stripped = len(origline) - len(line)
begidx = readline.get_begidx() - stripped
endidx = readline.get_endidx() - stripped
if begidx>0:
cmd, args, foo = self.parseline(line)
if cmd == '':
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, 'complete_' + cmd)
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
self.completion_matches = compfunc(text, line, begidx, endidx)
try:
return self.completion_matches[state]
except IndexError:
return None
def get_names(self):
# This method used to pull in base class attributes
# at a time dir() didn't do it yet.
return dir(self.__class__)
def complete_help(self, *args):
commands = set(self.completenames(*args))
topics = set(a[5:] for a in self.get_names()
if a.startswith('help_' + args[0]))
return list(commands | topics)
def do_help(self, arg):
'List available commands with "help" or detailed help with "help cmd".'
if arg:
# XXX check arg syntax
try:
func = getattr(self, 'help_' + arg)
except AttributeError:
try:
doc=getattr(self, 'do_' + arg).__doc__
if doc:
self.stdout.write("%s\n"%str(doc))
return
except AttributeError:
pass
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
return
func()
else:
names = self.get_names()
cmds_doc = []
cmds_undoc = []
help = {}
for name in names:
if name[:5] == 'help_':
help[name[5:]]=1
names.sort()
# There can be duplicates if routines overridden
prevname = ''
for name in names:
if name[:3] == 'do_':
if name == prevname:
continue
prevname = name
cmd=name[3:]
if cmd in help:
cmds_doc.append(cmd)
del help[cmd]
elif getattr(self, name).__doc__:
cmds_doc.append(cmd)
else:
cmds_undoc.append(cmd)
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
self.print_topics(self.misc_header, list(help.keys()),15,80)
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
def print_topics(self, header, cmds, cmdlen, maxcol):
if cmds:
self.stdout.write("%s\n"%str(header))
if self.ruler:
self.stdout.write("%s\n"%str(self.ruler * len(header)))
self.columnize(cmds, maxcol-1)
self.stdout.write("\n")
def columnize(self, list, displaywidth=80):
"""Display a list of strings as a compact set of columns.
Each column is only as wide as necessary.
Columns are separated by two spaces (one was not legible enough).
"""
if not list:
self.stdout.write("<empty>\n")
return
nonstrings = [i for i in range(len(list))
if not isinstance(list[i], str)]
if nonstrings:
raise TypeError("list[i] not a string for i in %s"
% ", ".join(map(str, nonstrings)))
size = len(list)
if size == 1:
self.stdout.write('%s\n'%str(list[0]))
return
# Try every row count from 1 upwards
for nrows in range(1, len(list)):
ncols = (size+nrows-1) // nrows
colwidths = []
totwidth = -2
for col in range(ncols):
colwidth = 0
for row in range(nrows):
i = row + nrows*col
if i >= size:
break
x = list[i]
colwidth = max(colwidth, len(x))
colwidths.append(colwidth)
totwidth += colwidth + 2
if totwidth > displaywidth:
break
if totwidth <= displaywidth:
break
else:
nrows = len(list)
ncols = 1
colwidths = [0]
for row in range(nrows):
texts = []
for col in range(ncols):
i = row + nrows*col
if i >= size:
x = ""
else:
x = list[i]
texts.append(x)
while texts and not texts[-1]:
del texts[-1]
for col in range(len(texts)):
texts[col] = texts[col].ljust(colwidths[col])
self.stdout.write("%s\n"%str(" ".join(texts)))
| {
"repo_name": "nattee/cafe-grader-web",
"path": "lib/assets/Lib/cmd.py",
"copies": "11",
"size": "15261",
"license": "mit",
"hash": 630701896771360600,
"line_mean": 36.0573566085,
"line_max": 79,
"alpha_frac": 0.5372518184,
"autogenerated": false,
"ratio": 4.5953026196928635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001339392946567504,
"num_lines": 401
} |
"""A generic custom property.
There are three parts to this file:
- the Flexidate class, which acts as a non-trivial example
- the CustomProperty class, which implements the generic functionality
- classes FlexidateProperty and Actor, and main(), showing sample usage
(Arguably the CustomProperty class should come before the Flexidate
class, and arguably the tests should be in a separate file and use the
unittest framework.)
"""
import datetime
from google.appengine.ext import testbed
from ndb.model import *
class Flexidate(object):
"""A 'fuzzy date'.
Given a start date and a fuzz in days, this has attributes
start, fuzz, and end.
"""
def __new__(cls, date, fuzz=1):
obj = super(Flexidate, cls).__new__(cls)
assert isinstance(date, datetime.date), repr(date)
assert not isinstance(date, datetime.datetime), repr(date) # !!!
assert isinstance(fuzz, (int, long)), repr(fuzz)
assert fuzz >= 1, repr(fuzz)
obj.__date = date
obj.__fuzz = fuzz
return obj
def __repr__(self):
if self.__fuzz == 1:
return 'Flexidate<%d-%d-%d>' % (self.__date.year, self.__date.month,
self.__date.day)
else:
return 'Flexidate<%d-%d-%d+%d>' % (self.__date.year, self.__date.month,
self.__date.day, self.__fuzz)
@property
def start(self):
return self.__date
@property
def fuzz(self):
return self.__fuzz
# Just to make things interesting, end is not an attribute but an
# accessor function -- you must call x.end().
def end(self):
return self.__date + datetime.timedelta(days=self.__fuzz - 1)
def __eq__(self, other):
if not isinstance(other, Flexidate):
return NotImplemented
return self.__date == other.__date and self.__fuzz == other.__fuzz
def __ne__(self, other):
eq = self.__eq__(other)
if isinstance(eq, bool):
eq = not eq
return eq
class CustomProperty(StructuredProperty):
"""Custom property.
Values stored for this property are either instances of a
user-facing class (e.g. Flexidate), or instances of a synthetic
model class which is constructed on the fly from the 'attributes'
argument to the constructor. Conversion between instances of the
user-facing class and instances of the model class happen lazily:
when the program requests a value using _get_value(), the value is
converted to a user-facing class instance if necessary, while when
the property is to be serialized, it is converted to a model class
instance if necessary. If the 'repeated' flag is set, all items in
the list value are converted in one direction or back at once.
"""
MISSING = object() # Singleton to indicate "no value at all"
def __init__(self,
construct, # lambda model_object: user_object
attributes, # [attrname, ...] or {attrname: propname, ...}
name=None,
repeated=False,
indexed=True,
# etc.
):
"""Constructor.
Args:
construct: A function that takes a 'model class' instance and
returns an instance of the desired user class.
attributes: A list, set or tuple of attribute/property names, or
a dict mapping attribute names to property names, Property
instances, or (Property instance, function) tuples. Each
attribute name corresponds to an attribute of the user class;
it will be serialized using the corresponding property name.
If a function is given, it is used to extract the attribute
from the user object; otherwise the attribute name is used to
get the attribute from the user object.
name, repeated, indexed, etc.: As for all properties.
"""
assert construct is not None
assert attributes is not None
self._construct = construct
if isinstance(attributes, dict):
pass
elif isinstance(attributes, (list, tuple, set, frozenset)):
attributes = dict(zip(attributes, attributes))
else:
assert False, type(attributes)
self._attrmap = {}
classdict = {}
for key, value in attributes.iteritems():
# The value can be a string, a property, or a (property, function) pair.
if isinstance(value, basestring):
prop = GenericProperty()
self._attrmap[key] = (prop, None)
classdict[key] = prop
elif isinstance(value, Property):
self._attrmap[key] = (value, None)
classdict[key] = value
elif isinstance(value, tuple):
assert len(value) == 2, repr(value)
prop, func = value
assert isinstance(prop, Property), repr(prop)
assert callable(func), repr(func)
self._attrmap[key] = value
classdict[key] = prop
else:
assert False, repr(value)
modelclass = MetaModel('<synthetic modelclass>', (Model,), classdict)
super(CustomProperty, self).__init__(modelclass,
name,
repeated=repeated,
indexed=indexed,
# etc.
)
def _to_base_type(self, value):
assert not isinstance(value, self._modelclass), repr(value)
if not isinstance(value, self._modelclass):
newvalue = self._modelclass()
for attrname, (prop, func) in self._attrmap.iteritems():
if func is None:
attrval = getattr(value, attrname, self.MISSING)
else:
attrval = func(value)
if attrval is not self.MISSING:
setattr(newvalue, attrname, attrval)
value = newvalue
return value
def _from_base_type(self, value):
assert isinstance(value, self._modelclass), repr(value)
if isinstance(value, self._modelclass):
value = self._construct(value)
return value
# TODO: Not sure what _validate() should do here, since we don't
# have a type to check for, only a 'constructor' function.
class FlexidateProperty(CustomProperty):
def __init__(self, name=None, repeated=False, indexed=True):
return super(FlexidateProperty, self).__init__(
construct=lambda ent: Flexidate(ent.start, ent.fuzz),
attributes={'start': DateProperty(),
'fuzz': 'fuzz',
'end': (DateProperty(), lambda fd: fd.end())},
name=name,
repeated=repeated,
indexed=indexed,
)
def __repr__(self):
return ('FlexidateProperty(%r, %r, %r)' %
(self._name, self._repeated, self._indexed))
def _validate(self, value):
if not isinstance(value, Flexidate):
raise TypeError('expected Flexidate, got %r' % (value,))
class Actor(Model):
name = StringProperty()
born = FlexidateProperty()
events = FlexidateProperty(repeated=True)
def main():
tb = testbed.Testbed()
tb.activate()
tb.init_datastore_v3_stub()
tb.init_memcache_stub()
print Actor.name
print Actor.born
print Actor.born.start
print Actor.born.fuzz
print Actor.born.end
a = Actor(name='John Doe')
a.born = Flexidate(datetime.date(1956, 1, 1), 366)
print 'a =', a
pb = a._to_pb()
b = Actor._from_pb(pb)
b.key = None
print 'b =', b
assert a == b, (a, b)
a.put()
b.name = 'Joan Doe'
b.born = Flexidate(datetime.date(1956, 1, 1), 31)
b.events = [a.born, b.born]
b.put()
print b
q = Actor.query(Actor.born.start == datetime.date(1956, 1, 1))
print 'q =', q
for i, res in enumerate(q):
print '%2d: %s' % (i, res)
q = Actor.query(Actor.born == Flexidate(datetime.date(1956, 1, 1), 366))
print 'q =', q
for i, res in enumerate(q):
print '%2d: %s' % (i, res)
q = Actor.query(Actor.born.fuzz >= 31)
print 'q =', q
for i, res in enumerate(q):
print '%2d: %s' % (i, res)
q = Actor.query(Actor.events.fuzz == 366)
print 'q =', q
for i, res in enumerate(q):
print '%2d: %s' % (i, res)
tb.deactivate()
if __name__ == '__main__':
main()
| {
"repo_name": "bslatkin/8-bits",
"path": "appengine-ndb/custom.py",
"copies": "1",
"size": "8011",
"license": "apache-2.0",
"hash": 791197267654449400,
"line_mean": 31.5650406504,
"line_max": 78,
"alpha_frac": 0.6188990139,
"autogenerated": false,
"ratio": 3.7734338200659443,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9823528000237618,
"avg_score": 0.013760966745665446,
"num_lines": 246
} |
"""A generic daemon class. Subclass and override the run() method.
Based on http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/
"""
import atexit
import os
from signal import SIGTERM
import sys
import time
class Daemon(object):
def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null',
stderr='/dev/null'):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.pidfile = pidfile
def daemonize(self):
"""UNIX double-fork magic."""
try:
pid = os.fork()
if pid > 0:
# First parent; exit.
sys.exit(0)
except OSError as e:
sys.stderr.write('Could not fork! %d (%s)\n' %
(e.errno, e.strerror))
sys.exit(1)
# Disconnect from parent environment.
os.chdir('/')
os.setsid()
os.umask(0o022)
# Fork again.
try:
pid = os.fork()
if pid > 0:
# Second parent; exit.
sys.exit(0)
except OSError as e:
sys.stderr.write('Could not fork (2nd)! %d (%s)\n' %
(e.errno, e.strerror))
sys.exit(1)
# Redirect file descriptors.
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# Write the pidfile.
atexit.register(self.delpid)
pid = str(os.getpid())
with open(self.pidfile, 'w+') as fp:
fp.write('%s\n' % pid)
def delpid(self):
os.remove(self.pidfile)
def start(self, *args, **kw):
"""Start the daemon."""
pid = None
if os.path.exists(self.pidfile):
with open(self.pidfile, 'r') as fp:
pid = int(fp.read().strip())
if pid:
msg = 'pidfile (%s) exists. Daemon already running?\n'
sys.stderr.write(msg % self.pidfile)
sys.exit(1)
self.daemonize()
self.run(*args, **kw)
def stop(self):
"""Stop the daemon."""
pid = None
if os.path.exists(self.pidfile):
with open(self.pidfile, 'r') as fp:
pid = int(fp.read().strip())
if not pid:
msg = 'pidfile (%s) does not exist. Daemon not running?\n'
sys.stderr.write(msg % self.pidfile)
return
try:
while 1:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except OSError as e:
e = str(e)
if e.find('No such process') > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print(e)
sys.exit(1)
def restart(self, *args, **kw):
"""Restart the daemon."""
self.stop()
self.start(*args, **kw)
def run(self, *args, **kw):
"""Override this method."""
| {
"repo_name": "RepositPower/pystatsd",
"path": "pystatsd/daemon.py",
"copies": "2",
"size": "3233",
"license": "bsd-2-clause",
"hash": -8496516436152733000,
"line_mean": 27.3596491228,
"line_max": 84,
"alpha_frac": 0.4856170739,
"autogenerated": false,
"ratio": 3.8170011806375443,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00010319917440660474,
"num_lines": 114
} |
""" A generic Emacs-style kill ring, as well as a Qt-specific version.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# System library imports
from IPython.external.qt import QtCore, QtGui
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class KillRing(object):
""" A generic Emacs-style kill ring.
"""
def __init__(self):
self.clear()
def clear(self):
""" Clears the kill ring.
"""
self._index = -1
self._ring = []
def kill(self, text):
""" Adds some killed text to the ring.
"""
self._ring.append(text)
def yank(self):
""" Yank back the most recently killed text.
Returns:
--------
A text string or None.
"""
self._index = len(self._ring)
return self.rotate()
def rotate(self):
""" Rotate the kill ring, then yank back the new top.
Returns:
--------
A text string or None.
"""
self._index -= 1
if self._index >= 0:
return self._ring[self._index]
return None
class QtKillRing(QtCore.QObject):
""" A kill ring attached to Q[Plain]TextEdit.
"""
#--------------------------------------------------------------------------
# QtKillRing interface
#--------------------------------------------------------------------------
def __init__(self, text_edit):
""" Create a kill ring attached to the specified Qt text edit.
"""
assert isinstance(text_edit, (QtGui.QTextEdit, QtGui.QPlainTextEdit))
super(QtKillRing, self).__init__()
self._ring = KillRing()
self._prev_yank = None
self._skip_cursor = False
self._text_edit = text_edit
text_edit.cursorPositionChanged.connect(self._cursor_position_changed)
def clear(self):
""" Clears the kill ring.
"""
self._ring.clear()
self._prev_yank = None
def kill(self, text):
""" Adds some killed text to the ring.
"""
self._ring.kill(text)
def kill_cursor(self, cursor):
""" Kills the text selected by the give cursor.
"""
text = cursor.selectedText()
if text:
cursor.removeSelectedText()
self.kill(text)
def yank(self):
""" Yank back the most recently killed text.
"""
text = self._ring.yank()
if text:
self._skip_cursor = True
cursor = self._text_edit.textCursor()
cursor.insertText(text)
self._prev_yank = text
def rotate(self):
""" Rotate the kill ring, then yank back the new top.
"""
if self._prev_yank:
text = self._ring.rotate()
if text:
self._skip_cursor = True
cursor = self._text_edit.textCursor()
cursor.movePosition(QtGui.QTextCursor.Left,
QtGui.QTextCursor.KeepAnchor,
n = len(self._prev_yank))
cursor.insertText(text)
self._prev_yank = text
#--------------------------------------------------------------------------
# Protected interface
#--------------------------------------------------------------------------
#------ Signal handlers ----------------------------------------------------
def _cursor_position_changed(self):
if self._skip_cursor:
self._skip_cursor = False
else:
self._prev_yank = None
| {
"repo_name": "sodafree/backend",
"path": "build/ipython/build/lib.linux-i686-2.7/IPython/frontend/qt/console/kill_ring.py",
"copies": "5",
"size": "3809",
"license": "bsd-3-clause",
"hash": 9159628415168418000,
"line_mean": 28.7578125,
"line_max": 80,
"alpha_frac": 0.4305592019,
"autogenerated": false,
"ratio": 4.9467532467532465,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0017864122289513629,
"num_lines": 128
} |
""" A generic game scene. """
import pygame.locals
from .update_events import UpdateEvents
from .resources.scenes import SceneChange
class Scene(object):
""" A generic game scene.
A scene should be immutable once created.
"""
def setup(self):
pass
def teardown(self):
pass
def _check_for_quit(self, ev, scene_events):
if ((ev.type == pygame.locals.QUIT) or
(ev.type == pygame.locals.KEYDOWN and
ev.key == pygame.locals.K_ESCAPE)):
scene_events.add_event(SceneChange(None))
return True
return False
def update(self, gamestate, pygame_events):
""" Update the gamestate based on recent pygame events.
:param gamestate:
The current gamestate.
:param pygame_events:
A list of recent pygame events.
:returns:
The new scene and a list of scene events,
(new_scene, scene_events).
"""
scene_events = UpdateEvents()
for ev in pygame_events:
if not self.handle_event(ev, gamestate, scene_events):
break
return scene_events
def handle_event(self, ev, gamestate, scene_events):
if self._check_for_quit(ev, scene_events):
return False
return True
def render(self, gamestate, surface):
""" Render the gamestate to the surface.
:param gamestate:
The current gamestate.
:param surface:
The surface to render to.
:returns:
None.
"""
pass
| {
"repo_name": "hodgestar/banjo",
"path": "ngn/scene.py",
"copies": "1",
"size": "1606",
"license": "mit",
"hash": 8157457618776009000,
"line_mean": 23.7076923077,
"line_max": 66,
"alpha_frac": 0.5722291407,
"autogenerated": false,
"ratio": 4.34054054054054,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 65
} |
# A Generic job class, that is extended by a specific job class that needs to be executed on the cluster
import sys
import os.path
import os
import subprocess
import datetime
import time
from itertools import product
from collections import defaultdict
LOGS_DIR = './logs_' + datetime.datetime.now().isoformat()
MEASURE_PERFORMANCE = False
RUN_LOCAL = False
USE_QSUB = False
USE_SBATCH = True
SBATCH_QUEUE = 'serial_requeue'
# SBATCH_QUEUE = 'general'
#SBATCH_QUEUE = 'holyseasgpu'
# SBATCH_ACCOUNT = None
SBATCH_ACCOUNT = 'lichtman_lab'
#SBATCH_EXCLUDED = None
# SBATCH_EXCLUDED = 'regal01,regal02,regal03,regal04,regal10,regal09,regal08,regal11,regal13,regal18,hsph05,hsph06,regal14'
SBATCH_EXCLUDED = 'holygiribet06,atlast3a01,atlast3a02,atlast3b01,atlast3b02'
# Make sure the logs directory exists
if not os.path.exists(LOGS_DIR) or not os.path.isdir(os.path.dirname(LOGS_DIR)):
os.makedirs(LOGS_DIR)
# Multicore settings
# #MAX_CORES = 16
MAX_CPUS_PER_NODE = 8
MAX_MEMORY_MB = 128000
MIN_TIME = 600
MAX_JOBS_TO_SUBMIT = 100
TIME_FACTOR = 4
MAX_SUBMISSION_ATTEMPTS = 3
class Job(object):
all_jobs = []
submitted_job_blocks = {}
def __init__(self):
self.name = self.__class__.__name__ + str(len(Job.all_jobs)) + '_' + datetime.datetime.now().isoformat()
self.jobid = None
self.output = []
self.already_done = False
##self.processors = 1
self.time = 60
self.memory = 1000
self.threads = 1
self.is_java_job = False
self.callback = None
Job.all_jobs.append(self)
def get_threads_num(self):
if self.is_java_job:
return self.threads + 1
return self.threads
def get_done(self):
if self.already_done:
return True
all_outputs = self.output if isinstance(self.output, (list, tuple)) else [self.output]
if all([os.path.exists(f) for f in all_outputs]):
# If the job's output is a directory, and the job is currently running or executing, then it is not done
if self.jobid is not None and any([os.path.isdir(f) for f in all_outputs]):
s_jobid = str(self.jobid)
sacct_output = subprocess.check_output(['sacct', '-n', '-o', 'JobID,JobName%100,State%20'])
for job_line in sacct_output.split('\n'):
job_split = job_line.split()
if len(job_split) == 0:
continue
job_id = job_split[0]
job_status = ' '.join(job_split[2:])
if s_jobid == job_id and job_status == 'COMPLETED':
self.already_done = True
# If there's a callback to the job, execute it (will only be called once due to the already_done flag
if self.callback is not None:
func, args = self.callback
func(*args)
return True
else:
self.already_done = True
# If there's a callback to the job, execute it (will only be called once due to the already_done flag
if self.callback is not None:
func, args = self.callback
func(*args)
return True
return False
def set_callback(self, func, args):
self.callback = (func, args)
def dependendencies_done(self):
for d in self.dependencies:
if not d.get_done():
return False
return True
def run(self):
# Make sure output directories exist
out = self.output
if isinstance(out, basestring):
out = [out]
for f in out:
if not os.path.isdir(os.path.dirname(f)):
os.mkdir(os.path.dirname(f))
if self.get_done():
return 0
print("RUN", self.name)
print(" ".join(self.command()))
work_queue = SBATCH_QUEUE
# if self.get_threads_num() > 1:
# work_queue = "general"
account_str = ""
if SBATCH_ACCOUNT is not None:
account_str = "--account={}".format(SBATCH_ACCOUNT)
if RUN_LOCAL:
log_out = open(LOGS_DIR + "/out." + self.name, 'w')
log_err = open(LOGS_DIR + "/error." + self.name, 'w')
try:
subprocess.check_call(' '.join(self.command()), stdout=log_out, stderr=log_err, shell=True)
finally:
log_out.close()
log_err.close()
elif USE_SBATCH:
command_list = ["sbatch",
"-J", self.name, # Job name
"-p", work_queue, # Work queue (partition) = general / unrestricted / interactive / serial_requeue
"--no-requeue",
"--ntasks", str(1), # Number of processes
"--cpus-per-task", str(self.get_threads_num()), # Number of threads
"-t", str(self.time), # Time in munites 1440 = 24 hours
"--mem-per-cpu", str(self.memory), # Max memory in MB (strict - attempts to allocate more memory will fail)
account_str,
"--open-mode=append", # Append to log files
"-o", LOGS_DIR + "/out." + self.name, # Standard out file
"-e", LOGS_DIR + "/error." + self.name] # Error out file
if SBATCH_EXCLUDED is not None:
command_list.append("--exclude")
command_list.append(SBATCH_EXCLUDED)
if len(self.dependencies) > 0:
command_list = command_list + self.dependency_strings()
process = subprocess.Popen(command_list,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if MEASURE_PERFORMANCE:
sbatch_out, sbatch_err = process.communicate("#!/bin/bash\nperf stat -o " + LOGS_DIR + "/perf.{0} {1}".format(self.name, " ".join(self.command())))
else:
sbatch_out, sbatch_err = process.communicate("#!/bin/bash\n{0}".format(" ".join(self.command())))
if len(sbatch_err) == 0:
self.jobid = sbatch_out.split()[3]
return 1
def dependency_strings(self):
dependency_string = ":".join(d.jobid for d in self.dependencies if not d.get_done())
if len(dependency_string) > 0:
return ["-d", "afterok:" + dependency_string]
return []
@classmethod
def run_all(cls):
for j in cls.all_jobs:
j.run()
@classmethod
def multicore_run_list(cls, jobs):
all_jobs_optimizer = JobBlockOrganizer()
for j in jobs:
# Make sure output directories exist
out = j.output
if isinstance(out, basestring):
out = [out]
for f in out:
if not os.path.isdir(os.path.dirname(f)):
os.mkdir(os.path.dirname(f))
if j.get_done():
continue
all_jobs_optimizer.add_job(j)
submitted_job_blocks = all_jobs_optimizer.run_all()
return submitted_job_blocks
@classmethod
def multicore_run_all(cls):
multicore_run_list(cls.all_jobs)
@classmethod
def multicore_keep_running(cls, run_partial=False):
all_jobs_complete = False
cancelled_jobs = {}
cancelled_requeue_iters = 5
#submitted_job_blocks = {}
partial_done = False
while not all_jobs_complete and not partial_done:
# Find running job blocks
sacct_output = subprocess.check_output(['sacct', '-n', '-o', 'JobID,JobName%100,State%20'])
pending_running_complete_job_blocks = {}
pending = 0
running = 0
complete = 0
failed = 0
cancelled = 0
timeout = 0
other_status = 0
non_matching = 0
timed_out_jobs = set()
for job_line in sacct_output.split('\n'):
job_split = job_line.split()
if len(job_split) == 0:
continue
job_id = job_split[0]
job_name = job_split[1]
job_status = ' '.join(job_split[2:])
if job_name in cls.submitted_job_blocks:
if job_status in ['PENDING', 'RUNNING', 'COMPLETED']:
if job_name in pending_running_complete_job_blocks:
print('Found duplicate job: ' + job_name)
dup_job_id, dup_job_status = pending_running_complete_job_blocks[job_name]
print((job_id, job_status, dup_job_id, dup_job_status))
job_to_kill = None
if job_status == 'PENDING':
job_to_kill = job_id
elif dup_job_status == 'PENDING':
job_to_kill = dup_job_id
pending_running_complete_job_blocks[job_name] = (job_id, job_status)
if job_to_kill is not None:
print('Canceling job ' + job_to_kill)
try:
scancel_output = subprocess.check_output(['scancel', '{0}'.format(job_to_kill)])
print(scancel_output)
except:
print("Error canceling job:", sys.exc_info()[0])
else:
pending_running_complete_job_blocks[job_name] = (job_id, job_status)
if job_status == 'PENDING':
pending += 1
elif job_status == 'RUNNING':
running += 1
elif job_status == 'COMPLETED':
# Make sure that the callback of the jobs (if there is one) is called
job_block_list = cls.submitted_job_blocks[job_name]
for job in job_block_list:
job.get_done()
complete += 1
elif job_status in ['FAILED', 'NODE_FAIL']:
failed += 1
elif job_status in ['CANCELLED', 'CANCELLED+'] or job_status.startswith('CANCELLED'):
cancelled += 1
# This job could requeued after preemption
# Wait cancelled_requeue_iters before requeueing
cancelled_iters = 0
if job_id in cancelled_jobs:
cancelled_iters = cancelled_jobs[job_id]
if cancelled_iters < cancelled_requeue_iters:
pending_running_complete_job_blocks[job_name] = (job_id, job_status)
cancelled_jobs[job_id] = cancelled_iters + 1
elif job_status in ['TIMEOUT']:
timeout += 1
# in case of a timeout, add all jobs to the timed_out_jobs set
job_block_list = cls.submitted_job_blocks[job_name]
timed_out_jobs.update(job_block_list)
else:
print("Unexpected status: {0}".format(job_status))
other_status += 1
elif job_name not in ['batch', 'true', 'prolog']:
non_matching += 1
# Find running jobs
pending_running_complete_jobs = {}
for job_block_name in pending_running_complete_job_blocks:
job_id, job_status = pending_running_complete_job_blocks[job_block_name]
job_block_list = cls.submitted_job_blocks[job_block_name]
for job in job_block_list:
pending_running_complete_jobs[job.name] = (job_id, job_status)
# Make a list of runnable jobs
run_count = 0
block_count = 0
runnable_jobs = []
for j in cls.all_jobs:
if j.name not in pending_running_complete_jobs and not j.get_done() and j.dependendencies_done():
# if the job is now available to run, and was previously timed out, then increase its time
if j in timed_out_jobs:
j.time *= 2
print("Extending the time of job: {0} to {1} minutes".format(j.name, j.time))
runnable_jobs.append(j)
run_count += 1
new_job_blocks = Job.multicore_run_list(runnable_jobs)
block_count += len(new_job_blocks)
cls.submitted_job_blocks.update(new_job_blocks)
print 'Found {0} pending, {1} running, {2} complete, {3} failed, {4} cancelled, {5} timeout, {6} unknown status and {7} non-matching job blocks.'.format(
pending, running, complete, failed, cancelled, timeout, other_status, non_matching)
print "Queued {0} job{1} in {2} block{3}.".format(
run_count, '' if run_count == 1 else 's',
block_count, '' if block_count == 1 else 's')
if pending > 0 or running > 0 or run_count > 0:
time.sleep(60)
else:
all_jobs_complete = True
# If need to run a partial (just a single execution), then exit the loop
if run_partial:
partial_done = True
@classmethod
def keep_running(cls):
all_jobs_complete = False
while not all_jobs_complete:
all_job_names = {}
# Generate dictionary of jobs
for j in cls.all_jobs:
all_job_names[j.name] = True
# Find running jobs
sacct_output = subprocess.check_output(['sacct', '-n', '-o', 'JobID,JobName%100,State,NodeList'])
pending_running_complete_jobs = {}
pending = 0
running = 0
complete = 0
failed = 0
cancelled = 0
timeout = 0
other_status = 0
non_matching = 0
for job_line in sacct_output.split('\n'):
job_split = job_line.split()
if len(job_split) == 0:
continue
job_id = job_split[0]
job_name = job_split[1]
job_status = job_split[2]
node = job_split[3]
if job_name in all_job_names:
if job_status in ['PENDING', 'RUNNING', 'COMPLETED']:
if job_name in pending_running_complete_jobs:
print 'Found duplicate job: ' + job_name
else:
pending_running_complete_jobs[job_name] = True
if job_status == 'PENDING':
pending += 1
elif job_status == 'RUNNING':
running += 1
elif job_status == 'COMPLETED':
complete += 1
elif job_status in ['FAILED', 'NODE_FAIL']:
failed += 1
elif job_status in ['CANCELLED', 'CANCELLED+']:
cancelled += 1
elif job_status in ['TIMEOUT']:
timeout += 1
else:
print "Unexpected status: {0}".format(job_status)
other_status += 1
elif job_name not in ['batch', 'true', 'prolog']:
non_matching += 1
run_count = 0
for j in cls.all_jobs:
if j.name not in pending_running_complete_jobs and j.dependendencies_done():
run_count += j.run()
print 'Found {0} pending, {1} running, {2} complete, {3} failed, {4} cancelled, {5} timeout, {6} unknown status and {7} non-matching jobs.'.format(
pending, running, complete, failed, cancelled, timeout, other_status, non_matching)
print "Queued {0} job{1}.".format(run_count, '' if run_count == 1 else 's')
if pending > 0 or running > 0 or run_count > 0:
time.sleep(60)
else:
all_jobs_complete = True
class JobBlock(object):
block_count = 0
all_job_blocks = []
jobs_to_job_block = {}
# Represnts a block of jobs that needs to be executed on the cluster
def __init__(self, threads):
self.pending = True
self.job_block_list = []
self.required_threads = threads
self.required_memory = 0
self.required_full_time = 0
self.jobs_count = 0
JobBlock.block_count += 1
self.block_num = JobBlock.block_count
JobBlock.all_job_blocks.append(self)
self.job_block_dependencies = set()
def can_add_job(self, job):
# If the job was already submitted, return false
if not self.pending:
# print "Cannot add: not pending:\n{0} to block {1}".format(job.command, self.block_num)
return False
# If the job has a different number of threads, return false
if self.required_threads != job.get_threads_num():
# print "Cannot add: not threads:\n{0} to block {1}".format(job.command, self.block_num)
return False
# If the job requires more threads than available, return false
if self.required_threads * (self.jobs_count + 1) > MAX_CPUS_PER_NODE:
# print "Cannot add: no cpus left:\n{0} to block {1}".format(job.command, self.block_num)
return False
# If the job requires more memory than available, return false
if self.required_memory + job.memory > MAX_MEMORY_MB:
# print "Cannot add: no memory left:\n{0} to block {1}".format(job.command, self.block_num)
return False
# If the job has a dependent job in the same job_list, return false
for d in job.dependencies:
if d in self.job_block_list:
# print "Cannot add: job is conflicted with another job:\n{0} to block {1}".format(job.command, self.block_num)
return False
# Check the JobBlock dependencies by copying the current dependencies,
# adding the new ones, and computing the transitive closure
# We then need to make sure we are not in the closure's output (otherwise, there is a cycle)
# (optimization, we first check that the given job's dependencies are not already there)
# for d in job.dependencies:
# if d in JobBlock.jobs_to_job_block.keys():
# other_job_block = JobBlock.jobs_to_job_block[d]
# if (other_job_block == self) or (other_job_block in self.job_block_dependencies):
# # print "Cannot add: job is conflicted with another dependency"
# return False
for d in job.dependencies:
if d in JobBlock.jobs_to_job_block.keys():
other_job_block = JobBlock.jobs_to_job_block[d]
if other_job_block == self:
# print "Cannot add: job is conflicted with another dependency:\n{0} to block {1}".format(job.command, self.block_num)
return False
return True
def add_job(self, job):
self.job_block_list.append(job)
self.jobs_count += 1
self.required_memory += job.memory
self.required_full_time = max(self.required_full_time, job.time)
JobBlock.jobs_to_job_block[job] = self
# Update the JobBlock dependencies
for d in job.dependencies:
if d in JobBlock.jobs_to_job_block.keys():
other_job_block = JobBlock.jobs_to_job_block[d]
self.job_block_dependencies.add(other_job_block)
def is_pending(self):
return self.pending
def submit_block(self):
submitted_job_blocks = {}
# If there are no jobs, do nothing
if self.jobs_count == 0:
return submitted_job_blocks
# If th ejob block was already submitted, do nothing
if not self.pending:
return submitted_job_blocks
self.pending = False
# receursively execute all the dependencies
for other_job_block in self.job_block_dependencies:
if other_job_block.is_pending():
submitted_job_blocks.update(other_job_block.submit_block())
block_name = 'JobBlock{0}.'.format(self.block_num) + self.job_block_list[0].name
print "RUNNING JOB BLOCK: " + block_name
print "{0} jobs(tasks), {1} threads per task, {2}MB memory, {3}m time.".format(
self.jobs_count, self.required_threads, self.required_memory, self.required_full_time)
full_command = "#!/bin/bash\n"
dependency_set = set()
# Find all dependencies for all jobs
for j in self.job_block_list:
for d in j.dependencies:
if not d.get_done() and d.jobid is not None:
if USE_SBATCH or USE_QSUB:
dependency_set.add(d.jobid)
# else:
# dependency_set.add(d.name)
work_queue = SBATCH_QUEUE
# if self.required_threads > 1:
# work_queue = "general"
account_str = ""
if SBATCH_ACCOUNT is not None:
account_str = "--account={}".format(SBATCH_ACCOUNT)
if USE_SBATCH:
command_list = ["sbatch",
"-J", block_name, # Job name
"-p", work_queue, # Work queue (partition) = general / unrestricted / interactive / serial_requeue
"--no-requeue",
"--ntasks", str(self.jobs_count), # Number of processes
"--cpus-per-task", str(self.required_threads), # Number of threads
"-t", str(self.required_full_time), # Time in munites 1440 = 24 hours
"--mem", str(self.required_memory), # Max memory in MB (strict - attempts to allocate more memory will fail)
account_str,
"--open-mode=append", # Append to log files
"-o", LOGS_DIR + "/out." + block_name, # Standard out file
"-e", LOGS_DIR + "/error." + block_name] # Error out file
if SBATCH_EXCLUDED is not None:
command_list.append("--exclude")
command_list.append(SBATCH_EXCLUDED)
elif USE_QSUB:
command_list = ["qsub"]#,
# "-N", block_name, # Job name
# "-A", 'hvd113', # XSEDE Allocation
# "-q", QSUB_WORK_QUEUE, # Work queue (partition) = general / unrestricted / interactive / serial_requeue
# "-l", 'nodes=1:ppn={0},walltime={1}:00'.format(str(required_cores), required_full_time), # Number of processors
# #"-l", 'walltime={0}:00'.format(self.time), # Time in munites 1440 = 24 hours
# #"-l", '-mppmem={0}'.format(self.memory), # Max memory per cpu in MB (strict - attempts to allocate more memory will fail)
# "-e", LOGS_DIR + "/outerror." + block_name('_')[0], # Error out file
# "-j", "eo"] # Join standard out file to error file
# Better to use file input rather than command line inputs (according to XSEDE helpdesk)
# Request MAX_CORES so that memory requirement is also met
full_command += (
"#PBS -N {0}\n".format(block_name) +
"#PBS -A hvd113\n" +
"#PBS -q {0}\n".format(QSUB_WORK_QUEUE) +
"#PBS -l nodes=1:ppn={0}:native,walltime={1}:00\n".format(str(MAX_CORES), self.required_full_time) +
"#PBS -e " + LOGS_DIR + "/outerror.{0}\n".format(block_name.split('_')[0]) +
"#PBS -j eo\n")
if len(dependency_set) > 0:
if USE_SBATCH:
dependency_string = ":".join(d for d in dependency_set)
if len(dependency_string) > 0:
print "depends on jobs:" + dependency_string
command_list += ["-d", "afterok:" + dependency_string]
elif USE_QSUB:
dependency_string = ":".join(d for d in dependency_set)
if len(dependency_string) > 0:
print "depends on jobs:" + dependency_string
full_command += "#PBS -W depend=afterok:" + dependency_string + "\n"
else:
command_list += " && ".join("done(%s)" % d for d in dependency_set)
if USE_SBATCH:
full_command += "date\n"
elif USE_QSUB:
full_command += "cd $PBS_O_WORKDIR\ndate\n"
# Generate job block commands
block_commands = ''
for j in self.job_block_list:
block_commands += '{0} &\n'.format(' '.join(j.command()))
print j.name
full_command += '{0}wait\ndate\n'.format(block_commands)
# # Test job ids
# for job_block in job_block_list:
# for j in job_block:
# j.jobid = str(cls.block_count - 1)
print "command_list: {0}".format(command_list)
print "full_command: {0}".format(full_command)
submission_attempts = 0
job_submitted = False
while not job_submitted and submission_attempts < MAX_SUBMISSION_ATTEMPTS:
submission_attempts += 1
# Submit job
process = subprocess.Popen(command_list, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
submit_out, submit_err = process.communicate(full_command)
# print submit_out
# print submit_err
# Process output
if len(submit_err) == 0:
if USE_SBATCH:
new_jobid = submit_out.split()[3]
print 'jobid={0}'.format(new_jobid)
for j in self.job_block_list:
j.jobid = new_jobid
job_submitted = True
elif process.returncode != 0:
print("Error while submitting job: {}\n{}".format(command_list, full_command))
print("Error ({}) is: {}".format(process.returncode, submit_err))
if submission_attempts == MAX_SUBMISSION_ATTEMPTS:
print("Reached maximal submissions attempts, aborting driver")
sys.exit(1)
else:
print("Trying again...")
continue
submitted_job_blocks[block_name] = self.job_block_list
return submitted_job_blocks
# @classmethod
# def run_all_job_blocks(cls):
# print "Running all job blocks ({0} blocks)".format(len(cls.all_job_blocks))
# for job_block in cls.all_job_blocks:
# job_block.submit_block()
class JobBlockOrganizer(object):
# In-charge of orgainizing all the jobs into JobBlocks while ensuring that all dependencies are being met
def __init__(self):
# A per threads number job blocks list
self.job_blocks_per_thread_lists = {}
def add_job(self, job):
if not job.get_threads_num() in self.job_blocks_per_thread_lists.keys():
self.job_blocks_per_thread_lists[job.get_threads_num()] = []
# Iterate over the job blocks lists of this threads number, and find a job block where this job can be added to
inserted = False
for job_block in self.job_blocks_per_thread_lists[job.get_threads_num()]:
if job_block.can_add_job(job):
job_block.add_job(job)
return
# if no suitable block was found, create a new block, and add the job to it
new_job_block = JobBlock(job.get_threads_num())
self.job_blocks_per_thread_lists[job.get_threads_num()].append(new_job_block)
new_job_block.add_job(job)
def run_all(self):
submitted_job_blocks = {}
for threads in self.job_blocks_per_thread_lists.keys():
for job_block in self.job_blocks_per_thread_lists[threads]:
submitted_job_blocks.update(job_block.submit_block())
return submitted_job_blocks
| {
"repo_name": "Rhoana/rh_aligner",
"path": "scripts/job.py",
"copies": "1",
"size": "29164",
"license": "mit",
"hash": -5752663834891736000,
"line_mean": 42.0147492625,
"line_max": 165,
"alpha_frac": 0.5188245782,
"autogenerated": false,
"ratio": 4.109921082299887,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0018936474361420073,
"num_lines": 678
} |
"""A generic, multi-protocol asynchronous server
Usage :
- create a server on a specific host and port : server = Server(host,port)
- call the loop() function, passing it the server and the class used to
manage the protocol (a subclass of ClientHandler) : loop(server,ProtocolClass)
An example of protocol class is provided, LengthSepBody : the client sends
the message length, the line feed character and the message body
"""
import cStringIO
import socket
import select
# the dictionary holding one client handler for each connected client
# key = client socket, value = instance of (a subclass of) ClientHandler
client_handlers = {}
# =======================================================================
# The server class. Creating an instance starts a server on the specified
# host and port
# =======================================================================
class Server:
def __init__(self,host='localhost',port=80):
self.host,self.port = host,port
self.socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.socket.setblocking(0)
self.socket.bind((host,port))
self.socket.listen(5)
# =====================================================================
# Generic client handler. An instance of this class is created for each
# request sent by a client to the server
# =====================================================================
class ClientHandler:
blocksize = 2048
def __init__(self, server, client_socket, client_address):
self.server = server
self.client_address = client_address
self.client_socket = client_socket
self.client_socket.setblocking(0)
self.host = socket.getfqdn(client_address[0])
self.incoming = '' # receives incoming data
self.writable = False
self.close_when_done = True
def handle_error(self):
self.close()
def handle_read(self):
"""Reads the data received"""
try:
buff = self.client_socket.recv(1024)
if not buff: # the connection is closed
self.close()
# buffer the data in self.incoming
self.incoming += buff #.write(buff)
self.process_incoming()
except socket.error:
self.close()
def process_incoming(self):
"""Test if request is complete ; if so, build the response
and set self.writable to True"""
if not self.request_complete():
return
self.response = self.make_response()
self.writable = True
def request_complete(self):
"""Return True if the request is complete, False otherwise
Override this method in subclasses"""
return True
def make_response(self):
"""Return the list of strings or file objects whose content will
be sent to the client
Override this method in subclasses"""
return ["xxx"]
def handle_write(self):
"""Send (a part of) the response on the socket
Finish the request if the whole response has been sent
self.response is a list of strings or file objects
"""
# get next piece of data from self.response
buff = ''
while self.response and not buff:
if isinstance(self.response[0],str):
buff = self.response.pop(0)
else:
buff = self.response[0].read(self.blocksize)
if not buff:
self.response.pop(0)
if buff:
try:
self.client_socket.sendall(buff)
except socket.error:
self.close()
if self.response:
return
# nothing left in self.response
if self.close_when_done:
self.close() # close socket
else:
# reset for next request
self.writable = False
self.incoming = ''
def close(self):
del client_handlers[self.client_socket]
self.client_socket.close()
# ==============================================================
# A protocol with message length + line feed (\n) + message body
# This implementation just echoes the message body
# ==============================================================
class LengthSepBody(ClientHandler):
def request_complete(self):
"""The request is complete if the separator is present and the
number of bytes received equals the specified message length"""
recv = self.incoming.split('\n',1)
if len(recv)==1 or len(recv[1]) != int(recv[0]):
return False
self.msg_body = recv[1]
return True
def make_response(self):
"""Override this method to actually process the data"""
return [self.msg_body]
# ============================================================================
# Main loop, calling the select() function on the sockets to see if new
# clients are trying to connect, if some clients have sent data and if those
# for which the response is complete are ready to receive it
# For each event, call the appropriate method of the server or of the instance
# of ClientHandler managing the dialog with the client : handle_read() or
# handle_write()
# ============================================================================
def loop(server,handler,timeout=30):
while True:
k = client_handlers.keys()
# w = sockets to which there is something to send
# we must test if we can send data
w = [ cl for cl in client_handlers if client_handlers[cl].writable ]
# the heart of the program ! "r" will have the sockets that have sent
# data, and the server socket if a new client has tried to connect
r,w,e = select.select(k+[server.socket],w,k,timeout)
for e_socket in e:
client_handlers[e_socket].handle_error()
for r_socket in r:
if r_socket is server.socket:
# server socket readable means a new connection request
try:
client_socket,client_address = server.socket.accept()
client_handlers[client_socket] = handler(server,
client_socket,client_address)
except socket.error:
pass
else:
# the client connected on r_socket has sent something
client_handlers[r_socket].handle_read()
w = set(w) & set(client_handlers.keys()) # remove deleted sockets
for w_socket in w:
client_handlers[w_socket].handle_write()
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/511453_generic_multi_protocol_asynchronous/recipe-511453.py",
"copies": "1",
"size": "6643",
"license": "mit",
"hash": -5898893554671109000,
"line_mean": 38.0764705882,
"line_max": 78,
"alpha_frac": 0.5645039892,
"autogenerated": false,
"ratio": 4.711347517730497,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010176914009350838,
"num_lines": 170
} |
"""A generic resource for publishing objects via BRPC.
Requires BRPC
API Stability: semi-stable
"""
from __future__ import nested_scopes
__version__ = "$Revision: 1.32 $"[11:-2]
# System Imports
import brpc
import urlparse
from cStringIO import StringIO
from gzip import GzipFile
pipeline_debug = False
version = "1.0"
from BTL.platform import app_name
from BTL.reactor_magic import reactor
from BTL.exceptions import str_exc
from BTL.protocol import SmartReconnectingClientFactory
from BTL.brpclib import ServerProxy
import twisted.web
if twisted.web.__version__ < '0.6.0':
raise ImportError("BTL.twisted_brpc requires twisted.web 0.6.0 or greater,"
" from Twisted 2.4.0.\nYou appear to have twisted.web "
"version %s installed at:\n%s" % (twisted.web.__version__,
twisted.web.__file__))
from twisted.web import resource, server
from twisted.internet import protocol
from twisted.python import log, reflect, failure
from twisted.web import http
from twisted.internet import defer
# Useful so people don't need to import brpc directly
Fault = brpc.Fault
class NoSuchFunction(Fault):
"""There is no function by the given name."""
pass
class Handler:
"""Handle a BRPC request and store the state for a request in progress.
Override the run() method and return result using self.result,
a Deferred.
We require this class since we're not using threads, so we can't
encapsulate state in a running function if we're going to have
to wait for results.
For example, lets say we want to authenticate against twisted.cred,
run a LDAP query and then pass its result to a database query, all
as a result of a single BRPC command. We'd use a Handler instance
to store the state of the running command.
"""
def __init__(self, resource, *args):
self.resource = resource # the BRPC resource we are connected to
self.result = defer.Deferred()
self.run(*args)
def run(self, *args):
# event driven equivalent of 'raise UnimplementedError'
try:
raise NotImplementedError("Implement run() in subclasses")
except:
self.result.errback(failure.Failure())
def parse_accept_encoding(header):
a = header.split(',')
l = []
for i in a:
i = i.strip()
if ';' not in i:
type = i
# hmmm
l.append(('1', type))
else:
type, q = i.split(';')
type = type.strip()
q = q.strip()
junk, q = q.split('=')
q = q.strip()
if q != '0':
l.append((q, type))
l.sort()
l.reverse()
l = [ t for q, t in l ]
return l
class BRPC(resource.Resource):
"""A resource that implements BRPC.
You probably want to connect this to '/RPC2'.
Methods published can return BRPC serializable results, Faults,
Binary, Boolean, DateTime, Deferreds, or Handler instances.
By default methods beginning with 'brpc_' are published.
Sub-handlers for prefixed methods (e.g., system.listMethods)
can be added with putSubHandler. By default, prefixes are
separated with a '.'. Override self.separator to change this.
"""
# Error codes for Twisted, if they conflict with yours then
# modify them at runtime.
NOT_FOUND = 8001
FAILURE = 8002
isLeaf = 1
separator = '.'
def __init__(self):
resource.Resource.__init__(self)
self.subHandlers = {}
def putSubHandler(self, prefix, handler):
self.subHandlers[prefix] = handler
def getSubHandler(self, prefix):
return self.subHandlers.get(prefix, None)
def getSubHandlerPrefixes(self):
return self.subHandlers.keys()
def _err(self, *a, **kw):
log.err(*a, **kw)
def render(self, request):
request.setHeader('server', "%s/%s" % (app_name, version))
request.content.seek(0, 0)
args, functionPath = brpc.loads(request.content.read())
args, kwargs = args
request.functionPath = functionPath
try:
function = self._getFunction(functionPath)
except Fault, f:
self._cbRender(f, request)
else:
request.setHeader("content-type", "application/octet-stream")
defer.maybeDeferred(function, *args, **kwargs).addErrback(
self._ebRender
).addCallback(
self._cbRender, request
)
return server.NOT_DONE_YET
def _cbRender(self, result, request):
if isinstance(result, Handler):
result = result.result
if not isinstance(result, Fault):
result = (result,)
try:
s = brpc.dumps(result, methodresponse=1)
except Exception, e:
f = Fault(self.FAILURE,
"function:%s can't serialize output: %s" %
(request.functionPath, str_exc(e)))
self._err(f)
s = brpc.dumps(f, methodresponse=1)
encoding = request.getHeader("accept-encoding")
if encoding:
encodings = parse_accept_encoding(encoding)
if 'gzip' in encodings or '*' in encodings:
sio = StringIO()
g = GzipFile(fileobj=sio, mode='wb', compresslevel=9)
g.write(s)
g.close()
s = sio.getvalue()
request.setHeader("Content-Encoding", "gzip")
request.setHeader("content-length", str(len(s)))
request.write(s)
request.finish()
def _ebRender(self, failure):
self._err(failure)
if isinstance(failure.value, Fault):
return failure.value
return Fault(self.FAILURE, "An unhandled exception occurred: %s" %
failure.getErrorMessage())
def _getFunction(self, functionPath):
"""Given a string, return a function, or raise NoSuchFunction.
This returned function will be called, and should return the result
of the call, a Deferred, or a Fault instance.
Override in subclasses if you want your own policy. The default
policy is that given functionPath 'foo', return the method at
self.brpc_foo, i.e. getattr(self, "brpc_" + functionPath).
If functionPath contains self.separator, the sub-handler for
the initial prefix is used to search for the remaining path.
"""
if functionPath.find(self.separator) != -1:
prefix, functionPath = functionPath.split(self.separator, 1)
handler = self.getSubHandler(prefix)
if handler is None: raise NoSuchFunction(self.NOT_FOUND, "no such subHandler %s" % prefix)
return handler._getFunction(functionPath)
f = getattr(self, "brpc_%s" % functionPath, None)
if not f:
raise NoSuchFunction(self.NOT_FOUND, "function %s not found" % functionPath)
elif not callable(f):
raise NoSuchFunction(self.NOT_FOUND, "function %s not callable" % functionPath)
else:
return f
def _listFunctions(self):
"""Return a list of the names of all brpc methods."""
return reflect.prefixedMethodNames(self.__class__, 'brpc_')
class BRPCIntrospection(BRPC):
"""Implement the BRPC Introspection API.
By default, the methodHelp method returns the 'help' method attribute,
if it exists, otherwise the __doc__ method attribute, if it exists,
otherwise the empty string.
To enable the methodSignature method, add a 'signature' method attribute
containing a list of lists. See methodSignature's documentation for the
format. Note the type strings should be BRPC types, not Python types.
"""
def __init__(self, parent):
"""Implement Introspection support for an BRPC server.
@param parent: the BRPC server to add Introspection support to.
"""
BRPC.__init__(self)
self._brpc_parent = parent
def brpc_listMethods(self):
"""Return a list of the method names implemented by this server."""
functions = []
todo = [(self._brpc_parent, '')]
while todo:
obj, prefix = todo.pop(0)
functions.extend([ prefix + name for name in obj._listFunctions() ])
todo.extend([ (obj.getSubHandler(name),
prefix + name + obj.separator)
for name in obj.getSubHandlerPrefixes() ])
return functions
brpc_listMethods.signature = [['array']]
def brpc_methodHelp(self, method):
"""Return a documentation string describing the use of the given method.
"""
method = self._brpc_parent._getFunction(method)
return (getattr(method, 'help', None)
or getattr(method, '__doc__', None) or '')
brpc_methodHelp.signature = [['string', 'string']]
def brpc_methodSignature(self, method):
"""Return a list of type signatures.
Each type signature is a list of the form [rtype, type1, type2, ...]
where rtype is the return type and typeN is the type of the Nth
argument. If no signature information is available, the empty
string is returned.
"""
method = self._brpc_parent._getFunction(method)
return getattr(method, 'signature', None) or ''
brpc_methodSignature.signature = [['array', 'string'],
['string', 'string']]
def addIntrospection(brpc):
"""Add Introspection support to an BRPC server.
@param brpc: The brpc server to add Introspection support to.
"""
brpc.putSubHandler('system', BRPCIntrospection(brpc))
class Query(object):
def __init__(self, path, host, method, user=None, password=None, *args):
self.path = path
self.host = host
self.user = user
self.password = password
self.method = method
self.payload = brpc.dumps(args, method)
self.deferred = defer.Deferred()
self.decode = False
class QueryProtocol(http.HTTPClient):
# All current queries are pipelined over the connection at
# once. When the connection is made, or as queries are made
# while a connection exists, queries are all sent to the
# server. Pipelining limits can be controlled by the caller.
# When a query completes (see parseResponse), if there are no
# more queries then an idle timeout gets sets.
# The QueryFactory reopens the connection if another query occurs.
#
# twisted_brpc does currently provide a mechanism for
# per-query timeouts. This could be added with another
# timeout_call mechanism that calls loseConnection and pops the
# current query with an errback.
timeout = 300 # idle timeout.
def log(self, msg, *a):
print "%s: %s: %r" % (self.peer, msg, a)
def connectionMade(self):
http.HTTPClient.connectionMade(self)
self.current_queries = []
self.timeout_call = None
if pipeline_debug:
p = self.transport.getPeer()
p = "%s:%d" % (p.host, p.port)
self.peer = (id(self.transport), p)
self.factory.connectionMade(self)
def _cancelTimeout(self):
if self.timeout_call and self.timeout_call.active():
self.timeout_call.cancel()
self.timeout_call = None
def connectionLost(self, reason):
http.HTTPClient.connectionLost(self, reason)
if pipeline_debug: self.log('connectionLost', reason.getErrorMessage())
self._cancelTimeout()
if self.current_queries:
# queries failed, put them back
if pipeline_debug: self.log('putting back', [q.method for q in self.current_queries])
self.factory.prependQueries(self.current_queries)
self.factory.connectionLost(self)
def sendCommand(self, command, path):
self.transport.write('%s %s HTTP/1.1\r\n' % (command, path))
def setLineMode(self, rest):
# twisted is stupid.
self.firstLine = 1
return http.HTTPClient.setLineMode(self, rest)
def sendQuery(self):
self._cancelTimeout()
query = self.factory.popQuery()
if pipeline_debug: self.log('sending', query.method)
self.current_queries.append(query)
self.sendCommand('POST', query.path)
self.sendHeader('User-Agent', 'BTL/BRPC 1.0')
self.sendHeader('Host', query.host)
self.sendHeader('Accept-encoding', 'gzip')
self.sendHeader('Connection', 'Keep-Alive')
self.sendHeader('Content-type', 'application/octet-stream')
self.sendHeader('Content-length', str(len(query.payload)))
#if query.user:
# auth = '%s:%s' % (query.user, query.password)
# auth = auth.encode('base64').strip()
# self.sendHeader('Authorization', 'Basic %s' % (auth,))
self.endHeaders()
self.transport.write(query.payload)
def parseResponse(self, contents):
query = self.current_queries.pop(0)
if pipeline_debug: self.log('responded', query.method)
if not self.current_queries:
assert not self.factory.anyQueries()
assert not self.timeout_call
self.timeout_call = reactor.callLater(self.timeout,
self.transport.loseConnection)
try:
response = brpc.loads(contents)
except Exception, e:
query.deferred.errback(failure.Failure())
del query.deferred
else:
query.deferred.callback(response[0][0])
del query.deferred
def badStatus(self, status, message):
query = self.current_queries.pop(0)
if pipeline_debug: self.log('failed', query.method)
try:
raise ValueError(status, message)
except:
query.deferred.errback(failure.Failure())
del query.deferred
self.transport.loseConnection()
def handleStatus(self, version, status, message):
if status != '200':
self.badStatus(status, message)
def handleHeader(self, key, val):
if not self.current_queries[0].decode:
if key.lower() == 'content-encoding' and val.lower() == 'gzip':
self.current_queries[0].decode = True
def handleResponse(self, contents):
if self.current_queries[0].decode:
s = StringIO()
s.write(contents)
s.seek(-1)
g = GzipFile(fileobj=s, mode='rb')
contents = g.read()
g.close()
self.parseResponse(contents)
class QueryFactory(object):
def __init__(self):
self.queries = []
self.instance = None
def connectionMade(self, instance):
self.instance = instance
if pipeline_debug: print 'connection made %s' % str(instance.peer)
while self.anyQueries():
self.instance.sendQuery()
def connectionLost(self, instance):
assert self.instance == instance
if pipeline_debug: print 'connection lost %s' % str(instance.peer)
self.instance = None
def prependQueries(self, queries):
self.queries = queries + self.queries
def popQuery(self):
return self.queries.pop(0)
def anyQueries(self):
return bool(self.queries)
def addQuery(self, query):
self.queries.append(query)
if pipeline_debug: print 'addQuery: %s %s' % (self.instance, self.queries)
if self.instance:
self.instance.sendQuery()
def disconnect(self):
if not self.instance:
return
if not hasattr(self.instance, 'transport'):
return
self.instance.transport.loseConnection()
class PersistantSingletonFactory(QueryFactory, SmartReconnectingClientFactory):
def clientConnectionFailed(self, connector, reason):
if pipeline_debug: print 'clientConnectionFailed %s' % str(connector)
return SmartReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, unused_reason):
self.started = False
if not self.anyQueries():
self.continueTrying = False
return SmartReconnectingClientFactory.clientConnectionLost(self, connector, unused_reason)
class SingletonFactory(QueryFactory, protocol.ClientFactory):
def clientConnectionFailed(self, connector, reason):
if pipeline_debug: print 'clientConnectionFailed %s' % str(connector)
queries = list(self.queries)
del self.queries[:]
for query in queries:
query.deferred.errback(reason)
self.started = False
class Proxy:
"""A Proxy for making remote BRPC calls.
Pass the URL of the remote BRPC server to the constructor.
Use proxy.callRemote('foobar', *args) to call remote method
'foobar' with *args.
"""
def __init__(self, url, user=None, password=None, retry_forever = True):
"""
@type url: C{str}
@param url: The URL to which to post method calls. Calls will be made
over SSL if the scheme is HTTPS. If netloc contains username or
password information, these will be used to authenticate, as long as
the C{user} and C{password} arguments are not specified.
@type user: C{str} or None
@param user: The username with which to authenticate with the server
when making calls. If specified, overrides any username information
embedded in C{url}. If not specified, a value may be taken from C{url}
if present.
@type password: C{str} or None
@param password: The password with which to authenticate with the
server when making calls. If specified, overrides any password
information embedded in C{url}. If not specified, a value may be taken
from C{url} if present.
"""
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
netlocParts = netloc.split('@')
if len(netlocParts) == 2:
userpass = netlocParts.pop(0).split(':')
self.user = userpass.pop(0)
try:
self.password = userpass.pop(0)
except:
self.password = None
else:
self.user = self.password = None
hostport = netlocParts[0].split(':')
self.host = hostport.pop(0)
try:
self.port = int(hostport.pop(0))
except:
self.port = None
self.path = path
if self.path in ['', None]:
self.path = '/'
self.secure = (scheme == 'https')
if user is not None:
self.user = user
if password is not None:
self.password = password
if not retry_forever:
_Factory = SingletonFactory
else:
_Factory = PersistantSingletonFactory
self.factory = _Factory()
self.factory.started = False
self.factory.protocol = QueryProtocol
def callRemote(self, method, *args, **kwargs):
if pipeline_debug: print 'callRemote to %s : %s' % (self.host, method)
args = (args, kwargs)
query = Query(self.path, self.host, method, self.user,
self.password, *args)
self.factory.addQuery(query)
if pipeline_debug: print 'factory started: %s' % self.factory.started
if not self.factory.started:
self.factory.started = True
def connect(host):
if self.secure:
if pipeline_debug: print 'connecting to %s' % str((host, self.port or 443))
from twisted.internet import ssl
reactor.connectSSL(host, self.port or 443,
self.factory, ssl.ClientContextFactory(),
timeout=60)
else:
if pipeline_debug: print 'connecting to %s' % str((host, self.port or 80))
reactor.connectTCP(host, self.port or 80, self.factory,
timeout=60)
df = reactor.resolve(self.host)
df.addCallback(connect)
df.addErrback(query.deferred.errback)
return query.deferred
class AsyncServerProxy(object):
def __init__(self, base_url, username=None, password=None, debug=False,
retry_forever = True):
self.base_url = base_url
self.username = username
self.password = password
self.proxy = Proxy(self.base_url, self.username, self.password, retry_forever)
self.debug = debug
def __getattr__(self, attr):
return self._make_call(attr)
def _make_call(self, methodname):
return lambda *a, **kw : self._method(methodname, *a, **kw)
def _method(self, methodname, *a, **kw):
# in case they have changed
self.proxy.user = self.username
self.proxy.password = self.password
if self.debug:
print ('callRemote:', self.__class__.__name__,
self.base_url, methodname, a, kw)
df = self.proxy.callRemote(methodname, *a, **kw)
return df
class EitherServerProxy(object):
SYNC = 0
ASYNC = 1
SYNC_DEFERRED = 2 # BE CAREFUL to call getResult() on the returned Deferred!
"""Server Proxy that supports both asynchronous and synchronous calls."""
def __init__(self, base_url, username = None, password = None, debug = False,
async = ASYNC, retry_forever = True ):
"""
The EitherServerProxy can make either synchronous or asynchronous calls.
The default is specified by the async parameter to __init__, but each
individual call can override the default behavior by passing 'async' as
a boolean keyword argument to any method call. The async keyword
argument can also be set to None. However, passing async as
None means simply 'use default behavior'. When calling with async=SYNC,
you should not be in the same thread as the reactor or you risk
blocking the reactor.
@param async: determines whether the default is asynchronous or blocking calls."""
assert async in [SYNC, ASYNC, SYNC_DEFERRED]
self.async = async
self.async_proxy = AsyncServerProxy( base_url, username, password, debug,
retry_forever = retry_forever )
# HERE HACK. retry_forever is not supported by ServerProxy.
self.sync_proxy = ServerProxy( base_url )
def __getattr__(self, attr):
return self._make_call(attr)
def _make_call(self, methodname):
return lambda *a, **kw : self._method(methodname, *a, **kw)
def _method(self, methodname, *a, **kw ):
async = kw.pop('async', self.async)
if async is None:
async = self.async
if async == ASYNC:
df = self.async_proxy._method(methodname, *a, **kw)
elif async == SYNC_DEFERRED:
df = defer.execute(getattr(self.sync_proxy, methodname), *a, **kw)
else:
return self.sync_proxy.__getattr__(methodname)(*a, **kw)
return df
SYNC = EitherServerProxy.SYNC
ASYNC = EitherServerProxy.ASYNC
SYNC_DEFERRED = EitherServerProxy.SYNC_DEFERRED
__all__ = ["BRPC", "Handler", "NoSuchFunction", "Fault", "Proxy", "AsyncServerProxy", "EitherServerProxy"]
| {
"repo_name": "rabimba/p2pScrapper",
"path": "BitTorrent-5.2.2/BTL/twisted_brpc.py",
"copies": "3",
"size": "23697",
"license": "mit",
"hash": 8288518969786411000,
"line_mean": 35.1234756098,
"line_max": 106,
"alpha_frac": 0.608136051,
"autogenerated": false,
"ratio": 4.209058614564832,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6317194665564831,
"avg_score": null,
"num_lines": null
} |
"""A generic rest serving layer for NDB models."""
import logging
import sys
from google.appengine.api import namespace_manager
from google.appengine.ext import db
import flask
import flask.views
from appengine import user
def command(func):
"""Command decorator - automatically dispatches methods."""
setattr(func, 'is_command', True)
return func
class ClassView(flask.views.MethodView):
"""Implements create, retrieve, update and removed endpoints for models."""
def __init__(self, classname, cls, create_callback):
super(ClassView, self).__init__()
self._classname = classname
self._cls = cls
self._create_callback = create_callback
def get(self, object_id):
"""List objects or just return a single object."""
default_user_authentication()
if object_id is None:
# Return json list of objects.
object_list = self._cls.query().iter()
if object_list is None:
object_list = []
object_list = [obj.to_dict() for obj in object_list]
return flask.jsonify(objects=object_list)
else:
# Return json repr of given object
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
return flask.jsonify(**obj.to_dict())
def post(self, object_id):
"""Using json body to create or update a object."""
default_user_authentication()
body = flask.request.get_json()
if body is None:
flask.abort(400, 'JSON body and mime type required.')
logging.info("Creating (or updating) object - %s", body)
obj = self._cls.get_by_id(object_id)
if not obj and self._create_callback is None:
flask.abort(403)
elif not obj:
obj = self._create_callback(object_id, body)
# Update the object; abort with 400 on unknown field
try:
obj.populate(**body)
except AttributeError:
logging.error('Exception populating object', exc_info=sys.exc_info())
flask.abort(400)
obj.sync()
# Put the object - BadValueError if there are uninitalised required fields
try:
obj.put()
except db.BadValueError:
logging.error('Exception saving object', exc_info=sys.exc_info())
flask.abort(400)
values = obj.to_dict()
return flask.jsonify(**values)
def delete(self, object_id):
"""Delete an object."""
default_user_authentication()
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
obj.key.delete()
user.send_event(cls=self._classname, id=object_id, event='delete')
return ('', 204)
class CommandView(flask.views.MethodView):
"""Implements /command endpoints for models."""
def __init__(self, classname, cls):
super(CommandView, self).__init__()
self._classname = classname
self._cls = cls
def post(self, object_id):
"""Run a command on a object."""
default_user_authentication()
body = flask.request.get_json()
if body is None:
flask.abort(400, 'JSON body and mime type required.')
logging.info(body)
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
func_name = body.pop('command', None)
func = getattr(obj, func_name, None)
if func is None or not getattr(func, 'is_command', False):
logging.error('Command %s does not exist or is not a command',
func_name)
flask.abort(400)
result = func(**body)
obj.put()
return flask.jsonify(result=result)
class HistoryView(flask.views.MethodView):
"""Implements /history endpoints for models."""
def __init__(self, classname, cls):
super(HistoryView, self).__init__()
self._classname = classname
self._cls = cls
def post(self, object_id):
"""Fetch the history for an object."""
default_user_authentication()
body = flask.request.get_json()
if body is None:
flask.abort(400, 'JSON body and mime type required.')
start_time = body.pop('start_time', None)
end_time = body.pop('end_time', None)
if start_time is None or end_time is None:
flask.abort(400, 'start_time and end_time expected.')
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
result = obj.get_history(start=start_time, end=end_time)
result = list(result)
return flask.jsonify(result=result)
def default_user_authentication():
"""Ensure user is authenticated, and switch to
appropriate building namespace."""
user_object = user.get_user_object()
if not user_object:
return flask.abort(401)
# Need to pick a building for this user request
person = user.get_person()
buildings = list(person.buildings)
assert len(buildings) > 0
buildings.sort()
if 'building-id' in flask.request.headers:
building_id = flask.request.headers['building-id']
if building_id not in buildings:
flask.abort(401)
else:
building_id = buildings[0]
namespace_manager.set_namespace(building_id)
def register_class(blueprint, cls, create_callback):
"""Register a ndb model for rest endpoints."""
# register some handlers
class_view_func = ClassView.as_view('%s_crud' % cls.__name__,
blueprint.name, cls, create_callback)
blueprint.add_url_rule('/', defaults={'object_id': None},
view_func=class_view_func, methods=['GET',])
blueprint.add_url_rule('/<object_id>', view_func=class_view_func,
methods=['GET', 'POST', 'DELETE'])
command_view_func = CommandView.as_view('%s_command' % cls.__name__,
blueprint.name, cls)
blueprint.add_url_rule('/<object_id>/command', methods=['POST'],
view_func=command_view_func)
history_view_func = HistoryView.as_view('%s_history' % cls.__name__,
blueprint.name, cls)
blueprint.add_url_rule('/<object_id>/history', methods=['POST'],
view_func=history_view_func)
| {
"repo_name": "tomwilkie/awesomation",
"path": "src/appengine/rest.py",
"copies": "1",
"size": "5973",
"license": "mit",
"hash": 825404743761626400,
"line_mean": 27.8550724638,
"line_max": 78,
"alpha_frac": 0.6358613762,
"autogenerated": false,
"ratio": 3.775600505689001,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9867054455083493,
"avg_score": 0.008881485361101551,
"num_lines": 207
} |
# A generic test template for the Python solutions to the linked list problems.
#
# The testee function is recommended to be defined inside the "solution" module.
#
import sys
import LinkedList
import solution
FP_RATIO_PREC = 2
# The number of digits of precision in the values representing
# the passed/failed ratio to the total number of tests.
def TestMain(sol, log=sys.stdout, doNotLogPassed=True) -> bool:
"""
@param sol: the function to be tested.
@param log: a stream or a file to log the tester output to.
@param doNotLogPassed: if True, all successful tests will not be logged.
@return: True if all tests in the TESTS array were successful, False otherwise.
All tester functions should follow the signature
of the TestMain function.
"""
def TestPredefined(sol, log, doNotLogPassed=True) -> bool:
class ArgsExpectedPairCollection:
def __init__(self, count=60, minVal=0, maxVal=99):
self.count = count
self.minVal = minVal
self.maxVal = maxVal
def __len__(self):
return self.count
def __iter__(self):
import random
for _ in range(self.count):
leftVal = random.randint(self.minVal, self.maxVal)
rightVal = random.randint(self.minVal, self.maxVal)
resultVal = leftVal + rightVal
leftHead = LinkedList.ConvertArrayToLinkedList(map(int, str(leftVal)))
rightHead = LinkedList.ConvertArrayToLinkedList(map(int, str(rightVal)))
resultHead = LinkedList.ConvertArrayToLinkedList(map(int, str(resultVal)))
yield ((leftHead, rightHead, ), resultHead, )
ARGS_EXPECTED_PAIRS = ArgsExpectedPairCollection()
areAllPassed = True
failedCount = 0
passedCount = 0
for testId, argsExpectedPair in enumerate(ARGS_EXPECTED_PAIRS):
args, expected = argsExpectedPair
actual = sol(*map(LinkedList.Copy, args))
isPassed = LinkedList.Equals(expected, actual)
if not(isPassed and doNotLogPassed):
print('Test #{}'.format(testId), file=log)
print('Args: ({})'.format(', '.join(map(str, args))), file=log)
print('Expected: {}'.format(expected), file=log)
print('Actual: {}'.format(actual), file=log)
print('{}'.format('OK' if expected == actual else 'FAILED'), file=log)
print( file=log)
if not isPassed:
failedCount += 1
areAllPassed = False
else:
passedCount += 1
passedRatio = passedCount / len(ARGS_EXPECTED_PAIRS)
failedRatio = failedCount / len(ARGS_EXPECTED_PAIRS)
print('Passed: {}, %{:.{prec}f}'.format(passedCount, 100.0 * passedRatio, prec=FP_RATIO_PREC), file=log)
print('Failed: {}, %{:.{prec}f}'.format(failedCount, 100.0 * failedRatio, prec=FP_RATIO_PREC), file=log)
return areAllPassed
# Please add all tester functions to the TESTS tuple.
TESTS = (TestPredefined, )
areAllPassed = True
for Test in TESTS:
if not Test(sol, log, doNotLogPassed):
areAllPassed = False
return areAllPassed
if __name__ == '__main__':
TestMain(solution.AddTwo)
| {
"repo_name": "lilsweetcaligula/Algorithms",
"path": "data_structures/linked_list/problems/add_two/py/tests.py",
"copies": "1",
"size": "3686",
"license": "mit",
"hash": 5478549063349894000,
"line_mean": 38.6344086022,
"line_max": 112,
"alpha_frac": 0.5567010309,
"autogenerated": false,
"ratio": 4.222222222222222,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5278923253122222,
"avg_score": null,
"num_lines": null
} |
'''A generic tree class.'''
from util.dot import escape
from util.mixin import Keyed
def Tree(node_type):
'''A parameterized tree class which asserts that its nodes carry values
that are instances of a particular type.'''
class TreeClass(Keyed):
def __init__(self, value, subtrees=None):
if not isinstance(value, node_type):
raise TypeError('tree value is not an instance of %s' % node_type.__name__)
if subtrees is None: subtrees = []
for t in subtrees:
assert isinstance(t, self.__class__)
self._value = value
self._subtrees = tuple(subtrees)
@property
def value(self):
'''Return the value stored at this node.'''
return self._value
@property
def subtrees(self):
'''Return the subtrees under this node.'''
return self._subtrees
def __str__(self):
if self.subtrees:
children = '(' + ''.join(str(s) for s in self.subtrees) + ')'
else:
children = ''
return str(self.value) + children
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.value, self.subtrees)
def __key__(self):
return (self._value, self._subtrees)
def __eq__(self, y):
return (
isinstance(y, TreeClass) and
super(TreeClass, self).__eq__(y))
def _dot_lines(self):
return ['q%s [label="%s"]' % (id(self), escape(str(self.value)))] + \
['q%s -> q%s' % (id(self), id(c)) for c in self.subtrees] + \
sum([c._dot_lines() for c in self.subtrees], [])
def dot_str(self):
return '''\
digraph {
%s
}''' % ';\n\t'.join(['graph [ordering="out"]'] + self._dot_lines())
def iter_leaves(self):
if not self.subtrees:
yield self.value
else:
for tree in self.subtrees:
for leaf in tree.iter_leaves():
yield leaf
def all_leaves(self, func=(lambda x: x)):
for leaf in self.iter_leaves():
if not func(leaf):
return False
return True
return TreeClass
| {
"repo_name": "bdusell/pycfg",
"path": "src/util/tree.py",
"copies": "1",
"size": "2353",
"license": "mit",
"hash": -5039356408781752000,
"line_mean": 30.7972972973,
"line_max": 91,
"alpha_frac": 0.4946876328,
"autogenerated": false,
"ratio": 4.186832740213523,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011460247661275368,
"num_lines": 74
} |
""" Agent acquiring data from web or file data sources and streaming packets. """
__author__ = 'Michael Meisinger'
from gevent.event import Event
from pyon.public import BadRequest, EventPublisher, log, NotFound, OT, RT, get_safe
from pyon.util.async import spawn
from ion.agent.streaming_agent import StreamingAgent, AgentPlugin
from ion.data.packet.packet_builder import DataPacketBuilder
from interface.objects import DataPacket
class DataAgent(StreamingAgent):
agent_type = "data_agent"
agent_plugin = None
sampling_gl = None
sampling_gl_quit = None
sampling_interval = 5
def on_connect(self, connect_args=None):
if self.agent_plugin and hasattr(self.agent_plugin, 'on_connect'):
self.agent_plugin.on_connect(connect_args)
def on_start_streaming(self, streaming_args=None):
self.sampling_gl_quit = Event()
self.sampling_interval = self.agent_config.get("sampling_interval", 5)
self.sampling_gl = spawn(self._sample_data_loop, self.sampling_interval)
if self.agent_plugin and hasattr(self.agent_plugin, 'on_start_streaming'):
self.agent_plugin.on_start_streaming(streaming_args)
def on_stop_streaming(self):
if self.agent_plugin and hasattr(self.agent_plugin, 'on_stop_streaming'):
self.agent_plugin.on_stop_streaming()
self.sampling_gl_quit.set()
self.sampling_gl.join(timeout=3)
self.sampling_gl.kill()
self.sampling_gl = None
self.sampling_gl_quit = None
def on_acquire_data(self, streaming_args=None):
if self.agent_plugin and hasattr(self.agent_plugin, 'on_acquire_data'):
self.agent_plugin.on_acquire_data(streaming_args)
def on_disconnect(self):
if self.agent_plugin and hasattr(self.agent_plugin, 'on_disconnect'):
self.agent_plugin.on_disconnect()
def on_get_status(self, agent_status):
if self.agent_plugin and hasattr(self.agent_plugin, 'on_get_status'):
return self.agent_plugin.on_get_status(agent_status)
return agent_status
def _sample_data_loop(self, sample_interval):
while not self.sampling_gl_quit.wait(timeout=sample_interval):
try:
if self.agent_plugin:
sample = self.agent_plugin.acquire_samples()
if sample:
#log.info("Sample %s", sample)
packet = DataPacketBuilder.build_packet_from_samples(sample,
resource_id=self.resource_id, stream_name=self.stream_name)
self.stream_pub.publish(packet)
except Exception as ex:
log.exception("Error in sampling greenlet")
class DataAgentPlugin(AgentPlugin):
def acquire_samples(self, max_samples=0):
return None
| {
"repo_name": "scionrep/scioncc",
"path": "src/ion/agent/data_agent.py",
"copies": "1",
"size": "2856",
"license": "bsd-2-clause",
"hash": 5203583757772816000,
"line_mean": 38.6666666667,
"line_max": 95,
"alpha_frac": 0.6523109244,
"autogenerated": false,
"ratio": 3.8594594594594596,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9999882638720776,
"avg_score": 0.0023775490277367787,
"num_lines": 72
} |
"""Agent Base.
Basic definitions, such as the Agent and RoboticAgent base classes.
Author: Lucas David -- <ld492@drexel.edu>
License: MIT (c) 2016
"""
import logging
import time
import numpy as np
from enum import Enum
from naoqi import ALProxy
from mazerunner.components.base import NAOAdapter
from .. import components, utils
logger = logging.getLogger('mazerunner')
class Agent(object):
"""Agent Base."""
def __init__(self):
self.cycle_ = 0
def update(self):
self.perceive()
self.act()
self.cycle_ += 1
def perceive(self):
raise NotImplementedError
def act(self):
raise NotImplementedError
class RoboticAgent(Agent):
"""Robot Agent Base.
:param identity: [str, int], default=''.
Integer or string that identifies the robot that will be controlled.
E.g.: 0, '', 'jogger' or 'kyle'.
:param interface: tuple (str, int).
Tuple indicating the IP and port of the robot that will be controlled.
E.g.: ('127.0.0.1', 5000), ('localhost', 6223).
:param link:
A link to V-REP, usually created by the `Environment` and shared
throughout all the components. This can be overridden during the
`start` procedure.
:param random_state: RandomState-like, default=None.
A random state used to control randomness in which the RoboticAgent
acts. If None is passed, a new one is build with the current timestamp
as seed.
"""
STRIDE = 1.0
SPEED = .7
BEHAVIORS = Enum('disabled', 'idle', 'moving', 'thinking', 'stuck', 'dead')
def __init__(self, identity='', interface=('127.0.0.1', 5000), link=None,
random_state=None):
if len(interface) != 2:
raise ValueError('Invalid interface: %s' % str(interface))
super(RoboticAgent, self).__init__()
self.identity = identity
self.interface = interface
self.random_state = random_state or np.random.RandomState()
self.motion = ALProxy("ALMotion", *interface)
self.posture = ALProxy("ALRobotPosture", *interface)
self.adapter = NAOAdapter(link, 'NAO') if link else None
self.sensors = dict()
self.joint_manager_ = None
self.perception_ = None
self.behavior_ = self.BEHAVIORS.disabled
def start(self, link=None):
if link is not None:
self.adapter = NAOAdapter(link, 'NAO')
link = self.adapter.link
self.motion.wakeUp()
self.posture.goToPosture('Stand', self.SPEED)
self.joint_manager_ = utils.JointManager(link=link,
motion=self.motion,
identity=self.identity,
robot_adapter=self.adapter)
self.joint_manager_.start()
self.sensors = {
'vision': [
components.Camera(link, component='NAO_vision1'),
components.Camera(link, component='NAO_vision2'),
],
'proximity': [
# front
components.ProximitySensor(link, component='Proximity_sensor1'),
# back
components.ProximitySensor(link, component='Proximity_sensor4'),
# right
components.ProximitySensor(link, component='Proximity_sensor3'),
# left
components.ProximitySensor(link, component='Proximity_sensor2')
],
'position': [
components.Tag(link, component='tag1'),
components.Tag(link, component='tag2'),
components.Tag(link, component='tag3')
],
'orientation': [
components.Compass(link, component=self.adapter)
]
}
# Sleep for one second to guarantee sensors are ready.
time.sleep(1)
self.behavior_ = self.BEHAVIORS.idle
def perceive(self):
for tag, sensors in self.sensors.items():
for s in sensors:
s.read()
return self
def act(self):
"""Find a method with the same name of its current state and execute
it.
"""
return getattr(self, str(self.behavior_))()
def stuck(self):
"""Reset agent to the starting point."""
logger.info('agent is stuck. Restarting...')
self.dispose().start(self.adapter.link)
def dead(self):
"""Doesn't do anything."""
logger.warning('attempt to update a dead agent')
def dispose(self):
self.motion.stopMove()
self.posture.goToPosture('Stand', self.SPEED)
if self.joint_manager_:
# Stop sync routine.
self.joint_manager_.dispose().join()
if self.adapter:
# Clear adapter.
self.adapter.dispose()
self.behavior_ = self.BEHAVIORS.dead
self.cycle_ = 0
return self
| {
"repo_name": "lucasdavid/mazerunner",
"path": "mazerunner/agents/base.py",
"copies": "1",
"size": "5016",
"license": "mit",
"hash": 3949280038649996000,
"line_mean": 28.5058823529,
"line_max": 80,
"alpha_frac": 0.5703748006,
"autogenerated": false,
"ratio": 4.176519567027477,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00021786492374727668,
"num_lines": 170
} |
''' AgentClass.py: Class for a basic RL Agent '''
# Python imports.
from collections import defaultdict
class Agent(object):
''' Abstract Agent class. '''
def __init__(self, name, actions, gamma=0.99):
self.name = name
self.actions = list(actions) # Just in case we're given a numpy array (like from Atari).
self.gamma = gamma
self.episode_number = 0
self.prev_state = None
self.prev_action = None
def get_parameters(self):
'''
Returns:
(dict) key=param_name (str) --> val=param_val (object).
'''
return {}
def act(self, state, reward):
'''
Args:
state (State): see StateClass.py
reward (float): the reward associated with arriving in state @state.
Returns:
(str): action.
'''
pass
def policy(self, state):
return self.act(state, 0)
def reset(self):
'''
Summary:
Resets the agent back to its tabula rasa config.
'''
self.prev_state = None
self.prev_action = None
self.step_number = 0
def end_of_episode(self):
'''
Summary:
Resets the agents prior pointers.
'''
self.prev_state = None
self.prev_action = None
self.episode_number += 1
def set_name(self, name):
self.name = name
def get_name(self):
return self.name
def __str__(self):
return str(self.name)
| {
"repo_name": "david-abel/simple_rl",
"path": "simple_rl/agents/AgentClass.py",
"copies": "1",
"size": "1526",
"license": "apache-2.0",
"hash": -1949566223329106200,
"line_mean": 23.2222222222,
"line_max": 96,
"alpha_frac": 0.5347313237,
"autogenerated": false,
"ratio": 4.080213903743315,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001279099735435305,
"num_lines": 63
} |
#Agente escenario
from os import system
class cEscenario:
def __init__(self):
self.__aFila = 0
self.__aColumna = 0
self.__aEscenario = []
def sFilCol(self):
return self.__aFila, self.__aColumna
def sElemento(self, pFila, pColumna):
return self.__aEscenario[pFila][pColumna]
def mFilCol(self, d, f, c):
self.__aEscenario[f][c] = d
def cargarEscenario(self, pMapa):
f = open(pMapa, 'r')
fila = 0
columna = 0
while True:
linea = f.readline()
aux = len(linea)
if not linea:
break
lista = []
for i in range(aux):
lista.append(linea[i])
self.__aEscenario.append(lista)
if columna < aux:
columna = aux
fila = fila + 1
f.close()
self.__aFila = fila
self.__aColumna = columna - 1
def imprimirEscenario(self):
escena = ''
for i in range(self.__aFila):
aux = ''
for j in range(self.__aColumna):
aux = aux + self.__aEscenario[i][j]
escena = escena + '\n' + aux
system('cls')
print(escena)
def puedeAvanzar(self, d, f, c):
avanzar = False
if d == 'A':
avanzar = (' ' == self.__aEscenario[f - 1][c])
elif d == 'V':
avanzar = (' ' == self.__aEscenario[f + 1][c])
elif d == '<':
avanzar = (' ' == self.__aEscenario[f][c - 1])
else:
avanzar = (' ' == self.__aEscenario[f][c + 1])
return avanzar
def puedeRetroceder(self, d, f, c):
retroceder = False
if d == 'A':
retroceder = (' ' == self.__aEscenario[f + 1][c])
elif d == 'V':
retroceder = (' ' == self.__aEscenario[f - 1][c])
elif d == '<':
retroceder = (' ' == self.__aEscenario[f][c + 1])
else:
retroceder = (' ' == self.__aEscenario[f][c - 1])
return retroceder
def puedeVoltear(self, d, f, c):
voltear = False
if d == 'A' or d == 'V':
voltear = (' ' == self.__aEscenario[f][c - 1])
if not voltear:
voltear = (' ' == self.__aEscenario[f][c + 1])
else:
voltear = (' ' == self.__aEscenario[f - 1][c])
if not voltear:
voltear = (' ' == self.__aEscenario[f + 1][c])
return voltear
def coordenadas(self, f, c):
ar = self.__aEscenario[f - 1][c]
ab = self.__aEscenario[f + 1][c]
iz = self.__aEscenario[f][c - 1]
de = self.__aEscenario[f][c + 1]
return ar, ab, iz, de
def obstaculos(self, fil, col, filObjetivo, colObjetivo, direc):
muro = self.__aEscenario[0][0]
if (fil == filObjetivo):#si el objetivo esta en la misma fila
if (direc=='<' and colObjetivo < col):
if [ a for a in self.__aEscenario[fil][colObjetivo:col] if a == muro] == []:
return True
if (direc=='>' and colObjetivo > col):
if [ a for a in self.__aEscenario[fil][col:colObjetivo] if a == muro] == []:
return True
if (col == colObjetivo):
if (direc=='A' and filObjetivo < fil):
if [ self.__aEscenario[a][col] for a in range(filObjetivo,fil) if self.__aEscenario[a][col] == muro] == []:
return True
if (direc=='V' and filObjetivo > fil):
if [ self.__aEscenario[a][col] for a in range(fil,filObjetivo) if self.__aEscenario[a][col] == muro] == []:
return True
return False | {
"repo_name": "ivansoriasolis/AppTanque",
"path": "src/aEscenario.py",
"copies": "1",
"size": "3748",
"license": "epl-1.0",
"hash": -3605092334596145000,
"line_mean": 33.0818181818,
"line_max": 123,
"alpha_frac": 0.4653148346,
"autogenerated": false,
"ratio": 3.206159110350727,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9133088532134574,
"avg_score": 0.007677082563230571,
"num_lines": 110
} |
# agent_email.py written by Duncan Murray 12/10/2014
import os
import imaplib
import smtplib
root_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." + os.sep + ".." + os.sep + "..")
#import aikif.cls_log as mod_log
import aikif.config as mod_cfg # comment out if you dont need cached password
def TEST():
email_credentials = input('file containing email credentials - eg ac.txt or press enter to get prompted : ',)
if email_credentials == '':
username = input('gmail username : ',)
password = input('gmail password : ')
else:
with open(mod_cfg.fldrs['localPath'] + email_credentials, 'r') as f:
username = f.readline().strip('\n')
password = f.readline().strip('\n')
save_folder = mod_cfg.fldrs['pers_data'] + os.sep + 'email' + os.sep + 'gmail'
account = GmailAccount(username, password, save_folder)
account.connect()
print('Total Emails = ', account.get_inbox_count())
search_str = "(SUBJECT Flight)"
search_str = "ALL"
account.get_all_emails_containing(100, search_str)
# tok account.send('djmurray@gmail.com', subject='test from AIKIF ', msg='this is a test')
class EmailAccount(object):
"""
base class for email account - server details based when sub-classed
"""
def __init__(self, credentials, save_folder, send_server_name, rec_server_name):
self.username = credentials[0]
self.password = credentials[1]
self.save_folder = save_folder
self.send_server_name = send_server_name
self.rec_server_name = rec_server_name
self.status = 'NONE'
self.server_snd = smtplib.SMTP(self.send_server_name)
self.server_snd.starttls()
self.server_rec = imaplib.IMAP4_SSL(self.rec_server_name[0], self.rec_server_name[1])
def __str__(self):
res = ' Account ---\n'
res += 'username = ' + self.username + '\n'
res += 'password = ' + self.password + '\n'
res += 'send_server = ' + self.send_server_name + '\n'
res += 'rec_server = ' + self.rec_server_name[0] + ':' + str(self.rec_server_name[1]) + '\n'
return res
def connect(self):
self.server_snd.login(self.username,self.password)
self.server_rec.login(self.username,self.password)
self.status = 'CONNECTED'
print(self.status)
def disconnect(self):
self.server_snd.quit()
self.server_rec.close()
self.server_rec.logout()
self.status = 'DISCONNECTED'
print(self.status)
def send(self, toaddr, subject='', msg=''):
fromaddr = self.username
headers = ["From: " + fromaddr,
"Subject: " + subject,
"To: " + toaddr,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
self.server_snd.sendmail(fromaddr, toaddr, headers + "\r\n\r\n" + msg)
def get_inbox_count(self):
return int(self.server_rec.select('Inbox')[1][0])
def get_all_emails_containing(self, max_emails, search_criteria="ALL"):
"""
Downloads all (up to max_emails) messages to EML format in local AIKIF drive
Works fine on both accounts though search fails if there are more than 10k bytes
search string contains things as follows:
'(FROM user@domain.com)'
'(OR (TO "tech163@fusionswift.com") (FROM "tech163@fusionswift.com"))'
'ALL' -> returns everything
"""
count_emails = 0
response, data = self.server_rec.search(None, search_criteria)
print('Email response', response)
for num in data[0].split():
response, data = self.server_rec.fetch(num, '(RFC822)')
count_emails += 1
if count_emails > max_emails:
break
print('Saving message # ', count_emails)
with open(self.save_folder + os.sep + self.username + '_' + str(count_emails).zfill(5) + '.eml', 'wb') as f:
f.write(data[0][1])
class GmailAccount(EmailAccount):
def __init__(self, username, password, save_folder):
EmailAccount.__init__(self, [username, password], save_folder, 'smtp.gmail.com:587', ['imap.gmail.com', 993])
def __str__(self):
return '--- Gmail' + str(EmailAccount.__str__(self))
class Message(object):
pass
if __name__ == '__main__':
TEST()
| {
"repo_name": "acutesoftware/rawdata",
"path": "scripts/gather/email_gmail.py",
"copies": "1",
"size": "4561",
"license": "mit",
"hash": -668787732551795200,
"line_mean": 36.7024793388,
"line_max": 122,
"alpha_frac": 0.5801359351,
"autogenerated": false,
"ratio": 3.571652310101801,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4651788245201801,
"avg_score": null,
"num_lines": null
} |
""" agent errors """
import cgi
from paste.urlparser import PkgResourcesParser
from pylons.middleware import error_document_template
from webhelpers.html.builder import literal
from agent.lib.base import BaseController
class ErrorController(BaseController):
"""Generates error documents as and when they are required.
The ErrorDocuments middleware forwards to ErrorController when error
related status codes are returned from the application.
This behaviour can be altered by changing the parameters to the
ErrorDocuments middleware in your config/middleware.py file.
"""
def document(self):
"""Render the error document"""
request = self._py_object.request
resp = request.environ.get('pylons.original_response')
content = literal(resp.body) or cgi.escape(request.GET.get('message', ''))
page = error_document_template % \
dict(prefix=request.environ.get('SCRIPT_NAME', ''),
code=cgi.escape(request.GET.get('code', str(resp.status_int))),
message=content)
return page
def img(self, ident):
"""Serve Pylons' stock images"""
return self._serve_file('/'.join(['media/img', ident]))
def style(self, ident):
"""Serve Pylons' stock stylesheets"""
return self._serve_file('/'.join(['media/style', ident]))
def _serve_file(self, path):
"""Call Paste's FileApp (a WSGI application) to serve the file
at the specified path
"""
request = self._py_object.request
request.environ['PATH_INFO'] = '/%s' % path
return PkgResourcesParser('pylons', 'pylons')(request.environ, self.start_response)
| {
"repo_name": "cronuspaas/cronusagent",
"path": "agent/agent/controllers/error.py",
"copies": "1",
"size": "1701",
"license": "apache-2.0",
"hash": -4778675462399555000,
"line_mean": 37.6590909091,
"line_max": 91,
"alpha_frac": 0.6631393298,
"autogenerated": false,
"ratio": 4.231343283582089,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0013841923327692492,
"num_lines": 44
} |
"""Agent manager to handle plugin to agent RPC and periodic tasks."""
# coding=utf-8
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import sys
import uuid
from oslo_config import cfg
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from oslo_service import periodic_task
from oslo_utils import importutils
from neutron.agent import rpc as agent_rpc
from neutron.common import topics
from neutron import context as ncontext
from neutron.plugins.ml2.drivers.l2pop import rpc as l2pop_rpc
from neutron_lbaas.services.loadbalancer import constants as lb_const
from neutron_lib import constants as plugin_const
from neutron_lib import exceptions as q_exception
from f5_openstack_agent.lbaasv2.drivers.bigip import constants_v2
from f5_openstack_agent.lbaasv2.drivers.bigip import plugin_rpc
LOG = logging.getLogger(__name__)
# XXX OPTS is used in (at least) agent.py Maybe move/rename to agent.py
OPTS = [
cfg.IntOpt(
'periodic_interval',
default=10,
help='Seconds between periodic task runs'
),
cfg.BoolOpt(
'start_agent_admin_state_up',
default=True,
help='Should the agent force its admin_state_up to True on boot'
),
cfg.StrOpt( # XXX should we use this with internal classes?
'f5_bigip_lbaas_device_driver', # XXX maybe remove "device" and "f5"?
default=('f5_openstack_agent.lbaasv2.drivers.bigip.icontrol_driver.'
'iControlDriver'),
help=('The driver used to provision BigIPs')
),
cfg.BoolOpt(
'l2_population',
default=False,
help=('Use L2 Populate service for fdb entries on the BIG-IP')
),
cfg.BoolOpt(
'f5_global_routed_mode',
default=True,
help=('Disable all L2 and L3 integration in favor of global routing')
),
cfg.BoolOpt(
'use_namespaces',
default=True,
help=('Allow overlapping IP addresses for tenants')
),
cfg.BoolOpt(
'f5_snat_mode',
default=True,
help=('use SNATs, not direct routed mode')
),
cfg.IntOpt(
'f5_snat_addresses_per_subnet',
default=1,
help=('Interface and VLAN for the VTEP overlay network')
),
cfg.StrOpt(
'agent_id',
default=None,
help=('static agent ID to use with Neutron')
),
cfg.StrOpt(
'static_agent_configuration_data',
default=None,
help=('static name:value entries to add to the agent configurations')
),
cfg.IntOpt(
'service_resync_interval',
default=300,
help=('Number of seconds between service refresh checks')
),
cfg.StrOpt(
'environment_prefix',
default='Project',
help=('The object name prefix for this environment')
),
cfg.BoolOpt(
'environment_specific_plugin',
default=True,
help=('Use environment specific plugin topic')
),
cfg.IntOpt(
'environment_group_number',
default=1,
help=('Agent group number for the environment')
),
cfg.DictOpt(
'capacity_policy',
default={},
help=('Metrics to measure capacity and their limits')
),
cfg.IntOpt(
'f5_pending_services_timeout',
default=60,
help=(
'Amount of time to wait for a pending service to become active')
),
cfg.IntOpt(
'f5_errored_services_timeout',
default=60,
help=(
'Amount of time to wait for a errored service to become active')
)
]
PERIODIC_TASK_INTERVAL = 10
class LogicalServiceCache(object):
"""Manage a cache of known services."""
class Service(object): # XXX maybe promote/use this class elsewhere?
"""Inner classes used to hold values for weakref lookups."""
def __init__(self, port_id, loadbalancer_id, tenant_id, agent_host):
self.port_id = port_id
self.loadbalancer_id = loadbalancer_id
self.tenant_id = tenant_id
self.agent_host = agent_host
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(
(self.port_id,
self.loadbalancer_id,
self.tenant_id,
self.agent_host)
)
def __init__(self):
"""Initialize Service cache object."""
LOG.debug("Initializing LogicalServiceCache")
self.services = {}
@property
def size(self):
"""Return the number of services cached."""
return len(self.services)
def put(self, service, agent_host):
"""Add a service to the cache."""
port_id = service['loadbalancer'].get('vip_port_id', None)
loadbalancer_id = service['loadbalancer']['id']
tenant_id = service['loadbalancer']['tenant_id']
if loadbalancer_id not in self.services:
s = self.Service(port_id, loadbalancer_id, tenant_id, agent_host)
self.services[loadbalancer_id] = s
else:
s = self.services[loadbalancer_id]
s.tenant_id = tenant_id
s.port_id = port_id
s.agent_host = agent_host
def remove(self, service):
"""Remove a service from the cache."""
if not isinstance(service, self.Service):
loadbalancer_id = service['loadbalancer']['id']
else:
loadbalancer_id = service.loadbalancer_id
if loadbalancer_id in self.services:
del(self.services[loadbalancer_id])
def remove_by_loadbalancer_id(self, loadbalancer_id):
"""Remove service by providing the loadbalancer id."""
if loadbalancer_id in self.services:
del(self.services[loadbalancer_id])
def get_by_loadbalancer_id(self, loadbalancer_id):
"""Retreive service by providing the loadbalancer id."""
return self.services.get(loadbalancer_id, None)
def get_loadbalancer_ids(self):
"""Return a list of cached loadbalancer ids."""
return self.services.keys()
def get_tenant_ids(self):
"""Return a list of tenant ids in the service cache."""
tenant_ids = {}
for service in self.services:
tenant_ids[service.tenant_id] = 1
return tenant_ids.keys()
def get_agent_hosts(self):
"""Return a list of agent ids stored in the service cache."""
agent_hosts = {}
for service in self.services:
agent_hosts[service.agent_host] = 1
return agent_hosts.keys()
class LbaasAgentManager(periodic_task.PeriodicTasks): # b --> B
"""Periodic task that is an endpoint for plugin to agent RPC."""
RPC_API_VERSION = '1.0'
target = oslo_messaging.Target(version='1.0')
def __init__(self, conf):
"""Initialize LbaasAgentManager."""
super(LbaasAgentManager, self).__init__(conf)
LOG.debug("Initializing LbaasAgentManager")
LOG.debug("runtime environment: %s" % sys.version)
self.conf = conf
self.context = ncontext.get_admin_context_without_session()
self.serializer = None
global PERIODIC_TASK_INTERVAL
PERIODIC_TASK_INTERVAL = self.conf.periodic_interval
# Create the cache of provisioned services
self.cache = LogicalServiceCache()
self.last_resync = datetime.datetime.now()
self.needs_resync = False
self.plugin_rpc = None
self.tunnel_rpc = None
self.l2_pop_rpc = None
self.state_rpc = None
self.pending_services = {}
self.service_resync_interval = conf.service_resync_interval
LOG.debug('setting service resync intervl to %d seconds' %
self.service_resync_interval)
# Load the driver.
self._load_driver(conf)
# Set the agent ID
if self.conf.agent_id:
self.agent_host = self.conf.agent_id
LOG.debug('setting agent host to %s' % self.agent_host)
else:
# If not set statically, add the driver agent env hash
agent_hash = str(
uuid.uuid5(uuid.NAMESPACE_DNS,
self.conf.environment_prefix +
'.' + self.lbdriver.hostnames[0])
)
self.agent_host = conf.host + ":" + agent_hash
LOG.debug('setting agent host to %s' % self.agent_host)
# Initialize agent configurations
agent_configurations = (
{'environment_prefix': self.conf.environment_prefix,
'environment_group_number': self.conf.environment_group_number,
'global_routed_mode': self.conf.f5_global_routed_mode}
)
if self.conf.static_agent_configuration_data:
entries = str(self.conf.static_agent_configuration_data).split(',')
for entry in entries:
nv = entry.strip().split(':')
if len(nv) > 1:
agent_configurations[nv[0]] = nv[1]
# Initialize agent-state to a default values
self.admin_state_up = self.conf.start_agent_admin_state_up
self.agent_state = {
'binary': constants_v2.AGENT_BINARY_NAME,
'host': self.agent_host,
'topic': constants_v2.TOPIC_LOADBALANCER_AGENT_V2,
'agent_type': lb_const.AGENT_TYPE_LOADBALANCERV2,
'l2_population': self.conf.l2_population,
'start_flag': True,
'configurations': agent_configurations
}
# Setup RPC for communications to and from controller
self._setup_rpc()
# Set driver context for RPC.
self.lbdriver.set_context(self.context)
# Allow the driver to make callbacks to the LBaaS driver plugin
self.lbdriver.set_plugin_rpc(self.plugin_rpc)
# Allow the driver to update tunnel endpoints
self.lbdriver.set_tunnel_rpc(self.tunnel_rpc)
# Allow the driver to update forwarding records in the SDN
self.lbdriver.set_l2pop_rpc(self.l2_pop_rpc)
# Allow the driver to force and agent state report to the controller
self.lbdriver.set_agent_report_state(self._report_state)
# Set the flag to resync tunnels/services
self.needs_resync = True
# Mark this agent admin_state_up per startup policy
if(self.admin_state_up):
self.plugin_rpc.set_agent_admin_state(self.admin_state_up)
# Start state reporting of agent to Neutron
report_interval = self.conf.AGENT.report_interval
if report_interval:
heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
heartbeat.start(interval=report_interval)
def _load_driver(self, conf):
self.lbdriver = None
LOG.debug('loading LBaaS driver %s' %
conf.f5_bigip_lbaas_device_driver)
try:
self.lbdriver = importutils.import_object(
conf.f5_bigip_lbaas_device_driver,
self.conf)
return
except ImportError as ie:
msg = ('Error importing loadbalancer device driver: %s error %s'
% (conf.f5_bigip_lbaas_device_driver, repr(ie)))
LOG.error(msg)
raise SystemExit(msg)
def _setup_rpc(self):
#
# Setting up outbound (callbacks) communications from agent
#
# setup the topic to send oslo messages RPC calls
# from this agent to the controller
topic = constants_v2.TOPIC_PROCESS_ON_HOST_V2
if self.conf.environment_specific_plugin:
topic = topic + '_' + self.conf.environment_prefix
LOG.debug('agent in %s environment will send callbacks to %s'
% (self.conf.environment_prefix, topic))
# create our class we will use to send callbacks to the controller
# for processing by the driver plugin
self.plugin_rpc = plugin_rpc.LBaaSv2PluginRPC(
topic,
self.context,
self.conf.environment_prefix,
self.conf.environment_group_number,
self.agent_host
)
#
# Setting up outbound communcations with the neutron agent extension
#
self.state_rpc = agent_rpc.PluginReportStateAPI(topic)
#
# Setting up all inbound notifications and outbound callbacks
# for standard neutron agent services:
#
# tunnel_sync - used to advertise the driver VTEP endpoints
# and optionally learn about other VTEP endpoints
#
# update - used to get updates to agent state triggered by
# the controller, like setting admin_state_up
# the agent
#
# l2_populateion - used to get updates on neturon SDN topology
# changes
#
# We only establish notification if we care about L2/L3 updates
#
if not self.conf.f5_global_routed_mode:
# notifications when tunnel endpoints get added
self.tunnel_rpc = agent_rpc.PluginApi(topics.PLUGIN)
# define which controler notifications the agent comsumes
consumers = [[constants_v2.TUNNEL, topics.UPDATE]]
# if we are dynamically changing tunnel peers,
# register to recieve and send notificatoins via RPC
if self.conf.l2_population:
# communications of notifications from the
# driver to neutron for SDN topology changes
self.l2_pop_rpc = l2pop_rpc.L2populationAgentNotifyAPI()
# notification of SDN topology updates from the
# controller by adding to the general consumer list
consumers.append(
[topics.L2POPULATION, topics.UPDATE, self.agent_host]
)
# kick off the whole RPC process by creating
# a connection to the message bus
self.endpoints = [self]
self.connection = agent_rpc.create_consumers(
self.endpoints,
topics.AGENT,
consumers
)
def _report_state(self, force_resync=False):
try:
if force_resync:
self.needs_resync = True
self.cache.services = {}
self.lbdriver.flush_cache()
# use the admin_state_up to notify the
# controller if all backend devices
# are functioning properly. If not
# automatically set the admin_state_up
# for this agent to False
if self.lbdriver:
if not self.lbdriver.backend_integrity():
self.needs_resync = True
self.cache.services = {}
self.lbdriver.flush_cache()
self.plugin_rpc.set_agent_admin_state(False)
self.admin_state_up = False
else:
# if we are transitioning from down to up,
# change the controller state for this agent
if not self.admin_state_up:
self.plugin_rpc.set_agent_admin_state(True)
self.admin_state_up = True
if self.lbdriver:
self.agent_state['configurations'].update(
self.lbdriver.get_agent_configurations()
)
# add the capacity score, used by the scheduler
# for horizontal scaling of an environment, from
# the driver
if self.conf.capacity_policy:
env_score = (
self.lbdriver.generate_capacity_score(
self.conf.capacity_policy
)
)
self.agent_state['configurations'][
'environment_capaciy_score'] = env_score
else:
self.agent_state['configurations'][
'environment_capacity_score'] = 0
LOG.debug("reporting state of agent as: %s" % self.agent_state)
self.state_rpc.report_state(self.context, self.agent_state)
self.agent_state.pop('start_flag', None)
except Exception as e:
LOG.exception(("Failed to report state: " + str(e.message)))
# callback from oslo messaging letting us know we are properly
# connected to the message bus so we can register for inbound
# messages to this agent
def initialize_service_hook(self, started_by):
"""Create service hook to listen for messanges on agent topic."""
node_topic = "%s_%s.%s" % (constants_v2.TOPIC_LOADBALANCER_AGENT_V2,
self.conf.environment_prefix,
self.agent_host)
LOG.debug("Creating topic for consuming messages: %s" % node_topic)
endpoints = [started_by.manager]
started_by.conn.create_consumer(
node_topic, endpoints, fanout=False)
@periodic_task.periodic_task(spacing=PERIODIC_TASK_INTERVAL)
def connect_driver(self, context):
"""Trigger driver connect attempts to all devices."""
if self.lbdriver:
self.lbdriver.connect()
@periodic_task.periodic_task(spacing=PERIODIC_TASK_INTERVAL)
def recover_errored_devices(self, context):
"""Try to reconnect to errored devices."""
if self.lbdriver:
LOG.debug("running periodic task to retry errored devices")
self.lbdriver.recover_errored_devices()
@periodic_task.periodic_task(
spacing=constants_v2.UPDATE_OPERATING_STATUS_INTERVAL)
def scrub_dead_agents_in_env_and_group(self, context):
"""Triggering a dead agent scrub on the controller."""
LOG.debug("running periodic scrub_dead_agents_in_env_and_group")
if not self.plugin_rpc:
return
self.plugin_rpc.scrub_dead_agents(self.conf.environment_prefix,
self.conf.environment_group_number)
@periodic_task.periodic_task(
spacing=constants_v2.UPDATE_OPERATING_STATUS_INTERVAL)
def update_operating_status(self, context):
"""Update pool member operational status from devices to controller."""
if not self.plugin_rpc:
return
active_loadbalancers = \
self.plugin_rpc.get_active_loadbalancers(host=self.agent_host)
for loadbalancer in active_loadbalancers:
if self.agent_host == loadbalancer['agent_host']:
try:
lb_id = loadbalancer['lb_id']
LOG.debug(
'getting operating status for loadbalancer %s.', lb_id)
svc = self.plugin_rpc.get_service_by_loadbalancer_id(
lb_id)
self.lbdriver.update_operating_status(svc)
except Exception as e:
LOG.exception('Error updating status %s.', e.message)
# setup a period task to decide if it is time empty the local service
# cache and resync service definitions form the controller
@periodic_task.periodic_task(spacing=PERIODIC_TASK_INTERVAL)
def periodic_resync(self, context):
"""Determine if it is time to resync services from controller."""
now = datetime.datetime.now()
# check if a resync has not been requested by the driver
if not self.needs_resync:
# check if we hit the resync interval
if (now - self.last_resync).seconds > self.service_resync_interval:
self.needs_resync = True
LOG.debug(
'forcing resync of services on resync timer (%d seconds).'
% self.service_resync_interval)
self.cache.services = {}
self.last_resync = now
self.lbdriver.flush_cache()
LOG.debug("periodic_sync: service_resync_interval expired: %s"
% str(self.needs_resync))
# resync if we need to
if self.needs_resync:
LOG.debug("resync required at: %s" % now)
self.needs_resync = False
# advertise devices as VTEPs if required
if self.tunnel_sync():
self.needs_resync = True
# synchronize LBaaS objects from controller
if self.sync_state():
self.needs_resync = True
# clean any objects orphaned on devices and persist configs
if self.clean_orphaned_objects_and_save_device_config():
self.needs_resync = True
def tunnel_sync(self):
"""Call into driver to advertise device tunnel endpoints."""
LOG.debug("manager:tunnel_sync: calling driver tunnel_sync")
return self.lbdriver.tunnel_sync()
@log_helpers.log_method_call
def sync_state(self):
"""Synchronize device configuration from controller state."""
resync = False
if hasattr(self, 'lbdriver'):
if not self.lbdriver.backend_integrity():
return resync
known_services, owned_services = self._all_vs_known_services()
try:
# Get loadbalancers from the environment which are bound to
# this agent.
active_loadbalancers, active_loadbalancer_ids = \
self._get_remote_loadbalancers('get_active_loadbalancers',
host=self.agent_host)
all_loadbalancers, all_loadbalancer_ids = \
self._get_remote_loadbalancers('get_all_loadbalancers',
host=self.agent_host)
LOG.debug("plugin produced the list of active loadbalancer ids: %s"
% list(active_loadbalancer_ids))
LOG.debug("currently known loadbalancer ids before sync are: %s"
% list(known_services))
# Validate each service we own, i.e. loadbalancers to which this
# agent is bound, that does not exist in our service cache.
self._validate_services(all_loadbalancer_ids)
resync = self._refresh_pending_services()
# Get a list of any cached service we now know after
# refreshing services
owned_services, known_services = self._all_vs_known_services()
LOG.debug("currently known loadbalancer ids after sync: %s"
% list(known_services))
except Exception as e:
LOG.exception("Unable to sync state: %s" % e.message)
resync = True
return resync
def _all_vs_known_services(self):
all_services = set()
known_services = set()
for lb_id, service in self.cache.services.iteritems():
all_services.add(lb_id)
if self.agent_host == service.agent_host:
known_services.add(lb_id)
return all_services, known_services
def _refresh_pending_services(self):
now = datetime.datetime.now()
resync = False
# This produces a list of loadbalancers with pending tasks to
# be performed.
pending_loadbalancers, pending_lb_ids = \
self._get_remote_loadbalancers('get_pending_loadbalancers',
host=self.agent_host)
LOG.debug(
"plugin produced the list of pending loadbalancer ids: %s"
% list(pending_lb_ids))
for lb_id in list(pending_lb_ids):
lb_pending = self.refresh_service(lb_id)
if lb_pending:
if lb_id not in self.pending_services:
self.pending_services[lb_id] = now
time_added = self.pending_services[lb_id]
has_expired = bool((now - time_added).seconds >
self.conf.f5_pending_services_timeout)
if has_expired:
lb_pending = False
self.service_timeout(lb_id)
if not lb_pending:
try:
del self.pending_services[lb_id]
except KeyError as e:
LOG.error("LB not found in pending services: {0}".format(
e.message))
# If there are services in the pending cache resync
if self.pending_services:
resync = True
return resync
def _get_remote_loadbalancers(self, plugin_rpc_attr, host=None):
loadbalancers = getattr(self.plugin_rpc, plugin_rpc_attr)(host=host)
lb_ids = [lb['lb_id'] for lb in loadbalancers]
return tuple(loadbalancers), set(lb_ids)
def _validate_services(self, lb_ids):
for lb_id in lb_ids:
if not self.cache.get_by_loadbalancer_id(lb_id):
self.validate_service(lb_id)
@log_helpers.log_method_call
def validate_service(self, lb_id):
try:
service = self.plugin_rpc.get_service_by_loadbalancer_id(
lb_id
)
self.cache.put(service, self.agent_host)
if not self.lbdriver.service_exists(service) or \
self.has_provisioning_status_of_error(service):
LOG.info("active loadbalancer '{}' is not on BIG-IP"
" or has error state...syncing".format(lb_id))
self.lbdriver.sync(service)
else:
LOG.debug("Found service definition for '{}', state is ACTIVE"
" move on.".format(lb_id))
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.exception("Service validation error: %s" % exc.message)
@staticmethod
def has_provisioning_status_of_error(service):
"""Determine if a service is in an ERROR/DEGRADED status.
This staticmethod will go through a service object and determine if it
has an ERROR status anywhere within the object.
"""
expected_tree = dict(loadbalancer=dict, members=list, pools=list,
listeners=list, healthmonitors=list,
l7policies=list, l7policy_rules=list)
error_status = False # assume we're in the clear unless otherwise...
loadbalancer = service.get('loadbalancer', dict())
def handle_error(error_status, obj):
provisioning_status = obj.get('provisioning_status')
if provisioning_status == plugin_const.ERROR:
obj_id = obj.get('id', 'unknown')
LOG.warning("Service object has object of type(id) {}({})"
" that is in '{}' status.".format(
item, obj_id, plugin_const.ERROR))
error_status = True
return error_status
for item in expected_tree:
obj = service.get(item, expected_tree[item]())
if expected_tree[item] == dict and isinstance(service[item], dict):
error_status = handle_error(error_status, obj)
elif expected_tree[item] == list and \
isinstance(obj, list):
for item in obj:
if len(item) == 1:
# {'networks': [{'id': {<network_obj>}}]}
item = item[item.keys()[0]]
error_status = handle_error(error_status, item)
if error_status:
loadbalancer['provisioning_status'] = plugin_const.ERROR
return error_status
@log_helpers.log_method_call
def refresh_service(self, lb_id):
try:
service = self.plugin_rpc.get_service_by_loadbalancer_id(
lb_id
)
self.cache.put(service, self.agent_host)
if self.lbdriver.sync(service):
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as e:
LOG.error("Exception: %s" % e.message)
self.needs_resync = True
return self.needs_resync
@log_helpers.log_method_call
def service_timeout(self, lb_id):
try:
service = self.plugin_rpc.get_service_by_loadbalancer_id(
lb_id
)
self.cache.put(service, self.agent_host)
self.lbdriver.update_service_status(service, timed_out=True)
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as e:
LOG.error("Exception: %s" % e.message)
@log_helpers.log_method_call
def clean_orphaned_objects_and_save_device_config(self):
cleaned = False
try:
#
# Global cluster refresh tasks
#
global_agent = self.plugin_rpc.get_clusterwide_agent(
self.conf.environment_prefix,
self.conf.environment_group_number
)
if 'host' not in global_agent:
LOG.debug('No global agent available to sync config')
return True
if global_agent['host'] == self.agent_host:
LOG.debug('this agent is the global config agent')
# We're the global agent perform global cluster tasks
# There are two independent types of service objects
# the LBaaS implments: 1) loadbalancers + 2) pools
# We will first try to find any orphaned pools
# and remove them.
# Ask BIG-IP for all deployed loadbalancers (virtual addresses)
lbs = self.lbdriver.get_all_deployed_loadbalancers(
purge_orphaned_folders=True)
if lbs:
self.purge_orphaned_loadbalancers(lbs)
# Ask the BIG-IP for all deployed listeners to make
# sure we are not orphaning listeners which have
# valid loadbalancers in a OK state
listeners = self.lbdriver.get_all_deployed_listeners()
if listeners:
self.purge_orphaned_listeners(listeners)
policies = self.lbdriver.get_all_deployed_l7_policys()
if policies:
self.purge_orphaned_l7_policys(policies)
# Ask the BIG-IP for all deployed pools not associated
# to a virtual server
pools = self.lbdriver.get_all_deployed_pools()
if pools:
self.purge_orphaned_pools(pools)
self.purge_orphaned_nodes(pools)
# Ask the BIG-IP for all deployed monitors not associated
# to a pool
monitors = self.lbdriver.get_all_deployed_health_monitors()
if monitors:
self.purge_orphaned_health_monitors(monitors)
else:
LOG.debug('the global agent is %s' % (global_agent['host']))
return True
# serialize config and save to disk
self.lbdriver.backup_configuration()
except Exception as e:
LOG.error("Unable to sync state: %s" % e.message)
cleaned = True
return cleaned
@log_helpers.log_method_call
def purge_orphaned_loadbalancers(self, lbs):
"""Gets 'unknown' loadbalancers from Neutron and purges them
Provisioning status of 'unknown' on loadbalancers means that the object
does not exist in Neutron. These should be deleted to consolidate
hanging objects.
"""
lbs_status = self.plugin_rpc.validate_loadbalancers_state(
list(lbs.keys()))
LOG.debug('validate_loadbalancers_state returned: %s'
% lbs_status)
lbs_removed = False
for lbid in lbs_status:
# If the statu is Unknown, it no longer exists
# in Neutron and thus should be removed from the BIG-IP
if lbs_status[lbid] in ['Unknown']:
LOG.debug('removing orphaned loadbalancer %s'
% lbid)
# This will remove pools, virtual servers and
# virtual addresses
self.lbdriver.purge_orphaned_loadbalancer(
tenant_id=lbs[lbid]['tenant_id'],
loadbalancer_id=lbid,
hostnames=lbs[lbid]['hostnames'])
lbs_removed = True
if lbs_removed:
# If we have removed load balancers, then scrub
# for tenant folders we can delete because they
# no longer contain loadbalancers.
self.lbdriver.get_all_deployed_loadbalancers(
purge_orphaned_folders=True)
@log_helpers.log_method_call
def purge_orphaned_listeners(self, listeners):
"""Deletes the hanging listeners from the deleted loadbalancers"""
listener_status = self.plugin_rpc.validate_listeners_state(
list(listeners.keys()))
LOG.debug('validated_pools_state returned: %s'
% listener_status)
for listenerid in listener_status:
# If the pool status is Unknown, it no longer exists
# in Neutron and thus should be removed from BIG-IP
if listener_status[listenerid] in ['Unknown']:
LOG.debug('removing orphaned listener %s'
% listenerid)
self.lbdriver.purge_orphaned_listener(
tenant_id=listeners[listenerid]['tenant_id'],
listener_id=listenerid,
hostnames=listeners[listenerid]['hostnames'])
@log_helpers.log_method_call
def purge_orphaned_l7_policys(self, policies):
"""Deletes hanging l7_policies from the deleted listeners"""
policies_used = set()
listeners = self.lbdriver.get_all_deployed_listeners()
for li_id in listeners:
policy = listeners[li_id]['l7_policy']
if policy:
policy = policy.split('/')[2]
policies_used.add(policy)
# Ask Neutron for the status of all deployed l7_policys
for policy_key in policies:
policy = policies.get(policy_key)
if policy_key not in policies_used:
LOG.debug('removing orphaned policy {}'.format(policy_key))
self.lbdriver.purge_orphaned_l7_policy(
tenant_id=policy['tenant_id'],
l7_policy_id=policy_key,
hostname=policy['hostnames'])
@log_helpers.log_method_call
def purge_orphaned_nodes(self, pools):
"""Deletes hanging pools from the deleted listeners"""
pools_members = self.plugin_rpc.get_pools_members(
list(pools.keys()))
tenant_members = dict()
for pool_id, pool in pools.iteritems():
tenant_id = pool['tenant_id']
members = pools_members.get(pool_id, list())
if tenant_id not in tenant_members:
tenant_members[tenant_id] = members
else:
tenant_members[tenant_id].extend(members)
self.lbdriver.purge_orphaned_nodes(tenant_members)
@log_helpers.log_method_call
def purge_orphaned_pools(self, pools):
"""Deletes hanging pools from the deleted listeners"""
# Ask Neutron for the status of all deployed pools
pools_status = self.plugin_rpc.validate_pools_state(
list(pools.keys()))
LOG.debug('validated_pools_state returned: %s'
% pools_status)
for poolid in pools_status:
# If the pool status is Unknown, it no longer exists
# in Neutron and thus should be removed from BIG-IP
if pools_status[poolid] in ['Unknown']:
LOG.debug('removing orphaned pool %s' % poolid)
self.lbdriver.purge_orphaned_pool(
tenant_id=pools[poolid]['tenant_id'],
pool_id=poolid,
hostnames=pools[poolid]['hostnames'])
@log_helpers.log_method_call
def purge_orphaned_health_monitors(self, monitors):
"""Deletes hanging Health Monitors from the deleted Pools"""
# ask Neutron for for the status of all deployed monitors...
monitors_used = set()
pools = self.lbdriver.get_all_deployed_pools()
LOG.debug("pools found: {}".format(pools))
for pool_id in pools:
monitorid = pools.get(pool_id).get('monitors', 'None')
monitors_used.add(monitorid)
LOG.debug('health monitors in use: {}'.format(monitors_used))
for monitorid in monitors:
if monitorid not in monitors_used:
LOG.debug("purging healthmonitor {} as it is not "
"in ({})".format(monitorid, monitors_used))
self.lbdriver.purge_orphaned_health_monitor(
tenant_id=monitors[monitorid]['tenant_id'],
monitor_id=monitorid,
hostnames=monitors[monitorid]['hostnames'])
######################################################################
#
# handlers for all in bound requests and notifications from controller
#
######################################################################
@log_helpers.log_method_call
def create_loadbalancer(self, context, loadbalancer, service):
"""Handle RPC cast from plugin to create_loadbalancer."""
try:
service_pending = \
self.lbdriver.create_loadbalancer(loadbalancer,
service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("q_exception.NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_loadbalancer(self, context, old_loadbalancer,
loadbalancer, service):
"""Handle RPC cast from plugin to update_loadbalancer."""
try:
service_pending = self.lbdriver.update_loadbalancer(
old_loadbalancer,
loadbalancer, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("q_exception.NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_loadbalancer(self, context, loadbalancer, service):
"""Handle RPC cast from plugin to delete_loadbalancer."""
try:
service_pending = \
self.lbdriver.delete_loadbalancer(loadbalancer, service)
self.cache.remove_by_loadbalancer_id(loadbalancer['id'])
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("q_exception.NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_loadbalancer_stats(self, context, loadbalancer, service):
"""Handle RPC cast from plugin to get stats."""
try:
self.lbdriver.get_stats(service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("q_exception.NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def create_listener(self, context, listener, service):
"""Handle RPC cast from plugin to create_listener."""
try:
service_pending = \
self.lbdriver.create_listener(listener, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("q_exception.NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_listener(self, context, old_listener, listener, service):
"""Handle RPC cast from plugin to update_listener."""
try:
service_pending = \
self.lbdriver.update_listener(old_listener, listener, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("q_exception.NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_listener(self, context, listener, service):
"""Handle RPC cast from plugin to delete_listener."""
try:
service_pending = \
self.lbdriver.delete_listener(listener, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("delete_listener: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("delete_listener: Exception: %s" % exc.message)
@log_helpers.log_method_call
def create_pool(self, context, pool, service):
"""Handle RPC cast from plugin to create_pool."""
try:
service_pending = self.lbdriver.create_pool(pool, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_pool(self, context, old_pool, pool, service):
"""Handle RPC cast from plugin to update_pool."""
try:
service_pending = \
self.lbdriver.update_pool(old_pool, pool, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_pool(self, context, pool, service):
"""Handle RPC cast from plugin to delete_pool."""
try:
service_pending = self.lbdriver.delete_pool(pool, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("delete_pool: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("delete_pool: Exception: %s" % exc.message)
@log_helpers.log_method_call
def create_member(self, context, member, service):
"""Handle RPC cast from plugin to create_member."""
try:
service_pending = \
self.lbdriver.create_member(member, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("create_member: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("create_member: Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_member(self, context, old_member, member, service):
"""Handle RPC cast from plugin to update_member."""
try:
service_pending = \
self.lbdriver.update_member(old_member, member, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("update_member: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("update_member: Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_member(self, context, member, service):
"""Handle RPC cast from plugin to delete_member."""
try:
service_pending = self.lbdriver.delete_member(member, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("delete_member: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("delete_member: Exception: %s" % exc.message)
@log_helpers.log_method_call
def create_health_monitor(self, context, health_monitor, service):
"""Handle RPC cast from plugin to create_pool_health_monitor."""
try:
service_pending = \
self.lbdriver.create_health_monitor(health_monitor, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("create_pool_health_monitor: NeutronException: %s"
% exc.msg)
except Exception as exc:
LOG.error("create_pool_health_monitor: Exception: %s"
% exc.message)
@log_helpers.log_method_call
def update_health_monitor(self, context, old_health_monitor,
health_monitor, service):
"""Handle RPC cast from plugin to update_health_monitor."""
try:
service_pending = \
self.lbdriver.update_health_monitor(old_health_monitor,
health_monitor,
service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("update_health_monitor: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("update_health_monitor: Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_health_monitor(self, context, health_monitor, service):
"""Handle RPC cast from plugin to delete_health_monitor."""
try:
service_pending = \
self.lbdriver.delete_health_monitor(health_monitor, service)
self.cache.put(service, self.agent_host)
if service_pending:
self.needs_resync = True
except q_exception.NeutronException as exc:
LOG.error("delete_health_monitor: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("delete_health_monitor: Exception: %s" % exc.message)
@log_helpers.log_method_call
def agent_updated(self, context, payload):
"""Handle the agent_updated notification event."""
if payload['admin_state_up'] != self.admin_state_up:
LOG.info("agent administration status updated %s!", payload)
self.admin_state_up = payload['admin_state_up']
# the agent transitioned to down to up and the
# driver reports healthy, trash the cache
# and force an update to update agent scheduler
if self.lbdriver.backend_integrity() and self.admin_state_up:
self._report_state(True)
else:
self._report_state(False)
@log_helpers.log_method_call
def tunnel_update(self, context, **kwargs):
"""Handle RPC cast from core to update tunnel definitions."""
try:
LOG.debug('received tunnel_update: %s' % kwargs)
self.lbdriver.tunnel_update(**kwargs)
except q_exception.NeutronException as exc:
LOG.error("tunnel_update: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("tunnel_update: Exception: %s" % exc.message)
@log_helpers.log_method_call
def add_fdb_entries(self, context, fdb_entries, host=None):
"""Handle RPC cast from core to update tunnel definitions."""
try:
LOG.debug('received add_fdb_entries: %s host: %s'
% (fdb_entries, host))
self.lbdriver.fdb_add(fdb_entries)
except q_exception.NeutronException as exc:
LOG.error("fdb_add: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("fdb_add: Exception: %s" % exc.message)
@log_helpers.log_method_call
def remove_fdb_entries(self, context, fdb_entries, host=None):
"""Handle RPC cast from core to update tunnel definitions."""
try:
LOG.debug('received remove_fdb_entries: %s host: %s'
% (fdb_entries, host))
self.lbdriver.fdb_remove(fdb_entries)
except q_exception.NeutronException as exc:
LOG.error("remove_fdb_entries: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("remove_fdb_entries: Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_fdb_entries(self, context, fdb_entries, host=None):
"""Handle RPC cast from core to update tunnel definitions."""
try:
LOG.debug('received update_fdb_entries: %s host: %s'
% (fdb_entries, host))
# self.lbdriver.fdb_update(fdb_entries)
LOG.warning("update_fdb_entries: the LBaaSv2 Agent does not "
"handle an update of the IP address of a neutron "
"port. This port is generally tied to a member. If "
"the IP address of a member was changed, be sure to "
"also recreate the member in neutron-lbaas with the "
"new address.")
except q_exception.NeutronException as exc:
LOG.error("update_fdb_entries: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("update_fdb_entries: Exception: %s" % exc.message)
@log_helpers.log_method_call
def create_l7policy(self, context, l7policy, service):
"""Handle RPC cast from plugin to create_l7policy."""
try:
self.lbdriver.create_l7policy(l7policy, service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_l7policy(self, context, old_l7policy, l7policy, service):
"""Handle RPC cast from plugin to update_l7policy."""
try:
self.lbdriver.update_l7policy(old_l7policy, l7policy, service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_l7policy(self, context, l7policy, service):
"""Handle RPC cast from plugin to delete_l7policy."""
try:
self.lbdriver.delete_l7policy(l7policy, service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("delete_l7policy: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("delete_l7policy: Exception: %s" % exc.message)
@log_helpers.log_method_call
def create_l7rule(self, context, l7rule, service):
"""Handle RPC cast from plugin to create_l7rule."""
try:
self.lbdriver.create_l7rule(l7rule, service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def update_l7rule(self, context, old_l7rule, l7rule, service):
"""Handle RPC cast from plugin to update_l7rule."""
try:
self.lbdriver.update_l7rule(old_l7rule, l7rule, service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("Exception: %s" % exc.message)
@log_helpers.log_method_call
def delete_l7rule(self, context, l7rule, service):
"""Handle RPC cast from plugin to delete_l7rule."""
try:
self.lbdriver.delete_l7rule(l7rule, service)
self.cache.put(service, self.agent_host)
except q_exception.NeutronException as exc:
LOG.error("delete_l7rule: NeutronException: %s" % exc.msg)
except Exception as exc:
LOG.error("delete_l7rule: Exception: %s" % exc.message)
| {
"repo_name": "richbrowne/f5-openstack-agent",
"path": "f5_openstack_agent/lbaasv2/drivers/bigip/agent_manager.py",
"copies": "1",
"size": "54618",
"license": "apache-2.0",
"hash": 2397604299253300000,
"line_mean": 40.4086429113,
"line_max": 79,
"alpha_frac": 0.5847888974,
"autogenerated": false,
"ratio": 4.250097268695043,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 1319
} |
"""Agent module."""
import time
import uuid
import logging
import argparse
import importlib
from slamon_agent import timeutil
from slamon_agent.executor import Executor
from slamon_agent.handlers import TaskHandler
from slamon_agent.communication import Communicator, TemporaryError
class Agent(object):
"""Agent class presents an instance of agent application."""
def __init__(self, afm_url, default_wait=60, max_tasks=2, name='Python Agent 1.0', agent_uuid=None):
"""
Initialize Agent object.
:param afm_url: The URL for the AFM service.
:param default_wait: Amount of time to wait after each run iteration.
:param max_tasks: Maximum tasks this agent can execute during one run iteration.
:param name: The name of this Agent instance.
:param agent_uuid: UUID for this Agent instance.
"""
self.afm = Communicator(afm_url)
self.max_tasks = max_tasks
self.default_wait = default_wait
self.name = name
self.uuid = agent_uuid if agent_uuid else str(uuid.uuid1())
self._run = True
logging.basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message).120s")
def exit(self):
"""
Signal agent to exit.
After issuing exit, agent will not make further task requests,
but will wait until all currently processed tasks finish.
"""
self._run = False
def run(self):
"""The "main function" of the agent, looping the claim & execute tasks flow."""
with Executor(self.max_tasks) as executor:
while self._run:
wait_time = self.default_wait
min_wait_time = 1
# request for tasks
try:
task_response = self.afm.request_tasks(
agent_id=self.uuid,
agent_name=self.name,
agent_time=timeutil.format(timeutil.now()),
agent_capabilities=TaskHandler.list_all(),
max_tasks=executor.available_executors()
)
if 'tasks' in task_response:
for task_data in task_response['tasks']:
executor.submit_task(task_data,
lambda *args, **kwargs: self.afm.post_result(*args, **kwargs))
if 'return_time' in task_response:
return_time = timeutil.parse(task_response['return_time'])
wait_time = max(min_wait_time, (return_time - timeutil.now()).total_seconds())
except TemporaryError as e:
logging.getLogger("Agent").error("An error occurred while claiming tasks: %s", e)
time.sleep(wait_time)
def _import_module(module_name, package=None):
"""Recursively load modules to search for task handlers."""
m = importlib.import_module(module_name, package=package)
if hasattr(m, '__all__'):
for sub_module_name in m.__all__:
_import_module('.' + sub_module_name, module_name)
def main():
"""Entry point for the agent script."""
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', action='store', required=True,
help='Coordinator URL')
parser.add_argument('-l', '--load', action='append',
help='Load handlers from specified module or package e.g. slamon_agent.handlers')
parser.add_argument('-w', '--default-wait', type=int, default=60,
help='Seconds to wait before reconnection after connection failure.')
parser.add_argument('-x', '--num-executors', type=int, default=2,
help='Number of concurrent task executors.')
parser.add_argument('--verbose', '-v', action='count', default=0,
help='Increase logging verbosity. Default logging level is WARNING.')
args = parser.parse_args()
# import defined modules to load defined handlers
if args.load:
for module in args.load:
_import_module(module)
logging.basicConfig(level=max(logging.WARNING - (args.verbose * 10), 0))
agent = Agent(
args.url,
default_wait=args.default_wait,
max_tasks=args.num_executors
)
agent.run()
return 0
if __name__ == "__main__":
main()
| {
"repo_name": "SLAMon/slamon-python-agent",
"path": "slamon_agent/agent.py",
"copies": "1",
"size": "4443",
"license": "apache-2.0",
"hash": 4722172975109789000,
"line_mean": 37.6347826087,
"line_max": 111,
"alpha_frac": 0.5827143822,
"autogenerated": false,
"ratio": 4.280346820809249,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5363061203009248,
"avg_score": null,
"num_lines": null
} |
"""Agent"""
import torch
from torch.nn.utils.rnn import pack_padded_sequence
from net import Decoder, Encoder
from action import Action, AverageMeter
from dataset import Dataset
class Agent:
def __init__(self):
super(Agent, self).__init__()
#self.config = config
self.action = Action()
self.dataset = Dataset()
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.grad_clip = 5
embed_dim = 512 # dimension of word embeddings
decoder_dim = 512 # dimension of decoder RNN
dropout = 0.5
#batch_size = 32
self.n_epochs = 20
self.encoder = Encoder()
self.decoder = Decoder(embed_dim=embed_dim,
decoder_dim=decoder_dim,
vocab_size=self.dataset.get_word_map_len(),
dropout=dropout)
def fit(self):
"""fit model"""
self.run(self.encoder, self.decoder)
def run(self, encoder, decoder):
"""Main function to run"""
device = self.device
encoder_lr = 1e-4 # learning rate for encoder if fine-tuning
decoder_lr = 4e-4 # learning rate for decoder
encoder_optimizer = self.action.get_optimizer(encoder, encoder_lr)
decoder_optimizer = self.action.get_optimizer(decoder, decoder_lr)
encoder = encoder.to(device)
decoder = decoder.to(device)
loss_fn = self.action.get_loss_fn().to(device)
train_loader, val_loader = self.dataset.get_dataloader()
for epoch in range(self.n_epochs):
self.train_epoch(train_loader, encoder, decoder, loss_fn,
encoder_optimizer, decoder_optimizer, epoch)
self.eval_epoch(val_loader, encoder, decoder, loss_fn, epoch)
def train_epoch(self, train_loader, encoder, decoder, loss_fn,
encoder_optimizer, decoder_optimizer, epoch):
"""
Performs one epoch's training.
:param train_loader: DataLoader for training data
:param encoder: encoder model
:param decoder: decoder model
:param criterion: loss layer
:param encoder_optimizer: optimizer to update encoder's weights (if fine-tuning)
:param decoder_optimizer: optimizer to update decoder's weights
"""
device = self.device
grad_clip = self.grad_clip
encoder.train()
decoder.train()
losses = AverageMeter()
for idx, (imgs, caps, caplens) in enumerate(train_loader):
# Move to GPU, if available
imgs = imgs.to(device)
caps = caps.to(device)
caplens = caplens.to(device)
# Forward prop.
imgs = encoder(imgs)
scores, caps_sorted, decode_lengths, alphas, sort_ind = \
decoder(imgs, caps, caplens)
# Since we decoded starting with <start>, the targets are
# all words after <start>, up to <end>
targets = caps_sorted[:, 1:]
# Remove timesteps that we didn't decode at, or are pads
# pack_padded_sequence is an easy trick to do this
#TODO
scores = pack_padded_sequence(scores, decode_lengths, batch_first=True)
targets = pack_padded_sequence(targets, decode_lengths, batch_first=True)
scores = scores[0]
targets = targets[0]
# Calculate loss
loss = loss_fn(scores, targets)
if alphas is not None:
loss += (1 - alphas.sum(dim=1) ** 2).mean()
# Back prop.
decoder_optimizer.zero_grad()
if encoder_optimizer is not None:
encoder_optimizer.zero_grad()
loss.backward()
# Clip gradients
if self.grad_clip is not None:
self.action.clip_gradient(decoder_optimizer, grad_clip)
if encoder_optimizer is not None:
self.action.clip_gradient(encoder_optimizer, grad_clip)
# Update weights
decoder_optimizer.step()
if encoder_optimizer is not None:
encoder_optimizer.step()
losses.update(loss.item(), sum(decode_lengths))
if idx % 100:
print(">>Epoch(Train): [{0}][{1}/{2}]\tLoss {loss.avg:.4f}".format
(epoch, idx, len(train_loader), loss=losses))
def eval_epoch(self, val_loader, encoder, decoder, loss_fn, epoch):
"""
Performs one epoch's validation.
:param val_loader: DataLoader for validation data.
:param encoder: encoder model
:param decoder: decoder model
:param criterion: loss layer
"""
device = self.device
decoder.eval()
encoder.eval()
losses = AverageMeter()
with torch.no_grad():
for idx, (imgs, caps, caplens, allcaps) in enumerate(val_loader):
# Move to device, if available
imgs = imgs.to(device)
caps = caps.to(device)
caplens = caplens.to(device)
# Forward prop.
imgs = encoder(imgs)
scores, caps_sorted, decode_lengths, alphas, sort_ind = \
decoder(imgs, caps, caplens)
# Since we decoded starting with <start>, the targets are all
# words after <start>, up to <end>
targets = caps_sorted[:, 1:]
scores = pack_padded_sequence(scores, decode_lengths, batch_first=True)
targets = pack_padded_sequence(targets, decode_lengths, batch_first=True)
scores = scores[0]
targets = targets[0]
# Calculate loss
loss = loss_fn(scores, targets)
if alphas is not None:
loss += (1 - alphas.sum(dim=1) ** 2).mean()
losses.update(loss.item(), sum(decode_lengths))
if idx % 100 == 0:
print(">>Epoch(Eval): [{epoch}][{idx}/{iters}]\tLoss \
{loss.avg:.4f}".format(
epoch=epoch, idx=idx, iters=len(val_loader),
loss=losses))
def caption_image_beam_search(self):
"""
captions images with beam search.
:param encoder: encoder model
:param decoder: decoder model
:param image_path: path to image
:param word_map: word map
:param beam_size: number of sequences to consider at each decode-step
:return: caption, weights for visualization
"""
# Load image
# Get caption by beam search
pass
| {
"repo_name": "MegaShow/college-programming",
"path": "Homework/Principles of Artificial Neural Networks/Week 15 Image Caption/src/agent.py",
"copies": "1",
"size": "6788",
"license": "mit",
"hash": -699218031971559300,
"line_mean": 33.9896907216,
"line_max": 89,
"alpha_frac": 0.5517088981,
"autogenerated": false,
"ratio": 4.329081632653061,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.006944772892318254,
"num_lines": 194
} |
"""Agent plays with keyboard input
"""
__author__ = "Liyan Chen, Chang Gao"
__copyright__ = "Copyright (c) 2017 Malmactor"
__license__ = "MIT"
key_act_map = {
"a": "left_nojump",
"d": "right_nojump",
"w": "nolr_jump",
"q": "left_jump",
"e": "right_jump"
}
def keyboard_agent(simulation, keypoller, render, config=None):
"""
Play game from keyboard input
:param simulation: Simulation instance
:param keypoller: Input instance
:param empty_act: The action for no action/no key pressed
:param render: Render instance
:param config: Global configuration. empty_action is required.
:return: None
"""
empty_action = config["empty_action"]
key = None
none_times = 0
while not key == "m":
key = keypoller()
if key and key in key_act_map:
simulation.advance_frame(key_act_map[key])
none_times = 0
elif none_times <= 5:
simulation.advance_frame(empty_action)
none_times += 1
else:
simulation.advance_frame("no_key")
renderable = simulation.get_renderable()
render.render(renderable)
| {
"repo_name": "Malmactor/malrio",
"path": "src/Agent/keyboard_agent.py",
"copies": "1",
"size": "1163",
"license": "mit",
"hash": 2720742267763220000,
"line_mean": 24.2826086957,
"line_max": 66,
"alpha_frac": 0.5993121238,
"autogenerated": false,
"ratio": 3.567484662576687,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4666796786376687,
"avg_score": null,
"num_lines": null
} |
# Agent.py
from Tools import *
from agTools import *
from random import *
import graphicDisplayGlobalVarAndFunctions as gvf
import commonVar as common
import numpy.random as npr
import numpy
import pandas as pd
import os
def mySort(ag):
if ag == []:
return []
numAg = []
for a in ag:
numAg.append((a.number, a))
numAg.sort()
agSorted = []
for i in range(len(numAg)):
agSorted.append(numAg[i][1])
return agSorted
def applyRationallyTheRateOfChange(base,rate):
if rate >= 0:
return base*(1+rate)
if rate < 0:
return base/(1+abs(rate))
class Agent(SuperAgent):
def __init__(self, number, myWorldState,
xPos=0, yPos=0, agType=""):
# print xPos,yPos
# the graph
if gvf.getGraph() == 0:
gvf.createGraph()
common.g.add_node(self)
# the environment
self.agOperatingSets = []
self.number = number
self.agType = agType
self.numOfWorkers = 0 # never use it directly to make calculations
self.profit = 0
self.plannedProduction = 0
self.soldProduction = 0
self.revenue = 0
self.consumption = 0
self.consumptionQuantity=0
self.employed = False
self.extraCostsResidualDuration = 0
self.profitStrategyReverseAfterN=0
self.priceSwitchIfProfitFalls=""
if agType == 'workers': #useful in initial creation
common.orderedListOfNodes.append(self)
# use to keep the order
# in output (ex. adjacency matrix)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "OrangeRed"
self.employed = False
self.workTroubles = 0
self.unspentConsumptionCapability = 0
self.jump = 0
if agType == 'entrepreneurs': #useful in initial creation
common.orderedListOfNodes.append(self)
# use to keep the order
# in output (ex. adjacency matrix)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "LawnGreen"
self.employed = True
self.plannedProduction = -100 # not used in plots if -100
self.hasTroubles = 0
self.unspentConsumptionCapability = 0
self.jump = 0
self.myWorldState = myWorldState
self.agType = agType
# the agents
if common.verbose:
print("agent of type", self.agType,
"#", self.number, "has been created at", xPos, ",", yPos)
gvf.pos[self] = (xPos, yPos)
if common.nodeNumbersInGraph:
common.g_labels[self] = str(number)
# to be used to clone (if any)
self.xPos = xPos
self.yPos = yPos
# price memory
self.buyPrice = -1000
self.sellPrice = 1000
self.sellPriceDefined=False
# consumption planning for the current cycle
# if the planning has been made, the variable contains
# the number of the cycle
self.consumptionPlanningInCycleNumber = -1
# status to be used in actOnMarketPlace acting as a buyer
# 0 means never used
# 1 if previous action was a successful buy attempt
# -1 if previous action was an unsuccessful buy attempt
self.statusB = 0
# status to be used in actOnMarketPlace acting as a seller
# 0 means never used
# 1 if previous action was a successful sell attempt
# -1 if previous action was an unsuccessful sell attempt
self.statusS = 0
# talk
def talk(self):
print(self.agType, self.number)
# reset values, redefining the method of agTools.py in $$slapp$$
def setNewCycleValues(self):
# the if is to save time, given that the order is arriving to
# all the agents (in principle, to reset local variables)
if not common.agent1existing:
print("At least one of the agents has to have number==1")
print("Missing that agent, all the agents are resetting common values")
if self.number == 1 or not common.agent1existing:
# introduced with V6
# V6 reset block starts hene
# this part is specific of the first hayekian cycle
# where it replaces the lack of a previous value in
# quantity
# here, if possible, we use the price at t-2
if common.startHayekianMarket > 1:
if common.cycle == common.startHayekianMarket:
if len(common.ts_df.price.values) == 1:
previuosPrice = common.ts_df.price.values[-1] # t=2
if len(common.ts_df.price.values) > 1:
previuosPrice = common.ts_df.price.values[-2] # t>2
# the code above can act only if t>1
if common.cycle > 1: # if == 1 do nothing
# makeProductionPlan acts
# establishing directly
# self.plannedProduction and the total
# common.totalPlannedProduction
common.totalConsumptionInQuantityInPrevious_TimeStep = \
common.totalPlannedConsumptionInValueInA_TimeStep \
/ previuosPrice
# not in case common.cycle == common.startHayekianMarket == 1
elif common.cycle > common.startHayekianMarket:
common.totalConsumptionInQuantityInPrevious2_TimeStep= \
common.totalConsumptionInQuantityInPrevious1_TimeStep # init. in common
common.totalConsumptionInQuantityInPrevious1_TimeStep = \
common.totalConsumptionInQuantityInA_TimeStep
if common.cycle==common.startHayekianMarket+1:
common.totalConsumptionInQuantityInPrevious_TimeStep = \
common.totalConsumptionInQuantityInPrevious1_TimeStep
if common.cycle > common.startHayekianMarket+1:
common.totalConsumptionInQuantityInPrevious_TimeStep = \
common.Q*common.totalConsumptionInQuantityInPrevious1_TimeStep +\
(1-common.Q)*common.totalConsumptionInQuantityInPrevious2_TimeStep
# !!!! here we can use also delayed values, look at !!!! in
# notesOnHayekianTransformation.md
common.totalConsumptionInQuantityInA_TimeStep = 0
# list of all the transaction prices in a cycle of the
# hayekian market
common.hayekianMarketTransactionPriceList_inACycle=[]
# v6 reset block ends here
common.totalProductionInA_TimeStep = 0
common.totalPlannedConsumptionInValueInA_TimeStep = 0
common.totalProfit = 0
common.totalPlannedProduction = 0
# ratio sellers/buyers
common.ratioSellersBuyersAlreadySet=False
# troubles related idividual variables
if self.agType == "entrepreneurs":
self.hasTroubles = 0
if self.agType == "workers":
self.workTroubles = 0
# hireIfProfit
def hireIfProfit(self):
# workers do not hire
if self.agType == "workers":
return
if self.profit <= common.hiringThreshold:
return
tmpList = []
for ag in self.agentList:
if ag != self:
if ag.agType == "workers" and not ag.employed:
tmpList.append(ag)
if len(tmpList) > 0:
hired = tmpList[randint(0, len(tmpList) - 1)]
hired.employed = True
gvf.colors[hired] = "Aqua"
gvf.createEdge(self, hired) # self, here, is the hiring firm
# count edges (workers) of the firm, after hiring (the values is
# recorded, but not used directly)
self.numOfWorkers = gvf.nx.degree(common.g, nbunch=self)
# nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through once.
print("entrepreneur", self.number, "has",
self.numOfWorkers, "edge/s after hiring")
def hireFireWithProduction(self):
# workers do not hire/fire
if self.agType == "workers":
return
# to decide to hire/fire we need to know the number of employees
# the value is calcutated on the fly, to be sure of accounting for
# modifications coming from outside
# (nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through once.)
laborForce0 = gvf.nx.degree(common.g, nbunch=self) + \
1 # +1 to account for the entrepreneur herself
# required labor force
laborForceRequired = int(
self.plannedProduction / common.laborProductivity)
#
# countUnemployed=0
# for ag in self.agentList:
# if not ag.employed: countUnemployed+=1
# print "I'm entrepreneur %d laborForce %d and required %d unemployed are %d" %\
#(self.number, laborForce0, laborForceRequired, countUnemployed)
# no action
if laborForce0 == laborForceRequired:
return
# hire
if laborForce0 < laborForceRequired:
n = laborForceRequired - laborForce0
tmpList = []
for ag in self.agentList:
if ag != self:
if ag.agType == "workers" and not ag.employed:
tmpList.append(ag)
if len(tmpList) > 0:
k = min(n, len(tmpList))
shuffle(tmpList)
for i in range(k):
hired = tmpList[i]
hired.employed = True
gvf.colors[hired] = "Aqua"
gvf.createEdge(self, hired)
# self, here, is the hiring firm
# count edges (workers) of the firm, after hiring (the values is
# recorded, but not used directly)
self.numOfWorkers = gvf.nx.degree(common.g, nbunch=self)
# nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through
# once.
print(
"entrepreneur",
self.number,
"is applying prod. plan and has",
self.numOfWorkers,
"edge/s after hiring")
# fire
if laborForce0 > laborForceRequired:
n = laborForce0 - laborForceRequired
# the list of the employees of the firm
#entrepreneurWorkers = gvf.nx.neighbors(common.g, self) with nx 2.0
entrepreneurWorkers = list(common.g.neighbors(self))
# print "entrepreneur", self.number, "could fire",
# entrepreneurWorkers
# the list returnes by nx is unstable as order
entrepreneurWorkers = mySort(entrepreneurWorkers)
if len(entrepreneurWorkers) > 0: # has to be, but ...
shuffle(entrepreneurWorkers)
for i in range(n):
fired = entrepreneurWorkers[i]
gvf.colors[fired] = "OrangeRed"
fired.employed = False
# common.g_edge_labels.pop((self,fired)) no labels in edges
common.g.remove_edge(self, fired)
# count edges (workers) after firing (recorded, but not used
# directly)
self.numOfWorkers = gvf.nx.degree(common.g, nbunch=self)
# nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through
# once.
print(
"entrepreneur",
self.number,
"is applying prod. plan and has",
self.numOfWorkers,
"edge/s after firing")
# fireIfProfit
def fireIfProfit(self):
# workers do not fire
if self.agType == "workers":
return
if self.profit >= common.firingThreshold:
return
# the list of the employees of the firm
#entrepreneurWorkers = gvf.nx.neighbors(common.g, self) with nx 2.0
entrepreneurWorkers = list(common.g.neighbors(self))
# print "entrepreneur", self.number, "could fire", entrepreneurWorkers
# the list returnes by nx is unstable as order
entrepreneurWorkers = mySort(entrepreneurWorkers)
if len(entrepreneurWorkers) > 0:
fired = entrepreneurWorkers[randint(
0, len(entrepreneurWorkers) - 1)]
gvf.colors[fired] = "OrangeRed"
fired.employed = False
# common.g_edge_labels.pop((self,fired)) no label in edges
common.g.remove_edge(self, fired)
# count edges (workers) after firing (recorded, but not used
# directly)
self.numOfWorkers = gvf.nx.degree(common.g, nbunch=self)
# nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through
# once.
print("entrepreneur", self.number, "has",
self.numOfWorkers, "edge/s after firing")
# produce
def produce(self):
# this is an entrepreneur action
if self.agType == "workers":
return
# to produce we need to know the number of employees
# the value is calcutated on the fly, to be sure of accounting for
# modifications coming from outside
# (nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through once.)
laborForce = gvf.nx.degree(common.g, nbunch=self) + \
1 # +1 to account for the entrepreneur herself
print("I'm entrepreneur", self.number, "my laborforce is", laborForce)
# productivity is set to 1 in the benginning from common space
self.production = common.laborProductivity * \
laborForce
# totalProductionInA_TimeStep
common.totalProductionInA_TimeStep += self.production
# having a copy, that is update after each agent's action
common.totalProductionInPrevious_TimeStep = common.totalProductionInA_TimeStep
# produce
def produceV5(self):
# this is an entrepreneur action
if self.agType == "workers":
return
# to produce we need to know the number of employees
# the value is calcutated on the fly, to be sure of accounting for
# modifications coming from outside
# (nbunch : iterable container, optional (default=all nodes)
# A container of nodes. The container will be iterated through once.)
laborForce = gvf.nx.degree(common.g, nbunch=self) + \
1 # +1 to account for the entrepreneur herself
print("I'm entrepreneur", self.number, "my laborforce is", laborForce)
# productivity is set to 1 in the benginning from common space
self.production = common.laborProductivity * \
laborForce
# print "I'm entrepreneur",self.number,"production before correction is",\
# self.production
# correction for work troubles, if any
# self.hasTroubles is 0 if no troubles
self.production *= (1. - self.hasTroubles)
# print "I'm entrepreneur",self.number,"production after correction is",\
# self.production
# totalProductionInA_TimeStep
common.totalProductionInA_TimeStep += self.production
# having a copy, that is update after each agent's action
common.totalProductionInPrevious_TimeStep = common.totalProductionInA_TimeStep
# makeProductionPlan
def makeProductionPlan(self):
# this is an entrepreneur action
if self.agType == "workers":
return
if common.projectVersion >= "3" and common.cycle == 1:
nEntrepreneurs = 0
for ag in self.agentList:
if ag.agType == "entrepreneurs":
nEntrepreneurs += 1
# print nEntrepreneurs
nWorkersPlus_nEntrepreneurs = len(self.agentList)
# print nWorkersPlus_nEntrepreneurs
common.nu = (
common.rho * nWorkersPlus_nEntrepreneurs) / nEntrepreneurs
# print common.rho, common.nu
if (common.projectVersion >= "3" and common.cycle == 1) or \
common.projectVersion < "3":
self.plannedProduction = npr.poisson(
common.nu, 1)[0] # 1 is the number
# of element of the returned matrix (vector)
# print self.plannedProduction
common.totalPlannedProduction += self.plannedProduction
# print "entrepreneur", self.number, "plan", self.plannedProduction,\
# "total", common.totalPlannedProduction
# adaptProductionPlan
def adaptProductionPlan(self):
if common.cycle > 1:
nEntrepreneurs = 0
for ag in self.agentList:
if ag.agType == "entrepreneurs":
nEntrepreneurs += 1
# previous period price
#print ("++++++++++++++++++++++", common.ts_df.price.values[-1])
#print ("&&&&&&&&&&&&&&&&&&&&&&",len(common.ts_df.price.values))
if len(common.ts_df.price.values) == 1:
previuosPrice = common.ts_df.price.values[-1] # t=2
if len(common.ts_df.price.values) > 1:
previuosPrice = common.ts_df.price.values[-2] # t>2
# NB adapt acts from t>1
self.plannedProduction = (common.totalDemandInPrevious_TimeStep /
previuosPrice) \
/ nEntrepreneurs
#self.plannedProduction += common.mg.myGauss(0,self.plannedProduction/10)
shock = uniform(
-common.randomComponentOfPlannedProduction,
common.randomComponentOfPlannedProduction)
if shock >= 0:
self.plannedProduction *= (1. + shock)
if shock < 0:
shock *= -1.
self.plannedProduction /= (1. + shock)
# print self.number, self.plannedProduction
common.totalPlannedProduction += self.plannedProduction
# print "entrepreneur", self.number, "plan", self.plannedProduction,\
# "total", common.totalPlannedProduction
# adaptProductionPlanV6
def adaptProductionPlanV6(self):
# pre hayekian period
if common.cycle > 1 and common.cycle < common.startHayekianMarket:
# count of the entrepreneur number
nEntrepreneurs = 0
for ag in self.agentList:
if ag.agType == "entrepreneurs":
nEntrepreneurs += 1
# with the scheme of prices until V.5c_fd
if len(common.ts_df.price.values) == 1:
previuosPrice = common.ts_df.price.values[-1] # t=2
if len(common.ts_df.price.values) > 1:
previuosPrice = common.ts_df.price.values[-2] # t>2
# NB adapt acts from t>1
self.plannedProduction = (common.totalDemandInPrevious_TimeStep /
previuosPrice) \
/ nEntrepreneurs
shock = uniform(
-common.randomComponentOfPlannedProduction,
common.randomComponentOfPlannedProduction)
if shock >= 0:
self.plannedProduction *= (1. + shock)
if shock < 0:
shock *= -1.
self.plannedProduction /= (1. + shock)
# print self.number, self.plannedProduction
common.totalPlannedProduction += self.plannedProduction
# print "entrepreneur", self.number, "plan", self.plannedProduction,\
# "total", common.totalPlannedProduction
# hayekian period
if common.cycle >1 and common.cycle >= common.startHayekianMarket:
#the case common.cycle==1, with common.startHayekianMarket==1, is
#absorbed by makeProductionPlan
nEntrepreneurs = 0
for ag in self.agentList:
if ag.agType == "entrepreneurs":
nEntrepreneurs += 1
self.plannedProduction = \
common.totalConsumptionInQuantityInPrevious_TimeStep \
/ nEntrepreneurs
shock = uniform(
-common.randomComponentOfPlannedProduction,
common.randomComponentOfPlannedProduction)
if shock >= 0:
self.plannedProduction *= (1. + shock)
if shock < 0:
shock *= -1.
self.plannedProduction /= (1. + shock)
# print self.number, self.plannedProduction
common.totalPlannedProduction += self.plannedProduction
# print "entrepreneur", self.number, "plan", self.plannedProduction,\
# "total", common.totalPlannedProduction
# to record sold production and revenue in hayekian phase
self.soldProduction=0
self.revenue=0
# set initial sell and buy prices in hayekian market
def setInitialPricesHM(self):
# 1 -----------------------------------------
if common.cycle >= common.startHayekianMarket:
if not common.ratioSellersBuyersAlreadySet:
nEntrepreneurs = 0
for ag in self.agentList:
if ag.agType == "entrepreneurs":
nEntrepreneurs += 1
nSellers=nEntrepreneurs
nBuyers=len(self.agentList)
common.ratioSellersBuyersAlreadySet=True
common.ratioSellersBuyers=nSellers/nBuyers
print("\nRatio sellers/buyers =",common.ratioSellersBuyers,"\n")
# in setNewCycleValues common.ratioSellersBuyersAlreadySet=False
# at the beginning of each cycle
# 2 -----------------------------------------
if common.cycle == common.startHayekianMarket and \
not common.priceWarmingDone:
# setting the basic price uniquely before the first hayekian cycle
common.sellPrice=1000
common.buyPrice=-1000
if common.startHayekianMarket>1:
if len(common.ts_df.price.values) == 1:
common.buyPrice = common.sellPrice = \
common.ts_df.price.values[-1] # the last price
#print("Ag.", self.number,"buying at", self.buyPrice,
# "selling at",self.sellPrice)
# NB the code above can act only if t>1
if len(common.ts_df.price.values) > 1:
common.buyPrice = common.sellPrice = \
common.ts_df.price.values[-2] # the second last price
#print("Ag.", self.number,"buying at", self.buyPrice,
# "selling at",self.sellPrice)
# NB the code above can act only if t>2
# NB NB we set the sellPrice also for workers but we do not
# use it
# when a worker becomes an entreprenuer she copies the
# sell price of the firm she is coming from
else: # case t==1 being common.startHayekianMarket==1
# look at the equilibrium price that would have been created
# at t==1 in the non-hayekian execution
# in the common.startHayekianMarket == 1 case, when
# actOnMaketPlace is activated
# we already have
# common.totalPlannedConsumptionInValueInA_TimeStep and
# common.totalProductionInA_TimeStep
# so, we can calculate
common.buyPrice = common.sellPrice = \
common.totalPlannedConsumptionInValueInA_TimeStep \
/ common.totalProductionInA_TimeStep
# outside WorldState setMarketPriceV3 method, to avoid here
# random shocks
# NB NB we set the sellPrice also for workers but we do not
# use it
# when a worker becomes an entreprenuer she copies the
# sell price of the firm she is coming from
#startingHayekianCommonPrice
print("\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("starting hayekian common price",common.buyPrice)
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
common.priceWarmingDone = True
# 3 -----------------------------------------
# individual starting prices
if common.cycle == common.startHayekianMarket:
#starting sell price
self.sellPrice = \
applyRationallyTheRateOfChange(common.sellPrice,\
uniform(-common.initShift*common.initShock, \
(1-common.initShift)*common.initShock))
if self.agType=="entrepreneurs":
print("entrepreneur", self.number, "has initial sell price",\
self.sellPrice)
self.sellPriceDefined=True
# starting individual buy price
self.buyPrice = \
applyRationallyTheRateOfChange(common.buyPrice,\
uniform((common.initShift-1)*common.initShock, \
common.initShift*common.initShock))
# modify a specific sell price with a jump on the side of the up
# corrections, in full hayekian market
# NB we are at the end of each cycle
def nextSellPriceJumpFHM(self):
if self.agType != "entrepreneurs": return
if common.hParadigm=="quasi": return
if common.pJump != -1 and npr.uniform(0,1)<=common.pJump:
if self.jump == 0:
self.jump=common.jump
self.sellPrice *= 1 + self.jump
print("entrepreur # ", self.number, \
"raises the sell price with a jump")
else:
self.sellPrice /= 1 + self.jump
self.jump=0
print("entrepreur # ", self.number, \
"reduces the sell price with a jump back")
# modify sell prices in quasi hayekian market
# NB we are at the end of each cycle
def nextSellPricesQHM(self):
if self.agType != "entrepreneurs": return
if common.hParadigm=="full": return
# hayekian period, "quasi" hayekian paradigm
# i) considering relative unsold quantity
if common.hParadigm=="quasi" and common.quasiHchoice=="unsold":
if common.cycle >= common.startHayekianMarket:
oldP=self.sellPrice
if common.cycle >1 and \
common.entrepreneursMindIfPlannedProductionFalls and \
common.ts_df.iloc[-1, 3] / common.totalPlannedProduction - 1 >= \
common.thresholdToDecreaseThePriceIfTotalPlannedPFalls:
# indexing Python style, pos. -1 is the last one
self.sellPrice = applyRationallyTheRateOfChange(self.sellPrice,\
uniform(common.decreasingRateRange, 0))
print(("end of t = %d entrepreneur %d initial production"+\
" %.2f sold %.3f \nold price %.3f new price %.3f as "+\
" total plannedProduction falls") %\
(common.cycle,self.number,self.production,\
self.soldProduction,oldP,self.sellPrice))
else:
if self.soldProduction/self.production <= common.soldThreshold1:
self.sellPrice = applyRationallyTheRateOfChange(self.sellPrice,\
uniform(common.decreasingRateRange, 0))
if self.production/self.production>=common.soldThreshold2:
self.sellPrice = applyRationallyTheRateOfChange(self.sellPrice,\
uniform(0, common.increasingRateRange))
print(("end of t = %d entrepreneur %d initial production"+\
" %.2f sold %.3f \nold price %.3f new price %.3f") %\
(common.cycle,self.number,self.production,\
self.soldProduction,oldP,self.sellPrice))
return
# ii) considering randomUp
if common.hParadigm=="quasi" and common.quasiHchoice=="randomUp":
if common.pJump != -1 and npr.uniform(0,1)<=common.pJump:
if self.jump == 0:
self.jump=common.jump
self.sellPrice *= 1 + self.jump
print("entrepreur # ", self.number, \
"raises the sell price with a jump")
else:
self.sellPrice /= 1 + self.jump
self.jump=0
print("entrepreur # ", self.number, \
"reduces the sell price with a jump back")
return
# iii) consideirng profit falls to act on price
if common.hParadigm=="quasi" and common.quasiHchoice=="profit":
if common.cycle >= common.startHayekianMarket:
if self.profitStrategyReverseAfterN==0:
if common.priceSwitchIfProfitFalls=="raise":
if npr.uniform(0,1)<=0.6:
self.priceSwitchIfProfitFalls="raise"
else:
self.priceSwitchIfProfitFalls="lower"
if common.priceSwitchIfProfitFalls=="lower":
if npr.uniform(0,1)<=0.4:
self.priceSwitchIfProfitFalls="raise"
else:
self.priceSwitchIfProfitFalls="lower"
if common.pJump != -1 and self.profit <0 and \
npr.uniform(0,1)<=common.pJump:
if self.priceSwitchIfProfitFalls=="raise":
self.sellPrice *= 1 + common.jump
print("entrepreur # ", self.number, \
"with profit<0, is raising the sell price")
self.profitStrategyReverseAfterN=\
common.profitStrategyReverseAfterN
# 0 means: acting again always possible
# a value > the number of cycles means:
# acting again never possible
if self.priceSwitchIfProfitFalls=="lower":
self.sellPrice /= 1 + common.jump
print("entrepreur # ", self.number, \
"with profit<0, is lowering the sell price")
self.profitStrategyReverseAfterN=\
common.profitStrategyReverseAfterN
else:
self.profitStrategyReverseAfterN-=1
if self.profitStrategyReverseAfterN==0:
if self.priceSwitchIfProfitFalls=="raise":
self.sellPrice /= 1 + common.jump
print("entrepreur # ", self.number, \
"lowering back the sell price")
if self.priceSwitchIfProfitFalls=="lower":
self.sellPrice *= 1 + common.jump
print("entrepreur # ", self.number, \
"raising back the sell price")
return
# here in error
print("Using the 'quasi' option in hayekian market:\n",\
"the",common.quasiHchoice, "value is not one of the\n",
"valid option (unsold, randomUp, profit)")
os.sys.exit(1)
# all acting as consumers on the market place
def actOnMarketPlace(self):
if common.cycle < common.startHayekianMarket: return
# in each sub step, we show residual consumption and production; the
# code operates on different agents, but consistently (in each call,
# the elaboration jumps from an instance of agent to another one)
if common.checkResConsUnsoldProd:
#print(self.number)
if common.withinASubstep:
common.internalSubStepAgentCounter+=1
#print('*',common.internalSubStepAgentCounter)
if common.internalSubStepAgentCounter==len(self.agentList):
common.withinASubstep=False
else: # not withinASubstep
common.withinASubstep=True
common.internalSubStepAgentCounter=1
if common.currentCycle != common.cycle:
common.currentCycle = common.cycle
common.subStepCounter=0
common.readySellerList=False
print()
common.subStepCounter+=1
residualConsumptionCapabilityInValue=0
residualUnsoldProduction=0
for anAgent in self.agentList:
residualConsumptionCapabilityInValue += anAgent.consumption
if anAgent.agType=="entrepreneurs":
residualUnsoldProduction+= \
anAgent.production - anAgent.soldProduction
print(\
"subc. %2d.%3d starts with cons. capab. (v) %.1f and uns. p. (q) %.1f"\
% (common.cycle, common.subStepCounter, residualConsumptionCapabilityInValue,\
residualUnsoldProduction))
try: common.wr.writerow
except:
print("The file firstStepOutputInHayekianMarket.csv was not"+\
" created in mActions.py")
os.sys.exit(1)
# first call in each cycle, preparing action (only once per cycle)
#if self.currentCycle != common.cycle:
if not common.readySellerList:
#self.currentCycle = common.cycle
common.readySellerList=True
# we check that the planning of the consumption has been
# made for the current cycle
if self.consumptionPlanningInCycleNumber != common.cycle:
print('Attempt of using actOnMarketPlace method before'+\
' consumption planning')
os.sys.exit(1) # to stop the execution, in the calling module
# we have multiple except, with 'SystemExit' case
# create a temporary list of sellers, starting each step (cycle)
common.sellerList=[]
for anAg in self.agentList:
if anAg.getType() == "entrepreneurs":
if not anAg.sellPriceDefined:
print("Inconsistent situation, an active selles"\
+" has no sell price defined.")
os.sys.exit(1)
else: common.sellerList.append(anAg)
# acting (NB self.consumption comes from planConsumptionInValueV6)
# if buying action is possible
#print("cycle",common.cycle,"ag",self.number,"cons val",self.consumption)
if self.consumption > 0:
if common.sellerList != []:
# chose a seller
mySeller=common.sellerList[randint(0,len(common.sellerList)-1)]
sellerQ=mySeller.production - mySeller.soldProduction
if sellerQ>0:
# try a deal
if self.buyPrice < mySeller.sellPrice:
self.statusB=mySeller.statusS=-1
if self.buyPrice >= mySeller.sellPrice:
self.statusB=mySeller.statusS= 1
#print(common.cycle,"entr.",mySeller.number,\
# mySeller.production,mySeller.soldProduction,\
# mySeller.sellPrice)
# NB production can be < plannedProduction due to lack of workers
# consumption in value cannot exceed self.maxConsumptionInAStep
buyerQ=min(self.consumption/mySeller.sellPrice, sellerQ,\
self.maxConsumptionInAStep/mySeller.sellPrice)
mySeller.soldProduction+=buyerQ
mySeller.revenue+=buyerQ*mySeller.sellPrice
self.consumption-=buyerQ*mySeller.sellPrice
self.unspentConsumptionCapability=self.consumption
#print("cycle",common.cycle,"ag",self.number,"deal: cons val",\
# buyerQ*mySeller.sellPrice,"price",mySeller.sellPrice)
# saving the price of the transaction
common.hayekianMarketTransactionPriceList_inACycle.\
append(mySeller.sellPrice)
common.totalConsumptionInQuantityInA_TimeStep += buyerQ
#ouput - seller has no goods to sell
elif common.cycle==common.startHayekianMarket:
common.wr.writerow\
(["nogoods", "buy", numpy.nan, self.consumption, self.number,\
"sell", numpy.nan,mySeller.number])
#output - deal vs. nodeal
if common.cycle==common.startHayekianMarket:
if mySeller.statusS==1:
common.wr.writerow\
(["deal", "buy", self.buyPrice, self.consumption, self.number,\
"sell", mySeller.sellPrice,mySeller.number])
if mySeller.statusS==-1 and mySeller.sellPriceDefined:
common.wr.writerow\
(["nodeal", "buy", self.buyPrice, self.consumption, self.number,\
"sell", mySeller.sellPrice,mySeller.number])
# correct running prices
# if the status is != 0 the agent has already been acting
if self.statusB == 1: # buyer case (statusB 1, successful buy attempt,
# acting mostly to decrease the reservation price)
self.buyPrice = applyRationallyTheRateOfChange(self.buyPrice,\
uniform(-(1-common.runningShiftB)* \
common.runningShockB, \
common.runningShiftB* \
common.runningShockB))
if self.statusB == -1: # buyer case (statusB -1, unsuccessful buy attempt,
# acting mostly to increase the reservation price)
self.buyPrice = applyRationallyTheRateOfChange(self.buyPrice,\
uniform(-common.runningShiftB* \
common.runningShockB, \
(1-common.runningShiftB)* \
common.runningShockB))
if mySeller.statusS == 1 and common.hParadigm=="full" or \
(common.hParadigm=="quasi" and \
common.cycle==common.startHayekianMarket):
# seller case (statusS 1, successful sell attempt,
mySeller.sellPrice = applyRationallyTheRateOfChange(mySeller.sellPrice,\
common.ratioSellersBuyers*\
uniform(-common.runningShiftS* \
common.runningShockS,
(1-common.runningShiftS)* \
common.runningShockS))
if mySeller.statusS == -1 and common.hParadigm=="full" or \
(common.hParadigm=="quasi" and \
common.cycle==common.startHayekianMarket):
# seller case (statusS -1, unsuccess. s. attempt,
# acting mostly to decrease the reservation price)
mySeller.sellPrice = applyRationallyTheRateOfChange(mySeller.sellPrice,\
common.ratioSellersBuyers*\
uniform(-(1-common.runningShiftS)* \
common.runningShockS, \
common.runningShiftS* \
common.runningShockS))
#print("ag.", self.number, "new prices", self.buyPrice, mySeller.sellPrice)
# cleaning the situation (redundant)\\
self.statusB=mySeller.statusS=0
#output - common.sellerList==[]
elif common.cycle==common.startHayekianMarket:
common.wr.writerow\
(["nosellers", "buy", self.buyPrice, self.consumption, self.number,\
"sell", numpy.nan,numpy.nan])
#output - self.consumption<=0
elif common.cycle==common.startHayekianMarket:
common.wr.writerow\
(["noconsumption", "buy", numpy.nan, self.consumption, self.number,\
"sell", numpy.nan,numpy.nan])
#output close
if common.cycle==common.startHayekianMarket+1 and not common.closed:
common.csvf.close()
common.closed=True
# calculateProfit V0
def evaluateProfitV0(self):
# this is an entrepreneur action
if self.agType == "workers":
return
# the number of producing workers is obtained indirectly via
# production/laborProductivity
# print self.production/common.laborProductivity
self.profit = (self.production / common.laborProductivity) * \
(common.revenuesOfSalesForEachWorker -
common.wage) + common.mg.myGauss(0, 0.05)
# calculateProfit
def evaluateProfit(self):
# this is an entrepreneur action
if self.agType == "workers":
return
# backward compatibily to version 1
try:
XC = common.newEntrantExtraCosts
except BaseException:
XC = 0
try:
k = self.extraCostsResidualDuration
except BaseException:
k = 0
if k == 0:
XC = 0
if k > 0:
self.extraCostsResidualDuration -= 1
# the number of pruducing workers is obtained indirectly via
# production/laborProductivity
# print self.production/common.laborProductivity
self.costs = common.wage * \
(self.production / common.laborProductivity) + XC
# the entrepreur sells her production, which is contributing - via
# totalActualProductionInA_TimeStep, to price formation
self.profit = common.price * self.production - self.costs
common.totalProfit += self.profit
# calculateProfit
def evaluateProfitV5(self):
# this is an entrepreneur action
if self.agType == "workers":
return
# backward compatibily to version 1
try:
XC = common.newEntrantExtraCosts
except BaseException:
XC = 0
try:
k = self.extraCostsResidualDuration
except BaseException:
k = 0
if k == 0:
XC = 0
if k > 0:
self.extraCostsResidualDuration -= 1
# the number of pruducing workers is obtained indirectly via
# production/laborProductivity
# print self.production/common.laborProductivity
# how many workers, not via productvity due to possible troubles
# in production
laborForce = gvf.nx.degree(common.g, nbunch=self) + \
1 # +1 to account for the entrepreneur herself
# the followin if/else structure is for control reasons because if
# not common.wageCutForWorkTroubles we do not take in account
# self.workTroubles also if != 0; if = 0 is non relevant in any case
if common.wageCutForWorkTroubles:
self.costs = (common.wage - self.hasTroubles) \
* (laborForce - 1) \
+ common.wage * 1 + \
XC
# above, common.wage * 1 is for the entrepreur herself
else:
self.costs = common.wage * laborForce + \
XC
# print "I'm entrepreur", self.number, "costs are",self.costs
# penalty Value
pv = 0
if self.hasTroubles > 0:
pv = common.penaltyValue
# the entrepreur sells her production, which is contributing - via
# totalActualProductionInA_TimeStep, to price formation
self.profit = common.price * (1. - pv) * self.production - self.costs
print("I'm entrepreur", self.number, "my price is ",
common.price * (1. - pv))
# individual data collection
# creating the dataframe
try:
common.firm_df
except BaseException:
common.firm_df = pd.DataFrame(
columns=[
'production',
'profit'])
print("\nCreation of fhe dataframe of the firms (individual data)\n")
firm_df2 = pd.DataFrame([[self.production, self.profit]],
columns=['production', 'profit'])
common.firm_df = common.firm_df.append(firm_df2, ignore_index=True)
common.totalProfit += self.profit
# calculateProfit
def evaluateProfitV6(self):
# this is an entrepreneur action
if self.agType == "workers":
return
# backward compatibily to version 1
try:
XC = common.newEntrantExtraCosts
except BaseException:
XC = 0
try:
k = self.extraCostsResidualDuration
except BaseException:
k = 0
if k == 0:
XC = 0
if k > 0:
self.extraCostsResidualDuration -= 1
# the number of pruducing workers is obtained indirectly via
# production/laborProductivity
# print self.production/common.laborProductivity
# how many workers, not via productvity due to possible troubles
# in production
laborForce = gvf.nx.degree(common.g, nbunch=self) + \
1 # +1 to account for the entrepreneur herself
# the followin if/else structure is for control reasons because if
# not common.wageCutForWorkTroubles we do not take in account
# self.workTroubles also if != 0; if = 0 is non relevant in any case
if common.wageCutForWorkTroubles:
self.costs = (common.wage - self.hasTroubles) \
* (laborForce - 1) \
+ common.wage * 1 + \
XC
# above, common.wage * 1 is for the entrepreur herself
else:
self.costs = common.wage * laborForce + \
XC
# print "I'm entrepreur", self.number, "costs are",self.costs
# penalty Value
pv = 0
if self.hasTroubles > 0:
pv = common.penaltyValue
# V6 - before hayekian phase
if common.cycle < common.startHayekianMarket:
# the entrepreur sells her production, which is contributing - via
# totalActualProductionInA_TimeStep, to price formation
self.profit = common.price * (1. - pv) * self.production - self.costs
print("I'm entrepreur", self.number, "my price is ",
common.price * (1. - pv))
# V6 - into the hayekian phase
else:
self.profit = self.revenue - self.costs
print("I'm entrepreur", self.number, "my individual price is ",
self.sellPrice)
# individual data collection
# creating the dataframe
try:
common.firm_df
except BaseException:
common.firm_df = pd.DataFrame(
columns=[
'production',
'profit'])
print("\nCreation of fhe dataframe of the firms (individual data)\n")
firm_df2 = pd.DataFrame([[self.production, self.profit]],
columns=['production', 'profit'])
common.firm_df = common.firm_df.append(firm_df2, ignore_index=True)
common.totalProfit += self.profit
# consumptions
def planConsumptionInValue(self):
self.consumption = 0
#case (1)
# Y1=profit(t-1)+wage NB no negative consumption if profit(t-1) < 0
# this is an entrepreneur action
if self.agType == "entrepreneurs":
self.consumption = common.a1 + \
common.b1 * (self.profit + common.wage) + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
if self.consumption < 0:
self.consumption = 0
# profit, in V2, is at time -1 due to the sequence in schedule2.xls
#case (2)
# Y2=wage
if self.agType == "workers" and self.employed:
self.consumption = common.a2 + \
common.b2 * common.wage + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
#case (3)
# Y3=socialWelfareCompensation
if self.agType == "workers" and not self.employed:
self.consumption = common.a3 + \
common.b3 * common.socialWelfareCompensation + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
# update totalPlannedConsumptionInValueInA_TimeStep
common.totalPlannedConsumptionInValueInA_TimeStep += self.consumption
# print "C sum", common.totalPlannedConsumptionInValueInA_TimeStep
# consumptions
def planConsumptionInValueV5(self):
self.consumption = 0
#case (1)
# Y1=profit(t-1)+wage NB no negative consumption if profit(t-1) < 0
# this is an entrepreneur action
if self.agType == "entrepreneurs":
self.consumption = common.a1 + \
common.b1 * (self.profit + common.wage) + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
if self.consumption < 0:
self.consumption = 0
# profit, in V2, is at time -1 due to the sequence in schedule2.xls
#case (2)
# Y2=wage
if self.agType == "workers" and self.employed:
# the followin if/else structure is for control reasons because if
# not common.wageCutForWorkTroubles we do not take in account
# self.workTroubles also if != 0; if = 0 is non relevant in any
# case
if common.wageCutForWorkTroubles:
self.consumption = common.a2 + \
common.b2 * common.wage * (1. - self.workTroubles) + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
# print "worker", self.number, "wage x",(1.-self.workTroubles)
else:
self.consumption = common.a2 + \
common.b2 * common.wage + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
#case (3)
# Y3=socialWelfareCompensation
if self.agType == "workers" and not self.employed:
self.consumption = common.a3 + \
common.b3 * common.socialWelfareCompensation + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
# update totalPlannedConsumptionInValueInA_TimeStep
common.totalPlannedConsumptionInValueInA_TimeStep += self.consumption
# print "C sum", common.totalPlannedConsumptionInValueInA_TimeStep
self.consumptionPlanningInCycleNumber=common.cycle
# consumptions
def planConsumptionInValueV6(self):
self.consumption = 0
#case (1)
# Y1=profit(t-1)+wage NB no negative consumption if profit(t-1) < 0
# this is an entrepreneur action
if self.agType == "entrepreneurs":
self.consumption = common.a1 + \
common.b1 * (self.profit + common.wage) + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
if self.consumption < 0:
self.consumption = 0
# profit, in V2, is at time -1 due to the sequence in schedule2.xls
#case (2)
# Y2=wage
if self.agType == "workers" and self.employed:
# the followin if/else structure is for control reasons because if
# not common.wageCutForWorkTroubles we do not take in account
# self.workTroubles also if != 0; if = 0 is non relevant in any
# case
if common.wageCutForWorkTroubles:
self.consumption = common.a2 + \
common.b2 * common.wage * (1. - self.workTroubles) + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
# print "worker", self.number, "wage x",(1.-self.workTroubles)
else:
self.consumption = common.a2 + \
common.b2 * common.wage + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
#case (3)
# Y3=socialWelfareCompensation
if self.agType == "workers" and not self.employed:
self.consumption = common.a3 + \
common.b3 * common.socialWelfareCompensation + \
common.mg.myGauss(0, common.consumptionRandomComponentSD)
# reuse unspent consumption capability
#if self.number==1:
# print("reuse unspent consumption capability", \
# self.unspentConsumptionCapability)
self.consumption += common.reUseUnspentConsumptionCapability * \
self.unspentConsumptionCapability
if self.consumption < 0:
#print('*************************************',self.employed, \
# self.consumption)
self.consumption=0
# max cons. in each step of a cycles of the hayekian phase
self.maxConsumptionInAStep=self.consumption*common.consumptionQuota
# update totalPlannedConsumptionInValueInA_TimeStep
if common.cycle < common.startHayekianMarket or \
(common.cycle == common.startHayekianMarket and \
common.startHayekianMarket == 1):
# the 'or' condition is necessary In the hayekian perspective,
# when the start is a cyce 1; the value of
# common.totalPlannedConsumptionInValueInA_TimeStep is necessary
# in the warming phase: look at the 'else' within
# the second block in setInitialPricesHM
common.totalPlannedConsumptionInValueInA_TimeStep += self.consumption
# print "C sum", common.totalPlannedConsumptionInValueInA_TimeStep
self.consumptionPlanningInCycleNumber=common.cycle
# to entrepreneur
def toEntrepreneur(self):
if self.agType != "workers" or not self.employed:
return
#myEntrepreneur = gvf.nx.neighbors(common.g, self)[0] with nx 2.0
myEntrepreneur = list(common.g.neighbors(self))[0]
myEntrepreneurProfit = myEntrepreneur.profit
if myEntrepreneurProfit >= common.thresholdToEntrepreneur:
print("I'm worker %2.0f and myEntrepreneurProfit is %4.2f" %
(self.number, myEntrepreneurProfit))
common.g.remove_edge(myEntrepreneur, self)
# originally, it was a worker
if self.xPos > 0:
gvf.pos[self] = (self.xPos - 15, self.yPos)
# originally, it was an entrepreneur
else:
gvf.pos[self] = (self.xPos, self.yPos)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "LawnGreen"
self.agType = "entrepreneurs"
self.employed = True
self.extraCostsResidualDuration = common.extraCostsDuration
# to entrepreneurV3
def toEntrepreneurV3(self):
if self.agType != "workers" or not self.employed:
return
# print float(common.absoluteBarrierToBecomeEntrepreneur)/ \
# len(self.agentList)
if random() <= float(common.absoluteBarrierToBecomeEntrepreneur) / \
len(self.agentList):
#myEntrepreneur = gvf.nx.neighbors(common.g, self)[0] with nx 2.0
myEntrepreneur = list(common.g.neighbors(self))[0]
myEntrepreneurProfit = myEntrepreneur.profit
myEntrepreneurCosts = myEntrepreneur.costs
if myEntrepreneurProfit / myEntrepreneurCosts >= \
common.thresholdToEntrepreneur:
print(
"Worker %2.0f is now an entrepreneur (previous firm relative profit %4.2f)" %
(self.number, myEntrepreneurProfit / myEntrepreneurCosts))
common.g.remove_edge(myEntrepreneur, self)
# originally, it was a worker
if self.xPos > 0:
gvf.pos[self] = (self.xPos - 15, self.yPos)
# originally, it was an entrepreneur
else:
gvf.pos[self] = (self.xPos, self.yPos)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "LawnGreen"
self.agType = "entrepreneurs"
self.employed = True
self.extraCostsResidualDuration = common.extraCostsDuration
# to entrepreneurV6
def toEntrepreneurV6(self):
if self.agType != "workers" or not self.employed:
return
# print float(common.absoluteBarrierToBecomeEntrepreneur)/ \
# len(self.agentList)
if random() <= float(common.absoluteBarrierToBecomeEntrepreneur) / \
len(self.agentList):
#myEntrepreneur = gvf.nx.neighbors(common.g, self)[0] with nx 2.0
myEntrepreneur = list(common.g.neighbors(self))[0]
myEntrepreneurProfit = myEntrepreneur.profit
myEntrepreneurCosts = myEntrepreneur.costs
if myEntrepreneurProfit / myEntrepreneurCosts >= \
common.thresholdToEntrepreneur:
print(
"Worker %2.0f is now an entrepreneur (previous firm relative profit %4.2f)" %
(self.number, myEntrepreneurProfit / myEntrepreneurCosts))
common.g.remove_edge(myEntrepreneur, self)
# originally, it was a worker
if self.xPos > 0:
gvf.pos[self] = (self.xPos - 15, self.yPos)
# originally, it was an entrepreneur
else:
gvf.pos[self] = (self.xPos, self.yPos)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "LawnGreen"
self.agType = "entrepreneurs"
self.employed = True
self.extraCostsResidualDuration = common.extraCostsDuration
if common.cycle >=common.startHayekianMarket:
if myEntrepreneur.sellPriceDefined:
self.sellPrice=myEntrepreneur.sellPrice
self.jump=myEntrepreneur.jump
print("with the same sell price of the the previous firm",\
self.sellPrice)
self.profitStrategyReverseAfterN=\
myEntrepreneur.profitStrategyReverseAfterN
self.priceSwitchIfProfitFalls=\
myEntrepreneur.priceSwitchIfProfitFalls
else:
print("New entrepreneur cannot copy the price of previous firm")
os.sys.exit(1)
# to workers
def toWorker(self):
if self.agType != "entrepreneurs":
return
if self.profit <= common.thresholdToWorker:
print("I'm entrepreneur %2.0f and my profit is %4.2f" %
(self.number, self.profit))
# the list of the employees of the firm, IF ANY
#entrepreneurWorkers = gvf.nx.neighbors(common.g, self) with nx 2.0
entrepreneurWorkers = list(common.g.neighbors(self))
print("entrepreneur", self.number, "has",
len(entrepreneurWorkers),
"workers to be fired")
if len(entrepreneurWorkers) > 0:
for aWorker in entrepreneurWorkers:
gvf.colors[aWorker] = "OrangeRed"
aWorker.employed = False
common.g.remove_edge(self, aWorker)
self.numOfWorkers = 0
# originally, it was an entrepreneur
if self.xPos < 0:
gvf.pos[self] = (self.xPos + 15, self.yPos)
# originally, it was a worker
else:
gvf.pos[self] = (self.xPos, self.yPos)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "OrangeRed"
self.agType = "workers"
self.employed = False
# to workersV3
def toWorkerV3(self):
if self.agType != "entrepreneurs":
return
# check for newborn firms
try:
self.costs
except BaseException:
return
if self.profit / self.costs <= common.thresholdToWorker:
print("I'm entrepreneur %2.0f and my relative profit is %4.2f" %
(self.number, self.profit / self.costs))
# the list of the employees of the firm, IF ANY
#entrepreneurWorkers = gvf.nx.neighbors(common.g, self) with nx 2.0
entrepreneurWorkers = list(common.g.neighbors(self))
print("entrepreneur", self.number, "has",
len(entrepreneurWorkers),
"workers to be fired")
if len(entrepreneurWorkers) > 0:
for aWorker in entrepreneurWorkers:
gvf.colors[aWorker] = "OrangeRed"
aWorker.employed = False
common.g.remove_edge(self, aWorker)
self.numOfWorkers = 0
# originally, it was an entrepreneur
if self.xPos < 0:
gvf.pos[self] = (self.xPos + 15, self.yPos)
# originally, it was a worker
else:
gvf.pos[self] = (self.xPos, self.yPos)
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.colors[self] = "OrangeRed"
self.agType = "workers"
self.employed = False
# work troubles
def workTroubles(self):
# NB this method acts with the probability set in the schedule.txt
# file
if self.agType != "entrepreneurs":
return
# production shock due to work troubles
psiShock = uniform(common.productionCorrectionPsi / 2,
common.productionCorrectionPsi)
self.hasTroubles = psiShock
print("Entrepreneur", self.number, "is suffering a reduction of "
"production of", psiShock * 100, "%, due to work troubles")
if common.wageCutForWorkTroubles:
# the list of the employees of the firm
#entrepreneurWorkers = gvf.nx.neighbors(common.g, self) with nx 2.0
entrepreneurWorkers = list(common.g.neighbors(self))
for aWorker in entrepreneurWorkers:
# avoiding the entrepreneur herself, as we are refering to her
# network of workers
aWorker.workTroubles = psiShock
# print "Worker ", aWorker.number, "is suffering a reduction of "\
# "wage of", psiShock*100, "%, due to work troubles"
# get graph
def getGraph(self):
return common.g
# get type
def getType(self):
return self.agType
| {
"repo_name": "terna/SLAPP3",
"path": "6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/oligopoly/Agent.py",
"copies": "1",
"size": "64523",
"license": "cc0-1.0",
"hash": 3846383145802185000,
"line_mean": 40.0190718373,
"line_max": 97,
"alpha_frac": 0.5622026254,
"autogenerated": false,
"ratio": 4.053970846946469,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5116173472346469,
"avg_score": null,
"num_lines": null
} |
# Agent.py
from Tools import *
from agTools import *
from turtle import *
register_shape("school/pupilBlackF.gif")
register_shape("school/pupilGreenF.gif")
register_shape("school/pupilRedF.gif")
register_shape("school/pupilYellowF.gif")
register_shape("school/pupilSpecialF.gif")
register_shape("school/pupilBlackM.gif")
register_shape("school/pupilGreenM.gif")
register_shape("school/pupilRedM.gif")
register_shape("school/pupilYellowM.gif")
register_shape("school/pupilSpecialM.gif")
register_shape("desk", ((5, -15), (15, -15), (15, 15), (5, 15)))
class Agent(Turtle, SuperAgent):
def __init__(self, number, myWorldState,
xPos, yPos, lX=-20, rX=19, bY=-20, tY=19, agType=""):
Turtle.__init__(self)
# nb lX (left lim. on X axis),
# rX (right lim. on X axis),
# bY (bottom lim. on Y axis),
# tY (top lim. on y axis),
# are defined in ModelSwarm.py
# the environment
self.agOperatingSets = []
self.number = number
self.lX = lX
self.rX = rX
self.bY = bY
self.tY = tY
self.myWorldState = myWorldState
self.agType = agType
# the agent
self.xPos = xPos
self.xPos0 = xPos
self.yPos = yPos
self.yPos0 = yPos
self.deltaAtt = 0
print("agent", self.agType, "#", self.number,
"has been created at", self.xPos, ",", self.yPos)
# turtle behavior
self.speed(0)
self.penup()
self.color("black") # default color
self.setx(self.xPos * 1) # *1 is a memo to change scale if needed
self.sety(self.yPos * 1)
# attention accounting
self.attention = 0
self.tickAttention = 0
self.nMethodsAttention = 0
self.lastExecutedMethod = ''
def setColorAndGender(self, color, gender):
self.color(color)
self.gender = gender
if gender == "F":
if color == "black":
self.shape("school/pupilBlackF.gif")
if color == "green":
self.shape("school/pupilGreenF.gif")
if color == "red":
self.shape("school/pupilRedF.gif")
if color == "yellow":
self.shape("school/pupilYellowF.gif")
if color == "violet":
self.shape("school/pupilSpecialF.gif")
if gender == "M":
if color == "black":
self.shape("school/pupilBlackM.gif")
if color == "green":
self.shape("school/pupilGreenM.gif")
if color == "red":
self.shape("school/pupilRedM.gif")
if color == "yellow":
self.shape("school/pupilYellowM.gif")
if color == "violet":
self.shape("school/pupilSpecialM.gif")
if color == "brown":
self.shape("desk")
# reset values (substitures that in agTools.py)
def setNewCycleValues(self):
self.deltaAtt = 0
# self.attention=0
# self.lastExecutedMethod=''
# attention (total value until previous step)
def getAttention(self):
return self.attention
# attention (with the last step)
def getAttentionAndCleanStep(self):
self.attention += self.tickAttention
self.tickAttention = 0
self.nMethodsAttention = 0
return self.attention
# ",**d" in the parameter lists of the methods is a place holder
# in case we use, calling the method, a dictionary as last param.
# active methods
# asking well
def askWell(self, **d):
self.lastExecutedMethod = self.askWell
print()
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm asking well!")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("Asking well!!!", font=("Arial", 14)) # turtle action
# fidgeting
def fidget(self, **d):
self.lastExecutedMethod = self.fidget
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm fidgeting")
add = random.random() * 0.3 + 0.7
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
# shaking
def shake(self, **d):
self.lastExecutedMethod = self.shake
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm shaking")
if self.agType == "greenPupil":
add = random.random() * 0.3 + 0.6
else:
add = random.random() * 0.4 + 0.4
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.xPos += random.random() * 20 - 10
self.setx(self.xPos)
self.yPos += random.random() * 20 - 10
self.sety(self.yPos)
self.write("Shaking", font=("Arial", 16)) # turtle action
# twisting
def twist(self, **d):
self.lastExecutedMethod = self.twist
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm twisting")
add = random.random() * 0.3
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.xPos += random.random() * 30 - 10
self.setx(self.xPos)
self.yPos += random.random() * 30 - 10
self.sety(self.yPos)
self.write("Twisting", font=("Arial", 16)) # turtle action
# paying attention
def payAttention(self, **d):
self.lastExecutedMethod = self.payAttention
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm paying attention")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
# attracting attention teacher well
def attractTeacherAttentionWell(self, **d):
self.lastExecutedMethod = self.attractTeacherAttentionWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm attracting Teacher attention well")
# not relevant for attention
self.write("Excuse me", font=("Arial", 12)) # turtle action
# attracting attention teacher not well
def attractTeacherAttentionNotWell(self, **d):
self.lastExecutedMethod = self.attractTeacherAttentionNotWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm attracting Teacher attention not well")
# not relevant for attention
self.write("TEACHER!!", font=("Arial", 14)) # turtle action
# attention elsewhere
def attElsewhere(self, **d):
self.lastExecutedMethod = self.attElsewhere
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("and I'm distracted")
add = random.random() * 0.2
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("!?!?", font=("Arial", 14)) # turtle action
# sitting down not well
def sitDownNotWell(self, **d):
self.lastExecutedMethod = self.sitDownNotWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm sitting down not well")
# not relevant for attention
self.xPos = self.xPos0 + random.random() * 20 - 10
self.setx(self.xPos)
self.yPos = self.yPos0 + random.random() * 20 - 10
self.sety(self.yPos)
# sitting down well
def sitDownWell(self, **d):
self.lastExecutedMethod = self.sitDownWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm sitting down well")
# not relevant for attention
self.xPos = self.xPos0
self.setx(self.xPos)
self.yPos = self.yPos0
self.sety(self.yPos)
# standing up
def standUp(self, **d):
self.lastExecutedMethod = self.standUp
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm standing up")
# not relevant for attention
self.xPos = self.xPos0
self.setx(self.xPos)
self.yPos = self.yPos0
self.sety(self.yPos + 15)
# turning back
def turnBack(self, **d):
self.lastExecutedMethod = self.turnBack
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm turning back")
# not relevant for attention
self.xPos = self.xPos0 + random.random() * 30 - 10
self.setx(self.xPos)
self.yPos = self.yPos0 + random.random() * 30 - 10
self.sety(self.yPos)
# move to teacher desk
def moveToTeacherDesk(self, **d):
self.lastExecutedMethod = self.moveToTeacherDesk
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("going to teacher desk")
# not relevant for attention
self.xPos = 200 + random.random() * 10 - 5
self.setx(self.xPos)
self.yPos = -75 + random.random() * 10 - 5
self.sety(self.yPos)
# move to blackboard
def moveToBlackboard(self, **d):
self.lastExecutedMethod = self.moveToBlackboard
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("going to blackboard")
# not relevant for attention
self.xPos = 40 + random.random() * 10 - 5
self.setx(self.xPos)
self.yPos = -125 + random.random() * 10 - 5
self.sety(self.yPos)
# doing desk
def doDesk(self, **d):
self.lastExecutedMethod = self.doDesk
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm doing desk")
# not relevant for attention
self.write("Do desk", font=("Arial", 14)) # turtle action
# doing work
def doWork(self, **d):
self.lastExecutedMethod = self.doWork
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm doing work")
# not relevant for attention
self.write("Work", font=("Arial", 14)) # turtle action
# doing metacognition
def metaCognition(self, **d):
self.lastExecutedMethod = self.metaCognition
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm doing metacognition!")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("meta", font=("Arial", 14)) # turtle action
# talking
def talk(self, **d):
self.lastExecutedMethod = self.talk
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm talking")
add = 0.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("blah blah", font=("Arial", 14)) # turtle action
# talking to teacher well
def talkTeacherWell(self, **d):
self.lastExecutedMethod = self.talkTeacherWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm talking to the teacher well")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("ok", font=("Arial", 14)) # turtle action
# talking to teacher bad
def talkTeacherBad(self, **d):
self.lastExecutedMethod = self.talkTeacherBad
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm talking to the teacher bad")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("gulp", font=("Arial", 14)) # turtle action
# talking to teacher not well
def talkTeacherNotWell(self, **d):
self.lastExecutedMethod = self.talkTeacherNotWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm talking to the teacher not well")
add = 0.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("not ok", font=("Arial", 14)) # turtle action
# talking to teacher wrong
def talkTeacherWrong(self, **d):
self.lastExecutedMethod = self.talkTeacherWrong
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm talking to the teacher wrong")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("bleah", font=("Arial", 14)) # turtle action
# checking Teacher and Talking closely
def checkTeacherTalkClose(self, **d):
self.lastExecutedMethod = self.checkTeacherTalkClose
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm checking teacher and talking closely")
if self.agType == "greenPupil":
add = random.random() * 0.2 + 0.4
else:
add = random.random() * 0.2 + 0.3
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("spy, pss pss", font=("Arial", 12)) # turtle action
# talking closely
def talkClose(self, **d):
self.lastExecutedMethod = self.talkClose
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm talking closely")
if self.agType == "greenPupil":
add = random.random() * 0.3 + 0.6
else:
add = random.random() * 0.4 + 0.4
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("pss pss", font=("Arial", 12)) # turtle action
# answering (well)
def answerWell(self, **d):
self.lastExecutedMethod = self.answerWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("answering well")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("answ ok", font=("Arial", 14)) # turtle answ
# answering (bad)
def answerBad(self, **d):
self.lastExecutedMethod = self.answerBad
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("answering bad")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("answ gulp", font=("Arial", 14)) # turtle answ
# answering (not well)
def answerNotWell(self, **d):
self.lastExecutedMethod = self.answerNotWell
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("answering not well")
add = 0.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("answ not ok", font=("Arial", 14)) # turtle answ
# answering (wrong)
def answerWrong(self, **d):
self.lastExecutedMethod = self.answerWrong
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("answering wrong")
add = 1.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("answ bleah", font=("Arial", 14)) # turtle answ
# answering Chorus
def answerChorus(self, **d):
self.lastExecutedMethod = self.answerChorus
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm answering chorus")
add = 0.0
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("Chorus", font=("Arial", 14)) # turtle action
# ask repeat
def askRepeat(self, **d):
self.lastExecutedMethod = self.askRepeat
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm asking repeat")
# not relevant for attention
self.write("Repeat", font=("Arial", 14)) # turtle action
# tidying
def tidy(self, **d):
self.lastExecutedMethod = self.tidy
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm tidying")
add = random.random() * 0.2 + 0.7
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("Tidy", font=("Arial", 12)) # turtle action
# untidying
def untidy(self, **d):
self.lastExecutedMethod = self.untidy
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm untidying")
add = random.random() * 0.2 + 0.7
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("Untidy", font=("Arial", 12)) # turtle action
# untidying-tidying
def untidyTidy(self, **d):
self.lastExecutedMethod = self.untidyTidy
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm untidying tidying")
add = random.random() * 0.2 + 0.6
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("un-tidy", font=("Arial", 12)) # turtle action
# borrowing
def borrow(self, **d):
self.lastExecutedMethod = self.borrow
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm borrowing")
add = random.random() * 0.2 + 0.7
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("Borrow", font=("Arial", 12)) # turtle action
# wellness
def wellness(self, **d):
self.lastExecutedMethod = self.wellness
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("Wellness")
add = random.random() * 0.2 + 0.7
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.xPos += random.random() * 20 - 10
self.setx(self.xPos)
self.yPos += random.random() * 20 - 10
self.sety(self.yPos)
self.write("wellness", font=("Arial", 14)) # turtle action
# being praised
def bePraised(self, **d):
self.lastExecutedMethod = self.bePraised
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm being praised")
# not relevant for attention
self.write("Good", font=("Arial", 14)) # turtle action
# be quite bored
def beQuiteBored(self, **d):
self.lastExecutedMethod = self.beQuiteBored
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm quite bored")
add = random.random() * 0.2 + 0.6
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("uff", font=("Arial", 12)) # turtle action
# being scolded
def beScolded(self, **d):
self.lastExecutedMethod = self.beScolded
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm being scolded")
# not relevant for attention
self.write("ohi ohi", font=("Arial", 14)) # turtle action
# be very bored
def beVeryBored(self, **d):
self.lastExecutedMethod = self.beVeryBored
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm very bored")
add = random.random() * 0.2 + 0.3
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("ronf", font=("Arial", 12)) # turtle action
# mumbling
def mumble(self, **d):
self.lastExecutedMethod = self.mumble
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm mumbling")
# not relevant for attention
self.write("mumble", font=("Arial", 14)) # turtle action
# growling
def growl(self, **d):
self.lastExecutedMethod = self.growl
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm growling")
if self.agType == "greenPupil":
add = random.random() * 0.2 + 0.5
else:
add = random.random() * 0.2 + 0.4
self.nMethodsAttention += 1
self.tickAttention = (self.tickAttention * (self.nMethodsAttention - 1)
+ add) \
/ self.nMethodsAttention
self.write("Growl", font=("Arial", 14)) # turtle action
# teasing
def tease(self, **d):
self.lastExecutedMethod = self.tease
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm teasing")
# not relevant for attention
self.write("ihihih", font=("Arial", 14)) # turtle action
# helping classmate
def helpClassmate(self, **d):
self.lastExecutedMethod = self.helpClassmate
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm helping you")
# not relevant for attention
self.write("SOS", font=("Arial", 14)) # turtle action
# checking teacher
def checkTeacher(self, **d):
self.lastExecutedMethod = self.checkTeacher
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm checking teacher")
# not relevant for attention
self.write("spy teacher", font=("Arial", 14)) # turtle action
# checking classmate work
def checkClassmateWork(self, **d):
self.lastExecutedMethod = self.checkClassmateWork
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm checking classmate work")
# not relevant for attention
self.write("spy work", font=("Arial", 14)) # turtle action
# checking work
def checkWork(self, **d):
self.lastExecutedMethod = self.checkWork
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm checking work")
# not relevant for attention
self.write("check work", font=("Arial", 14)) # turtle action
# checking fast work
def checkFastWork(self, **d):
self.lastExecutedMethod = self.checkFastWork
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm checking fast work")
# not relevant for attention
self.write("check fast work", font=("Arial", 14)) # turtle action
# checking learning
def checkLearning(self, **d):
self.lastExecutedMethod = self.checkLearning
print("I'm %s agent %d: " % (self.agType, self.number), end=' ')
print("I'm checking learning")
# not relevant for attention
self.write("Do I Know?", font=("Arial", 14)) # turtle action
# conditional (maybe mirroring) actions
# shake if
def shakeIf_greenPupil(self, **d):
self.lastExecutedMethod = self.shakeIf_greenPupil
count = 0
for a in self.agentList:
# print a.number, a.getLastExecutedMethod(), a.containers
found = str(a.getLastExecutedMethod()).find('shake')
# print found
# -1 means 'not found'
if found > -1 and 'greenPupil' in a.containers\
and a.number != self.number:
count += 1
# print "*** ",count
if count > 0:
self.shake()
# chorus if
def chorusIf_all(self, **d):
self.lastExecutedMethod = self.chorusIf_all
count = 0
for a in self.agentList:
# print a.number, a.getLastExecutedMethod(), a.containers
found = str(a.getLastExecutedMethod()).find('answerWell')
# print found
if found > -1 and 'all' in a.containers\
and a.number != self.number:
count += 1
# print "*** ",count
if count > 0:
self.answerChorus()
| {
"repo_name": "terna/SLAPP3",
"path": "6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/school/Agent.py",
"copies": "1",
"size": "27391",
"license": "cc0-1.0",
"hash": 7063755813656908000,
"line_mean": 35.6680053548,
"line_max": 79,
"alpha_frac": 0.5407250557,
"autogenerated": false,
"ratio": 3.619796484736355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4660521540436355,
"avg_score": null,
"num_lines": null
} |
# Agent.py
from Tools import *
from agTools import *
import commonVar as common
class Agent(SuperAgent):
def __init__(self, number, myWorldState,
xPos, yPos, lX=0, rX=0, bY=0, tY=0, agType=""):
# 0 definitions to be replaced (useful only if the
# dimensions are omitted and we do not use space)
# the environment
self.agOperatingSets = []
self.number = number
self.lX = lX
self.rX = rX
self.bY = bY
self.tY = tY
self.myWorldState = myWorldState
self.agType = agType
# the agent
self.xPos = xPos
self.yPos = yPos
print("agent", self.agType, "#", self.number,
"has been created at", self.xPos, ",", self.yPos)
# ",**d" in the parameter lists of the methods is a place holder
# in case we use, calling the method, a dictionary as last par
# check the clock
def checkClock(self, **d):
print("I'm %s agent # %d: " % (self.agType, self.number), end=' ')
print("clock is at ", common.cycle)
# check the superClock
def checkSuperClock(self, **d):
print("I'm %s agent # %d: " % (self.agType, self.number), end=' ')
print("clock is at ", common.cycles)
# the action, also jumping
def randomMovement(self, **k):
if random.random() <= self.myWorldState.getGeneralMovingProb():
print("agent %s # %d moving" % (self.agType, self.number))
self.jump = k["jump"]
dx = randomMove(self.jump)
self.xPos += dx
dy = randomMove(self.jump)
self.yPos += dy
#self.xPos = (self.xPos + self.worldXSize) % self.worldXSize
#self.yPos = (self.yPos + self.worldYSize) % self.worldYSize
if self.xPos < self.lX:
self.xPos = self.lX
if self.xPos > self.rX:
self.xPos = self.rX
if self.yPos < self.bY:
self.yPos = self.bY
if self.yPos > self.tY:
self.yPos = self.tY
# report
def reportPosition(self, **d):
print(self.agType, "agent # ", self.number, " is at X = ",
self.xPos, " Y = ", self.yPos)
# returns -1, 0, 1 with equal probability
def randomMove(jump):
return random.randint(-1, 1) * jump
| {
"repo_name": "terna/SLAPP3",
"path": "6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/debug/Agent.py",
"copies": "1",
"size": "2358",
"license": "cc0-1.0",
"hash": -4903057965197125000,
"line_mean": 33.1739130435,
"line_max": 74,
"alpha_frac": 0.5453774385,
"autogenerated": false,
"ratio": 3.4423357664233576,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44877132049233576,
"avg_score": null,
"num_lines": null
} |
# Agent.py
from Tools import *
from agTools import *
import graphicDisplayGlobalVarAndFunctions as gvf
import commonVar as common
class Agent(SuperAgent):
def __init__(self, number, myWorldState,
xPos=0, yPos=0, lX=0, rX=0, bY=0, tY=0, agType="",
sector=0):
# 0 definitions to be replaced (useful only if the
# dimensions are omitted and we do not use space)
# the environment
self.agOperatingSets = []
self.number = number
if agType == 'recipes':
self.content = []
self.canMove = False
self.maxLength = common.maxLenght
self.maxSector = common.maxSector
self.factoryInWhichRecipeIs = None
if agType == 'factories':
self.sector = sector
self.recipeWaitingList = []
common.orderedListOfNodes.append(self)
# use to keep the order
# in output (ex. adjacency matrix)
self.myWorldState = myWorldState
self.agType = agType
# the graph
if gvf.getGraph() == 0:
gvf.createGraph()
# the agent
if self.agType == "factories":
if common.verbose:
print("agent of type", self.agType,
"#", self.number, "has been created at", xPos, ",", yPos,
' in production sector ', self.sector)
common.g.add_node(self)
common.g.nodes[self]['sector'] = sector
gvf.colors[self] = "LightGray"
# colors at http://www.w3schools.com/html/html_colornames.asp
gvf.pos[self] = (xPos, yPos)
common.g_labels[self] = str(number)
# to be used to clone (if any)
self.xPos = xPos
self.yPos = yPos
self.sector = sector
if self.agType == "recipes":
if common.verbose:
print("agent of type", self.agType, "#", self.number,
"has been created")
# get graph
def getGraph(self):
return common.g
# ",**d" in the parameter lists of the methods is a place holder
# in case we use, calling the method, a dictionary as last par
# fill the content
def setRecipeContent(self):
if self.agType != "recipes":
return
if self.content != []:
return
self.canMove = True
length = random.randint(1, self.maxLength)
for i in range(length + 1):
self.content.append(random.randint(1, self.maxSector))
self.factoryInWhichRecipeIs = None
if common.verbose:
print("recipe %d now contains the sequence: "
% (self.number), self.content)
# search for factories
def searchForSector(self):
if self.agType != "recipes":
return
if not self.canMove:
return
step = self.content[0] # first step to be done
res = gvf.findNodesFromSector(step)
if common.verbose:
if res == []:
print("recipe %d cannot find a factory for the step of type %d"
% (self.number, step))
else:
if common.verbose:
print("recipe %d found %d factory/ies for the step of type %d"
% (self.number, len(res), step))
# for debug only!!!!!!!!!!!!!
# if res!=[]:
# for aNode in res: print "searchForSector:", aNode.number, aNode.sector
# else: print "searchForSector: no node for sector", step
if res != []:
random.shuffle(res)
if common.verbose:
print(
"recipe %d moving to factory %d" %
(self.number, res[0].number))
# future development: here res[0] simply contain a randomly chosen unit
# (if in res we have more than a unique possibility)
# it is possible to put here intelligence (as the effect of the)
# action of the recipe, searching for data such as production costs of
# the factories, their waiting lists, their quality standards, their
# owners, etc.
# create an edge from self.factoryInWhichRecipeIs to res[0]
# or upgrading the weight of the link
if self.factoryInWhichRecipeIs is not None:
gvf.createEdge(
self.factoryInWhichRecipeIs, res[0])
self.factoryInWhichRecipeIs = res[0]
# self here is the calling recipe
res[0].addToRecipeWaitingList(self)
# check if next step can be produced
def checkIfNextStepCanBeAccomplished(self, aRecipe):
step = aRecipe.content[0] # next step to be done
res = gvf.findNodesFromSector(step)
if res != []:
return True
else:
return False
# waiting list in factories
def addToRecipeWaitingList(self, recipe):
if self.agType != "factories":
return
recipe.canMove = False
self.recipeWaitingList.append(recipe)
if common.verbose:
print("factory %d waiting list contains %d recipe/s" %
(self.number, len(self.recipeWaitingList)))
# update factory label
# the try below is not subject to debug
try:
pseudoL = common.g[self][self]['pseudoLabel']
except BaseException:
pseudoL = ""
gvf.common.g_labels[self] = str(self.number) + " (" +\
str(len(self.recipeWaitingList)) + ") "\
+ "\n" + pseudoL
# produce
def produce(self):
if self.agType != "factories":
return
if self.recipeWaitingList == []:
return
currentRecipe = self.recipeWaitingList[0]
# remove the current recipe (if next sector exists or if the present
# one is the last step)
if len(currentRecipe.content) > 1:
if self.checkIfNextStepCanBeAccomplished(currentRecipe):
self.recipeWaitingList.remove(currentRecipe)
# if next step cannot be accomplished, the recipe is locked
# here
if len(currentRecipe.content) == 1:
self.recipeWaitingList.remove(currentRecipe)
if common.verbose:
print("factory %d producing (recipe %d)" %
(self.number, currentRecipe.number))
currentRecipe.content.pop(0)
currentRecipe.canMove = True
if currentRecipe.content == []:
currentRecipe.canMove = False
if common.verbose:
print("recipe %d completed in factory %d"
% (currentRecipe.number, self.number))
# addAFactory
def addAFactory(self):
if self.agType != "factories":
return
# create a new factory cloning an existing one
# choose randomly a factory (also a cloned one)
toBeCloned = self
# print toBeCloned.number
# creating
common.clonedN += 1
anAgent = Agent(toBeCloned.number * 100 + common.clonedN,
self.myWorldState,
toBeCloned.xPos + modPosition(),
toBeCloned.yPos + modPosition(),
agType=toBeCloned.agType,
sector=toBeCloned.sector)
self.agentList.append(anAgent)
# updating the agentList of all the agents
for anAg in self.agentList:
# in this way, also the new agent
anAg.setAgentList(self.agentList)
# has its agentList (updated)
# udating the agentList in the ModelSwarm instance
common.modelAddress.agentList = self.agentList
if common.verbose:
print("Factory", self.number, "has created factory #",
anAgent.number, "in sector", anAgent.sector)
# remove itself
def removeItself(self):
if self.agType != "factories":
return
toBeRemoved = self
if common.verbose:
print("Factory #", toBeRemoved.number,
"removed itself from sector", toBeRemoved.sector)
self.agentList.remove(toBeRemoved)
# updating the agentList of all the agents
for anAg in self.agentList:
# in this way, also the new agent
anAg.setAgentList(self.agentList)
# has its agentList (updated)
# udating the agentList in the ModelSwarm instance
common.modelAddress.agentList = self.agentList
# print "removeItself verification of surviving agents"
# for i in range(len(self.agentList)):
# if self.agentList[i].agType=="factories":
# print self.agentList[i].number,
common.orderedListOfNodes.remove(toBeRemoved)
# print "\nremoveItself node removed in graph", toBeRemoved, \
# toBeRemoved.number
edges_toBeDropped = []
for edge in common.g.edges():
if edge[0] == toBeRemoved or edge[1] == toBeRemoved:
edges_toBeDropped.append(edge)
if edges_toBeDropped != []:
for edge in edges_toBeDropped:
# print "removeItself edge removed in graph", edge
if edge in common.g_edge_labels:
common.g_edge_labels.pop(edge)
# print "removeItself previous nodes in graph", common.g.nodes()
common.g_labels.pop(toBeRemoved)
# remove factoryInWhichRecipeIs from all the recipes, also
# that having just left this factory and waiting for
# searchForSector order
if self.agentList != []:
for anAg in self.agentList:
if anAg.agType == "recipes" and \
anAg.factoryInWhichRecipeIs == self:
anAg.factoryInWhichRecipeIs = None
# recipes in the waiting list
# print "removeItself recipes in the factory before cleaning"
# if self.recipeWaitingList != []:
# for aR in self.recipeWaitingList:
# print aR.number, aR.factoryInWhichRecipeIs,
# aR.content
# else: print "None"
if self.recipeWaitingList != []:
for aRecipe in self.recipeWaitingList:
aRecipe.content = []
aRecipe.canMove = False
# aRecipe.factoryInWhichRecipeIs=None # done above
"""
print "removeItself recipes in the factory after cleaning"
if self.recipeWaitingList != []:
for aR in self.recipeWaitingList:
print aR.number, aR.factoryInWhichRecipeIs, aR.content
else: print "None"
"""
common.g.remove_node(toBeRemoved)
# print "removeItself residual nodes in graph", common.g.nodes()
def modPosition():
if random.randint(0, 1) == 0:
return random.randint(-8, -6)
else:
return random.randint(6, 8)
| {
"repo_name": "terna/SLAPP3",
"path": "6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/production/Agent.py",
"copies": "1",
"size": "11126",
"license": "cc0-1.0",
"hash": 8709830619486130000,
"line_mean": 34.3206349206,
"line_max": 87,
"alpha_frac": 0.5596800288,
"autogenerated": false,
"ratio": 4.259571209800919,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5319251238600919,
"avg_score": null,
"num_lines": null
} |
# Agent.py
from Tools import *
from agTools import *
class Agent(SuperAgent):
def __init__(self, number, myWorldState,
xPos, yPos, lX=0, rX=0, bY=0, tY=0, agType=""):
# 0 definitions to be replaced (useful only if the
# dimensions are omitted and we do not use space)
# the environment
self.agOperatingSets = []
self.number = number
self.lX = lX
self.rX = rX
self.bY = bY
self.tY = tY
if myWorldState != 0:
self.myWorldState = myWorldState
self.agType = agType
# the agent
self.xPos = xPos
self.yPos = yPos
print("agent", self.agType, "#", self.number,
"has been created at", self.xPos, ",", self.yPos)
# ",**d" in the parameter lists of the methods is a place holder
# in case we use, calling the method, a dictionary as last par
# eating
def eat(self, **d):
print("I'm %s agent # %d: " % (self.agType, self.number), end=' ')
print("nothing to eat here!")
# dancing
def dance(self, **d):
print("I'm %s agent # %d: " % (self.agType, self.number), end=' ')
if self.agType == "tasteA":
print("I'm an A, nice to dance here!")
elif self.agType == "tasteB":
print("I'm a B, not so nice to dance here!")
elif self.agType == "tasteC":
print("I'm a C, why to dance here?")
else:
print("it's not time to dance!")
# the action, also jumping
def randomMovement(self, **k):
if random.random() <= self.myWorldState.getGeneralMovingProb():
print("agent %s # %d moving" % (self.agType, self.number))
self.jump = 1
if "jump" in k:
self.jump = k["jump"]
dx = randomMove(self.jump)
self.xPos += dx
dy = randomMove(self.jump)
self.yPos += dy
#self.xPos = (self.xPos + self.worldXSize) % self.worldXSize
#self.yPos = (self.yPos + self.worldYSize) % self.worldYSize
if self.xPos < self.lX:
self.xPos = self.lX
if self.xPos > self.rX:
self.xPos = self.rX
if self.yPos < self.bY:
self.yPos = self.bY
if self.yPos > self.tY:
self.yPos = self.tY
# report
def reportPosition(self, **d):
print(self.agType, "agent # ", self.number, " is at X = ",
self.xPos, " Y = ", self.yPos)
def reportPos(self, **d):
return (self.xPos, self.yPos)
# adding a task (from v. 1.35 of SLAPP)
# common is derived importing Tools
def addTask(self):
newTask = "all dance"
print(
"agent",
self.number,
"adding a task for cycle",
common.cycle + 1)
if common.cycle + 1 not in common.addTasks:
common.addTasks[common.cycle + 1] = []
common.addTasks[common.cycle + 1].append(newTask)
# eliminating a task (from v. 1.35 of SLAPP)
# common is derived importing Tools
def elimTask(self):
killTask = "tasteC eat"
print("agent", self.number, "eliminating a task for cycle",
common.cycle + 2)
if common.cycle + 2 not in common.elimTasks:
common.elimTasks[common.cycle + 2] = []
common.elimTasks[common.cycle + 2].append(killTask)
# returns -1, 0, 1 with equal probability
def randomMove(jump):
return random.randint(-1, 1) * jump
| {
"repo_name": "terna/SLAPP3",
"path": "6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/basic2D/Agent.py",
"copies": "1",
"size": "3576",
"license": "cc0-1.0",
"hash": -1957749164653530400,
"line_mean": 31.5090909091,
"line_max": 74,
"alpha_frac": 0.5352348993,
"autogenerated": false,
"ratio": 3.471844660194175,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4507079559494175,
"avg_score": null,
"num_lines": null
} |
# Agent.py
from Tools import *
from agTools import *
class Agent(SuperAgent):
def __init__(self, number, myWorldState,
xPos, yPos, lX=-20, rX=19, bY=-20, tY=19, agType=""):
# the environment
self.agOperatingSets = []
self.number = number
self.lX = lX
self.rX = rX
self.bY = bY
self.tY = tY
if myWorldState != 0:
self.myWorldState = myWorldState
self.agType = agType
# the agent
self.xPos = xPos
self.yPos = yPos
print("agent", self.agType, "#", self.number,
"has been created at", self.xPos, ",", self.yPos)
# ",**d" in the parameter lists of the methods is a place holder
# in case we use, calling the method, a dictionary as last par
# eating
def eat(self, **d):
print("I'm %s agent # %d: " % (self.agType, self.number), end=' ')
print("nothing to eat here!")
# dancing
def dance(self, **d):
print("I'm %s agent # %d: " % (self.agType, self.number), end=' ')
if self.agType == "tasteA":
print("I'm an A, nice to dance here!")
elif self.agType == "tasteB":
print("I'm a B, not so nice to dance here!")
elif self.agType == "tasteC":
print("I'm a C, why to dance here?")
else:
print("it's not time to dance!")
# the action, also jumping
def randomMovement(self, **k):
if random.random() <= self.myWorldState.getGeneralMovingProb():
print("agent %s # %d moving" % (self.agType, self.number))
self.jump = 1
if "jump" in k:
self.jump = k["jump"]
dx = randomMove(self.jump)
self.xPos += dx
dy = randomMove(self.jump)
self.yPos += dy
#self.xPos = (self.xPos + self.worldXSize) % self.worldXSize
#self.yPos = (self.yPos + self.worldYSize) % self.worldYSize
if self.xPos < self.lX:
self.xPos = self.lX
if self.xPos > self.rX:
self.xPos = self.rX
if self.yPos < self.bY:
self.yPos = self.bY
if self.yPos > self.tY:
self.yPos = self.tY
# report
def reportPosition(self, **d):
print(self.agType, "agent # ", self.number, " is at X = ",
self.xPos, " Y = ", self.yPos)
# adding a task (from v. 1.35 of SLAPP)
# common is derived importing Tools
def addTask(self):
newTask = "all dance"
print(
"agent",
self.number,
"adding a task for cycle",
common.cycle + 1)
if common.cycle + 1 not in common.addTasks:
common.addTasks[common.cycle + 1] = []
common.addTasks[common.cycle + 1].append(newTask)
# eliminating a task (from v. 1.35 of SLAPP)
# common is derived importing Tools
def elimTask(self):
killTask = "tasteC eat"
print("agent", self.number, "eliminating a task for cycle",
common.cycle + 2)
if common.cycle + 2 not in common.elimTasks:
common.elimTasks[common.cycle + 2] = []
common.elimTasks[common.cycle + 2].append(killTask)
# returns -1, 0, 1 with equal probability
def randomMove(jump):
return random.randint(-1, 1) * jump
| {
"repo_name": "terna/SLAPP3",
"path": "6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/basic2classesPathsToTypes/Agent.py",
"copies": "1",
"size": "3376",
"license": "cc0-1.0",
"hash": -1119020441145169000,
"line_mean": 31.7766990291,
"line_max": 74,
"alpha_frac": 0.5337677725,
"autogenerated": false,
"ratio": 3.4378818737270875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9470319682136118,
"avg_score": 0.0002659928181939087,
"num_lines": 103
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.