text stringlengths 0 1.05M | meta dict |
|---|---|
"""An example term implementation. This implements terms used for dask"""
from __future__ import absolute_import, division, print_function
from itertools import count
from pinyon.core import Context
def istask(x):
""" Is x a runnable task?
A task is a tuple with a callable first argument
Example
-------
>>> inc = lambda x: x + 1
>>> istask((inc, 1))
True
>>> istask(1)
False
"""
return isinstance(x, tuple) and x and callable(x[0])
def head(task):
"""Return the top level node of a task"""
if istask(task):
return task[0]
elif isinstance(task, list):
return list
else:
return task
def args(task):
"""Get the arguments for the current task"""
if istask(task):
return task[1:]
elif isinstance(task, list):
return task
else:
return ()
def subs(expr, sub_dict):
"""Perform direct matching substitution."""
if expr in sub_dict:
return sub_dict[expr]
elif not args(expr):
return expr
new_args = (subs(arg, sub_dict) for arg in args(expr))
return rebuild(head(expr), new_args)
def rebuild(func, args):
return (func,) + tuple(args)
sexpr_context = Context(head, args, subs, rebuild)
# Other fun things for a term implementation:
def run(task):
"""Run a task"""
if istask(task):
func = task[0]
return func(*(run(i) for i in task[1:]))
else:
return task
def funcify(args, task):
"""Compile a task into a callable function"""
lookup = {}
names = ("_gensym_%d" % i for i in count(1))
arg_string = ", ".join(str(i) for i in args)
code_string = _compile(args, task, lookup, names)
code = "lambda {0}: {1}".format(arg_string, code_string)
return eval(code, lookup)
# Helpers
def _code_print(args, t, lookup, names):
"""Print t as code"""
if t in args:
return str(t)
elif isinstance(t, (int, float, str, bool)):
return str(t)
else:
name = next(names)
lookup[name] = t
return name
def _compile(func_args, task, lookup, names):
"""Print a task. Modifies lookup in place"""
if istask(task):
func = _code_print(func_args, head(task), lookup, names)
new_args = (_compile(func_args, i, lookup, names) for i in args(task))
return "{0}({1})".format(func, ", ".join(new_args))
else:
return _code_print(func_args, task, lookup, names)
| {
"repo_name": "jcrist/pinyon",
"path": "pinyon/term/sexpr.py",
"copies": "1",
"size": "2472",
"license": "bsd-3-clause",
"hash": -611100087342270800,
"line_mean": 21.2702702703,
"line_max": 78,
"alpha_frac": 0.5934466019,
"autogenerated": false,
"ratio": 3.4915254237288136,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45849720256288135,
"avg_score": null,
"num_lines": null
} |
""" An example that defines two apps, one with a single hbox and
one with hboxes in vboxes in hboxes. For performance testing
"""
import time
import flexx
from flexx import ui
class MyApp1(ui.App):
def init(self):
with ui.VBox() as self.l1:
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
class MyApp2(ui.App):
def init(self):
with ui.HBox():
with ui.VBox():
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
with ui.HBox(flex=0):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=1)
ui.Button(text='Box C is a bit longer', flex=1)
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=2)
with ui.HBox(flex=2):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=2)
ui.Button(text='Box C is a bit longer', flex=3)
with ui.VBox():
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
with ui.HBox(flex=0):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=1)
ui.Button(text='Box C is a bit longer', flex=1)
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=2)
with ui.HBox(flex=2):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=2)
ui.Button(text='Box C is a bit longer', flex=3)
class MyApp3(ui.App):
def init(self):
with ui.HBox(spacing=20):
with ui.FormLayout() as self.form:
# todo: can this be written with one line per row?
# e.g. self.b1 = ui.Button(label='Name', text='Hola')
ui.Label(text='Name:')
self.b1 = ui.Button(text='Hola')
ui.Label(text='Age:')
self.b2 = ui.Button(text='Hello world')
ui.Label(text='Favorite color:')
self.b3 = ui.Button(text='Foo bar')
#ui.Widget(flex=1)
with ui.FormLayout() as self.form:
# e.g. self.b1 = ui.Button(label='Name', text='Hola')
ui.Widget(flex=1) # Add a flexer
ui.Widget()
ui.Label(text='Pet name:')
self.b1 = ui.Button(text='Hola')
ui.Label(text='Pet Age:')
self.b2 = ui.Button(text='Hello world')
ui.Label(text='Pet\'s Favorite color:')
self.b3 = ui.Button(text='Foo bar')
ui.Widget(flex=2)
class MyApp4(ui.App):
def init(self):
with ui.PinboardLayout():
self.b1 = ui.Button(text='Stuck at (20, 20)', pos=(20, 30))
self.b2 = ui.Button(text='Dynamic at (20%, 20%)', pos=(0.2, 0.2))
self.b3 = ui.Button(text='Dynamic at (50%, 70%)', pos=(0.5, 0.7))
class MyApp5(ui.App):
def init(self):
with ui.HSplitter() as self.l1:
ui.Button(text='Right A')
with ui.VSplitter() as self.l2:
ui.Button(text='Right B')
ui.Button(text='Right C')
ui.Button(text='Right D')
class MyApp6(ui.App):
def init(self):
layout = ui.PlotLayout()
layout.add_tools('Edit plot',
ui.Button(text='do this'),
ui.Button(text='do that'))
layout.add_tools('Plot info',
ui.ProgressBar(value='0.3'),
ui.Label(text='The plot aint pretty'))
app = MyApp1(runtime='browser')
ui.run()
#MyApp1.export('/home/almar/dev/pylib/flexx/_website/_static/boxdemo_table1.html')
#MyApp2.export('/home/almar/dev/pylib/flexx/_website/_static/boxdemo_table2.html')
| {
"repo_name": "hungle90/flexx",
"path": "examples/ui-tests/box_performance.py",
"copies": "21",
"size": "4634",
"license": "bsd-2-clause",
"hash": -9032786051202666000,
"line_mean": 36.674796748,
"line_max": 82,
"alpha_frac": 0.4721622788,
"autogenerated": false,
"ratio": 3.5701078582434516,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01119091801115709,
"num_lines": 123
} |
""" An example that defines two apps, one with a single hbox and
one with hboxes in vboxes in hboxes. For performance testing
"""
import time
import zoof
from zoof import ui
class MyApp1(ui.App):
def init(self):
with ui.HBox(self):
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
class MyApp2(ui.App):
def init(self):
with ui.HBox(self):
with ui.VBox():
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
with ui.HBox(flex=0):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=1)
ui.Button(text='Box C is a bit longer', flex=1)
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=2)
with ui.HBox(flex=2):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=2)
ui.Button(text='Box C is a bit longer', flex=3)
with ui.VBox():
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
with ui.HBox(flex=0):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=1)
ui.Button(text='Box C is a bit longer', flex=1)
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=2)
with ui.HBox(flex=2):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=2)
ui.Button(text='Box C is a bit longer', flex=3)
app = MyApp1()
ui.run()
#MyApp1.export('/home/almar/projects/pylib/zoof/_website/_static/boxdemo_table1.html')
#MyApp2.export('/home/almar/projects/pylib/zoof/_website/_static/boxdemo_table2.html')
| {
"repo_name": "almarklein/zoof",
"path": "examples/ui/box_performance.py",
"copies": "1",
"size": "2492",
"license": "bsd-2-clause",
"hash": 6770527543598483000,
"line_mean": 35.115942029,
"line_max": 86,
"alpha_frac": 0.4739165329,
"autogenerated": false,
"ratio": 3.5804597701149423,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4554376303014942,
"avg_score": null,
"num_lines": null
} |
# An example to get a tile source, save a thumbnail, and iterate through the
# tiles at a specific magnification, reporting the average color of each tile.
import argparse
import numpy
import large_image
# Explicitly set the caching method before we request any data
large_image.config.setConfig('cache_backend', 'python')
def average_color(imagePath, magnification=None):
"""
Print the average color for a tiled image file.
:param imagePath: path of the file to analyze.
:param magnification: optional magnification to use for the analysis.
"""
source = large_image.getTileSource(imagePath)
# get a thumbnail no larger than 1024x1024 pixels
thumbnail, mimeType = source.getThumbnail(
width=1024, height=1024, encoding='JPEG')
print('Made a thumbnail of type %s taking %d bytes' % (
mimeType, len(thumbnail)))
# We could save it, if we want to.
# open('/tmp/thumbnail.jpg', 'wb').write(thumbnail)
tileMeans = []
tileWeights = []
# iterate through the tiles at a particular magnification:
for tile in source.tileIterator(
format=large_image.tilesource.TILE_FORMAT_NUMPY,
scale={'magnification': magnification},
resample=True):
# The tile image data is in tile['tile'] and is a numpy
# multi-dimensional array
mean = numpy.mean(tile['tile'], axis=(0, 1))
tileMeans.append(mean)
tileWeights.append(tile['width'] * tile['height'])
print('x: %d y: %d w: %d h: %d mag: %g color: %g %g %g' % (
tile['x'], tile['y'], tile['width'], tile['height'],
tile['magnification'], mean[0], mean[1], mean[2]))
mean = numpy.average(tileMeans, axis=0, weights=tileWeights)
print('Average color: %g %g %g' % (mean[0], mean[1], mean[2]))
return mean
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Compute the mean color of a tiled image')
parser.add_argument('path', metavar='image-path', type=str,
help='Path of the tiled image to examine')
parser.add_argument('-m', '--magnification', dest='magnification',
type=float,
help='Magnification to use to examine the image')
args = parser.parse_args()
average_color(args.path, args.magnification)
| {
"repo_name": "DigitalSlideArchive/large_image",
"path": "examples/average_color.py",
"copies": "1",
"size": "2357",
"license": "apache-2.0",
"hash": 6705809868174848000,
"line_mean": 39.6379310345,
"line_max": 78,
"alpha_frac": 0.6359779381,
"autogenerated": false,
"ratio": 3.8513071895424837,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4987285127642484,
"avg_score": null,
"num_lines": null
} |
"""An example to run minitaur gym environment with randomized terrain.
"""
import math
import numpy as np
import tensorflow as tf
#from google3.pyglib import app
#from google3.pyglib import flags
from pybullet_envs.minitaur.envs import minitaur_randomize_terrain_gym_env
FLAGS = flags.FLAGS
flags.DEFINE_enum("example_name", "reset", ["sine", "reset"],
"The name of the example: sine or reset.")
def ResetTerrainExample():
"""An example showing resetting random terrain env."""
num_reset = 10
steps = 100
env = minitaur_randomize_terrain_gym_env.MinitaurRandomizeTerrainGymEnv(
render=True, leg_model_enabled=False, motor_velocity_limit=np.inf, pd_control_enabled=True)
action = [math.pi / 2] * 8
for _ in xrange(num_reset):
env.reset()
for _ in xrange(steps):
_, _, done, _ = env.step(action)
if done:
break
def SinePolicyExample():
"""An example of minitaur walking with a sine gait."""
env = minitaur_randomize_terrain_gym_env.MinitaurRandomizeTerrainGymEnv(
render=True, motor_velocity_limit=np.inf, pd_control_enabled=True, on_rack=False)
sum_reward = 0
steps = 200
amplitude_1_bound = 0.5
amplitude_2_bound = 0.5
speed = 40
for step_counter in xrange(steps):
time_step = 0.01
t = step_counter * time_step
amplitude1 = amplitude_1_bound
amplitude2 = amplitude_2_bound
steering_amplitude = 0
if t < 10:
steering_amplitude = 0.5
elif t < 20:
steering_amplitude = -0.5
else:
steering_amplitude = 0
# Applying asymmetrical sine gaits to different legs can steer the minitaur.
a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude)
a2 = math.sin(t * speed + math.pi) * (amplitude1 - steering_amplitude)
a3 = math.sin(t * speed) * amplitude2
a4 = math.sin(t * speed + math.pi) * amplitude2
action = [a1, a2, a2, a1, a3, a4, a4, a3]
_, reward, _, _ = env.step(action)
sum_reward += reward
def main(unused_argv):
if FLAGS.example_name == "sine":
SinePolicyExample()
elif FLAGS.example_name == "reset":
ResetTerrainExample()
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
app.run()
| {
"repo_name": "MadManRises/Madgine",
"path": "shared/bullet3-2.89/examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_randomize_terrain_gym_env_example.py",
"copies": "2",
"size": "2206",
"license": "mit",
"hash": -7039910983589351000,
"line_mean": 27.6493506494,
"line_max": 97,
"alpha_frac": 0.6640979148,
"autogenerated": false,
"ratio": 3.098314606741573,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47624125215415736,
"avg_score": null,
"num_lines": null
} |
"""An example to run of the minitaur gym environment with sine gaits.
"""
import csv
import math
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(os.path.dirname(currentdir)))
print("parentdir=", parentdir)
os.sys.path.insert(0, parentdir)
import argparse
import numpy as np
import tensorflow as tf
from pybullet_envs.minitaur.envs import minitaur_gym_env
import time
#FLAGS = flags.FLAGS
#flags.DEFINE_enum(
# "example_name", "sine", ["sine", "reset", "stand", "overheat"],
# "The name of the example: sine, reset, stand, or overheat.")
#flags.DEFINE_string("output_filename", None, "The name of the output CSV file."
# "Each line in the CSV file will contain the action, the "
# "motor position, speed and torques.")
#flags.DEFINE_string("log_path", None, "The directory to write the log file.")
def WriteToCSV(filename, actions_and_observations):
"""Write simulation data to file.
Save actions and observed angles, angular velocities and torques for data
analysis.
Args:
filename: The file to write. Can be locally or on CNS.
actions_and_observations: the interested simulation quantities to save.
"""
with tf.gfile.Open(filename, "wb") as csvfile:
csv_writer = csv.writer(csvfile, delimiter=",")
for row in actions_and_observations:
csv_writer.writerow(row)
def ResetPoseExample(log_path=None):
"""An example that the minitaur stands still using the reset pose."""
steps = 10000
environment = minitaur_gym_env.MinitaurGymEnv(
urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION,
render=True,
leg_model_enabled=False,
motor_velocity_limit=np.inf,
pd_control_enabled=True,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
hard_reset=False,
log_path=log_path)
action = [math.pi / 2] * 8
for _ in range(steps):
_, _, done, _ = environment.step(action)
time.sleep(1. / 100.)
if done:
break
def MotorOverheatExample(log_path=None):
"""An example of minitaur motor overheat protection is triggered.
The minitaur is leaning forward and the motors are getting obove threshold
torques. The overheat protection will be triggered in ~1 sec.
Args:
log_path: The directory that the log files are written to. If log_path is
None, no logs will be written.
"""
environment = minitaur_gym_env.MinitaurGymEnv(
urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION,
render=True,
leg_model_enabled=False,
motor_velocity_limit=np.inf,
motor_overheat_protection=True,
accurate_motor_model_enabled=True,
motor_kp=1.20,
motor_kd=0.00,
on_rack=False,
log_path=log_path)
action = [2.0] * 8
for i in range(8):
action[i] = 2.0 - 0.5 * (-1 if i % 2 == 0 else 1) * (-1 if i < 4 else 1)
steps = 500
actions_and_observations = []
for step_counter in range(steps):
# Matches the internal timestep.
time_step = 0.01
t = step_counter * time_step
current_row = [t]
current_row.extend(action)
observation, _, _, _ = environment.step(action)
current_row.extend(observation.tolist())
actions_and_observations.append(current_row)
time.sleep(1. / 100.)
if FLAGS.output_filename is not None:
WriteToCSV(FLAGS.output_filename, actions_and_observations)
def SineStandExample(log_path=None):
"""An example of minitaur standing and squatting on the floor.
To validate the accurate motor model we command the robot and sit and stand up
periodically in both simulation and experiment. We compare the measured motor
trajectories, torques and gains. The results are at:
https://colab.corp.google.com/v2/notebook#fileId=0BxTIAnWh1hb_ZnkyYWtNQ1RYdkU&scrollTo=ZGFMl84kKqRx
Args:
log_path: The directory that the log files are written to. If log_path is
None, no logs will be written.
"""
environment = minitaur_gym_env.MinitaurGymEnv(
urdf_version=minitaur_gym_env.RAINBOW_DASH_V0_URDF_VERSION,
render=True,
leg_model_enabled=False,
motor_velocity_limit=np.inf,
motor_overheat_protection=True,
accurate_motor_model_enabled=True,
motor_kp=1.20,
motor_kd=0.02,
on_rack=False,
log_path=log_path)
steps = 1000
amplitude = 0.5
speed = 3
actions_and_observations = []
for step_counter in range(steps):
# Matches the internal timestep.
time_step = 0.01
t = step_counter * time_step
current_row = [t]
action = [math.sin(speed * t) * amplitude + math.pi / 2] * 8
current_row.extend(action)
observation, _, _, _ = environment.step(action)
current_row.extend(observation.tolist())
actions_and_observations.append(current_row)
time.sleep(1. / 100.)
if FLAGS.output_filename is not None:
WriteToCSV(FLAGS.output_filename, actions_and_observations)
def SinePolicyExample(log_path=None):
"""An example of minitaur walking with a sine gait.
Args:
log_path: The directory that the log files are written to. If log_path is
None, no logs will be written.
"""
environment = minitaur_gym_env.MinitaurGymEnv(
urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION,
render=True,
motor_velocity_limit=np.inf,
pd_control_enabled=True,
hard_reset=False,
on_rack=False,
log_path=log_path)
sum_reward = 0
steps = 20000
amplitude_1_bound = 0.5
amplitude_2_bound = 0.5
speed = 40
for step_counter in range(steps):
time_step = 0.01
t = step_counter * time_step
amplitude1 = amplitude_1_bound
amplitude2 = amplitude_2_bound
steering_amplitude = 0
if t < 10:
steering_amplitude = 0.5
elif t < 20:
steering_amplitude = -0.5
else:
steering_amplitude = 0
# Applying asymmetrical sine gaits to different legs can steer the minitaur.
a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude)
a2 = math.sin(t * speed + math.pi) * (amplitude1 - steering_amplitude)
a3 = math.sin(t * speed) * amplitude2
a4 = math.sin(t * speed + math.pi) * amplitude2
action = [a1, a2, a2, a1, a3, a4, a4, a3]
_, reward, done, _ = environment.step(action)
time.sleep(1. / 100.)
sum_reward += reward
if done:
tf.logging.info("Return is {}".format(sum_reward))
environment.reset()
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--env',
help='environment ID (0==sine, 1==stand, 2=reset, 3=overheat)',
type=int,
default=0)
args = parser.parse_args()
print("--env=" + str(args.env))
if (args.env == 0):
SinePolicyExample()
if (args.env == 1):
SineStandExample()
if (args.env == 2):
ResetPoseExample()
if (args.env == 3):
MotorOverheatExample()
if __name__ == '__main__':
main()
| {
"repo_name": "MTASZTAKI/ApertusVR",
"path": "plugins/physics/bulletPhysics/3rdParty/bullet3/examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_gym_env_example.py",
"copies": "2",
"size": "7020",
"license": "mit",
"hash": -4702521174686219000,
"line_mean": 29.6550218341,
"line_max": 103,
"alpha_frac": 0.6663817664,
"autogenerated": false,
"ratio": 3.245492371705964,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9861794889012214,
"avg_score": 0.010015849818749888,
"num_lines": 229
} |
"""An example to run the minitaur environment of alternating legs.
"""
import time
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0, parentdir)
import os
import numpy as np
import tensorflow as tf
from pybullet_envs.minitaur.envs import minitaur_alternating_legs_env
from pybullet_envs.minitaur.envs import minitaur_gym_env
from pybullet_envs.minitaur.envs.env_randomizers import minitaur_alternating_legs_env_randomizer as randomizer_lib
#FLAGS = flags.FLAGS
#flags.DEFINE_string("log_path", None, "The directory to write the log file.")
def hand_tuned_agent(observation, timestamp):
"""A hand tuned controller structure with vizier optimized parameters.
Args:
observation: The observation of the environment. It includes the roll, pith
the speed of roll and pitch changes.
timestamp: The simulated time since the simulation reset.
Returns:
Delta desired motor angles to be added to the reference motion of
alternating legs for balance.
"""
roll = observation[0]
pitch = observation[1]
roll_dot = observation[2]
pitch_dot = observation[3]
# The following gains are hand-tuned. These gains are
# designed according to traditional robotics techniques. These are linear
# feedback balance conroller. The idea is that when the base is tilting,
# the legs in the air should swing more towards the falling direction to catch
# up falling. At the same time, the legs in the air should extend more to
# touch ground earlier.
roll_gain = 1.0
pitch_gain = 1.0
roll_dot_gain = 0.1
pitch_dot_gain = 0.1
roll_compensation = roll_gain * roll + roll_dot_gain * roll_dot
pitch_compensation = pitch_gain * pitch + pitch_dot_gain * pitch_dot
first_leg = [
0, -pitch_compensation, -pitch_compensation, 0, 0, -pitch_compensation - roll_compensation,
pitch_compensation + roll_compensation, 0
]
second_leg = [
-pitch_compensation, 0, 0, -pitch_compensation, pitch_compensation - roll_compensation, 0, 0,
-pitch_compensation + roll_compensation
]
if (timestamp // minitaur_alternating_legs_env.STEP_PERIOD) % 2:
return second_leg
else:
return first_leg
def hand_tuned_balance_example(log_path=None):
"""An example that the minitaur balances while alternating its legs.
Args:
log_path: The directory that the log files are written to. If log_path is
None, no logs will be written.
"""
steps = 1000
episodes = 5
randomizer = randomizer_lib.MinitaurAlternatingLegsEnvRandomizer()
environment = minitaur_alternating_legs_env.MinitaurAlternatingLegsEnv(
urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION,
render=True,
num_steps_to_log=steps,
pd_latency=0.002,
control_latency=0.02,
remove_default_joint_damping=True,
on_rack=False,
env_randomizer=randomizer,
log_path=log_path)
np.random.seed(100)
avg_reward = 0
for i in range(episodes):
sum_reward = 0
observation = environment.reset()
for _ in range(steps):
# Sleep to prevent serial buffer overflow on microcontroller.
time.sleep(0.002)
action = hand_tuned_agent(observation, environment.minitaur.GetTimeSinceReset())
observation, reward, done, _ = environment.step(action)
sum_reward += reward
if done:
break
tf.logging.info("reward {}: {}".format(i, sum_reward))
avg_reward += sum_reward
tf.logging.info("avg_reward: {}\n\n\n".format(avg_reward / episodes))
def main(unused_argv):
hand_tuned_balance_example(log_path=os.getcwd())
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
| {
"repo_name": "MadManRises/Madgine",
"path": "shared/bullet3-2.89/examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_alternating_legs_env_example.py",
"copies": "2",
"size": "3761",
"license": "mit",
"hash": 7913803194884294000,
"line_mean": 32.8828828829,
"line_max": 114,
"alpha_frac": 0.7125764424,
"autogenerated": false,
"ratio": 3.33717834960071,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.504975479200071,
"avg_score": null,
"num_lines": null
} |
"""An example to run the minitaur environment of standing with four legs.
"""
import numpy as np
import tensorflow as tf
from pybullet_envs.minitaur.envs import minitaur_four_leg_stand_env
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string("log_path", None, "The directory to write the log file.")
NUM_LEGS = 4
kroll = 3.0
kpitch = 3.0
def feed_forward_only_control_example(log_path=None):
"""An example of hand-tuned controller for minitaur standing with four legs.
Args:
log_path: The directory that the log files are written to. If log_path is
None, no logs will be written.
"""
steps = 1000
episodes = 1
environment = minitaur_four_leg_stand_env.MinitaurFourLegStandEnv(
on_rack=False,
log_path=log_path,
urdf_version=minitaur_four_leg_stand_env.RAINBOW_DASH_V0_URDF_VERSION,
remove_default_joint_damping=True,
hard_reset=True,
motor_kp=1.0,
motor_kd=0.015,
control_latency=0.015,
pd_latency=0.003,
control_time_step=0.006,
action_repeat=6,
env_randomizer=None,
render=True)
np.random.seed(100)
avg_reward = 0
for i in range(episodes):
sum_reward = 0
observation = environment.reset()
for _ in range(steps):
action = [0] * 4
uroll = kroll * observation[0]
upitch = kpitch * observation[1]
action[0] = upitch - uroll
action[1] = -upitch - uroll
action[2] = upitch + uroll
action[3] = -upitch + uroll
observation, reward, done, _ = environment.step(action)
sum_reward += reward
if done:
break
tf.logging.info("reward {}: {}".format(i, sum_reward))
avg_reward += sum_reward
tf.logging.info("avg_reward: {}\n\n\n".format(avg_reward / episodes))
def main(unused_argv):
feed_forward_only_control_example(log_path=FLAGS.log_path)
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
| {
"repo_name": "MTASZTAKI/ApertusVR",
"path": "plugins/physics/bulletPhysics/3rdParty/bullet3/examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_four_leg_stand_env_example.py",
"copies": "2",
"size": "1917",
"license": "mit",
"hash": -9022565774189264000,
"line_mean": 27.1911764706,
"line_max": 80,
"alpha_frac": 0.6546687533,
"autogenerated": false,
"ratio": 3.106969205834684,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4761637959134684,
"avg_score": null,
"num_lines": null
} |
# An example use of the /delta API call. Maintains a local cache of
# the App Folder's contents. Use the 'update' sub-command to update
# the local cache. Use the 'find' sub-command to search the local
# cache.
#
# Example usage:
#
# 1. Link to your Dropbox account
# > python search_cache.py link
#
# 2. Go to Dropbox and make changes to the contents.
#
# 3. Update the local cache to match what's on Dropbox.
# > python search_cache.py update
#
# 4. Search the local cache.
# > python search_cache.py find 'txt'
#
# Repeat steps 2-4 any number of times.
import dropbox
import sys, os, json
APP_KEY = ''
APP_SECRET = ''
ACCESS_TYPE = 'app_folder'
STATE_FILE = 'search_cache.json'
def main():
# Lets us print unicode characters through sys.stdout/stderr
reload(sys).setdefaultencoding('utf8')
if APP_KEY == '' or APP_SECRET == '':
sys.stderr.write("ERROR: Set your APP_KEY and APP_SECRET at the top of %r.\n" % __file__)
sys.exit(1)
prog_name = sys.argv[0]
args = sys.argv[1:]
if len(args) == 0:
sys.stderr.write("Usage:\n")
sys.stderr.write(" %s link Link to a user's account.\n" % prog_name)
sys.stderr.write(" %s update Update cache to the latest on Dropbox.\n" % prog_name)
sys.stderr.write(" %s update <num> Update cache, limit to <num> pages of /delta.\n" % prog_name)
sys.stderr.write(" %s find <term> Search the cache for <term> (case-sensitive).\n" % prog_name)
sys.stderr.write(" %s find Display entire cache.\n" % prog_name)
sys.stderr.write(" %s reset Delete the cache.\n" % prog_name)
sys.exit(0)
command = args[0]
if command == 'link':
command_link(args)
elif command == 'update':
command_update(args)
elif command == 'find':
command_find(args)
elif command == 'reset':
command_reset(args)
else:
sys.stderr.write("ERROR: Unknown command: %r\n" % command)
sys.stderr.write("Run with no arguments for help.\n")
sys.exit(1)
def command_link(args):
if len(args) != 1:
sys.stderr.write("ERROR: \"link\" doesn't take any arguments.\n")
sys.exit(1)
sess = dropbox.session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
request_token = sess.obtain_request_token()
# Make the user log in and authorize this token
url = sess.build_authorize_url(request_token)
sys.stdout.write("1. Go to: %s\n" % url)
sys.stdout.write("2. Authorize this app.\n")
sys.stdout.write("After you're done, press ENTER.\n")
raw_input()
# This will fail if the user didn't visit the above URL and hit 'Allow'
access_token = sess.obtain_access_token(request_token)
sys.stdout.write("Link successful.\n")
save_state({
'access_token': (access_token.key, access_token.secret),
'tree': {}
})
def command_update(args):
if len(args) == 1:
page_limit = None
elif len(args) == 2:
page_limit = int(args[1])
else:
sys.stderr.write("ERROR: \"update\" takes either zero or one argument.\n")
sys.exit(1)
# Load state
state = load_state()
access_token = state['access_token']
cursor = state.get('cursor')
tree = state['tree']
# Connect to Dropbox
sess = dropbox.session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
sess.set_token(*access_token)
c = dropbox.client.DropboxClient(sess)
page = 0
changed = False
while (page_limit is None) or (page < page_limit):
# Get /delta results from Dropbox
result = c.delta(cursor)
page += 1
if result['reset'] == True:
sys.stdout.write('reset\n')
changed = True
tree = {}
cursor = result['cursor']
# Apply the entries one by one to our cached tree.
for delta_entry in result['entries']:
changed = True
apply_delta(tree, delta_entry)
cursor = result['cursor']
if not result['has_more']: break
if not changed:
sys.stdout.write('No updates.\n')
else:
# Save state
state['cursor'] = cursor
state['tree'] = tree
save_state(state)
def command_find(args):
if len(args) == 1:
term = ''
elif len(args) == 2:
term = args[1]
else:
sys.stderr.write("ERROR: \"find\" takes either zero or one arguments.\n")
sys.exit(1)
state = load_state()
results = []
search_tree(results, state['tree'], term)
for r in results:
sys.stdout.write("%s\n" % (r,))
sys.stdout.write("[Matches: %d]\n" % (len(results),))
def command_reset(args):
if len(args) != 1:
sys.stderr.write("ERROR: \"reset\" takes no arguments.\n")
sys.exit(1)
# Delete cursor, empty tree.
state = load_state()
if 'cursor' in state:
del state['cursor']
state['tree'] = {}
save_state(state)
# We track the folder state as a tree of Node objects.
class Node(object):
def __init__(self, path, content):
# The "original" path (i.e. not the lower-case path)
self.path = path
# For files, content is a pair (size, modified)
# For folders, content is a dict of children Nodes, keyed by lower-case file names.
self.content = content
def is_folder(self):
return isinstance(self.content, dict)
def to_json(self):
return (self.path, Node.to_json_content(self.content))
@staticmethod
def from_json(jnode):
path, jcontent = jnode
return Node(path, Node.from_json_content(jcontent))
@staticmethod
def to_json_content(content):
if isinstance(content, dict):
return dict([(name_lc, node.to_json()) for name_lc, node in content.iteritems()])
else:
return content
@staticmethod
def from_json_content(jcontent):
if isinstance(jcontent, dict):
return dict([(name_lc, Node.from_json(jnode)) for name_lc, jnode in jcontent.iteritems()])
else:
return jcontent
def apply_delta(root, e):
path, metadata = e
branch, leaf = split_path(path)
if metadata is not None:
sys.stdout.write('+ %s\n' % path)
# Traverse down the tree until we find the parent folder of the entry
# we want to add. Create any missing folders along the way.
children = root
for part in branch:
node = get_or_create_child(children, part)
# If there's no folder here, make an empty one.
if not node.is_folder():
node.content = {}
children = node.content
# Create the file/folder.
node = get_or_create_child(children, leaf)
node.path = metadata['path'] # Save the un-lower-cased path.
if metadata['is_dir']:
# Only create an empty folder if there isn't one there already.
if not node.is_folder():
node.content = {}
else:
node.content = metadata['size'], metadata['modified']
else:
sys.stdout.write('- %s\n' % path)
# Traverse down the tree until we find the parent of the entry we
# want to delete.
children = root
for part in branch:
node = children.get(part)
# If one of the parent folders is missing, then we're done.
if node is None or not node.is_folder(): break
children = node.content
else:
# If we made it all the way, delete the file/folder (if it exists).
if leaf in children:
del children[leaf]
def get_or_create_child(children, name):
child = children.get(name)
if child is None:
children[name] = child = Node(None, None)
return child
def split_path(path):
assert path[0] == '/', path
assert path != '/', path
parts = path[1:].split('/')
return parts[0:-1], parts[-1]
# Recursively search 'tree' for files that contain the string in 'term'.
# Print out any matches.
def search_tree(results, tree, term):
for name_lc, node in tree.iteritems():
path = node.path
if (path is not None) and term in path:
if node.is_folder():
results.append('%s' % (path,))
else:
size, modified = node.content
results.append('%s (%s, %s)' % (path, size, modified))
# Recurse on children.
if node.is_folder():
search_tree(results, node.content, term)
def load_state():
if not os.path.exists(STATE_FILE):
sys.stderr.write("ERROR: Couldn't find state file %r. Run the \"link\" subcommand first.\n" % (STATE_FILE))
sys.exit(1)
f = open(STATE_FILE, 'r')
state = json.load(f)
state['tree'] = Node.from_json_content(state['tree'])
f.close()
return state
def save_state(state):
f = open(STATE_FILE, 'w')
state['tree'] = Node.to_json_content(state['tree'])
json.dump(state, f, indent=4)
f.close()
if __name__ == '__main__':
main()
| {
"repo_name": "calvdee/dropbox_python_sdk",
"path": "example/search_cache.py",
"copies": "1",
"size": "9122",
"license": "mit",
"hash": 3756008433887201300,
"line_mean": 31.8129496403,
"line_max": 116,
"alpha_frac": 0.5879193159,
"autogenerated": false,
"ratio": 3.5998421468034727,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46877614627034725,
"avg_score": null,
"num_lines": null
} |
# an example user tag, using RiotTag
from riot_tag import RiotTag
class P(RiotTag):
debug = 1
# never do mutables on class level. this is just to check if transpiler
# creates the same behaviour - and it does, a second tag instance gets
# the same lv object:
lv = [{'name': 'n0'}]
# immuatble on class level. does a second instance start at 1?
# answer: yes, perfect:
counter = 1
template = ''' <div><h1>Riot Transcrypt Tag Instance {label}</h1>
<div>INNER</div></div> '''
def count_up(self):
self.counter = self.counter + 1
self.pp('counter:', self.counter, 'len lv:', len(self.lv), 'adding one lv' )
self.lv.append({'name': 'n' + self.counter})
return self.counter
# try some inheritance...
class Sample2(P):
# ... and change the state at every update, just for fun:
template = P.template.replace('INNER', '''
<div>
<h5 each="{lv}">name: {name} - counter: {count_up()}</h5>
</div>
''')
# no scoped styles currently
style = '''sample2 h5 {color: green}'''
def __init__(self, tag, opts):
self.label = opts.label.capitalize() # this rocks so much.
# alternative to super:
RiotTag.__init__(self, tag, opts)
# uncomment next line and chrome will stop:
# debugger
self.pp('tag init', 'adding 2 lv')
# mutating the lv object:
self.lv.extend([{'name': 'n1'}, {'name': 'n2'}])
def update(self):
self.pp('update handler in the custom tag, calling super')
RiotTag.update(self)
| {
"repo_name": "QQuick/Transcrypt",
"path": "transcrypt/demos/riot_demo/riot_demo.py",
"copies": "1",
"size": "1646",
"license": "apache-2.0",
"hash": -5501191482737263000,
"line_mean": 30.92,
"line_max": 84,
"alpha_frac": 0.567436209,
"autogenerated": false,
"ratio": 3.6017505470459517,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46691867560459516,
"avg_score": null,
"num_lines": null
} |
"""An example using as many different features of rueckenwind as possible
"""
__author__ = """Florian Ludwig"""
__email__ = 'f.ludwig@greyrook.com'
__version__ = '0.0.2'
import tornado.web
import perm
import rw.testing
import rw.http
import rw.httpbase
import rw.scope
from rw import gen
from . import model
root = rw.http.Module('rwlogin')
@root.get('/')
def login_page():
root.render_template('login.html')
plugin = rw.plugin.Plugin(__name__)
class UserService(object):
@gen.coroutine
@rw.scope.inject
def current(self, handler):
rwuser = handler.get_secure_cookie('rwuser')
if rwuser:
rwuser = yield model.User.by_id(rwuser)
if rwuser is None:
rwuser = perm.Anonymous()
raise gen.Return(rwuser)
@gen.coroutine
@rw.scope.inject
def set_current(self, user, handler, app):
cfg = app.settings.get('rwuser', {})
expires = cfg.get('session_time_h', 24)
handler.set_secure_cookie('rwuser', str(user['_id']), expires_days=expires/24.)
@gen.coroutine
@rw.scope.inject
def login(self, handler):
email = handler.get_argument('email')
password = handler.get_argument('password')
user = yield model.User.find_one({'email': email})
if user and user.check_password(password):
self.set_current(user)
raise gen.Return(user)
@gen.coroutine
@rw.scope.inject
def register(self, handler):
email = handler.get_argument('email')
password = handler.get_argument('password')
user = model.User()
user.email = email
user.set_password(password)
yield user.insert()
raise gen.Return(user)
class PermissionDenied(tornado.web.HTTPError):
def __init__(self, permission, subject):
super(PermissionDenied, self).__init__(403)
@gen.coroutine
@rw.scope.inject
def pre_request_handler(handler, scope, settings, services):
preload = settings.get('rwuser', {}).get('preload_user', False)
user = services['user'].current()
if preload:
user = yield user
scope['user'] = handler['user'] = user
@plugin.init
def init(scope, settings):
perm.PERMISSION_DENIED_EXCEPTION = PermissionDenied
scope.subscope('services')['user'] = UserService()
rw.httpbase.PRE_REQUEST.add(pre_request_handler)
| {
"repo_name": "FlorianLudwig/rwuser",
"path": "rwuser/__init__.py",
"copies": "1",
"size": "2360",
"license": "apache-2.0",
"hash": -4040383193965596700,
"line_mean": 25.2222222222,
"line_max": 87,
"alpha_frac": 0.6406779661,
"autogenerated": false,
"ratio": 3.5866261398176293,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9726672792786316,
"avg_score": 0.00012626262626262626,
"num_lines": 90
} |
"""An example using as many different features of rueckenwind as possible
"""
import time
import tornado.web
import tornado.ioloop
import rw.testing
import rw.http
import rw.httpbase
from rw import gen
# every rueckenwind projects needs at least one rw.http.Module
root = rw.http.Module('test.example')
@root.init
def init(template_env):
# init is executed during startup
#
# Here we add a global variable to the template environment
# every module has a seperate jinja2 template environment
template_env.globals['static_value'] = 42
# assert template_env is root.template_env
@root.get('/')
def index(handler):
handler.finish('Hello World')
@root.get('/hello_handler')
def hello_return(handler):
# the http decorators (get etc.) of rw.http.Module provide
# dependency inection via rw.scope.
# This way the current handler (rw.httpbase.RequestHandler) is injected
# and can be used to responed to the current http request via
# handler.finish()
handler.finish('Hello Handler!')
@root.get('/lazy')
@gen.coroutine
def lazy(handler):
# function can be gen.coroutines so async operations can be yielded
yield gen.Task(tornado.ioloop.IOLoop.current().add_timeout, time.time())
handler.finish('Hello lazy rw.http')
@root.post('/')
def root_submit(handler):
handler.finish('root POST')
# TODO support: @root.get('/user', defaults={'name': 'me'})
@root.get('/user/<name>')
def user_page(handler, name):
handler.finish('Hello ' + name)
@root.get('/otherplace')
def other(handler):
handler.finish('other')
@root.put('/put')
def put(handler):
handler.finish('put')
@root.delete('/delete')
def delete(handler):
handler.finish('delete')
@root.options('/options')
def options(handler):
handler.finish('options')
@root.get('/foo')
def some_page():
return root.render_template('index.html')
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Tornado GET")
def post(self):
self.write("Tornado POST")
sub = rw.http.Module(name='submodule', resources='test.example')
@sub.init
def sub_init(template_env):
template_env.globals['static_value'] = 42
# assert template_env is root.template_env
@sub.get('/')
def sub_index():
return sub.render_template('sub.html')
root.mount('/tornado', MainHandler)
root.mount('/sub', sub)
| {
"repo_name": "FlorianLudwig/rueckenwind",
"path": "test/example/__init__.py",
"copies": "1",
"size": "2388",
"license": "apache-2.0",
"hash": -3313034623288294400,
"line_mean": 20.9082568807,
"line_max": 76,
"alpha_frac": 0.6930485762,
"autogenerated": false,
"ratio": 3.52212389380531,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.471517247000531,
"avg_score": null,
"num_lines": null
} |
"""An example using WAV audio."""
import io
import math
import wave
import binobj
from binobj import fields
class WAVFileHeader(binobj.Struct):
riff_header = fields.Bytes(const=b"RIFF")
size = fields.UInt32(endian="little")
file_format = fields.Bytes(const=b"WAVE")
# Format and data chunks follow
class WAVFormatChunk(binobj.Struct):
chunk_id = fields.Bytes(const=b"fmt ")
size = fields.UInt32(const=16, endian="little")
audio_format = fields.UInt16(endian="little")
n_channels = fields.UInt16(endian="little")
sample_rate = fields.UInt32(endian="little")
byte_rate = fields.UInt32(endian="little")
block_alignment = fields.UInt16(endian="little")
bits_per_sample = fields.UInt16(endian="little")
@byte_rate.computes
def _byte_rate(self, all_fields):
return (
all_fields["sample_rate"]
* all_fields["n_channels"]
* all_fields["bits_per_sample"]
// 8
)
@block_alignment.computes
def _block_alignment(self, all_fields):
return all_fields["n_channels"] * all_fields["bits_per_sample"] // 8
class WAVDataChunk(binobj.Struct):
chunk_id = fields.Bytes(const=b"data")
size = fields.UInt32(endian="little")
# WAV PCM data bytes follow.
def test_wav__basic_read(tmpdir):
"""Create 16-bit mono audio sampled at 8kHz and hope the header data we read
back matches.
"""
file_path = str(tmpdir.join("test.wav"))
wav = wave.open(file_path, "wb")
wav.setnchannels(1)
wav.setframerate(8000)
wav.setsampwidth(2)
# Write 4 seconds of audio, each second with a different tone. One frame is
# 16 bits, 8000 frames per second -> 16000 bytes per second. Total: 64000
this_frame = io.BytesIO()
for herz in (440, 540, 640, 740):
for frame_i in range(8000):
theta = (frame_i / 8000) * (2 * math.pi) * herz
sample = int(16384 * math.sin(theta)) + 16384
this_frame.write(sample.to_bytes(2, "little", signed=False))
wav.writeframes(this_frame.getvalue())
this_frame.seek(0)
this_frame.truncate()
wav.close()
# Audio file has been written to test.wav. Now we need to read it back and
# verify that we get sane values in the header. We're only checking the
# header!
with open(file_path, "rb") as fd:
file_header = WAVFileHeader.from_stream(fd)
assert file_header.riff_header == b"RIFF"
assert file_header.file_format == b"WAVE"
format_chunk = WAVFormatChunk.from_stream(fd)
assert format_chunk.size == 16, "Audio file isn't in PCM format."
assert format_chunk.audio_format == 1, "Audio data is compressed?"
assert format_chunk.n_channels == 1
assert format_chunk.sample_rate == 8000
assert format_chunk.byte_rate == 16000
assert format_chunk.block_alignment == 2
assert format_chunk.bits_per_sample == 16
data_chunk_header = WAVDataChunk.from_stream(fd)
assert data_chunk_header.size == 64000
def test_wav__basic_write(tmpdir):
"""Write a 16-bit mono audio file sampled at 24kHz and try to read it back."""
file_path = str(tmpdir.join("test.wav"))
format_chunk = WAVFormatChunk(
audio_format=1, n_channels=1, sample_rate=24000, bits_per_sample=16
)
# 24000 samples/s, 2 bytes/sample, 4s of audio = 192000B
data_chunk = WAVDataChunk(size=192000)
audio_data = b"\xaa\x55" * 96000
header = WAVFileHeader(
size=len(format_chunk) + len(data_chunk) + len(audio_data) + 4
)
with open(file_path, "wb") as fd:
header.to_stream(fd)
format_chunk.to_stream(fd)
data_chunk.to_stream(fd)
fd.write(audio_data)
wav = wave.open(file_path, "rb")
assert wav.getframerate() == 24000
assert wav.getnchannels() == 1
assert wav.getnframes() == 96000
assert wav.getsampwidth() == 2
wav.close()
| {
"repo_name": "dargueta/binobj",
"path": "tests/full_examples/wav_test.py",
"copies": "1",
"size": "3970",
"license": "bsd-3-clause",
"hash": -8689727697808208000,
"line_mean": 30.76,
"line_max": 82,
"alpha_frac": 0.637279597,
"autogenerated": false,
"ratio": 3.3473861720067455,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44846657690067454,
"avg_score": null,
"num_lines": null
} |
# An exception that any methods in exchange may raise.
class ExchangeException(Exception):
def __init__(self, exception):
message = '[%s] %s' % (type(exception).__name__, exception)
Exception.__init__(self, message)
# An order.
class Order(object):
def __init__(self, market, order_id, bid_order, amount, price):
self._market = market
self._order_id = order_id
self._bid_order = bid_order
self._amount = amount
self._price = price
def GetMarket(self):
return self._market
def GetOrderId(self):
return self._order_id
def IsBidOrder(self):
return self._bid_order
def GetAmount(self):
return self._amount
def GetPrice(self):
return self._price
# An available market.
class Market(object):
def __init__(self, exchange):
self._exchange = exchange
def GetExchange(self):
return self._exchange
# Returns the currency that will be sold.
def GetSourceCurrency(self):
raise NotImplementedError
# Returns the currency that will be bought.
def GetTargetCurrency(self):
raise NotImplementedError
# Get the minimum amount of source currency that must be traded.
def GetTradeMinimum(self):
raise NotImplementedError
# Returns a tuple of buy and sell Orders.
def GetPublicOrders(self):
raise NotImplementedError
# Creates an order.
# If 'bid_order' is True, this is a bid/buy order, otherwise an ask/sell order.
# Returns an Order.
def CreateOrder(self, bid_order, amount, price):
raise NotImplementedError
# A base class for Exchanges.
class Exchange(object):
# Returns the name of the exchange.
@staticmethod
def GetName():
raise NotImplementedError
# Returns a list of currencies, e.g. ['BTC', 'LTC', 'DOGE', '42'].
def GetCurrencies(self):
raise NotImplementedError
# Returns an array of Markets.
def GetMarkets(self):
raise NotImplementedError
# Returns a dict of currency to balance, e.g.
# {
# 'BTC': 173.23,
# 'LTC': 19,347,
# }
def GetBalances(self):
raise NotImplementedError
| {
"repo_name": "dtbartle/altcoin-autosell",
"path": "exchange_api.py",
"copies": "1",
"size": "2212",
"license": "apache-2.0",
"hash": 5423754842314280000,
"line_mean": 25.6506024096,
"line_max": 83,
"alpha_frac": 0.6365280289,
"autogenerated": false,
"ratio": 4.0962962962962965,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0017668074896990558,
"num_lines": 83
} |
""" An exception View equipped with traceback,
log output, and where to file a bug.
"""
from urwid import (Pile, Text, Filler, WidgetWrap, Divider)
from ubuntui.widgets.buttons import cancel_btn
from ubuntui.utils import Color, Padding
class ErrorViewException(Exception):
"Problem in Error View"
class ErrorView(WidgetWrap):
def __init__(self, error):
body = [
Padding.center_60(
Text("Oops, there was a problem with your install:",
align="center")),
Padding.center_95(
Divider("\N{BOX DRAWINGS LIGHT HORIZONTAL}", 1, 1)),
Padding.center_85(Text("Reason:")),
Padding.center_80(Color.error_major(Text(error))),
Padding.line_break(""),
Padding.line_break(""),
Padding.center_95(
Divider("\N{BOX DRAWINGS LIGHT HORIZONTAL}", 1, 1)),
Padding.center_20(self._build_buttons())
]
super().__init__(Filler(Pile(body), valign="middle"))
def _build_buttons(self):
buttons = [
Color.button_secondary(
cancel_btn(label="Quit", on_press=self.cancel),
focus_map="button_secondary focus")
]
return Pile(buttons)
def cancel(self, button):
raise SystemExit("Install exited because of error.")
| {
"repo_name": "battlemidget/conjure-up",
"path": "ubuntui/views/error.py",
"copies": "1",
"size": "1366",
"license": "mit",
"hash": -116990739683668160,
"line_mean": 31.5238095238,
"line_max": 68,
"alpha_frac": 0.5805270864,
"autogenerated": false,
"ratio": 4.041420118343195,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5121947204743195,
"avg_score": null,
"num_lines": null
} |
"""An execution engine for Python processes."""
import copy
import inspect
import json
import os
import shlex
import shutil
from resolwe.flow.execution_engines.base import BaseExecutionEngine
from resolwe.flow.execution_engines.exceptions import ExecutionError
from resolwe.flow.models.utils import hydrate_input_references, hydrate_input_uploads
from resolwe.process.parser import SafeParser
PYTHON_RUNTIME_DIRNAME = 'python_runtime'
PYTHON_RUNTIME_ROOT = '/'
PYTHON_RUNTIME_VOLUME = os.path.join(PYTHON_RUNTIME_ROOT, PYTHON_RUNTIME_DIRNAME)
PYTHON_PROGRAM_ROOT = '/'
PYTHON_PROGRAM_FILENAME = 'python_process.py'
PYTHON_PROGRAM_VOLUME = os.path.join(PYTHON_PROGRAM_ROOT, PYTHON_PROGRAM_FILENAME)
PYTHON_INPUTS_FILENAME = 'inputs.json'
PYTHON_INPUTS_ROOT = '/'
PYTHON_INPUTS_VOLUME = os.path.join(PYTHON_INPUTS_ROOT, PYTHON_INPUTS_FILENAME)
class ExecutionEngine(BaseExecutionEngine):
"""An execution engine that outputs bash programs."""
name = 'python'
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
if not path.lower().endswith('.py'):
return []
parser = SafeParser(open(path).read())
processes = parser.parse()
return [process.to_schema() for process in processes]
def evaluate(self, data):
"""Evaluate the code needed to compute a given Data object."""
return 'PYTHONPATH="{runtime}" python3 -m resolwe.process {program} --slug {slug} --inputs {inputs}'.format(
runtime=PYTHON_RUNTIME_VOLUME,
program=PYTHON_PROGRAM_VOLUME,
slug=shlex.quote(data.process.slug),
inputs=PYTHON_INPUTS_VOLUME,
)
def prepare_runtime(self, runtime_dir, data):
"""Prepare runtime directory."""
# Copy over Python process runtime (resolwe.process).
import resolwe.process as runtime_package
src_dir = os.path.dirname(inspect.getsourcefile(runtime_package))
dest_package_dir = os.path.join(runtime_dir, PYTHON_RUNTIME_DIRNAME, 'resolwe', 'process')
shutil.copytree(src_dir, dest_package_dir)
os.chmod(dest_package_dir, 0o755)
# Write python source file.
source = data.process.run.get('program', '')
program_path = os.path.join(runtime_dir, PYTHON_PROGRAM_FILENAME)
with open(program_path, 'w') as file:
file.write(source)
os.chmod(program_path, 0o755)
# Write serialized inputs.
inputs = copy.deepcopy(data.input)
hydrate_input_references(inputs, data.process.input_schema)
hydrate_input_uploads(inputs, data.process.input_schema)
inputs_path = os.path.join(runtime_dir, PYTHON_INPUTS_FILENAME)
# XXX: Skip serialization of LazyStorageJSON. We should support
# LazyStorageJSON in Python processes on the new communication protocol
def default(obj):
"""Get default value."""
class_name = obj.__class__.__name__
if class_name == 'LazyStorageJSON':
return ''
raise TypeError(f'Object of type {class_name} is not JSON serializable')
with open(inputs_path, 'w') as file:
json.dump(inputs, file, default=default)
# Generate volume maps required to expose needed files.
volume_maps = {
PYTHON_RUNTIME_DIRNAME: PYTHON_RUNTIME_VOLUME,
PYTHON_PROGRAM_FILENAME: PYTHON_PROGRAM_VOLUME,
PYTHON_INPUTS_FILENAME: PYTHON_INPUTS_VOLUME,
}
return volume_maps
| {
"repo_name": "jberci/resolwe",
"path": "resolwe/flow/execution_engines/python/__init__.py",
"copies": "1",
"size": "3704",
"license": "apache-2.0",
"hash": -1939259999420230400,
"line_mean": 37.5833333333,
"line_max": 116,
"alpha_frac": 0.6695464363,
"autogenerated": false,
"ratio": 4.052516411378556,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5222062847678556,
"avg_score": null,
"num_lines": null
} |
"""An execution engine for Python processes."""
import os
from resolwe.flow.execution_engines.base import BaseExecutionEngine
from resolwe.process.parser import SafeParser
PYTHON_RUNTIME_DIRNAME = "python_runtime"
PYTHON_RUNTIME_ROOT = "/"
PYTHON_RUNTIME_VOLUME = os.path.join(PYTHON_RUNTIME_ROOT, PYTHON_RUNTIME_DIRNAME)
class ExecutionEngine(BaseExecutionEngine):
"""An execution engine that outputs bash programs."""
name = "python"
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
if not path.lower().endswith(".py"):
return []
parser = SafeParser(open(path).read())
processes = parser.parse()
return [process.to_schema() for process in processes]
def evaluate(self, data):
"""Evaluate the code needed to compute a given Data object."""
return 'PYTHONPATH="{runtime}" python3 -u -m resolwe.process'.format(
runtime=PYTHON_RUNTIME_VOLUME
)
def prepare_volumes(self):
"""Mount additional volumes."""
return {PYTHON_RUNTIME_DIRNAME: PYTHON_RUNTIME_VOLUME}
| {
"repo_name": "genialis/resolwe",
"path": "resolwe/flow/execution_engines/python/__init__.py",
"copies": "1",
"size": "1284",
"license": "apache-2.0",
"hash": -7637847016620552000,
"line_mean": 32.7894736842,
"line_max": 81,
"alpha_frac": 0.6760124611,
"autogenerated": false,
"ratio": 4.36734693877551,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0003209242618741977,
"num_lines": 38
} |
"""An execution engine that outputs bash programs."""
import copy
import shellescape
import yaml
from django.conf import settings
from resolwe.flow.execution_engines.base import BaseExecutionEngine
from resolwe.flow.execution_engines.exceptions import ExecutionError
from resolwe.flow.expression_engines import EvaluationError
from resolwe.flow.models.utils import hydrate_input_references, hydrate_input_uploads
class SafeString(str):
"""String wrapper for marking strings safe."""
pass
class ExecutionEngine(BaseExecutionEngine):
"""An execution engine that outputs bash programs."""
name = 'bash'
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
if not path.lower().endswith(('.yml', '.yaml')):
return []
with open(path) as fn:
schemas = yaml.load(fn)
if not schemas:
# TODO: Logger.
# self.stderr.write("Could not read YAML file {}".format(schema_file))
return []
process_schemas = []
for schema in schemas:
if 'run' not in schema:
continue
# NOTE: This currently assumes that 'bash' is the default.
if schema['run'].get('language', 'bash') != 'bash':
continue
process_schemas.append(schema)
return process_schemas
def evaluate(self, data):
"""Evaluate the code needed to compute a given Data object."""
try:
inputs = copy.deepcopy(data.input)
hydrate_input_references(inputs, data.process.input_schema)
hydrate_input_uploads(inputs, data.process.input_schema)
# Include special 'proc' variable in the context.
inputs['proc'] = {
'data_id': data.id,
'data_dir': self.manager.get_executor().resolve_data_path(),
}
# Include special 'requirements' variable in the context.
inputs['requirements'] = data.process.requirements
# Inject default values and change resources according to
# the current Django configuration.
inputs['requirements']['resources'] = data.process.get_resource_limits()
script_template = data.process.run.get('program', '')
# Get the appropriate expression engine. If none is defined, do not evaluate
# any expressions.
expression_engine = data.process.requirements.get('expression-engine', None)
if not expression_engine:
return script_template
return self.get_expression_engine(expression_engine).evaluate_block(
script_template, inputs,
escape=self._escape,
safe_wrapper=SafeString,
)
except EvaluationError as error:
raise ExecutionError('{}'.format(error))
def _escape(self, value):
"""Escape given value unless it is safe."""
if isinstance(value, SafeString):
return value
return shellescape.quote(value)
| {
"repo_name": "jberci/resolwe",
"path": "resolwe/flow/execution_engines/bash/__init__.py",
"copies": "1",
"size": "3260",
"license": "apache-2.0",
"hash": -4836073617358197000,
"line_mean": 32.9583333333,
"line_max": 88,
"alpha_frac": 0.6171779141,
"autogenerated": false,
"ratio": 4.752186588921282,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007318582786967363,
"num_lines": 96
} |
"""An execution engine that outputs bash programs."""
import copy
import shellescape
import yaml
from resolwe.flow.execution_engines.base import BaseExecutionEngine
from resolwe.flow.execution_engines.exceptions import ExecutionError
from resolwe.flow.expression_engines import EvaluationError
from resolwe.flow.models.utils import hydrate_input_references, hydrate_input_uploads
class SafeString(str):
"""String wrapper for marking strings safe."""
class ExecutionEngine(BaseExecutionEngine):
"""An execution engine that outputs bash programs."""
name = "bash"
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
if not path.lower().endswith((".yml", ".yaml")):
return []
with open(path) as fn:
schemas = yaml.load(fn, Loader=yaml.FullLoader)
if not schemas:
# TODO: Logger.
# self.stderr.write("Could not read YAML file {}".format(schema_file))
return []
process_schemas = []
for schema in schemas:
if "run" not in schema:
continue
# NOTE: This currently assumes that 'bash' is the default.
if schema["run"].get("language", "bash") != "bash":
continue
process_schemas.append(schema)
return process_schemas
def evaluate(self, data):
"""Evaluate the code needed to compute a given Data object."""
try:
inputs = copy.deepcopy(data.input)
hydrate_input_references(inputs, data.process.input_schema)
hydrate_input_uploads(inputs, data.process.input_schema)
# Include special 'proc' variable in the context.
inputs["proc"] = {
"data_id": data.id,
}
# Include special 'requirements' variable in the context.
inputs["requirements"] = data.process.requirements
# Inject default values and change resources according to
# the current Django configuration.
inputs["requirements"]["resources"] = data.process.get_resource_limits()
script_template = data.process.run.get("program", "")
# Get the appropriate expression engine. If none is defined, do not evaluate
# any expressions.
expression_engine = data.process.requirements.get("expression-engine", None)
if not expression_engine:
return script_template
return self.get_expression_engine(expression_engine).evaluate_block(
script_template, inputs, escape=self._escape, safe_wrapper=SafeString
)
except EvaluationError as error:
raise ExecutionError("{}".format(error))
def _escape(self, value):
"""Escape given value unless it is safe."""
if isinstance(value, SafeString):
return value
return shellescape.quote(value)
| {
"repo_name": "genialis/resolwe",
"path": "resolwe/flow/execution_engines/bash/__init__.py",
"copies": "1",
"size": "3130",
"license": "apache-2.0",
"hash": 7020067260480734000,
"line_mean": 34.1685393258,
"line_max": 88,
"alpha_frac": 0.6249201278,
"autogenerated": false,
"ratio": 4.742424242424242,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0009200708059733807,
"num_lines": 89
} |
"""An execution engine that supports workflow specifications."""
import collections
import yaml
from django.db import transaction
from resolwe.flow.execution_engines.base import BaseExecutionEngine
from resolwe.flow.execution_engines.exceptions import ExecutionError
from resolwe.flow.expression_engines import EvaluationError
from resolwe.flow.models import Data, DataDependency, Process
from resolwe.permissions.utils import copy_permissions
class ExecutionEngine(BaseExecutionEngine):
"""An execution engine that supports workflow specifications."""
name = 'workflow'
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
if not path.lower().endswith(('.yml', '.yaml')):
return []
with open(path) as fn:
schemas = yaml.load(fn)
if not schemas:
# TODO: Logger.
# self.stderr.write("Could not read YAML file {}".format(schema_file))
return []
process_schemas = []
for schema in schemas:
if 'run' not in schema:
continue
# NOTE: This currently assumes that 'bash' is the default.
if schema['run'].get('language', 'bash') != 'workflow':
continue
process_schemas.append(schema)
return process_schemas
def get_output_schema(self, process):
"""Return any additional output schema for the process."""
return [
{'name': 'steps', 'label': "Steps", 'type': 'list:data:'},
]
def _evaluate_expressions(self, expression_engine, step_id, values, context):
"""Recursively evaluate expressions in a dictionary of values."""
if expression_engine is None:
return values
processed = {}
for name, value in values.items():
if isinstance(value, str):
value = value.strip()
try:
expression = expression_engine.get_inline_expression(value)
if expression is not None:
# Inline expression.
value = expression_engine.evaluate_inline(expression, context)
else:
# Block expression.
value = expression_engine.evaluate_block(value, context)
except EvaluationError as error:
raise ExecutionError('Error while evaluating expression for step "{}":\n{}'.format(
step_id, error
))
elif isinstance(value, dict):
value = self._evaluate_expressions(expression_engine, step_id, value, context)
processed[name] = value
return processed
@transaction.atomic
def evaluate(self, data):
"""Evaluate the code needed to compute a given Data object."""
expression_engine = data.process.requirements.get('expression-engine', None)
if expression_engine is not None:
expression_engine = self.get_expression_engine(expression_engine)
# Parse steps.
steps = data.process.run.get('program', None)
if steps is None:
return
if not isinstance(steps, list):
raise ExecutionError('Workflow program must be a list of steps.')
# Expression engine evaluation context.
context = {
'input': data.input,
'steps': collections.OrderedDict(),
}
for index, step in enumerate(steps):
try:
step_id = step['id']
step_slug = step['run']
except KeyError as error:
raise ExecutionError('Incorrect definition of step "{}", missing property "{}".'.format(
step.get('id', index), error
))
# Fetch target process.
process = Process.objects.filter(slug=step_slug).order_by('-version').first()
if not process:
raise ExecutionError('Incorrect definition of step "{}", invalid process "{}".'.format(
step_id, step_slug
))
# Process all input variables.
step_input = step.get('input', {})
if not isinstance(step_input, dict):
raise ExecutionError('Incorrect definition of step "{}", input must be a dictionary.'.format(
step_id
))
data_input = self._evaluate_expressions(expression_engine, step_id, step_input, context)
# Create the data object.
data_object = Data.objects.create(
process=process,
contributor=data.contributor,
tags=data.tags,
input=data_input,
)
DataDependency.objects.create(
parent=data,
child=data_object,
kind=DataDependency.KIND_SUBPROCESS,
)
# Copy permissions.
copy_permissions(data, data_object)
# Copy collections.
for collection in data.collection_set.all():
collection.data.add(data_object)
context['steps'][step_id] = data_object.pk
# Immediately set our status to done and output all data object identifiers.
data.output = {
'steps': list(context['steps'].values()),
}
data.status = Data.STATUS_DONE
| {
"repo_name": "jberci/resolwe",
"path": "resolwe/flow/execution_engines/workflow/__init__.py",
"copies": "1",
"size": "5636",
"license": "apache-2.0",
"hash": -4730300465726501000,
"line_mean": 35.1282051282,
"line_max": 109,
"alpha_frac": 0.5688431512,
"autogenerated": false,
"ratio": 5.0321428571428575,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6100986008342858,
"avg_score": null,
"num_lines": null
} |
# an exhaustive research on moving average strategy
import sys
import os
this_dir=os.path.dirname(__file__)
sys.path.append(this_dir+"\..\\")
from Back_Test import *
from Ticker_API import *
from Data_API import *
from Util import *
import Single_Algos
test_start = -2500
test_end = -1
Pricing_Database.lazy_update_data_period = 20
# Test 0 ---------------------------------
#get benchmark on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in good_keys:
print(k)
algo = back_test_single("portfolio.buy(ticker)",test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Bench_Mark_1.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
#
# Test 1 ---------------------------------
#run moving average on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in good_keys:
print(k)
algo = back_test_single(Single_Algos.algos["moving average"],test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Moving_Average_Result_1.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
# # Test 2 ---------------------------------
# #run moving average with short selling on all SP500 underlyers
# cache = Cache()
# #Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
# sp500 = get_snp500()
# sp500_by_sector = get_snp500_by_sector()
#
# Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
# count = 0
# good_keys = []
# for k in sp500:
# try:
# if k == "JEC":
# raise Exception('Unadjusted ticker')
# cache.get_ticker_data(k)
# count += 1
# good_keys.append(k)
# except:
# print( "skipped {}".format(k) )
# pass
#
# all_measures = []
# all_measures_by_sector = []
# all_measures_total ={}
# for sector in sp500_by_sector:
# sector_measures = {}
# sector_counter = 0
# for k in sp500_by_sector[sector]:
# if k in good_keys:
# print(k)
# algo = back_test_single(Single_Algos.algos["moving average with short sell"],test_start,test_end,ticker=k)
# temp_measure = algo.portfolio.get_measures()
# sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
# all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
# temp_measure['ticker'] = k
# all_measures.append(temp_measure)
# sector_counter += 1
# sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
# sector_measures['ticker'] = sector
# all_measures_by_sector.append(sector_measures)
# all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
# all_measures_total['ticker'] = "Avg_SP500"
#
# list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
# dict_array_to_csv(list_for_csv,"Moving_Average_With_Short_Sell_Result_1.csv",fields=['ticker','return','volatility','draw_down','sharpe'])
# Test 3 ---------------------------------
#run moving average with support price signal on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in good_keys:
print(k)
algo = back_test_single(Single_Algos.algos["moving average with support price"],test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Moving_Average_With_Support_Price_Result_1.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
# Test 4 ---------------------------------
#run moving average with support price and volatility signal on all SP500 underlyers
cache = Cache()
#Cache.quandl_key = "Put your quandl key here if you want better download speed with Quandl"
sp500 = get_snp500()
sp500_by_sector = get_snp500_by_sector()
# print(sp500_by_sector['real_estate'])
Pricing_Database.lazy_update_data = True # to use downloaded data in the past week.
count = 0
good_keys = []
for k in sp500:
try:
if k == "JEC":
raise Exception('Unadjusted ticker')
cache.get_ticker_data(k)
count += 1
good_keys.append(k)
except:
print( "skipped {}".format(k) )
pass
filtered_keys = [] # In this strategy first we restrict our algo only on less-volatile stocks - note that we apply the vol filter here based on history up to the state date so there is no look forward bias
for k in good_keys:
if volatility(k,test_start - 250, test_start) < 0.25:
filtered_keys.append(k)
all_measures = []
all_measures_by_sector = []
all_measures_total ={}
for sector in sp500_by_sector:
sector_measures = {}
sector_counter = 0
for k in sp500_by_sector[sector]:
if k in filtered_keys:
print(k)
algo = back_test_single(Single_Algos.algos["moving average support volatility"],test_start,test_end,ticker=k)
temp_measure = algo.portfolio.get_measures()
sector_measures={ key:temp_measure[key]+sector_measures.get(key,0) for key in temp_measure}
all_measures_total={ key:temp_measure[key]+all_measures_total.get(key,0) for key in temp_measure}
temp_measure['ticker'] = k
all_measures.append(temp_measure)
sector_counter += 1
sector_measures = {key:sector_measures[key]/sector_counter for key in sector_measures}
sector_measures['ticker'] = sector
all_measures_by_sector.append(sector_measures)
all_measures_total = {key:all_measures_total[key]/count for key in all_measures_total}
all_measures_total['ticker'] = "Avg_SP500"
list_for_csv = [all_measures_total]+all_measures_by_sector+all_measures
dict_array_to_csv(list_for_csv,"Moving_Average_Support_Volatility_Result.csv",fields=['ticker','return','volatility','draw_down','max_draw_down','sharpe'])
| {
"repo_name": "geome-mitbbs/QTS_Research",
"path": "Algo_Research/Moving_Average.py",
"copies": "1",
"size": "10491",
"license": "mit",
"hash": -4207640791727655000,
"line_mean": 38.98046875,
"line_max": 205,
"alpha_frac": 0.634925174,
"autogenerated": false,
"ratio": 3.1504504504504505,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9235497320976352,
"avg_score": 0.00997566069481956,
"num_lines": 256
} |
"""An EXIF library, written by Gene Cash.
Original file was available at Gene Cash's web site::
http://home.cfl.rr.com/genecash/
"""
# Library to extract EXIF information in digital camera image files
#
# To use this library call with:
# f=open(path_name, 'rb')
# tags=EXIF.process_file(f)
# tags will now be a dictionary mapping names of EXIF tags to their
# values in the file named by path_name. You can process the tags
# as you wish. In particular, you can iterate through all the tags with:
# for tag in tags.keys():
# if tag not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename',
# 'EXIF MakerNote'):
# print "Key: %s, value %s" % (tag, tags[tag])
# (This code uses the if statement to avoid printing out a few of the
# tags that tend to be long or boring.)
#
# The tags dictionary will include keys for all of the usual EXIF
# tags, and will also include keys for Makernotes used by some
# cameras, for which we have a good specification.
#
# Contains code from "exifdump.py" originally written by Thierry Bousch
# <bousch@topo.math.u-psud.fr> and released into the public domain.
#
# Updated and turned into general-purpose library by Gene Cash
#
# This copyright license is intended to be similar to the FreeBSD license.
#
# Copyright 2002 Gene Cash All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY GENE CASH ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# This means you may do anything you want with this code, except claim you
# wrote it. Also, if it breaks you get to keep both pieces.
#
# Patch Contributors:
# * Simon J. Gerraty <sjg@crufty.net>
# s2n fix & orientation decode
# * John T. Riedl <riedl@cs.umn.edu>
# Added support for newer Nikon type 3 Makernote format for D70 and some
# other Nikon cameras.
# * Joerg Schaefer <schaeferj@gmx.net>
# Fixed subtle bug when faking an EXIF header, which affected maker notes
# using relative offsets, and a fix for Nikon D100.
#
# 21-AUG-99 TB Last update by Thierry Bousch to his code.
# 17-JAN-02 CEC Discovered code on web.
# Commented everything.
# Made small code improvements.
# Reformatted for readability.
# 19-JAN-02 CEC Added ability to read TIFFs and JFIF-format JPEGs.
# Added ability to extract JPEG formatted thumbnail.
# Added ability to read GPS IFD (not tested).
# Converted IFD data structure to dictionaries indexed by
# tag name.
# Factored into library returning dictionary of IFDs plus
# thumbnail, if any.
# 20-JAN-02 CEC Added MakerNote processing logic.
# Added Olympus MakerNote.
# Converted data structure to single-level dictionary, avoiding
# tag name collisions by prefixing with IFD name. This makes
# it much easier to use.
# 23-JAN-02 CEC Trimmed nulls from end of string values.
# 25-JAN-02 CEC Discovered JPEG thumbnail in Olympus TIFF MakerNote.
# 26-JAN-02 CEC Added ability to extract TIFF thumbnails.
# Added Nikon, Fujifilm, Casio MakerNotes.
# 30-NOV-03 CEC Fixed problem with canon_decode_tag() not creating an
# IFD_Tag() object.
# 15-FEB-04 CEC Finally fixed bit shift warning by converting Y to 0L.
#
# field type descriptions as (length, abbreviation, full name) tuples
FIELD_TYPES=(
(0, 'X', 'Proprietary'), # no such type
(1, 'B', 'Byte'),
(1, 'A', 'ASCII'),
(2, 'S', 'Short'),
(4, 'L', 'Long'),
(8, 'R', 'Ratio'),
(1, 'SB', 'Signed Byte'),
(1, 'U', 'Undefined'),
(2, 'SS', 'Signed Short'),
(4, 'SL', 'Signed Long'),
(8, 'SR', 'Signed Ratio')
)
# dictionary of main EXIF tag names
# first element of tuple is tag name, optional second element is
# another dictionary giving names to values
EXIF_TAGS={
0x0100: ('ImageWidth', ),
0x0101: ('ImageLength', ),
0x0102: ('BitsPerSample', ),
0x0103: ('Compression',
{1: 'Uncompressed TIFF',
6: 'JPEG Compressed'}),
0x0106: ('PhotometricInterpretation', ),
0x010A: ('FillOrder', ),
0x010D: ('DocumentName', ),
0x010E: ('ImageDescription', ),
0x010F: ('Make', ),
0x0110: ('Model', ),
0x0111: ('StripOffsets', ),
0x0112: ('Orientation',
{1: 'Horizontal (normal)',
2: 'Mirrored horizontal',
3: 'Rotated 180',
4: 'Mirrored vertical',
5: 'Mirrored horizontal then rotated 90 CCW',
6: 'Rotated 90 CW',
7: 'Mirrored horizontal then rotated 90 CW',
8: 'Rotated 90 CCW'}),
0x0115: ('SamplesPerPixel', ),
0x0116: ('RowsPerStrip', ),
0x0117: ('StripByteCounts', ),
0x011A: ('XResolution', ),
0x011B: ('YResolution', ),
0x011C: ('PlanarConfiguration', ),
0x0128: ('ResolutionUnit',
{1: 'Not Absolute',
2: 'Pixels/Inch',
3: 'Pixels/Centimeter'}),
0x012D: ('TransferFunction', ),
0x0131: ('Software', ),
0x0132: ('DateTime', ),
0x013B: ('Artist', ),
0x013E: ('WhitePoint', ),
0x013F: ('PrimaryChromaticities', ),
0x0156: ('TransferRange', ),
0x0200: ('JPEGProc', ),
0x0201: ('JPEGInterchangeFormat', ),
0x0202: ('JPEGInterchangeFormatLength', ),
0x0211: ('YCbCrCoefficients', ),
0x0212: ('YCbCrSubSampling', ),
0x0213: ('YCbCrPositioning', ),
0x0214: ('ReferenceBlackWhite', ),
0x828D: ('CFARepeatPatternDim', ),
0x828E: ('CFAPattern', ),
0x828F: ('BatteryLevel', ),
0x8298: ('Copyright', ),
0x829A: ('ExposureTime', ),
0x829D: ('FNumber', ),
0x83BB: ('IPTC/NAA', ),
0x8769: ('ExifOffset', ),
0x8773: ('InterColorProfile', ),
0x8822: ('ExposureProgram',
{0: 'Unidentified',
1: 'Manual',
2: 'Program Normal',
3: 'Aperture Priority',
4: 'Shutter Priority',
5: 'Program Creative',
6: 'Program Action',
7: 'Portrait Mode',
8: 'Landscape Mode'}),
0x8824: ('SpectralSensitivity', ),
0x8825: ('GPSInfo', ),
0x8827: ('ISOSpeedRatings', ),
0x8828: ('OECF', ),
# print as string
0x9000: ('ExifVersion', lambda x: ''.join(map(chr, x))),
0x9003: ('DateTimeOriginal', ),
0x9004: ('DateTimeDigitized', ),
0x9101: ('ComponentsConfiguration',
{0: '',
1: 'Y',
2: 'Cb',
3: 'Cr',
4: 'Red',
5: 'Green',
6: 'Blue'}),
0x9102: ('CompressedBitsPerPixel', ),
0x9201: ('ShutterSpeedValue', ),
0x9202: ('ApertureValue', ),
0x9203: ('BrightnessValue', ),
0x9204: ('ExposureBiasValue', ),
0x9205: ('MaxApertureValue', ),
0x9206: ('SubjectDistance', ),
0x9207: ('MeteringMode',
{0: 'Unidentified',
1: 'Average',
2: 'CenterWeightedAverage',
3: 'Spot',
4: 'MultiSpot'}),
0x9208: ('LightSource',
{0: 'Unknown',
1: 'Daylight',
2: 'Fluorescent',
3: 'Tungsten',
10: 'Flash',
17: 'Standard Light A',
18: 'Standard Light B',
19: 'Standard Light C',
20: 'D55',
21: 'D65',
22: 'D75',
255: 'Other'}),
0x9209: ('Flash', {0: 'No',
1: 'Fired',
5: 'Fired (?)', # no return sensed
7: 'Fired (!)', # return sensed
9: 'Fill Fired',
13: 'Fill Fired (?)',
15: 'Fill Fired (!)',
16: 'Off',
24: 'Auto Off',
25: 'Auto Fired',
29: 'Auto Fired (?)',
31: 'Auto Fired (!)',
32: 'Not Available'}),
0x920A: ('FocalLength', ),
0x927C: ('MakerNote', ),
# print as string
0x9286: ('UserComment', lambda x: ''.join(map(chr, x))),
0x9290: ('SubSecTime', ),
0x9291: ('SubSecTimeOriginal', ),
0x9292: ('SubSecTimeDigitized', ),
# print as string
0xA000: ('FlashPixVersion', lambda x: ''.join(map(chr, x))),
0xA001: ('ColorSpace', ),
0xA002: ('ExifImageWidth', ),
0xA003: ('ExifImageLength', ),
0xA005: ('InteroperabilityOffset', ),
0xA20B: ('FlashEnergy', ), # 0x920B in TIFF/EP
0xA20C: ('SpatialFrequencyResponse', ), # 0x920C - -
0xA20E: ('FocalPlaneXResolution', ), # 0x920E - -
0xA20F: ('FocalPlaneYResolution', ), # 0x920F - -
0xA210: ('FocalPlaneResolutionUnit', ), # 0x9210 - -
0xA214: ('SubjectLocation', ), # 0x9214 - -
0xA215: ('ExposureIndex', ), # 0x9215 - -
0xA217: ('SensingMethod', ), # 0x9217 - -
0xA300: ('FileSource',
{3: 'Digital Camera'}),
0xA301: ('SceneType',
{1: 'Directly Photographed'}),
0xA302: ('CVAPattern',),
}
# interoperability tags
INTR_TAGS={
0x0001: ('InteroperabilityIndex', ),
0x0002: ('InteroperabilityVersion', ),
0x1000: ('RelatedImageFileFormat', ),
0x1001: ('RelatedImageWidth', ),
0x1002: ('RelatedImageLength', ),
}
# GPS tags (not used yet, haven't seen camera with GPS)
GPS_TAGS={
0x0000: ('GPSVersionID', ),
0x0001: ('GPSLatitudeRef', ),
0x0002: ('GPSLatitude', ),
0x0003: ('GPSLongitudeRef', ),
0x0004: ('GPSLongitude', ),
0x0005: ('GPSAltitudeRef', ),
0x0006: ('GPSAltitude', ),
0x0007: ('GPSTimeStamp', ),
0x0008: ('GPSSatellites', ),
0x0009: ('GPSStatus', ),
0x000A: ('GPSMeasureMode', ),
0x000B: ('GPSDOP', ),
0x000C: ('GPSSpeedRef', ),
0x000D: ('GPSSpeed', ),
0x000E: ('GPSTrackRef', ),
0x000F: ('GPSTrack', ),
0x0010: ('GPSImgDirectionRef', ),
0x0011: ('GPSImgDirection', ),
0x0012: ('GPSMapDatum', ),
0x0013: ('GPSDestLatitudeRef', ),
0x0014: ('GPSDestLatitude', ),
0x0015: ('GPSDestLongitudeRef', ),
0x0016: ('GPSDestLongitude', ),
0x0017: ('GPSDestBearingRef', ),
0x0018: ('GPSDestBearing', ),
0x0019: ('GPSDestDistanceRef', ),
0x001A: ('GPSDestDistance', )
}
# Nikon E99x MakerNote Tags
# http://members.tripod.com/~tawba/990exif.htm
MAKERNOTE_NIKON_NEWER_TAGS={
0x0002: ('ISOSetting', ),
0x0003: ('ColorMode', ),
0x0004: ('Quality', ),
0x0005: ('Whitebalance', ),
0x0006: ('ImageSharpening', ),
0x0007: ('FocusMode', ),
0x0008: ('FlashSetting', ),
0x0009: ('AutoFlashMode', ),
0x000B: ('WhiteBalanceBias', ),
0x000C: ('WhiteBalanceRBCoeff', ),
0x000F: ('ISOSelection', ),
0x0012: ('FlashCompensation', ),
0x0013: ('ISOSpeedRequested', ),
0x0016: ('PhotoCornerCoordinates', ),
0x0018: ('FlashBracketCompensationApplied', ),
0x0019: ('AEBracketCompensationApplied', ),
0x0080: ('ImageAdjustment', ),
0x0081: ('ToneCompensation', ),
0x0082: ('AuxiliaryLens', ),
0x0083: ('LensType', ),
0x0084: ('LensMinMaxFocalMaxAperture', ),
0x0085: ('ManualFocusDistance', ),
0x0086: ('DigitalZoomFactor', ),
0x0088: ('AFFocusPosition',
{0x0000: 'Center',
0x0100: 'Top',
0x0200: 'Bottom',
0x0300: 'Left',
0x0400: 'Right'}),
0x0089: ('BracketingMode',
{0x00: 'Single frame, no bracketing',
0x01: 'Continuous, no bracketing',
0x02: 'Timer, no bracketing',
0x10: 'Single frame, exposure bracketing',
0x11: 'Continuous, exposure bracketing',
0x12: 'Timer, exposure bracketing',
0x40: 'Single frame, white balance bracketing',
0x41: 'Continuous, white balance bracketing',
0x42: 'Timer, white balance bracketing'}),
0x008D: ('ColorMode', ),
0x008F: ('SceneMode?', ),
0x0090: ('LightingType', ),
0x0092: ('HueAdjustment', ),
0x0094: ('Saturation',
{-3: 'B&W',
-2: '-2',
-1: '-1',
0: '0',
1: '1',
2: '2'}),
0x0095: ('NoiseReduction', ),
0x00A7: ('TotalShutterReleases', ),
0x00A9: ('ImageOptimization', ),
0x00AA: ('Saturation', ),
0x00AB: ('DigitalVariProgram', ),
0x0010: ('DataDump', )
}
MAKERNOTE_NIKON_OLDER_TAGS={
0x0003: ('Quality',
{1: 'VGA Basic',
2: 'VGA Normal',
3: 'VGA Fine',
4: 'SXGA Basic',
5: 'SXGA Normal',
6: 'SXGA Fine'}),
0x0004: ('ColorMode',
{1: 'Color',
2: 'Monochrome'}),
0x0005: ('ImageAdjustment',
{0: 'Normal',
1: 'Bright+',
2: 'Bright-',
3: 'Contrast+',
4: 'Contrast-'}),
0x0006: ('CCDSpeed',
{0: 'ISO 80',
2: 'ISO 160',
4: 'ISO 320',
5: 'ISO 100'}),
0x0007: ('WhiteBalance',
{0: 'Auto',
1: 'Preset',
2: 'Daylight',
3: 'Incandescent',
4: 'Fluorescent',
5: 'Cloudy',
6: 'Speed Light'})
}
# decode Olympus SpecialMode tag in MakerNote
def olympus_special_mode(v):
a={
0: 'Normal',
1: 'Unknown',
2: 'Fast',
3: 'Panorama'}
b={
0: 'Non-panoramic',
1: 'Left to right',
2: 'Right to left',
3: 'Bottom to top',
4: 'Top to bottom'}
return '%s - sequence %d - %s' % (a[v[0]], v[1], b[v[2]])
MAKERNOTE_OLYMPUS_TAGS={
# ah HAH! those sneeeeeaky bastids! this is how they get past the fact
# that a JPEG thumbnail is not allowed in an uncompressed TIFF file
0x0100: ('JPEGThumbnail', ),
0x0200: ('SpecialMode', olympus_special_mode),
0x0201: ('JPEGQual',
{1: 'SQ',
2: 'HQ',
3: 'SHQ'}),
0x0202: ('Macro',
{0: 'Normal',
1: 'Macro'}),
0x0204: ('DigitalZoom', ),
0x0207: ('SoftwareRelease', ),
0x0208: ('PictureInfo', ),
# print as string
0x0209: ('CameraID', lambda x: ''.join(map(chr, x))),
0x0F00: ('DataDump', )
}
MAKERNOTE_CASIO_TAGS={
0x0001: ('RecordingMode',
{1: 'Single Shutter',
2: 'Panorama',
3: 'Night Scene',
4: 'Portrait',
5: 'Landscape'}),
0x0002: ('Quality',
{1: 'Economy',
2: 'Normal',
3: 'Fine'}),
0x0003: ('FocusingMode',
{2: 'Macro',
3: 'Auto Focus',
4: 'Manual Focus',
5: 'Infinity'}),
0x0004: ('FlashMode',
{1: 'Auto',
2: 'On',
3: 'Off',
4: 'Red Eye Reduction'}),
0x0005: ('FlashIntensity',
{11: 'Weak',
13: 'Normal',
15: 'Strong'}),
0x0006: ('Object Distance', ),
0x0007: ('WhiteBalance',
{1: 'Auto',
2: 'Tungsten',
3: 'Daylight',
4: 'Fluorescent',
5: 'Shade',
129: 'Manual'}),
0x000B: ('Sharpness',
{0: 'Normal',
1: 'Soft',
2: 'Hard'}),
0x000C: ('Contrast',
{0: 'Normal',
1: 'Low',
2: 'High'}),
0x000D: ('Saturation',
{0: 'Normal',
1: 'Low',
2: 'High'}),
0x0014: ('CCDSpeed',
{64: 'Normal',
80: 'Normal',
100: 'High',
125: '+1.0',
244: '+3.0',
250: '+2.0',})
}
MAKERNOTE_FUJIFILM_TAGS={
0x0000: ('NoteVersion', lambda x: ''.join(map(chr, x))),
0x1000: ('Quality', ),
0x1001: ('Sharpness',
{1: 'Soft',
2: 'Soft',
3: 'Normal',
4: 'Hard',
5: 'Hard'}),
0x1002: ('WhiteBalance',
{0: 'Auto',
256: 'Daylight',
512: 'Cloudy',
768: 'DaylightColor-Fluorescent',
769: 'DaywhiteColor-Fluorescent',
770: 'White-Fluorescent',
1024: 'Incandescent',
3840: 'Custom'}),
0x1003: ('Color',
{0: 'Normal',
256: 'High',
512: 'Low'}),
0x1004: ('Tone',
{0: 'Normal',
256: 'High',
512: 'Low'}),
0x1010: ('FlashMode',
{0: 'Auto',
1: 'On',
2: 'Off',
3: 'Red Eye Reduction'}),
0x1011: ('FlashStrength', ),
0x1020: ('Macro',
{0: 'Off',
1: 'On'}),
0x1021: ('FocusMode',
{0: 'Auto',
1: 'Manual'}),
0x1030: ('SlowSync',
{0: 'Off',
1: 'On'}),
0x1031: ('PictureMode',
{0: 'Auto',
1: 'Portrait',
2: 'Landscape',
4: 'Sports',
5: 'Night',
6: 'Program AE',
256: 'Aperture Priority AE',
512: 'Shutter Priority AE',
768: 'Manual Exposure'}),
0x1100: ('MotorOrBracket',
{0: 'Off',
1: 'On'}),
0x1300: ('BlurWarning',
{0: 'Off',
1: 'On'}),
0x1301: ('FocusWarning',
{0: 'Off',
1: 'On'}),
0x1302: ('AEWarning',
{0: 'Off',
1: 'On'})
}
MAKERNOTE_CANON_TAGS={
0x0006: ('ImageType', ),
0x0007: ('FirmwareVersion', ),
0x0008: ('ImageNumber', ),
0x0009: ('OwnerName', )
}
# see http://www.burren.cx/david/canon.html by David Burren
# this is in element offset, name, optional value dictionary format
MAKERNOTE_CANON_TAG_0x001={
1: ('Macromode',
{1: 'Macro',
2: 'Normal'}),
2: ('SelfTimer', ),
3: ('Quality',
{2: 'Normal',
3: 'Fine',
5: 'Superfine'}),
4: ('FlashMode',
{0: 'Flash Not Fired',
1: 'Auto',
2: 'On',
3: 'Red-Eye Reduction',
4: 'Slow Synchro',
5: 'Auto + Red-Eye Reduction',
6: 'On + Red-Eye Reduction',
16: 'external flash'}),
5: ('ContinuousDriveMode',
{0: 'Single Or Timer',
1: 'Continuous'}),
7: ('FocusMode',
{0: 'One-Shot',
1: 'AI Servo',
2: 'AI Focus',
3: 'MF',
4: 'Single',
5: 'Continuous',
6: 'MF'}),
10: ('ImageSize',
{0: 'Large',
1: 'Medium',
2: 'Small'}),
11: ('EasyShootingMode',
{0: 'Full Auto',
1: 'Manual',
2: 'Landscape',
3: 'Fast Shutter',
4: 'Slow Shutter',
5: 'Night',
6: 'B&W',
7: 'Sepia',
8: 'Portrait',
9: 'Sports',
10: 'Macro/Close-Up',
11: 'Pan Focus'}),
12: ('DigitalZoom',
{0: 'None',
1: '2x',
2: '4x'}),
13: ('Contrast',
{0xFFFF: 'Low',
0: 'Normal',
1: 'High'}),
14: ('Saturation',
{0xFFFF: 'Low',
0: 'Normal',
1: 'High'}),
15: ('Sharpness',
{0xFFFF: 'Low',
0: 'Normal',
1: 'High'}),
16: ('ISO',
{0: 'See ISOSpeedRatings Tag',
15: 'Auto',
16: '50',
17: '100',
18: '200',
19: '400'}),
17: ('MeteringMode',
{3: 'Evaluative',
4: 'Partial',
5: 'Center-weighted'}),
18: ('FocusType',
{0: 'Manual',
1: 'Auto',
3: 'Close-Up (Macro)',
8: 'Locked (Pan Mode)'}),
19: ('AFPointSelected',
{0x3000: 'None (MF)',
0x3001: 'Auto-Selected',
0x3002: 'Right',
0x3003: 'Center',
0x3004: 'Left'}),
20: ('ExposureMode',
{0: 'Easy Shooting',
1: 'Program',
2: 'Tv-priority',
3: 'Av-priority',
4: 'Manual',
5: 'A-DEP'}),
23: ('LongFocalLengthOfLensInFocalUnits', ),
24: ('ShortFocalLengthOfLensInFocalUnits', ),
25: ('FocalUnitsPerMM', ),
28: ('FlashActivity',
{0: 'Did Not Fire',
1: 'Fired'}),
29: ('FlashDetails',
{14: 'External E-TTL',
13: 'Internal Flash',
11: 'FP Sync Used',
7: '2nd("Rear")-Curtain Sync Used',
4: 'FP Sync Enabled'}),
32: ('FocusMode',
{0: 'Single',
1: 'Continuous'})
}
MAKERNOTE_CANON_TAG_0x004={
7: ('WhiteBalance',
{0: 'Auto',
1: 'Sunny',
2: 'Cloudy',
3: 'Tungsten',
4: 'Fluorescent',
5: 'Flash',
6: 'Custom'}),
9: ('SequenceNumber', ),
14: ('AFPointUsed', ),
15: ('FlashBias',
{0XFFC0: '-2 EV',
0XFFCC: '-1.67 EV',
0XFFD0: '-1.50 EV',
0XFFD4: '-1.33 EV',
0XFFE0: '-1 EV',
0XFFEC: '-0.67 EV',
0XFFF0: '-0.50 EV',
0XFFF4: '-0.33 EV',
0X0000: '0 EV',
0X000C: '0.33 EV',
0X0010: '0.50 EV',
0X0014: '0.67 EV',
0X0020: '1 EV',
0X002C: '1.33 EV',
0X0030: '1.50 EV',
0X0034: '1.67 EV',
0X0040: '2 EV'}),
19: ('SubjectDistance', )
}
# extract multibyte integer in Motorola format (little endian)
def s2n_motorola(str):
x=0
for c in str:
x=(x << 8) | ord(c)
return x
# extract multibyte integer in Intel format (big endian)
def s2n_intel(str):
x=0
y=0L
for c in str:
x=x | (ord(c) << y)
y=y+8
return x
# ratio object that eventually will be able to reduce itself to lowest
# common denominator for printing
def gcd(a, b):
if b == 0:
return a
else:
return gcd(b, a % b)
class Ratio:
def __init__(self, num, den):
self.num=num
self.den=den
def __repr__(self):
self.reduce()
if self.den == 1:
return str(self.num)
return '%d/%d' % (self.num, self.den)
def reduce(self):
div=gcd(self.num, self.den)
if div > 1:
self.num=self.num/div
self.den=self.den/div
# for ease of dealing with tags
class IFD_Tag:
def __init__(self, printable, tag, field_type, values, field_offset,
field_length):
# printable version of data
self.printable=printable
# tag ID number
self.tag=tag
# field type as index into FIELD_TYPES
self.field_type=field_type
# offset of start of field in bytes from beginning of IFD
self.field_offset=field_offset
# length of data field in bytes
self.field_length=field_length
# either a string or array of data items
self.values=values
def __str__(self):
return self.printable
def __repr__(self):
return '(0x%04X) %s=%s @ %d' % (self.tag,
FIELD_TYPES[self.field_type][2],
self.printable,
self.field_offset)
# class that handles an EXIF header
class EXIF_header:
def __init__(self, file, endian, offset, fake_exif, debug=0):
self.file=file
self.endian=endian
self.offset=offset
self.fake_exif=fake_exif
self.debug=debug
self.tags={}
# convert slice to integer, based on sign and endian flags
# usually this offset is assumed to be relative to the beginning of the
# start of the EXIF information. For some cameras that use relative tags,
# this offset may be relative to some other starting point.
def s2n(self, offset, length, signed=0):
self.file.seek(self.offset+offset)
slice=self.file.read(length)
if self.endian == 'I':
val=s2n_intel(slice)
else:
val=s2n_motorola(slice)
# Sign extension ?
if signed:
msb=1L << (8*length-1)
if val & msb:
val=val-(msb << 1)
return val
# convert offset to string
def n2s(self, offset, length):
s=''
for i in range(length):
if self.endian == 'I':
s=s+chr(offset & 0xFF)
else:
s=chr(offset & 0xFF)+s
offset=offset >> 8
return s
# return first IFD
def first_IFD(self):
return self.s2n(4, 4)
# return pointer to next IFD
def next_IFD(self, ifd):
entries=self.s2n(ifd, 2)
return self.s2n(ifd+2+12*entries, 4)
# return list of IFDs in header
def list_IFDs(self):
i=self.first_IFD()
a=[]
while i:
a.append(i)
i=self.next_IFD(i)
return a
# return list of entries in this IFD
def dump_IFD(self, ifd, ifd_name, dict=EXIF_TAGS, relative=0):
entries=self.s2n(ifd, 2)
for i in range(entries):
# entry is index of start of this IFD in the file
entry=ifd+2+12*i
tag=self.s2n(entry, 2)
# get tag name. We do it early to make debugging easier
tag_entry=dict.get(tag)
if tag_entry:
tag_name=tag_entry[0]
else:
tag_name='Tag 0x%04X' % tag
field_type=self.s2n(entry+2, 2)
if not 0 < field_type < len(FIELD_TYPES):
# unknown field type
raise ValueError, \
'unknown type %d in tag 0x%04X' % (field_type, tag)
typelen=FIELD_TYPES[field_type][0]
count=self.s2n(entry+4, 4)
offset=entry+8
if count*typelen > 4:
# offset is not the value; it's a pointer to the value
# if relative we set things up so s2n will seek to the right
# place when it adds self.offset. Note that this 'relative'
# is for the Nikon type 3 makernote. Other cameras may use
# other relative offsets, which would have to be computed here
# slightly differently.
if relative:
tmp_offset=self.s2n(offset, 4)
offset=tmp_offset+ifd-self.offset+4
if self.fake_exif:
offset=offset+18
else:
offset=self.s2n(offset, 4)
field_offset=offset
if field_type == 2:
# special case: null-terminated ASCII string
if count != 0:
self.file.seek(self.offset+offset)
values=self.file.read(count)
values=values.strip().replace('\x00','')
else:
values=''
else:
values=[]
signed=(field_type in [6, 8, 9, 10])
for j in range(count):
if field_type in (5, 10):
# a ratio
value_j=Ratio(self.s2n(offset, 4, signed),
self.s2n(offset+4, 4, signed))
else:
value_j=self.s2n(offset, typelen, signed)
values.append(value_j)
offset=offset+typelen
# now "values" is either a string or an array
if count == 1 and field_type != 2:
printable=str(values[0])
else:
printable=str(values)
# compute printable version of values
if tag_entry:
if len(tag_entry) != 1:
# optional 2nd tag element is present
if callable(tag_entry[1]):
# call mapping function
printable=tag_entry[1](values)
else:
printable=''
for i in values:
# use lookup table for this tag
printable+=tag_entry[1].get(i, repr(i))
self.tags[ifd_name+' '+tag_name]=IFD_Tag(printable, tag,
field_type,
values, field_offset,
count*typelen)
if self.debug:
print ' debug: %s: %s' % (tag_name,
repr(self.tags[ifd_name+' '+tag_name]))
# extract uncompressed TIFF thumbnail (like pulling teeth)
# we take advantage of the pre-existing layout in the thumbnail IFD as
# much as possible
def extract_TIFF_thumbnail(self, thumb_ifd):
entries=self.s2n(thumb_ifd, 2)
# this is header plus offset to IFD ...
if self.endian == 'M':
tiff='MM\x00*\x00\x00\x00\x08'
else:
tiff='II*\x00\x08\x00\x00\x00'
# ... plus thumbnail IFD data plus a null "next IFD" pointer
self.file.seek(self.offset+thumb_ifd)
tiff+=self.file.read(entries*12+2)+'\x00\x00\x00\x00'
# fix up large value offset pointers into data area
for i in range(entries):
entry=thumb_ifd+2+12*i
tag=self.s2n(entry, 2)
field_type=self.s2n(entry+2, 2)
typelen=FIELD_TYPES[field_type][0]
count=self.s2n(entry+4, 4)
oldoff=self.s2n(entry+8, 4)
# start of the 4-byte pointer area in entry
ptr=i*12+18
# remember strip offsets location
if tag == 0x0111:
strip_off=ptr
strip_len=count*typelen
# is it in the data area?
if count*typelen > 4:
# update offset pointer (nasty "strings are immutable" crap)
# should be able to say "tiff[ptr:ptr+4]=newoff"
newoff=len(tiff)
tiff=tiff[:ptr]+self.n2s(newoff, 4)+tiff[ptr+4:]
# remember strip offsets location
if tag == 0x0111:
strip_off=newoff
strip_len=4
# get original data and store it
self.file.seek(self.offset+oldoff)
tiff+=self.file.read(count*typelen)
# add pixel strips and update strip offset info
old_offsets=self.tags['Thumbnail StripOffsets'].values
old_counts=self.tags['Thumbnail StripByteCounts'].values
for i in range(len(old_offsets)):
# update offset pointer (more nasty "strings are immutable" crap)
offset=self.n2s(len(tiff), strip_len)
tiff=tiff[:strip_off]+offset+tiff[strip_off+strip_len:]
strip_off+=strip_len
# add pixel strip to end
self.file.seek(self.offset+old_offsets[i])
tiff+=self.file.read(old_counts[i])
self.tags['TIFFThumbnail']=tiff
# decode all the camera-specific MakerNote formats
# Note is the data that comprises this MakerNote. The MakerNote will
# likely have pointers in it that point to other parts of the file. We'll
# use self.offset as the starting point for most of those pointers, since
# they are relative to the beginning of the file.
#
# If the MakerNote is in a newer format, it may use relative addressing
# within the MakerNote. In that case we'll use relative addresses for the
# pointers.
#
# As an aside: it's not just to be annoying that the manufacturers use
# relative offsets. It's so that if the makernote has to be moved by the
# picture software all of the offsets don't have to be adjusted. Overall,
# this is probably the right strategy for makernotes, though the spec is
# ambiguous. (The spec does not appear to imagine that makernotes would
# follow EXIF format internally. Once they did, it's ambiguous whether
# the offsets should be from the header at the start of all the EXIF info,
# or from the header at the start of the makernote.)
def decode_maker_note(self):
note=self.tags['EXIF MakerNote']
make=self.tags['Image Make'].printable
model=self.tags['Image Model'].printable
# Nikon
# The maker note usually starts with the word Nikon, followed by the
# type of the makernote (1 or 2, as a short). If the word Nikon is
# not at the start of the makernote, it's probably type 2, since some
# cameras work that way.
if make in ('NIKON', 'NIKON CORPORATION'):
if note.values[0:7] == [78, 105, 107, 111, 110, 00, 01]:
if self.debug:
print "Looks like a type 1 Nikon MakerNote."
self.dump_IFD(note.field_offset+8, 'MakerNote',
dict=MAKERNOTE_NIKON_OLDER_TAGS)
elif note.values[0:7] == [78, 105, 107, 111, 110, 00, 02]:
if self.debug:
print "Looks like a labeled type 2 Nikon MakerNote"
if note.values[12:14] != [0, 42] and note.values[12:14] != [42L, 0L]:
raise ValueError, "Missing marker tag '42' in MakerNote."
# skip the Makernote label and the TIFF header
self.dump_IFD(note.field_offset+10+8, 'MakerNote',
dict=MAKERNOTE_NIKON_NEWER_TAGS, relative=1)
else:
# E99x or D1
if self.debug:
print "Looks like an unlabeled type 2 Nikon MakerNote"
self.dump_IFD(note.field_offset, 'MakerNote',
dict=MAKERNOTE_NIKON_NEWER_TAGS)
return
# Olympus
if make[:7] == 'OLYMPUS':
self.dump_IFD(note.field_offset+8, 'MakerNote',
dict=MAKERNOTE_OLYMPUS_TAGS)
return
# Casio
if make == 'Casio':
self.dump_IFD(note.field_offset, 'MakerNote',
dict=MAKERNOTE_CASIO_TAGS)
return
# Fujifilm
if make == 'FUJIFILM':
# bug: everything else is "Motorola" endian, but the MakerNote
# is "Intel" endian
endian=self.endian
self.endian='I'
# bug: IFD offsets are from beginning of MakerNote, not
# beginning of file header
offset=self.offset
self.offset+=note.field_offset
# process note with bogus values (note is actually at offset 12)
self.dump_IFD(12, 'MakerNote', dict=MAKERNOTE_FUJIFILM_TAGS)
# reset to correct values
self.endian=endian
self.offset=offset
return
# Canon
if make == 'Canon':
self.dump_IFD(note.field_offset, 'MakerNote',
dict=MAKERNOTE_CANON_TAGS)
for i in (('MakerNote Tag 0x0001', MAKERNOTE_CANON_TAG_0x001),
('MakerNote Tag 0x0004', MAKERNOTE_CANON_TAG_0x004)):
self.canon_decode_tag(self.tags[i[0]].values, i[1])
return
# decode Canon MakerNote tag based on offset within tag
# see http://www.burren.cx/david/canon.html by David Burren
def canon_decode_tag(self, value, dict):
for i in range(1, len(value)):
x=dict.get(i, ('Unknown', ))
if self.debug:
print i, x
name=x[0]
if len(x) > 1:
val=x[1].get(value[i], 'Unknown')
else:
val=value[i]
# it's not a real IFD Tag but we fake one to make everybody
# happy. this will have a "proprietary" type
self.tags['MakerNote '+name]=IFD_Tag(str(val), None, 0, None,
None, None)
# process an image file (expects an open file object)
# this is the function that has to deal with all the arbitrary nasty bits
# of the EXIF standard
def process_file(file, debug=0):
# determine whether it's a JPEG or TIFF
data=file.read(12)
if data[0:4] in ['II*\x00', 'MM\x00*']:
# it's a TIFF file
file.seek(0)
endian=file.read(1)
file.read(1)
offset=0
elif data[0:2] == '\xFF\xD8':
# it's a JPEG file
# skip JFIF style header(s)
fake_exif=0
while data[2] == '\xFF' and data[6:10] in ('JFIF', 'JFXX', 'OLYM'):
length=ord(data[4])*256+ord(data[5])
file.read(length-8)
# fake an EXIF beginning of file
data='\xFF\x00'+file.read(10)
fake_exif=1
if data[2] == '\xFF' and data[6:10] == 'Exif':
# detected EXIF header
offset=file.tell()
endian=file.read(1)
else:
# no EXIF information
return {}
else:
# file format not recognized
return {}
# deal with the EXIF info we found
if debug:
print {'I': 'Intel', 'M': 'Motorola'}[endian], 'format'
hdr=EXIF_header(file, endian, offset, fake_exif, debug)
ifd_list=hdr.list_IFDs()
ctr=0
for i in ifd_list:
if ctr == 0:
IFD_name='Image'
elif ctr == 1:
IFD_name='Thumbnail'
thumb_ifd=i
else:
IFD_name='IFD %d' % ctr
if debug:
print ' IFD %d (%s) at offset %d:' % (ctr, IFD_name, i)
hdr.dump_IFD(i, IFD_name)
# EXIF IFD
exif_off=hdr.tags.get(IFD_name+' ExifOffset')
if exif_off:
if debug:
print ' EXIF SubIFD at offset %d:' % exif_off.values[0]
hdr.dump_IFD(exif_off.values[0], 'EXIF')
# Interoperability IFD contained in EXIF IFD
intr_off=hdr.tags.get('EXIF SubIFD InteroperabilityOffset')
if intr_off:
if debug:
print ' EXIF Interoperability SubSubIFD at offset %d:' \
% intr_off.values[0]
hdr.dump_IFD(intr_off.values[0], 'EXIF Interoperability',
dict=INTR_TAGS)
# GPS IFD
gps_off=hdr.tags.get(IFD_name+' GPSInfo')
if gps_off:
if debug:
print ' GPS SubIFD at offset %d:' % gps_off.values[0]
hdr.dump_IFD(gps_off.values[0], 'GPS', dict=GPS_TAGS)
ctr+=1
# extract uncompressed TIFF thumbnail
thumb=hdr.tags.get('Thumbnail Compression')
if thumb and thumb.printable == 'Uncompressed TIFF':
hdr.extract_TIFF_thumbnail(thumb_ifd)
# JPEG thumbnail (thankfully the JPEG data is stored as a unit)
thumb_off=hdr.tags.get('Thumbnail JPEGInterchangeFormat')
if thumb_off:
file.seek(offset+thumb_off.values[0])
size=hdr.tags['Thumbnail JPEGInterchangeFormatLength'].values[0]
hdr.tags['JPEGThumbnail']=file.read(size)
# deal with MakerNote contained in EXIF IFD
if hdr.tags.has_key('EXIF MakerNote'):
hdr.decode_maker_note()
# Sometimes in a TIFF file, a JPEG thumbnail is hidden in the MakerNote
# since it's not allowed in a uncompressed TIFF IFD
if not hdr.tags.has_key('JPEGThumbnail'):
thumb_off=hdr.tags.get('MakerNote JPEGThumbnail')
if thumb_off:
file.seek(offset+thumb_off.values[0])
hdr.tags['JPEGThumbnail']=file.read(thumb_off.field_length)
return hdr.tags
# library test/debug function (dump given files)
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print 'Usage: %s files...\n' % sys.argv[0]
sys.exit(0)
for filename in sys.argv[1:]:
try:
file=open(filename, 'rb')
except:
print filename, 'unreadable'
print
continue
print filename+':'
# data=process_file(file, 1) # with debug info
data=process_file(file)
if not data:
print 'No EXIF information found'
continue
x=data.keys()
x.sort()
for i in x:
if i in ('JPEGThumbnail', 'TIFFThumbnail'):
continue
try:
print ' %s (%s): %s' % \
(i, FIELD_TYPES[data[i].field_type][2], data[i].printable)
except:
print 'error', i, '"', data[i], '"'
if data.has_key('JPEGThumbnail'):
print 'File has JPEG thumbnail'
print
| {
"repo_name": "dbaty/Lasco",
"path": "lasco/ext/exif.py",
"copies": "1",
"size": "42067",
"license": "bsd-3-clause",
"hash": -7022373962422283000,
"line_mean": 34.0558333333,
"line_max": 85,
"alpha_frac": 0.5145601065,
"autogenerated": false,
"ratio": 3.496259973404255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.947731076298149,
"avg_score": 0.006701863384552888,
"num_lines": 1200
} |
"""An experimental library for combining (g)VCFS into sparse matrix tables"""
# these are necessary for the diver script included at the end of this file
import math
import uuid
from typing import Optional, List, Tuple, Dict
import hail as hl
from hail import MatrixTable, Table
from hail.expr import StructExpression
from hail.expr.expressions import expr_bool, expr_str
from hail.genetics.reference_genome import reference_genome_type
from hail.ir import Apply, TableMapRows, MatrixKeyRowsBy, TopLevelReference
from hail.typecheck import oneof, sequenceof, typecheck
from hail.utils.java import info
_transform_rows_function_map = {}
_merge_function_map = {}
@typecheck(string=expr_str, has_non_ref=expr_bool)
def parse_as_ints(string, has_non_ref):
ints = string.split(r'\|')
ints = hl.cond(has_non_ref, ints[:-1], ints)
return ints.map(lambda i: hl.cond((hl.len(i) == 0) | (i == '.'), hl.null(hl.tint32), hl.int32(i)))
@typecheck(string=expr_str, has_non_ref=expr_bool)
def parse_as_doubles(string, has_non_ref):
ints = string.split(r'\|')
ints = hl.cond(has_non_ref, ints[:-1], ints)
return ints.map(lambda i: hl.cond((hl.len(i) == 0) | (i == '.'), hl.null(hl.tfloat64), hl.float64(i)))
@typecheck(string=expr_str, has_non_ref=expr_bool)
def parse_as_sb_table(string, has_non_ref):
ints = string.split(r'\|')
ints = hl.cond(has_non_ref, ints[:-1], ints)
return ints.map(lambda xs: xs.split(",").map(hl.int32))
@typecheck(string=expr_str, has_non_ref=expr_bool)
def parse_as_ranksum(string, has_non_ref):
typ = hl.ttuple(hl.tfloat64, hl.tint32)
items = string.split(r'\|')
items = hl.cond(has_non_ref, items[:-1], items)
return items.map(lambda s: hl.cond(
(hl.len(s) == 0) | (s == '.'),
hl.null(typ),
hl.rbind(s.split(','), lambda ss: hl.cond(
hl.len(ss) != 2, # bad field, possibly 'NaN', just set it null
hl.null(hl.ttuple(hl.tfloat64, hl.tint32)),
hl.tuple([hl.float64(ss[0]), hl.int32(ss[1])])))))
_as_function_map = {
'AS_QUALapprox': parse_as_ints,
'AS_RAW_MQ': parse_as_doubles,
'AS_RAW_MQRankSum': parse_as_ranksum,
'AS_RAW_ReadPosRankSum': parse_as_ranksum,
'AS_SB_TABLE': parse_as_sb_table,
'AS_VarDP': parse_as_ints,
}
def parse_as_fields(info, has_non_ref):
return hl.struct(**{f: info[f] if f not in _as_function_map
else _as_function_map[f](info[f], has_non_ref) for f in info})
def localize(mt):
if isinstance(mt, MatrixTable):
return mt._localize_entries('__entries', '__cols')
return mt
def unlocalize(mt):
if isinstance(mt, Table):
return mt._unlocalize_entries('__entries', '__cols', ['s'])
return mt
@typecheck(mt=oneof(Table, MatrixTable), info_to_keep=sequenceof(str))
def transform_gvcf(mt, info_to_keep=[]) -> Table:
"""Transforms a gvcf into a sparse matrix table
The input to this should be some result of either :func:`.import_vcf` or
:func:`.import_gvcfs` with ``array_elements_required=False``.
There is an assumption that this function will be called on a matrix table
with one column (or a localized table version of the same).
Parameters
----------
mt : :obj:`Union[Table, MatrixTable]`
The gvcf being transformed, if it is a table, then it must be a localized matrix table with
the entries array named ``__entries``
info_to_keep : :obj:`List[str]`
Any ``INFO`` fields in the gvcf that are to be kept and put in the ``gvcf_info`` entry
field. By default, all ``INFO`` fields except ``END`` and ``DP`` are kept.
Returns
-------
:obj:`.Table`
A localized matrix table that can be used as part of the input to :func:`.combine_gvcfs`
Notes
-----
This function will parse the following allele specific annotations from
pipe delimited strings into proper values. ::
AS_QUALapprox
AS_RAW_MQ
AS_RAW_MQRankSum
AS_RAW_ReadPosRankSum
AS_SB_TABLE
AS_VarDP
"""
if not info_to_keep:
info_to_keep = [name for name in mt.info if name not in ['END', 'DP']]
mt = localize(mt)
if mt.row.dtype not in _transform_rows_function_map:
def get_lgt(e, n_alleles, has_non_ref, row):
index = e.GT.unphased_diploid_gt_index()
n_no_nonref = n_alleles - hl.int(has_non_ref)
triangle_without_nonref = hl.triangle(n_no_nonref)
return (hl.case()
.when(index < triangle_without_nonref, e.GT)
.when(index < hl.triangle(n_alleles), hl.null('call'))
.or_error('invalid GT ' + hl.str(e.GT) + ' at site ' + hl.str(row.locus)))
def make_entry_struct(e, alleles_len, has_non_ref, row):
handled_fields = dict()
handled_names = {'LA', 'gvcf_info',
'END',
'LAD', 'AD',
'LGT', 'GT',
'LPL', 'PL',
'LPGT', 'PGT'}
if 'END' not in row.info:
raise hl.utils.FatalError("the Hail GVCF combiner expects GVCFs to have an 'END' field in INFO.")
if 'GT' not in e:
raise hl.utils.FatalError("the Hail GVCF combiner expects GVCFs to have a 'GT' field in FORMAT.")
handled_fields['LA'] = hl.range(0, alleles_len - hl.cond(has_non_ref, 1, 0))
handled_fields['LGT'] = get_lgt(e, alleles_len, has_non_ref, row)
if 'AD' in e:
handled_fields['LAD'] = hl.cond(has_non_ref, e.AD[:-1], e.AD)
if 'PGT' in e:
handled_fields['LPGT'] = e.PGT
if 'PL' in e:
handled_fields['LPL'] = hl.cond(has_non_ref,
hl.cond(alleles_len > 2,
e.PL[:-alleles_len],
hl.null(e.PL.dtype)),
hl.cond(alleles_len > 1,
e.PL,
hl.null(e.PL.dtype)))
handled_fields['RGQ'] = hl.cond(
has_non_ref,
e.PL[hl.call(0, alleles_len - 1).unphased_diploid_gt_index()],
hl.null(e.PL.dtype.element_type))
handled_fields['END'] = row.info.END
handled_fields['gvcf_info'] = (hl.case()
.when(hl.is_missing(row.info.END),
hl.struct(**(
parse_as_fields(
row.info.select(*info_to_keep),
has_non_ref)
)))
.or_missing())
pass_through_fields = {k: v for k, v in e.items() if k not in handled_names}
return hl.struct(**handled_fields, **pass_through_fields)
f = hl.experimental.define_function(
lambda row: hl.rbind(
hl.len(row.alleles), '<NON_REF>' == row.alleles[-1],
lambda alleles_len, has_non_ref: hl.struct(
locus=row.locus,
alleles=hl.cond(has_non_ref, row.alleles[:-1], row.alleles),
rsid=row.rsid,
__entries=row.__entries.map(
lambda e: make_entry_struct(e, alleles_len, has_non_ref, row)))),
mt.row.dtype)
_transform_rows_function_map[mt.row.dtype] = f
transform_row = _transform_rows_function_map[mt.row.dtype]
return Table(TableMapRows(mt._tir, Apply(transform_row._name, transform_row._ret_type, TopLevelReference('row'))))
def transform_one(mt, info_to_keep=[]) -> Table:
return transform_gvcf(mt, info_to_keep)
def combine(ts):
def merge_alleles(alleles):
from hail.expr.functions import _num_allele_type, _allele_ints
return hl.rbind(
alleles.map(lambda a: hl.or_else(a[0], ''))
.fold(lambda s, t: hl.cond(hl.len(s) > hl.len(t), s, t), ''),
lambda ref:
hl.rbind(
alleles.map(
lambda al: hl.rbind(
al[0],
lambda r:
hl.array([ref]).extend(
al[1:].map(
lambda a:
hl.rbind(
_num_allele_type(r, a),
lambda at:
hl.cond(
(_allele_ints['SNP'] == at)
| (_allele_ints['Insertion'] == at)
| (_allele_ints['Deletion'] == at)
| (_allele_ints['MNP'] == at)
| (_allele_ints['Complex'] == at),
a + ref[hl.len(r):],
a)))))),
lambda lal:
hl.struct(
globl=hl.array([ref]).extend(hl.array(hl.set(hl.flatten(lal)).remove(ref))),
local=lal)))
def renumber_entry(entry, old_to_new) -> StructExpression:
# global index of alternate (non-ref) alleles
return entry.annotate(LA=entry.LA.map(lambda lak: old_to_new[lak]))
if (ts.row.dtype, ts.globals.dtype) not in _merge_function_map:
f = hl.experimental.define_function(
lambda row, gbl:
hl.rbind(
merge_alleles(row.data.map(lambda d: d.alleles)),
lambda alleles:
hl.struct(
locus=row.locus,
alleles=alleles.globl,
rsid=hl.find(hl.is_defined, row.data.map(lambda d: d.rsid)),
__entries=hl.bind(
lambda combined_allele_index:
hl.range(0, hl.len(row.data)).flatmap(
lambda i:
hl.cond(hl.is_missing(row.data[i].__entries),
hl.range(0, hl.len(gbl.g[i].__cols))
.map(lambda _: hl.null(row.data[i].__entries.dtype.element_type)),
hl.bind(
lambda old_to_new: row.data[i].__entries.map(
lambda e: renumber_entry(e, old_to_new)),
hl.range(0, hl.len(alleles.local[i])).map(
lambda j: combined_allele_index[alleles.local[i][j]])))),
hl.dict(hl.range(0, hl.len(alleles.globl)).map(
lambda j: hl.tuple([alleles.globl[j], j])))))),
ts.row.dtype, ts.globals.dtype)
_merge_function_map[(ts.row.dtype, ts.globals.dtype)] = f
merge_function = _merge_function_map[(ts.row.dtype, ts.globals.dtype)]
ts = Table(TableMapRows(ts._tir, Apply(merge_function._name,
merge_function._ret_type,
TopLevelReference('row'),
TopLevelReference('global'))))
return ts.transmute_globals(__cols=hl.flatten(ts.g.map(lambda g: g.__cols)))
@typecheck(mts=sequenceof(oneof(Table, MatrixTable)))
def combine_gvcfs(mts):
"""Merges gvcfs and/or sparse matrix tables
Parameters
----------
mts : :obj:`List[Union[Table, MatrixTable]]`
The matrix tables (or localized versions) to combine
Returns
-------
:class:`.MatrixTable`
Notes
-----
All of the input tables/matrix tables must have the same partitioning. This
module provides no method of repartitioning data.
"""
ts = hl.Table.multi_way_zip_join([localize(mt) for mt in mts], 'data', 'g')
combined = combine(ts)
return unlocalize(combined)
@typecheck(ht=hl.Table, n=int, reference_genome=reference_genome_type)
def calculate_new_intervals(ht, n, reference_genome):
"""takes a table, keyed by ['locus', ...] and produces a list of intervals suitable
for repartitioning a combiner matrix table
Parameters
----------
ht : :class:`.Table`
Table / Rows Table to compute new intervals for
n : :obj:`int`
Number of rows each partition should have, (last partition may be smaller)
reference_genome: :obj:`str` or :class:`.ReferenceGenome`, optional
Reference genome to use.
Returns
-------
:obj:`List[Interval]`
"""
assert list(ht.key) == ['locus']
assert ht.locus.dtype == hl.tlocus(reference_genome=reference_genome)
end = hl.Locus(reference_genome.contigs[-1],
reference_genome.lengths[reference_genome.contigs[-1]],
reference_genome=reference_genome)
n_rows = ht.count()
if n_rows == 0:
raise ValueError('empty table!')
ht = ht.select()
ht = ht.annotate(x=hl.scan.count())
ht = ht.annotate(y=ht.x + 1)
ht = ht.filter((ht.x // n != ht.y // n) | (ht.x == (n_rows - 1)))
ht = ht.select()
ht = ht.annotate(start=hl.or_else(
hl.scan._prev_nonnull(hl.locus_from_global_position(ht.locus.global_position() + 1,
reference_genome=reference_genome)),
hl.locus_from_global_position(0, reference_genome=reference_genome)))
ht = ht.key_by()
ht = ht.select(interval=hl.interval(start=ht.start, end=ht.locus, includes_end=True))
intervals = ht.aggregate(hl.agg.collect(ht.interval))
last_st = hl.eval(
hl.locus_from_global_position(hl.literal(intervals[-1].end).global_position() + 1,
reference_genome=reference_genome))
interval = hl.Interval(start=last_st, end=end, includes_end=True)
intervals.append(interval)
return intervals
@typecheck(reference_genome=reference_genome_type)
def default_exome_intervals(reference_genome) -> List[hl.utils.Interval]:
"""create a list of locus intervals suitable for importing and merging exome gvcfs. As exomes
are small. One partition per chromosome works well here.
Parameters
----------
reference_genome: :obj:`str` or :class:`.ReferenceGenome`, optional
Reference genome to use. NOTE: only GRCh37 and GRCh38 references
are supported.
Returns
-------
:obj:`List[Interval]`
"""
if reference_genome.name == 'GRCh37':
contigs = [f'{i}' for i in range(1, 23)] + ['X', 'Y', 'MT']
elif reference_genome.name == 'GRCh38':
contigs = [f'chr{i}' for i in range(1, 23)] + ['chrX', 'chrY', 'chrM']
else:
raise ValueError(
f"Invalid reference genome '{reference_genome.name}', only 'GRCh37' and 'GRCh38' are supported")
return [hl.Interval(start=hl.Locus(contig=contig, position=1, reference_genome=reference_genome),
end=hl.Locus.parse(f'{contig}:END', reference_genome=reference_genome),
includes_end=True) for contig in contigs]
# END OF VCF COMBINER LIBRARY, BEGINNING OF BEST PRACTICES SCRIPT #
class Merge(object):
def __init__(self,
inputs: List[int],
input_total_size: int):
self.inputs: List[int] = inputs
self.input_total_size: int = input_total_size
class Job(object):
def __init__(self, merges: List[Merge]):
self.merges: List[Merge] = merges
self.input_total_size = sum(m.input_total_size for m in merges)
class Phase(object):
def __init__(self, jobs: List[Job]):
self.jobs: List[Job] = jobs
class CombinerPlan(object):
def __init__(self,
file_size: List[List[int]],
phases: List[Phase]):
self.file_size = file_size
self.phases = phases
self.merge_per_phase = len(file_size[0])
self.total_merge = self.merge_per_phase * len(phases)
class CombinerConfig(object):
default_branch_factor = 100
default_batch_size = 100
default_target_records = 30_000
def __init__(self,
branch_factor: int = default_branch_factor,
batch_size: int = default_batch_size,
target_records: int = default_target_records):
self.branch_factor: int = branch_factor
self.batch_size: int = batch_size
self.target_records: int = target_records
@classmethod
def default(cls) -> 'CombinerConfig':
return CombinerConfig()
def plan(self, n_inputs: int) -> CombinerPlan:
assert n_inputs > 0
def int_ceil(x):
return int(math.ceil(x))
tree_height = int_ceil(math.log(n_inputs, self.branch_factor))
phases: List[Phase] = []
file_size: List[List[int]] = [] # List of file size per phase
file_size.append([1 for _ in range(n_inputs)])
while len(file_size[-1]) > 1:
last_stage_files = file_size[-1]
n = len(last_stage_files)
i = 0
jobs = []
while (i < n):
job = []
job_i = 0
while job_i < self.batch_size and i < n:
merge = []
merge_i = 0
merge_size = 0
while merge_i < self.branch_factor and i < n:
merge_size += last_stage_files[i]
merge.append(i)
merge_i += 1
i += 1
job.append(Merge(merge, merge_size))
job_i += 1
jobs.append(Job(job))
file_size.append([merge.input_total_size for job in jobs for merge in job.merges])
phases.append(Phase(jobs))
assert len(phases) == tree_height
for layer in file_size:
assert sum(layer) == n_inputs
phase_strs = []
total_jobs = 0
for i, phase in enumerate(phases):
n = len(phase.jobs)
job_str = hl.utils.misc.plural('job', n)
n_files_produced = len(file_size[i + 1])
adjective = 'final' if n_files_produced == 1 else 'intermediate'
file_str = hl.utils.misc.plural('file', n_files_produced)
phase_strs.append(
f'\n Phase {i + 1}: {n} {job_str} corresponding to {n_files_produced} {adjective} output {file_str}.')
total_jobs += n
info(f"GVCF combiner plan:\n"
f" Branch factor: {self.branch_factor}\n"
f" Batch size: {self.batch_size}\n"
f" Combining {n_inputs} input files in {tree_height} phases with {total_jobs} total jobs.{''.join(phase_strs)}\n")
return CombinerPlan(file_size, phases)
def run_combiner(sample_paths: List[str],
out_file: str,
tmp_path: str,
intervals: Optional[List[hl.utils.Interval]] = None,
header: Optional[str] = None,
sample_names: Optional[List[str]] = None,
branch_factor: int = CombinerConfig.default_branch_factor,
batch_size: int = CombinerConfig.default_batch_size,
target_records: int = CombinerConfig.default_target_records,
overwrite: bool = False,
reference_genome: str = 'default',
contig_recoding: Optional[Dict[str, str]] = None,
key_by_locus_and_alleles: bool = False):
"""Run the Hail VCF combiner, performing a hierarchical merge to create a combined sparse matrix table.
Parameters
----------
sample_paths : :obj:`list` of :obj:`str`
Paths to individual GVCFs.
out_file : :obj:`str`
Path to final combined matrix table.
tmp_path : :obj:`str`
Path for intermediate output.
intervals : list of :class:`.Interval` or None
Partitioning with which to import GVCFs in first phase of combiner.
header : :obj:`str` or None
External header file to use as GVCF header for all inputs. If defined, `sample_names` must be defined as well.
sample_names: list of :obj:`str` or None
Sample names, to be used with `header`.
branch_factor : :obj:`int`
Combiner branch factor.
batch_size : :obj:`int`
Combiner batch size.
target_records : :obj:`int`
Target records per partition in each combiner phase after the first.
overwrite : :obj:`bool`
Overwrite output file, if it exists.
reference_genome : :obj:`str`
Reference genome for GVCF import.
contig_recoding: :obj:`dict` of (:obj:`str`, :obj:`str`), optional
Mapping from contig name in gVCFs to contig name the reference
genome. All contigs must be present in the
`reference_genome`, so this is useful for mapping
differently-formatted data onto known references.
key_by_locus_and_alleles : :obj:`bool`
Key by both locus and alleles in the final output.
Returns
-------
None
"""
tmp_path += f'/combiner-temporary/{uuid.uuid4()}/'
if header is not None:
assert sample_names is not None
assert len(sample_names) == len(sample_paths)
# FIXME: this should be hl.default_reference().even_intervals_contig_boundary
intervals = intervals or default_exome_intervals(reference_genome)
config = CombinerConfig(branch_factor=branch_factor,
batch_size=batch_size,
target_records=target_records)
plan = config.plan(len(sample_paths))
files_to_merge = sample_paths
n_phases = len(plan.phases)
total_ops = len(files_to_merge) * n_phases
total_work_done = 0
for phase_i, phase in enumerate(plan.phases):
phase_i += 1 # used for info messages, 1-indexed for readability
n_jobs = len(phase.jobs)
merge_str = 'input GVCFs' if phase_i == 1 else 'intermediate sparse matrix tables'
job_str = hl.utils.misc.plural('job', n_jobs)
info(f"Starting phase {phase_i}/{n_phases}, merging {len(files_to_merge)} {merge_str} in {n_jobs} {job_str}.")
if phase_i > 1:
intervals = calculate_new_intervals(hl.read_matrix_table(files_to_merge[0]).rows(),
config.target_records,
reference_genome=reference_genome)
new_files_to_merge = []
for job_i, job in enumerate(phase.jobs):
job_i += 1 # used for info messages, 1-indexed for readability
n_merges = len(job.merges)
merge_str = hl.utils.misc.plural('file', n_merges)
pct_total = 100 * job.input_total_size / total_ops
info(
f"Starting phase {phase_i}/{n_phases}, job {job_i}/{len(phase.jobs)} to create {n_merges} merged {merge_str}, corresponding to ~{pct_total:.1f}% of total I/O.")
merge_mts: List[MatrixTable] = []
for merge in job.merges:
inputs = [files_to_merge[i] for i in merge.inputs]
if phase_i == 1:
mts = [transform_gvcf(vcf)
for vcf in hl.import_gvcfs(inputs, intervals, array_elements_required=False,
_external_header=header,
_external_sample_ids=[sample_names[i] for i in
merge.inputs] if header is not None else None,
reference_genome=reference_genome,
contig_recoding=contig_recoding)]
else:
mts = [hl.read_matrix_table(path, _intervals=intervals) for path in inputs]
merge_mts.append(combine_gvcfs(mts))
if phase_i == n_phases: # final merge!
assert n_jobs == 1
assert len(merge_mts) == 1
[final_mt] = merge_mts
if key_by_locus_and_alleles:
final_mt = MatrixTable(MatrixKeyRowsBy(final_mt._mir, ['locus', 'alleles'], is_sorted=True))
final_mt.write(out_file, overwrite=overwrite)
new_files_to_merge = [out_file]
info(f"Finished phase {phase_i}/{n_phases}, job {job_i}/{len(phase.jobs)}, 100% of total I/O finished.")
break
tmp = f'{tmp_path}_phase{phase_i}_job{job_i}/'
hl.experimental.write_matrix_tables(merge_mts, tmp, overwrite=True)
pad = len(str(len(merge_mts)))
new_files_to_merge.extend(tmp + str(n).zfill(pad) + '.mt' for n in range(len(merge_mts)))
total_work_done += job.input_total_size
info(
f"Finished {phase_i}/{n_phases}, job {job_i}/{len(phase.jobs)}, {100 * total_work_done / total_ops:.1f}% of total I/O finished.")
info(f"Finished phase {phase_i}/{n_phases}.")
files_to_merge = new_files_to_merge
assert files_to_merge == [out_file]
info("Finished!")
def parse_sample_mapping(sample_map_path: str) -> Tuple[List[str], List[str]]:
sample_names: List[str] = list()
sample_paths: List[str] = list()
with hl.hadoop_open(sample_map_path) as f:
for line in f:
[name, path] = line.strip().split('\t')
sample_names.append(name)
sample_paths.append(path)
return sample_names, sample_paths
| {
"repo_name": "hail-is/hail",
"path": "hail/python/hail/experimental/vcf_combiner/vcf_combiner.py",
"copies": "1",
"size": "26027",
"license": "mit",
"hash": -1509807225512726000,
"line_mean": 40.8440514469,
"line_max": 176,
"alpha_frac": 0.5374418873,
"autogenerated": false,
"ratio": 3.737363584147042,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9770198420720027,
"avg_score": 0.0009214101454031727,
"num_lines": 622
} |
"""An experimental SVG renderer for the ReportLab graphics framework.
This will create SVG code from the ReportLab Graphics API (RLG).
To read existing SVG code and convert it into ReportLab graphics
objects download the svglib module here:
http://python.net/~gherman/#svglib
"""
import math, string, types, sys, os
from types import StringType
from operator import getitem
from reportlab.pdfbase.pdfmetrics import stringWidth # for font info
from reportlab.lib.utils import fp_str
from reportlab.lib.colors import black
from reportlab.graphics.renderbase import StateTracker, getStateDelta, Renderer, renderScaledDrawing
from reportlab.graphics.shapes import STATE_DEFAULTS, Path, UserNode
from reportlab.graphics.shapes import * # (only for test0)
from reportlab import rl_config
from reportlab.lib.utils import getStringIO
from xml.dom import getDOMImplementation
### some constants ###
sin = math.sin
cos = math.cos
pi = math.pi
LINE_STYLES = 'stroke-width stroke-linecap stroke fill stroke-dasharray'
TEXT_STYLES = 'font-family font-size'
### top-level user function ###
def drawToString(d, showBoundary=rl_config.showBoundary):
"Returns a SVG as a string in memory, without touching the disk"
s = getStringIO()
drawToFile(d, s, showBoundary=showBoundary)
return s.getvalue()
def drawToFile(d, fn, showBoundary=rl_config.showBoundary):
d = renderScaledDrawing(d)
c = SVGCanvas((d.width, d.height))
draw(d, c, 0, 0, showBoundary=showBoundary)
c.save(fn)
def draw(drawing, canvas, x=0, y=0, showBoundary=rl_config.showBoundary):
"""As it says."""
r = _SVGRenderer()
r.draw(renderScaledDrawing(drawing), canvas, x, y, showBoundary=showBoundary)
### helper functions ###
def _pointsFromList(L):
"""
given a list of coordinates [x0, y0, x1, y1....]
produce a list of points [(x0,y0), (y1,y0),....]
"""
P=[]
for i in range(0,len(L), 2):
P.append((L[i], L[i+1]))
return P
def transformNode(doc, newTag, node=None, **attrDict):
"""Transform a DOM node into new node and copy selected attributes.
Creates a new DOM node with tag name 'newTag' for document 'doc'
and copies selected attributes from an existing 'node' as provided
in 'attrDict'. The source 'node' can be None. Attribute values will
be converted to strings.
E.g.
n = transformNode(doc, "node1", x="0", y="1")
-> DOM node for <node1 x="0" y="1"/>
n = transformNode(doc, "node1", x=0, y=1+1)
-> DOM node for <node1 x="0" y="2"/>
n = transformNode(doc, "node1", node0, x="x0", y="x0", zoo=bar())
-> DOM node for <node1 x="[node0.x0]" y="[node0.y0]" zoo="[bar()]"/>
"""
newNode = doc.createElement(newTag)
for newAttr, attr in attrDict.items():
sattr = str(attr)
if not node:
newNode.setAttribute(newAttr, sattr)
else:
attrVal = node.getAttribute(sattr)
newNode.setAttribute(newAttr, attrVal or sattr)
return newNode
### classes ###
class SVGCanvas:
def __init__(self, size=(300,300)):
self.verbose = 0
self.width, self.height = self.size = size
# self.height = size[1]
self.code = []
self.style = {}
self.path = ''
self._strokeColor = self._fillColor = self._lineWidth = \
self._font = self._fontSize = self._lineCap = \
self._lineJoin = self._color = None
implementation = getDOMImplementation('minidom')
self.doc = implementation.createDocument(None, "svg", None)
self.svg = self.doc.documentElement
self.svg.setAttribute("width", str(size[0]))
self.svg.setAttribute("height", str(self.height))
title = self.doc.createElement('title')
text = self.doc.createTextNode('...')
title.appendChild(text)
self.svg.appendChild(title)
desc = self.doc.createElement('desc')
text = self.doc.createTextNode('...')
desc.appendChild(text)
self.svg.appendChild(desc)
self.setFont(STATE_DEFAULTS['fontName'], STATE_DEFAULTS['fontSize'])
self.setStrokeColor(STATE_DEFAULTS['strokeColor'])
self.setLineCap(2)
self.setLineJoin(0)
self.setLineWidth(1)
# Add a rectangular clipping path identical to view area.
clipPath = transformNode(self.doc, "clipPath", id="clip")
clipRect = transformNode(self.doc, "rect", x=0, y=0,
width=self.width, height=self.height)
clipPath.appendChild(clipRect)
self.svg.appendChild(clipPath)
self.groupTree = transformNode(self.doc, "g",
id="group",
transform="scale(1,-1) translate(0,-%d)" % self.height,
style="clip-path: url(#clip)")
self.svg.appendChild(self.groupTree)
self.currGroup = self.groupTree
def save(self, f=None):
if type(f) is StringType:
file = open(f, 'w')
else:
file = f
file.write("""\
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20000303 Stylable//EN" "http://www.w3.org/TR/2000/03/WD-SVG-20000303/DTD/svg-20000303-stylable.dtd" >\n""")
# use = self.doc.createElement('use')
# use.setAttribute("xlink:href", "#group")
# use.setAttribute("transform", "scale(1, -1)")
# self.svg.appendChild(use)
result = self.svg.toprettyxml(indent=" ")
file.write(result)
if file is not f:
file.close()
### helpers ###
def NOTUSED_stringWidth(self, s, font=None, fontSize=None):
"""Return the logical width of the string if it were drawn
in the current font (defaults to self.font).
"""
font = font or self._font
fontSize = fontSize or self._fontSize
return stringWidth(s, font, fontSize)
def _formatStyle(self, include=''):
str = ''
include = string.split(include)
keys = self.style.keys()
if include:
#2.1-safe version of the line below follows:
#keys = filter(lambda k: k in include, keys)
tmp = []
for word in keys:
if word in include:
tmp.append(word)
keys = tmp
items = []
for k in keys:
items.append((k, self.style[k]))
items = map(lambda i: "%s: %s"%(i[0], i[1]), items)
str = string.join(items, '; ') + ';'
return str
def _escape(self, s):
"""
return a copy of string s with special characters in postscript strings
escaped with backslashes.
Have not handled characters that are converted normally in python strings
i.e. \n -> newline
"""
str = string.replace(s, chr(0x5C), r'\\' )
str = string.replace(str, '(', '\(' )
str = string.replace(str, ')', '\)')
return str
def _genArcCode(self, x1, y1, x2, y2, startAng, extent):
"""Calculate the path for an arc inscribed in rectangle defined
by (x1,y1),(x2,y2)."""
return
#calculate semi-minor and semi-major axes of ellipse
xScale = abs((x2-x1)/2.0)
yScale = abs((y2-y1)/2.0)
#calculate centre of ellipse
x, y = (x1+x2)/2.0, (y1+y2)/2.0
codeline = 'matrix currentmatrix %s %s translate %s %s scale 0 0 1 %s %s %s setmatrix'
if extent >= 0:
arc='arc'
else:
arc='arcn'
data = (x,y, xScale, yScale, startAng, startAng+extent, arc)
return codeline % data
def _fillAndStroke(self, code, clip=0):
path = transformNode(self.doc, "path",
d=self.path, style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(path)
self.path = ''
return
"""
if self._fillColor or self._strokeColor or clip:
self.code.extend(code)
if self._fillColor:
if self._strokeColor or clip:
self.code.append("gsave")
self.setColor(self._fillColor)
self.code.append("eofill")
if self._strokeColor or clip:
self.code.append("grestore")
if self._strokeColor != None:
if clip: self.code.append("gsave")
self.setColor(self._strokeColor)
self.code.append("stroke")
if clip: self.code.append("grestore")
if clip:
self.code.append("clip")
self.code.append("newpath")
"""
### styles ###
def setLineCap(self, v):
vals = {0:'butt', 1:'round', 2:'square'}
if self._lineCap != v:
self._lineCap = v
self.style['stroke-linecap'] = vals[v]
def setLineJoin(self, v):
vals = {0:'miter', 1:'round', 2:'bevel'}
if self._lineJoin != v:
self._lineJoin = v
self.style['stroke-linecap'] = vals[v]
def setDash(self, array=[], phase=0):
"""Two notations. Pass two numbers, or an array and phase."""
join = string.join
if type(array) in (types.IntType, types.FloatType):
self.style['stroke-dasharray'] = join(map(str, ([array, phase])), ', ')
elif type(array) in (types.ListType, types.TupleType) and len(array) > 0:
assert phase >= 0, "phase is a length in user space"
self.style['stroke-dasharray'] = join(map(str, (array+[phase])), ', ')
def setStrokeColor(self, color):
self._strokeColor = color
self.setColor(color)
if color == None:
self.style['stroke'] = 'none'
else:
r, g, b = color.red, color.green, color.blue
self.style['stroke'] = 'rgb(%d%%,%d%%,%d%%)' % (r*100, g*100, b*100)
def setColor(self, color):
if self._color != color:
self._color = color
def setFillColor(self, color):
self._fillColor = color
self.setColor(color)
if color == None:
self.style['fill'] = 'none'
else:
r, g, b = color.red, color.green, color.blue
self.style['fill'] = 'rgb(%d%%,%d%%,%d%%)' % (r*100, g*100, b*100)
def setLineWidth(self, width):
if width != self._lineWidth:
self._lineWidth = width
self.style['stroke-width'] = width
def setFont(self, font, fontSize):
if self._font != font or self._fontSize != fontSize:
self._font, self._fontSize = (font, fontSize)
self.style['font-family'] = font
self.style['font-size'] = fontSize
### shapes ###
def rect(self, x1,y1, x2,y2, rx=8, ry=8):
"Draw a rectangle between x1,y1 and x2,y2."
if self.verbose: print "+++ SVGCanvas.rect"
rect = transformNode(self.doc, "rect",
x=x1, y=y1, width=x2-x1, height=y2-y1,
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(rect)
def roundRect(self, x1,y1, x2,y2, rx=8, ry=8):
"""Draw a rounded rectangle between x1,y1 and x2,y2.
Corners inset as ellipses with x-radius rx and y-radius ry.
These should have x1<x2, y1<y2, rx>0, and ry>0.
"""
rect = transformNode(self.doc, "rect",
x=x1, y=y1, width=x2-x1, height=y2-y1, rx=rx, ry=ry,
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(rect)
def drawString(self, s, x, y, angle=0):
if self.verbose: print "+++ SVGCanvas.drawString"
if self._fillColor != None:
self.setColor(self._fillColor)
s = self._escape(s)
st = self._formatStyle(TEXT_STYLES)
if angle != 0:
st = st + " rotate(%f %f %f);" % (angle, x, y)
st = st + " fill: %s;" % self.style['fill']
text = transformNode(self.doc, "text",
x=x, y=y, style=st,
transform="translate(0,%d) scale(1,-1)" % (2*y))
content = self.doc.createTextNode(s)
text.appendChild(content)
self.currGroup.appendChild(text)
def drawCentredString(self, s, x, y, angle=0,text_anchor='middle'):
if self.verbose: print "+++ SVGCanvas.drawCentredString"
if self._fillColor != None:
if not text_anchor in ['start', 'inherited']:
textLen = stringWidth(s,self._font,self._fontSize)
if text_anchor=='end':
x -= textLen
elif text_anchor=='middle':
x -= textLen/2.
else:
raise ValueError, 'bad value for text_anchor ' + str(text_anchor)
self.drawString(x,y,text,angle=angle)
def drawRightString(self, text, x, y, angle=0):
self.drawCentredString(text,x,y,angle=angle,text_anchor='end')
def comment(self, data):
"Add a comment."
comment = self.doc.createComment(data)
# self.currGroup.appendChild(comment)
def drawImage(self, image, x1, y1, x2=None, y2=None):
pass
def line(self, x1, y1, x2, y2):
if self._strokeColor != None:
if 0: # something is wrong with line in my SVG viewer...
line = transformNode(self.doc, "line",
x=x1, y=y1, x2=x2, y2=y2,
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(line)
path = transformNode(self.doc, "path",
d="M %f,%f L %f,%f Z" % (x1,y1,x2,y2),
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(path)
def ellipse(self, x1, y1, x2, y2):
"""Draw an orthogonal ellipse inscribed within the rectangle x1,y1,x2,y2.
These should have x1<x2 and y1<y2.
"""
ellipse = transformNode(self.doc, "ellipse",
cx=(x1+x2)/2.0, cy=(y1+y2)/2.0, rx=(x2-x1)/2.0, ry=(y2-y1)/2.0,
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(ellipse)
def circle(self, xc, yc, r):
circle = transformNode(self.doc, "circle",
cx=xc, cy=yc, r=r,
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(circle)
def drawCurve(self, x1, y1, x2, y2, x3, y3, x4, y4, closed=0):
pass
return
codeline = '%s m %s curveto'
data = (fp_str(x1, y1), fp_str(x2, y2, x3, y3, x4, y4))
if self._fillColor != None:
self.setColor(self._fillColor)
self.code.append((codeline % data) + ' eofill')
if self._strokeColor != None:
self.setColor(self._strokeColor)
self.code.append((codeline % data)
+ ((closed and ' closepath') or '')
+ ' stroke')
def drawArc(self, x1,y1, x2,y2, startAng=0, extent=360, fromcenter=0):
"""Draw a partial ellipse inscribed within the rectangle x1,y1,x2,y2.
Starting at startAng degrees and covering extent degrees. Angles
start with 0 to the right (+x) and increase counter-clockwise.
These should have x1<x2 and y1<y2.
"""
cx, cy = (x1+x2)/2.0, (y1+y2)/2.0
rx, ry = (x2-x1)/2.0, (y2-y1)/2.0
mx = rx * cos(startAng*pi/180) + cx
my = ry * sin(startAng*pi/180) + cy
ax = rx * cos((startAng+extent)*pi/180) + cx
ay = ry * sin((startAng+extent)*pi/180) + cy
str = ''
if fromcenter:
str = str + "M %f, %f L %f, %f " % (cx, cy, ax, ay)
if fromcenter:
str = str + "A %f, %f %d %d %d %f, %f " % \
(rx, ry, 0, extent>=180, 0, mx, my)
else:
str = str + "M %f, %f A %f, %f %d %d %d %f, %f Z " % \
(mx, my, rx, ry, 0, extent>=180, 0, mx, my)
if fromcenter:
str = str + "L %f, %f Z " % (cx, cy)
path = transformNode(self.doc, "path",
d=str, style=self._formatStyle())
self.currGroup.appendChild(path)
def polygon(self, points, closed=0):
assert len(points) >= 2, 'Polygon must have 2 or more points'
if self._strokeColor != None:
self.setColor(self._strokeColor)
pairs = []
for i in xrange(len(points)):
pairs.append("%f %f" % (points[i]))
pts = string.join(pairs, ', ')
polyline = transformNode(self.doc, "polygon",
points=pts, style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(polyline)
# self._fillAndStroke(polyCode)
def lines(self, lineList, color=None, width=None):
# print "### lineList", lineList
return
if self._strokeColor != None:
self._setColor(self._strokeColor)
codeline = '%s m %s l stroke'
for line in lineList:
self.code.append(codeline % (fp_str(line[0]), fp_str(line[1])))
def polyLine(self, points):
assert len(points) >= 1, 'Polyline must have 1 or more points'
if self._strokeColor != None:
self.setColor(self._strokeColor)
pairs = []
for i in xrange(len(points)):
pairs.append("%f %f" % (points[i]))
pts = string.join(pairs, ', ')
polyline = transformNode(self.doc, "polyline",
points=pts, style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(polyline)
### groups ###
def startGroup(self):
if self.verbose: print "+++ begin SVGCanvas.startGroup"
currGroup, group = self.currGroup, transformNode(self.doc, "g", transform="")
currGroup.appendChild(group)
self.currGroup = group
if self.verbose: print "+++ end SVGCanvas.startGroup"
return currGroup
def endGroup(self,currGroup):
if self.verbose: print "+++ begin SVGCanvas.endGroup"
self.currGroup = currGroup
if self.verbose: print "+++ end SVGCanvas.endGroup"
def transform(self, a, b, c, d, e, f):
if self.verbose: print "!!! begin SVGCanvas.transform", a, b, c, d, e, f
tr = self.currGroup.getAttribute("transform")
t = 'matrix(%f, %f, %f, %f, %f, %f)' % (a,b,c,d,e,f)
if (a, b, c, d, e, f) != (1, 0, 0, 1, 0, 0):
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
def translate(self, x, y):
# probably never used
print "!!! begin SVGCanvas.translate"
return
tr = self.currGroup.getAttribute("transform")
t = 'translate(%f, %f)' % (x, y)
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
def scale(self, x, y):
# probably never used
print "!!! begin SVGCanvas.scale"
return
tr = self.groups[-1].getAttribute("transform")
t = 'scale(%f, %f)' % (x, y)
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
### paths ###
def moveTo(self, x, y):
self.path = self.path + 'M %f %f ' % (x, y)
def lineTo(self, x, y):
self.path = self.path + 'L %f %f ' % (x, y)
def curveTo(self, x1, y1, x2, y2, x3, y3):
self.path = self.path + 'C %f %f %f %f %f %f ' % (x1, y1, x2, y2, x3, y3)
def closePath(self):
self.path = self.path + 'Z '
def saveState(self):
pass
def restoreState(self):
pass
class _SVGRenderer(Renderer):
"""This draws onto an SVG document.
"""
def __init__(self):
self._tracker = StateTracker()
self.verbose = 0
def drawNode(self, node):
"""This is the recursive method called for each node in the tree.
"""
if self.verbose: print "### begin _SVGRenderer.drawNode"
self._canvas.comment('begin node %s'%`node`)
color = self._canvas._color
if not (isinstance(node, Path) and node.isClipPath):
pass # self._canvas.saveState()
#apply state changes
deltas = getStateDelta(node)
self._tracker.push(deltas)
self.applyStateChanges(deltas, {})
#draw the object, or recurse
self.drawNodeDispatcher(node)
rDeltas = self._tracker.pop()
if not (isinstance(node, Path) and node.isClipPath):
pass # self._canvas.restoreState()
self._canvas.comment('end node %s'%`node`)
self._canvas._color = color
#restore things we might have lost (without actually doing anything).
for k, v in rDeltas.items():
if self._restores.has_key(k):
setattr(self._canvas,self._restores[k],v)
if self.verbose: print "### end _SVGRenderer.drawNode"
_restores = {'strokeColor':'_strokeColor','strokeWidth': '_lineWidth','strokeLineCap':'_lineCap',
'strokeLineJoin':'_lineJoin','fillColor':'_fillColor','fontName':'_font',
'fontSize':'_fontSize'}
def drawGroup(self, group):
if self.verbose: print "### begin _SVGRenderer.drawGroup"
currGroup = self._canvas.startGroup()
a, b, c, d, e, f = self._tracker.getCTM()
for childNode in group.getContents():
if isinstance(childNode, UserNode):
node2 = childNode.provideNode()
else:
node2 = childNode
self.drawNode(node2)
self._canvas.transform(a, b, c, d, e, f)
self._canvas.endGroup(currGroup)
if self.verbose: print "### end _SVGRenderer.drawGroup"
def drawRect(self, rect):
if rect.rx == rect.ry == 0:
#plain old rectangle
self._canvas.rect(
rect.x, rect.y,
rect.x+rect.width, rect.y+rect.height)
else:
#cheat and assume ry = rx; better to generalize
#pdfgen roundRect function. TODO
self._canvas.roundRect(
rect.x, rect.y,
rect.x+rect.width, rect.y+rect.height,
rect.rx, rect.ry
)
def drawString(self, stringObj):
if self._canvas._fillColor:
S = self._tracker.getState()
text_anchor, x, y, text = S['textAnchor'], stringObj.x, stringObj.y, stringObj.text
if not text_anchor in ['start', 'inherited']:
font, fontSize = S['fontName'], S['fontSize']
textLen = stringWidth(text, font,fontSize)
if text_anchor=='end':
x = x-textLen
elif text_anchor=='middle':
x = x - textLen/2
else:
raise ValueError, 'bad value for text_anchor ' + str(text_anchor)
self._canvas.drawString(text,x,y)
def drawLine(self, line):
if self._canvas._strokeColor:
self._canvas.line(line.x1, line.y1, line.x2, line.y2)
def drawCircle(self, circle):
self._canvas.circle( circle.cx, circle.cy, circle.r)
def drawWedge(self, wedge):
centerx, centery, radius, startangledegrees, endangledegrees = \
wedge.centerx, wedge.centery, wedge.radius, wedge.startangledegrees, wedge.endangledegrees
yradius = wedge.yradius or wedge.radius
(x1, y1) = (centerx-radius, centery-yradius)
(x2, y2) = (centerx+radius, centery+yradius)
extent = endangledegrees - startangledegrees
self._canvas.drawArc(x1, y1, x2, y2, startangledegrees, extent, fromcenter=1)
def drawPolyLine(self, p):
if self._canvas._strokeColor:
self._canvas.polyLine(_pointsFromList(p.points))
def drawEllipse(self, ellipse):
#need to convert to pdfgen's bounding box representation
x1 = ellipse.cx - ellipse.rx
x2 = ellipse.cx + ellipse.rx
y1 = ellipse.cy - ellipse.ry
y2 = ellipse.cy + ellipse.ry
self._canvas.ellipse(x1,y1,x2,y2)
def drawPolygon(self, p):
self._canvas.polygon(_pointsFromList(p.points), closed=1)
def drawPath(self, path):
# print "### drawPath", path.points
from reportlab.graphics.shapes import _renderPath
c = self._canvas
drawFuncs = (c.moveTo, c.lineTo, c.curveTo, c.closePath)
isClosed = _renderPath(path, drawFuncs)
if not isClosed:
c._fillColor = None
c._fillAndStroke([], clip=path.isClipPath)
def applyStateChanges(self, delta, newState):
"""This takes a set of states, and outputs the operators
needed to set those properties"""
for key, value in delta.items():
if key == 'transform':
pass
#self._canvas.transform(value[0], value[1], value[2], value[3], value[4], value[5])
elif key == 'strokeColor':
self._canvas.setStrokeColor(value)
elif key == 'strokeWidth':
self._canvas.setLineWidth(value)
elif key == 'strokeLineCap': #0,1,2
self._canvas.setLineCap(value)
elif key == 'strokeLineJoin':
self._canvas.setLineJoin(value)
elif key == 'strokeDashArray':
if value:
self._canvas.setDash(value)
else:
self._canvas.setDash()
elif key == 'fillColor':
self._canvas.setFillColor(value)
elif key in ['fontSize', 'fontName']:
fontname = delta.get('fontName', self._canvas._font)
fontsize = delta.get('fontSize', self._canvas._fontSize)
self._canvas.setFont(fontname, fontsize)
def test0(outdir='svgout'):
# print all drawings and their doc strings from the test
# file
if not os.path.isdir(outdir):
os.mkdir(outdir)
#grab all drawings from the test module
from reportlab.graphics import testshapes
drawings = []
for funcname in dir(testshapes):
#if funcname[0:11] == 'getDrawing2':
# print 'hacked to only show drawing 2'
if funcname[0:10] == 'getDrawing':
drawing = eval('testshapes.' + funcname + '()')
docstring = eval('testshapes.' + funcname + '.__doc__')
drawings.append((drawing, docstring))
# return
i = 0
for (d, docstring) in drawings:
filename = outdir + os.sep + 'renderSVG_%d.svg' % i
drawToFile(d, filename)
# print 'saved', filename
i = i + 1
def test1():
from reportlab.graphics.testshapes import getDrawing01
d = getDrawing01()
drawToFile(d, "svgout/test.svg")
def test2():
from reportlab.lib.corp import RL_CorpLogo
from reportlab.graphics.shapes import Drawing
rl = RL_CorpLogo()
d = Drawing(rl.width,rl.height)
d.add(rl)
drawToFile(d, "svgout/corplogo.svg")
if __name__=='__main__':
test0()
test1()
test2()
| {
"repo_name": "alexissmirnov/donomo",
"path": "donomo_archive/lib/reportlab/graphics/renderSVG.py",
"copies": "2",
"size": "26974",
"license": "bsd-3-clause",
"hash": 3255077663741253600,
"line_mean": 31.577294686,
"line_max": 145,
"alpha_frac": 0.5615407429,
"autogenerated": false,
"ratio": 3.5371098872279045,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5098650630127904,
"avg_score": null,
"num_lines": null
} |
"""An experimental XPath-based streaming filter for ElementTree's iterparse
For details see:
http://dalkescientific.com/writings/diary/archive/2006/11/06/iterparse_filter.html
"""
# I have got to rearrange my site to use shorter URLs.
__version__ = "0.9-experimental"
import re
dtd_validation = False
try:
from lxml import etree
dtd_validation = True
except ImportError:
try:
# Python 2.5
import xml.etree.cElementTree as etree
except ImportError:
try:
# Python 2.5
import xml.etree.ElementTree as etree
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree
except ImportError:
# normal ElementTree install
import elementtree.ElementTree as etree
# define "letter" as "any character except /:[]()@={}* or in \s"
# (XXX make it match the XML spec)
# A URI is:
# letter+
# letter+ ':' letter+ --- a namespace prefixed term, like xml:space
# '{' [^}]* '}' letter+ --- a Clark namespace term, like {http://a}b
# Can also use a '*' in place of a URI or in the tag part of a namespaced field
#
# URIs are separated only be '/' and '//'.
# These may not occur together, eg, '///' is not allowed.
# Basing this tokenization method in part on elementtree.ElementPath
xpath_tokenizer = re.compile( r"""
(// | / ) # separators
| (?: # namespaced term
([^\/\:\[\]\(\)\@\=\{\}\*\s]+) : # namespace
([^\/\:\[\]\(\)\@\=\{\}\*\s]+|\*) # tag
)
| (?:
\{([^}]*)\} # namespace in Clark notation
([^\/\:\[\]\(\)\@\=\{\}\*\s]+|\*) # tag
)
| ([^\/\:\[\]\(\)\@\=\{\}\*\s]+|\*) # tag with no namespace
| (.) # everything else; used to identify errors
""", re.X).findall
# """"" # fix emacs cruft; having too many special characters fools it
def tokenize(s):
pos = 0
for token in xpath_tokenizer(s):
op = token[0]
if op in ("/", "//"):
yield (op, None, pos)
elif token[1]:
yield ("namespace", (token[1], token[2]), pos)
elif token[3]:
yield ("clark", (token[3], token[4]), pos)
elif token[5]:
yield ("default", token[5], pos)
elif token[6]:
raise SyntaxError("Unknown symbol %r at position %d" %
(token[6], pos))
else:
raise AssertionError("Unknown token: %r" % (token,))
def _make_original_tag(op, args):
if op == "namespace":
return "%s:%s" % (args[0], args[1])
if op == "clark":
return "{%s}:%s" % (args[0], args[1])
if op == "default":
return args
raise AssertionError("Strange: %r %r" % (op, args))
def _verify_ordering(tokens):
if not tokens:
raise SyntaxError(
"empty xpath not supported (don't know how to handle that case)")
pos = 0
prev = None
SEP = 1
URI = 2
# Check that the path alternates between separator and uri
for op, args, pos in tokens:
if op in ("/", "//"):
if prev == SEP:
raise SyntaxError(
"separator %r may not follow separator at position %d" %
(op, pos))
prev = SEP
elif op in ("namespace", "clark", "default"):
if prev == URI:
errmsg = _make_original_tag(op, args)
raise SyntaxError(
"%r may not follow a separator at position %d" %
(errormsg, pos))
prev = URI
else:
raise AssertionError("Unknown op: %r, %r, %r" % (op, args, pos))
if tokens[-1][0] == "//":
raise AssertionError("xpath may not end with '//'")
# There are further optimizations. For example, if this
# returned a match function instead of the regex then it
# could special case terms like /blah//* to mean "startswith('/blah/')"
# The small performance advantages for most cases doesn't
# currently warrant the extra work.
def to_regexp(s, namespaces={}, default_namespace=None):
tokens = list(tokenize(s))
_verify_ordering(tokens)
### Process the tokens
re_terms = []
if tokens[0][0] == "/":
re_terms.append("^")
tokens.pop(0)
for op, args, pos in tokens:
if op == "/":
pass
elif op == "//":
re_terms.append("(/[^/]+)*")
elif op in ("namespace", "clark", "default"):
# Break each apart to get the correct namespace and tag
if op == "namespace":
namespace, tag = args
try:
full_namespace = namespaces[namespace]
except KeyError:
raise SyntaxError("Unknown namespace %r at position %d" %
(namespace, pos))
elif op == "clark":
full_namespace, tag = args
elif op == "default":
full_namespace = default_namespace
tag = args
# Figure out which pattern to use for the combination
# of (namespace, namespace==None) x (tag, tag=='*')
if full_namespace is None:
# No namespace specified
if tag == "*":
# Select everything between the /s
re_terms.append("/[^/]+")
else:
# Select exactly the tag, no namespace
re_terms.append("/%s" % (re.escape(tag),))
else:
# namespace specified
if tag == "*":
# Select only fields in the given namespace
re_terms.append("/" +
re.escape("{%s}" % (full_namespace,)) +
"[^/]+")
else:
# Must match namespace and tag, exactly
re_terms.append("/" +
re.escape("{%s}%s" % (full_namespace, tag)))
else:
raise AssertionError("Unknown op %r" % (op,))
# Must be a complete match
re_terms.append("/$")
return "".join(re_terms)
class IterParseFilter(object):
def __init__(self, namespaces=None, default_namespace=None, validate_dtd=False):
if namespaces is None:
namespaces = {}
self.namespaces = namespaces
self.default_namespace = default_namespace
self.validate_dtd = validate_dtd
self._start_document_handlers = []
self._end_document_handlers = []
self._start_filters = []
self._end_filters = []
self._default_start_filters = []
self._default_end_filters = []
self._iter_start_filters = []
self._iter_end_filters = []
self._start_ns_handlers = []
self._end_ns_handlers = []
self._iter_start_ns = False
self._iter_end_ns = False
def on_start_document(self, handler):
self._start_document_handlers.append(handler)
def on_end_document(self, handler):
self._end_document_handlers.append(handler)
def _add_handler(self, filters, path, handler):
path_re = to_regexp(path,
namespaces = self.namespaces,
default_namespace = self.default_namespace)
filters.append( (path, re.compile(path_re).search, handler) )
def on_start(self, path, handler):
self._add_handler(self._start_filters, path, handler)
def on_end(self, path, handler):
self._add_handler(self._end_filters, path, handler)
def on_start_default(self, path, handler):
self._add_handler(self._default_start_filters, path, handler)
def on_end_default(self, path, handler):
self._add_handler(self._default_end_filters, path, handler)
def _add_yielder(self, yielders, path):
path_re = to_regexp(path,
namespaces = self.namespaces,
default_namespace = self.default_namespace)
yielders.append( (path, re.compile(path_re).search) )
def iter_start(self, path):
self._add_yielder(self._iter_start_filters, path)
def iter_end(self, path):
self._add_yielder(self._iter_end_filters, path)
def on_start_ns(self, handler):
self._start_ns_handlers.append(handler)
def on_end_ns(self, handler):
self._end_ns_handlers.append(handler)
def iter_start_ns(self):
self._iter_start_ns = True
def iter_end_ns(self):
self._iter_end_ns = True
def _get_filter_info(self, category):
for (_, _, pat, handler) in self.filters[category]:
yield (pat, handler)
def create_fa(self):
# Make copies of everything to emphasize that they must
# not be changed during processing.
return FilterAutomata(
start_document_handlers = self._start_document_handlers,
end_document_handlers = self._end_document_handlers[::-1], # reverse!
start_filters = self._start_filters[:],
end_filters = self._end_filters[::-1], # reversing here!
default_start_filters = self._default_start_filters[:],
default_end_filters = self._default_end_filters[::-1], # reversing!
iter_start_filters = self._iter_start_filters[:],
iter_end_filters = self._iter_end_filters[:],
start_ns_handlers = self._start_ns_handlers[:],
end_ns_handlers = self._end_ns_handlers[::-1], # reversing here!
iter_start_ns = self._iter_start_ns,
iter_end_ns = self._iter_end_ns)
# These forward to the underlying automata; make a new one each time.
def parse(self, file, state=None):
return self.create_fa().parse(file, state, self.validate_dtd)
# Experimental
def iterparse(self, file):
return self.create_fa().iterparse(file, self.validate_dtd)
# I need a better name
def handler_parse(self, file, state=None):
return self.create_fa().handler_parse(file, state)
class FilterAutomata(object):
def __init__(self,
start_document_handlers,
end_document_handlers,
start_filters,
end_filters,
default_start_filters,
default_end_filters,
iter_start_filters,
iter_end_filters,
start_ns_handlers,
end_ns_handlers,
iter_start_ns,
iter_end_ns):
self.start_document_handlers = start_document_handlers
self.end_document_handlers = end_document_handlers
self.start_filters = start_filters
self.end_filters = end_filters
self.default_start_filters = default_start_filters
self.default_end_filters = default_end_filters
self.iter_start_filters = iter_start_filters
self.iter_end_filters = iter_end_filters
self.start_ns_handlers = start_ns_handlers
self.end_ns_handlers = end_ns_handlers
self.iter_start_ns = iter_start_ns
self.iter_end_ns = iter_end_ns
# Can cache results over multiple invocations
# NOTE: not thread-safe. Though given the GIL
# this shouldn't be a problem.
self.dfa = {}
def _new_node(self, stack_as_path):
start_handlers = []
for (path, matcher, handler) in self.start_filters:
if matcher(stack_as_path):
start_handlers.append(handler)
if not start_handlers:
# Any defaults?
for (path, matcher, handler) in self.default_start_filters:
if matcher(stack_as_path):
start_handlers.append(handler)
end_handlers = []
for (path, matcher, handler) in self.end_filters:
if matcher(stack_as_path):
end_handlers.append(handler)
if not end_handlers:
# Any defaults?
for (path, matcher, handler) in self.default_end_filters:
if matcher(stack_as_path):
end_handlers.append(handler)
# Have all the handlers, now check for yields
iter_start = False
for (path, matcher) in self.iter_start_filters:
if matcher(stack_as_path):
iter_start = True
break
iter_end = False
for (path, matcher) in self.iter_end_filters:
if matcher(stack_as_path):
iter_end = True
break
new_node = ({}, start_handlers, end_handlers, iter_start, iter_end)
return new_node
def _needed_actions(self, iter=False, handler=False):
if (not handler) and (not cb):
raise AssertionError("must specify one")
actions = ("start", "end")
if ( (handler and self.start_ns_handlers) or
(iter and self.iter_start_ns) ):
actions = actions + ("start-ns",)
if ( (handler and self.end_ns_handlers) or
(iter and self.iter_end_ns) ):
actions = actions + ("end-ns",)
return actions
# I plan to implement 'handler_parse' as a near copy of 'parse'
# but without any yield statements.
def handler_parse(self, file, state=None):
for x in self.parse(file, state):
pass
# I plan to implement 'iterparse' as a near copy of 'parse'
# but without any references to callbacks
def iterparse(self, file, validate_dtd=False):
return self.parse(file, None, validate_dtd)
def parse(self, file, state=None, validate_dtd=False):
if not dtd_validation:
validate_dtd = False
node_stack = []
node_stack_append = node_stack.append
tag_stack = []
tag_stack_append = tag_stack.append
# children, start handlers, end handlers, iter start, iter end
node = (self.dfa, [], [], False, False)
# synthesize start-document events
for handler in self.start_document_handlers:
handler("start-document", None, state)
# figure out if I also need start-ns and/or end-ns events
needed_actions = self._needed_actions(True, True)
kwargs = {}
if validate_dtd:
kwargs = dict(dtd_validation=True)
for (event, ele) in etree.iterparse(file, needed_actions, **kwargs):
if event == "start":
tag = ele.tag
# Descend into node; track where I am
tag_stack_append(tag)
node_stack_append(node)
try:
node = node[0][tag]
except KeyError:
# No child exists; figure out what to do
stack_as_path = "/" + ("/".join(tag_stack)) + "/"
new_node = self._new_node(stack_as_path)
node[0][tag] = new_node
node = new_node
# call the start handlers then yield the element
for start_handler in node[1]:
start_handler(event, ele, state)
if node[3]:
yield (event, ele)
elif event == "end":
# call the end handlers then yield the element
for end_handler in node[2]:
end_handler(event, ele, state)
if node[4]:
yield (event, ele)
del tag_stack[-1]
node = node_stack.pop()
elif event == "start-ns":
for handler in self.start_ns_handlers:
handler(event, ele, state)
if self.iter_start_ns:
yield (event, ele)
elif event == "end-ns":
for handler in self.end_ns_handlers:
handler(event, ele, state)
if self.iter_start_ns:
yield (event, ele)
for handler in self.end_document_handlers:
handler("end-document", None, state)
#### An incomplete test suite ####
def test_path(path, args):
#print "**** test_path", repr(path), repr(args)
pattern = to_regexp(path)
pat = re.compile(pattern)
s = "/" + ("/".join(args)) + "/"
#print pattern, s
return bool(pat.search(s))
def test_ns_path(path, args):
#print "**** test_path", repr(path), repr(args)
pattern = to_regexp(path,
namespaces = {
"xml": "http://www.w3.org/XML/1998/namespace",
"das2": "http://biodas.org/documents/das2"},
# the empty namespace is not the same as no namespace!
default_namespace = "")
pat = re.compile(pattern)
s = "/" + ("/".join(args)) + "/"
#print pattern, s
return bool(pat.search(s))
def test_syntax():
for (xpath, tag_list, expect) in (
("A", ["A"], 1),
("A", ["AA"], 0),
("A", ["B", "A"], 1),
("/A", ["B", "A"], 0),
("/B", ["B", "A"], 0),
("//A", ["B", "A"], 1),
("A//B", ["A", "B"], 1),
("A//B", ["C", "A", "B"], 1),
("/A//B", ["C", "A", "B"], 0),
("/B/*", ["B", "A"], 1),
# Test back-tracking; both greedy and non-greedy cases
("A//B//C//D", ["A", "B", "C", "B", "D"], 1),
("A//B/D", ["A", "B", "C", "B", "D"], 1),
# Clark namespace tests
("{http://x.com}A", ["{http://x.com}A"], 1),
("{http://x.org}A", ["{http://x.com}A"], 0),
("{http://x.org}A", ["{http://x.com}B", "{http://x.org}A"], 1),
("*", ["{http://x.com}A"], 1),
("{http://x.com}*", ["{http://x.com}A"], 1),
("{http://x.com}*", ["{http://x.org}A"], 0),
):
got = test_path(xpath, tag_list)
if got != expect:
raise AssertionError("xpath %r against %r got %r, expected %r" %
(xpath, tag_list, got, bool(expect)))
for (xpath, tag_list, expect) in (
# various namespace checks
("xml:A", ["{http://www.w3.org/XML/1998/namespace}A"], 1),
("xml:A", ["{http://www.w3.org/XML/1998/namespace2}A"], 0),
("xml:A", ["{http://www.w3.org/XML/1998/namespace}AA"], 0),
("xml:A", ["{http://www.w3.org/XML/1998/namespace}B",
"{http://www.w3.org/XML/1998/namespace}A"], 1),
("xml:B", ["{http://www.w3.org/XML/1998/namespace}B",
"{http://www.w3.org/XML/1998/namespace}A"], 0),
("A", ["{}A"], 1),
("A", ["A"], 0),
("*", ["A"], 0),
("*", ["{}A"], 1),
("das2:*", ["{http://biodas.org/documents/das2}AAA"], 1),
("das2:*", ["{}AAA"], 0),
("xml:*/das2:*", ["{http://www.w3.org/XML/1998/namespace}ABC",
"{http://biodas.org/documents/das2}ABC"], 1),
("das2:*/xml:*", ["{http://www.w3.org/XML/1998/namespace}ABC",
"{http://biodas.org/documents/das2}ABC"], 0),
):
got = test_ns_path(xpath, tag_list)
if got != expect:
raise AssertionError("xpath %r against %r got %r, expected %r" %
(xpath, tag_list, got, bool(expect)))
def test_filtering():
import cStringIO as StringIO
f = StringIO.StringIO("""\
<A><AA>
<B xmlns="http://z/"><C/><spam:D xmlns:spam="http://spam/">eggs</spam:D></B>
<B x='6'>foo<B y='7'>bar</B>baz</B>
</AA></A>""")
special = object()
class Capture(object):
def __init__(self):
self.history = []
def __call__(self, event, ele, state):
if state is not special:
raise AssertionError("Did not get expected state")
self.history.append( (event, ele) )
filter = IterParseFilter()
capture_all = Capture()
filter.on_start_document(capture_all)
filter.on_start("*", capture_all)
filter.on_end("*", capture_all)
filter.on_end_document(capture_all)
filter.on_start_ns(capture_all)
filter.on_end_ns(capture_all)
for x in filter.parse(f, state=special):
raise AssertionError("should not yield %r" % (x,))
expect_history = (
("start-document", None),
("start", "A"),
("start", "AA"),
("start-ns", ("", "http://z/")),
("start", "{http://z/}B"),
("start", "{http://z/}C"),
("end", "{http://z/}C"),
("start-ns", ("spam", "http://spam/")),
("start", "{http://spam/}D"),
("end", "{http://spam/}D"),
("end-ns", None),
("end", "{http://z/}B"),
("end-ns", None),
("start", "B"),
("start", "B"),
("end", "B"),
("end", "B"),
("end", "AA"),
("end","A"),
("end-document", None),
)
for (got, expect) in zip(capture_all.history, expect_history):
event, ele = got
tag = getattr(ele, "tag", ele)
if (event, tag) != expect:
raise AssertionError("Expected %r Got %r" % (expect, (event, tag)))
if len(capture_all.history) != len(expect_history):
raise AssertionError("Length mismatch")
f.seek(0)
filter = IterParseFilter()
def must_match_B(event, ele, state):
if ele.tag != "B":
raise AssertionError("%r is not B" % (ele.tag,))
def must_match_B_y7(event, ele, state):
if ele.tag != "B":
raise AssertionError("%r is not B" % (ele.tag,))
if ele.attrib["y"] != "7":
raise AssertionError("%r is not the correct B" % (ele.tag,))
filter.on_start("B", must_match_B)
filter.on_start("B/B", must_match_B_y7)
f.seek
def test_parse():
import os
filename = "/Users/dalke/Music/iTunes/iTunes Music Library.xml"
if not os.path.exists(filename):
print "Cannot find %r: skipping test" % (filename,)
return
# Work through callbacks
ef = IterParseFilter()
def print_info(event, ele, state):
d = {}
children = iter(ele)
for child in children:
key = child.text
value = children.next().text
d[key] = value
print "%r is by %r" % (d["Name"], d.get("Artist", "<unknown>"))
ele.clear()
ef.on_end("/plist/dict/dict/dict", print_info)
ef.handler_parse(open(filename))
# Work through iterators
ef = IterParseFilter()
ef.iter_end("/plist/dict/dict/dict")
for (event, ele) in ef.iterparse(open(filename)):
d = {}
children = iter(ele)
for child in children:
key = child.text
value = children.next().text
d[key] = value
print "%r is a %r song" % (d["Name"], d.get("Genre", "<unknown>"))
ele.clear()
def test():
test_syntax()
test_filtering()
test_parse()
if __name__ == "__main__":
test()
print "All tests passed."
| {
"repo_name": "jerrylei98/graphene",
"path": "curation/dblp/iterparse_filter.py",
"copies": "1",
"size": "23248",
"license": "apache-2.0",
"hash": -1222795375246253000,
"line_mean": 34.9319938176,
"line_max": 84,
"alpha_frac": 0.5136355816,
"autogenerated": false,
"ratio": 3.831877369375309,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9805403780280738,
"avg_score": 0.008021834138914071,
"num_lines": 647
} |
# An expert who knows about how file names are structured on a number of
# platforms... this handles them mostly as strings, which of course they
# are...
import os
def windows_environment_vars_to_unix(token):
"""Transmogrify windows environment tokens (e.g. %WINDIR%) to
the UNIX form ($WINDIR) for python environment token replacement."""
if token.count("%") % 2:
raise RuntimeError("must have even number of % tokens")
in_env_variable = False
token_list = token.split("%")
result = ""
for l in token_list:
if not in_env_variable:
result += l
in_env_variable = True
else:
result += "$%s" % l
in_env_variable = False
return result
def expand_path(path):
"""Expand the input to give a full path."""
if path is None:
return None
if os.name == "nt":
return os.path.expandvars(
os.path.expanduser(windows_environment_vars_to_unix(path))
)
else:
return os.path.expandvars(os.path.expanduser(path))
| {
"repo_name": "xia2/xia2",
"path": "src/xia2/Experts/Filenames.py",
"copies": "1",
"size": "1070",
"license": "bsd-3-clause",
"hash": 7933161707017330000,
"line_mean": 24.4761904762,
"line_max": 72,
"alpha_frac": 0.608411215,
"autogenerated": false,
"ratio": 3.9051094890510947,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0009523809523809524,
"num_lines": 42
} |
"""An exporter that converts scenes / animations to shareable HTML files.
"""
from klampt import *
from klampt.model import trajectory
from klampt import robotsim
import json
import pkg_resources
_title_id = '__TITLE__'
_scene_id = '__SCENE_JSON__'
_path_id = '__PATH_JSON__'
_rpc_id = '__RPC_JSON__'
_compressed_id = '__COMPRESSED__'
_dt_id = '__TIMESTEP__'
_frontend_load_id = '__KLAMPT_FRONTEND_LOAD__'
def make_fixed_precision(obj,digits):
if isinstance(obj,float):
return round(obj,digits)
elif isinstance(obj,list):
for i in range(len(obj)):
obj[i] = make_fixed_precision(obj[i],digits)
elif isinstance(obj,tuple):
return [make_fixed_precision(val,digits) for val in obj]
elif isinstance(obj,dict):
for i,v in obj.items():
obj[i] = make_fixed_precision(v,digits)
return obj
class HTMLSharePath:
"""An exporter that converts scenes / animations to shareable HTML files.
Examples::
sharer = HTMLSharePath("mypath.html",name="My spiffy path")
sharer.start(sim) #can accept a sim or a world
while [simulation is running]:
#do whatever control you wish to do here
sim.simulate(...)
sharer.animate()
sharer.end() #this saves to the filename given in the constructor
"""
def __init__(self,filename=None,name="Klamp't Three.js app",boilerplate='auto',libraries='static'):
"""
Args:
filename (str, optional): the HTML file to generate. If None, then
the end() method returns the HTML string.
name (str): the title of the HTML page
boilerplate (str): the location of the boilerplate HTML file. If
'auto', it's automatically found in the ``klampt/data`` folder.
libraries (str): either 'static' or 'dynamic'. In the latter case,
the html file loads the libraries from the Klamp't website
dynamically. This reduces the size of the HTML file by about
600kb, but the viewer needs an internet connection
"""
self.name = name
if boilerplate == 'auto':
boilerplate = pkg_resources.resource_filename('klampt','data/share_path_boilerplate.html')
f = open(boilerplate,'r')
self.boilerplate_file = ''.join(f.readlines())
f.close()
if libraries == 'static':
self.klampt_frontend_load_script = pkg_resources.resource_filename('klampt','data/klampt_frontend_load_static.js')
else:
if libraries != 'dynamic':
raise ValueError("The libraries argument must either be 'static' or 'dynamic'")
self.klampt_frontend_load_script = pkg_resources.resource_filename('klampt','data/klampt_frontend_load_dynamic.js')
if any(v not in self.boilerplate_file for v in [_scene_id,_path_id,_rpc_id,_compressed_id,_dt_id,_frontend_load_id]):
raise RuntimeError("Boilerplate file does not contain the right tags")
self.fn = filename
self.scene = 'null'
self.transforms = {}
self.rpc = []
self.dt = 0
self.last_t = 0
def start(self,world):
"""Begins the path saving with the given WorldModel or Simulator"""
if isinstance(world,Simulator):
self.sim = world
self.world = world.world
self.last_t = world.getTime()
else:
self.sim = None
self.world = world
if self.world is not None:
self.scene = robotsim.ThreeJSGetScene(self.world)
def animate(self,time=None,rpc=None):
"""Updates the path from the world. If the world wasn't a simulator, the time
argument needs to be provided.
If you want to include extra things, provide them in the rpc argument (as a list
of KlamptFrontend rpc calls)
"""
if self.sim is not None and time is None:
time = self.sim.getTime()
self.sim.updateWorld()
if time is None:
raise ValueError("Time needs to be provided")
dt = time - self.last_t
if self.dt == 0:
self.dt = dt
if self.dt == 0:
return
if abs(dt - self.dt) <= 1e-6:
dt = self.dt
numadd = 0
while dt >= self.dt:
numadd += 1
if self.world is not None:
transforms = json.loads(robotsim.ThreeJSGetTransforms(self.world))
else:
transforms = {'object':[]}
for update in transforms['object']:
n = update['name']
mat = make_fixed_precision(update['matrix'],4)
matpath = self.transforms.setdefault(n,[])
assert len(matpath) == len(self.rpc)
lastmat = None
for m in matpath[::-1]:
if m != None:
lastmat = m
break
if lastmat != mat:
matpath.append(mat)
else:
matpath.append(None)
if numadd == 1:
if rpc is not None:
assert isinstance(rpc,(list,tuple)),"rpc argument must be a list or a tuple"
self.rpc.append(rpc)
else:
self.rpc.append(None)
else:
self.rpc.append(None)
dt -= self.dt
self.last_t += self.dt
if numadd > 1:
print("HTMLSharePath: Note, uneven time spacing, duplicating frame",numadd,"times")
def end(self,rpc=None):
if len(self.rpc)==0:
self.rpc = [rpc]
elif rpc is not None:
self.rpc[-1] += rpc
data = self.boilerplate_file.replace(_title_id,self.name)
data = data.replace(_scene_id,self.scene)
data = data.replace(_path_id,json.dumps(self.transforms))
data = data.replace(_rpc_id,json.dumps(self.rpc))
data = data.replace(_compressed_id,'true')
data = data.replace(_dt_id,str(self.dt))
f = open(self.klampt_frontend_load_script,'r')
load_script = ''.join(f.readlines())
f.close()
data = data.replace(_frontend_load_id,load_script)
if self.fn is None:
return data
else:
print("Path with",len(self.rpc),"frames saved to",self.fn)
f = open(self.fn,'w')
f.write(data)
f.close()
if __name__ == '__main__':
import sys
import os
from klampt import trajectory
world = WorldModel()
if len(sys.argv) == 1:
world.readFile("../../data/athlete_plane.xml")
q = world.robot(0).getConfig()
q[2] = 2
world.robot(0).setConfig(q)
sim = Simulator(world)
share = HTMLSharePath(name="Klamp't simulation path")
share.start(sim)
for i in range(100):
sim.simulate(0.02)
share.animate()
share.end()
else:
assert len(sys.argv) == 3,"Usage: sharepath.py world.xml robot_path"
world.readFile(sys.argv[1])
traj = trajectory.RobotTrajectory(world.robot(0))
traj.load(sys.argv[2])
world.robot(0).setConfig(traj.milestones[0])
dt = 0.02
excess = 1.0
share = HTMLSharePath(name="Klamp't path "+os.path.split(sys.argv[2])[1])
share.start(world)
share.dt = dt
t = traj.times[0]
while t < traj.times[-1] + excess:
world.robot(0).setConfig(traj.eval(t))
share.animate(t)
t += dt
share.end()
| {
"repo_name": "krishauser/Klampt",
"path": "Python/klampt/io/html.py",
"copies": "1",
"size": "7673",
"license": "bsd-3-clause",
"hash": -4166459910219164700,
"line_mean": 37.1741293532,
"line_max": 127,
"alpha_frac": 0.5581910596,
"autogenerated": false,
"ratio": 3.8654911838790933,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9883923993281281,
"avg_score": 0.007951650039562556,
"num_lines": 201
} |
"""An exposure time calculator for LSST. Uses GalSim to draw a galaxy with specified magnitude,
shape, etc, and then uses the same image as the optimal weight function. Derived from D. Kirkby's
notes on deblending.
"""
import numpy as np
import galsim
# Some constants
# --------------
#
# LSST effective area in meters^2
A = 319/9.6 # etendue / FoV. I *think* this includes vignetting
# zeropoints from DK notes in photons per second per pixel
# should eventually compute these on the fly from filter throughput functions.
s0 = {'u': A*0.732,
'g': A*2.124,
'r': A*1.681,
'i': A*1.249,
'z': A*0.862,
'Y': A*0.452}
# Sky brightnesses in AB mag / arcsec^2.
# stole these from http://www.lsst.org/files/docs/gee_137.28.pdf
# should eventually construct a sky SED (varies with the moon phase) and integrate to get these
B = {'u': 22.8,
'g': 22.2,
'r': 21.3,
'i': 20.3,
'z': 19.1,
'Y': 18.1}
# number of visits
# From LSST Science Book
fiducial_nvisits = {'u': 56,
'g': 80,
'r': 180,
'i': 180,
'z': 164,
'Y': 164}
# exposure time per visit
visit_time = 30.0
# Sky brightness per arcsec^2 per second
sbar = {}
for k in B:
sbar[k] = s0[k] * 10**(-0.4*(B[k]-24.0))
# And some random numbers for drawing
bd = galsim.BaseDeviate(1)
class ETC(object):
def __init__(self, band, pixel_scale=None, stamp_size=None, threshold=0.0,
nvisits=None):
self.pixel_scale = pixel_scale
self.stamp_size = stamp_size
self.threshold = threshold
self.band = band
if nvisits is None:
self.exptime = fiducial_nvisits[band] * visit_time
else:
self.exptime = nvisits * visit_time
self.sky = sbar[band] * self.exptime * self.pixel_scale**2
self.sigma_sky = np.sqrt(self.sky)
self.s0 = s0[band]
def flux(self, mag):
return self.s0 * 10**(-0.4*(mag - 24.0)) * self.exptime
def draw(self, profile, mag, noise=False):
img = galsim.ImageD(self.stamp_size, self.stamp_size, scale=self.pixel_scale)
profile = profile.withFlux(self.flux(mag))
profile.drawImage(image=img)
if noise:
gd = galsim.GaussianNoise(bd, sigma=self.sigma_sky)
img.addNoise(gd)
return img
def SNR(self, profile, mag):
img = self.draw(profile, mag, noise=False)
mask = img.array > (self.threshold * self.sigma_sky)
imgsqr = img.array**2*mask
signal = imgsqr.sum()
noise = np.sqrt((imgsqr * self.sky).sum())
return signal / noise
def err(self, profile, mag):
snr = self.SNR(profile, mag)
return 2.5 / np.log(10) / snr
def display(self, profile, mag, noise=True):
img = self.draw(profile, mag, noise)
import matplotlib.pyplot as plt
import matplotlib.cm as cm
plt.imshow(img.array, cmap=cm.Greens)
plt.colorbar()
plt.show()
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
# Filter
parser.add_argument("--band", default='i',
help="band for simulation (Default 'i')")
# PSF structural arguments
PSF_profile = parser.add_mutually_exclusive_group()
PSF_profile.add_argument("--kolmogorov", action="store_true",
help="Use Kolmogorov PSF (Default Gaussian)")
PSF_profile.add_argument("--moffat", action="store_true",
help="Use Moffat PSF (Default Gaussian)")
parser.add_argument("--PSF_beta", type=float, default=3.0,
help="Set beta parameter of Moffat profile PSF. (Default 2.5)")
parser.add_argument("--PSF_FWHM", type=float, default=0.67,
help="Set FWHM of PSF in arcsec (Default 0.67).")
parser.add_argument("--PSF_phi", type=float, default=0.0,
help="Set position angle of PSF in degrees (Default 0.0).")
parser.add_argument("--PSF_ellip", type=float, default=0.0,
help="Set ellipticity of PSF (Default 0.0)")
# Galaxy structural arguments
parser.add_argument("-n", "--sersic_n", type=float, default=1.0,
help="Sersic index (Default 1.0)")
parser.add_argument("--gal_ellip", type=float, default=0.3,
help="Set ellipticity of galaxy (Default 0.3)")
parser.add_argument("--gal_phi", type=float, default=0.0,
help="Set position angle of galaxy in radians (Default 0.0)")
parser.add_argument("--gal_HLR", type=float, default=0.2,
help="Set galaxy half-light-radius. (default 0.5 arcsec)")
# Simulation input arguments
parser.add_argument("--pixel_scale", type=float, default=0.2,
help="Set pixel scale in arcseconds (Default 0.2)")
parser.add_argument("--stamp_size", type=int, default=31,
help="Set postage stamp size in pixels (Default 31)")
# Magnitude!
parser.add_argument("--mag", type=float, default=25.3,
help="magnitude of galaxy")
# threshold
parser.add_argument("--threshold", type=float, default=0.0,
help="Threshold, in sigma-sky units, above which to include pixels")
# Observation characteristics
parser.add_argument("--nvisits", type=int, default=None)
# draw the image!
parser.add_argument("--display", action='store_true',
help="Display image used to compute SNR.")
args = parser.parse_args()
if args.kolmogorov:
psf = galsim.Kolmogorov(fwhm=args.PSF_FWHM)
elif args.moffat:
psf = galsim.Moffat(fwhm=args.PSF_FWHM, beta=args.PSF_beta)
else:
psf = galsim.Gaussian(fwhm=args.PSF_FWHM)
psf = psf.shear(e=args.PSF_ellip, beta=args.PSF_phi*galsim.radians)
gal = galsim.Sersic(n=args.sersic_n, half_light_radius=args.gal_HLR)
gal = gal.shear(e=args.gal_ellip, beta=args.gal_phi*galsim.radians)
profile = galsim.Convolve(psf, gal)
etc = ETC(args.band, pixel_scale=args.pixel_scale, stamp_size=args.stamp_size,
threshold=args.threshold, nvisits=args.nvisits)
print
print "input"
print "------"
print "band: {}".format(args.band)
print "magnitude: {}".format(args.mag)
print
print "output"
print "------"
print "signal: {}".format(etc.flux(args.mag))
print "sky noise: {}".format(etc.sigma_sky)
print "SNR: {}".format(etc.SNR(profile, args.mag))
print "mag err: {}".format(etc.err(profile, args.mag))
if args.display:
etc.display(profile, args.mag)
| {
"repo_name": "DarkEnergyScienceCollaboration/BremertonRoundTrip",
"path": "galsim/lsstetc.py",
"copies": "1",
"size": "6774",
"license": "mit",
"hash": 2269566649429024000,
"line_mean": 35.6162162162,
"line_max": 98,
"alpha_frac": 0.5922645409,
"autogenerated": false,
"ratio": 3.3092330239374697,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9398465125890056,
"avg_score": 0.0006064877894826517,
"num_lines": 185
} |
"""an extended chroot equivalent"""
from __future__ import absolute_import, unicode_literals
import argparse
import errno
from functools import partial
import os
import sys
from pychroot.base import Chroot
from pychroot.exceptions import ChrootError
from snakeoil.cli import arghparse
def bindmount(s, recursive=False, readonly=False):
opts = {'recursive': recursive, 'readonly': readonly}
return {s: opts}
class mountpoints(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if not getattr(namespace, 'mountpoints', False):
namespace.mountpoints = {}
namespace.mountpoints.update(values)
argparser = arghparse.ArgumentParser(
color=False, debug=False, quiet=False, verbose=False,
description=__doc__)
argparser.add_argument('path', help='path to newroot')
argparser.add_argument(
'command', nargs=argparse.REMAINDER, help='optional command to run',
docs="""
Optional command to run.
Similar to chroot(1), if unspecified this defaults to $SHELL from the
host environment and if that's unset it executes /bin/sh.
""")
argparser.add_argument(
'--no-mounts', action='store_true',
help='disable the default bind mounts',
docs="""
Disable the default bind mounts which can be used to obtain a standard
chroot environment that you'd expect when using chroot(1).
""")
argparser.add_argument(
'--hostname', type=str, help='specify the chroot hostname',
docs="""
Specify the chroot hostname. In order to set the domain name as well,
pass an FQDN instead of a singular hostname.
""")
argparser.add_argument(
'--skip-chdir', action='store_true',
help="do not change working directory to '/'",
docs="""
Do not change the current working directory to '/'.
Unlike chroot(1), this currently doesn't limit you to only using it
when the new root isn't '/'. In other words, you can use a new chroot
environment on the current host system rootfs with one caveat: any
absolute paths will use the new rootfs.
""")
argparser.add_argument(
'-B', '--bind', type=bindmount, action=mountpoints,
metavar='SRC[:DEST]', help='specify custom bind mount',
docs="""
Specify a custom bind mount.
In order to mount the same source to multiple destinations, use the
SRC:DEST syntax. For example, the following will bind mount '/srv/data'
to /srv/data and /home/user/data in the chroot::
pychroot -B /srv/data -B /srv/data:/home/user/data /path/to/chroot
""")
argparser.add_argument(
'-R', '--rbind', type=partial(bindmount, recursive=True),
action=mountpoints, metavar='SRC[:DEST]',
help='specify custom recursive bind mount')
argparser.add_argument(
'--ro', '--readonly', type=partial(bindmount, readonly=True),
action=mountpoints, metavar='SRC[:DEST]',
help='specify custom readonly bind mount',
docs="""
Specify a custom readonly bind mount.
Readonly, recursive bind mounts aren't currently supported on Linux so
this has to be a standalone option for now. Once they are, support for
them and other mount attributes will be added as an extension to the
mount point argument syntax.
""")
def parse_args(args):
opts = argparser.parse_args(args)
if not opts.command:
opts.command = [os.getenv('SHELL', '/bin/sh'), '-i']
if not hasattr(opts, 'mountpoints'):
opts.mountpoints = None
if opts.no_mounts:
Chroot.default_mounts = {}
return opts
def main(args):
opts = parse_args(args)
try:
with Chroot(opts.path, mountpoints=opts.mountpoints,
hostname=opts.hostname, skip_chdir=opts.skip_chdir):
os.execvp(opts.command[0], opts.command)
except EnvironmentError as e:
if (e.errno == errno.ENOENT):
raise SystemExit(
"{}: failed to run command '{}': {}".format(
os.path.basename(sys.argv[0]), opts.command[0], e.strerror))
raise
except ChrootError as e:
raise SystemExit('{}: {}'.format(os.path.basename(sys.argv[0]), str(e)))
| {
"repo_name": "radhermit/pychroot",
"path": "pychroot/scripts/pychroot.py",
"copies": "1",
"size": "4248",
"license": "bsd-3-clause",
"hash": 4580546519955035000,
"line_mean": 32.7142857143,
"line_max": 80,
"alpha_frac": 0.6560734463,
"autogenerated": false,
"ratio": 4.0534351145038165,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5209508560803816,
"avg_score": null,
"num_lines": null
} |
""" An extended complex number library, with a richer API and compatible with Python numeric types.
It includes functions that are equivalent to the ones found in the cmath module. It also includes an
AlComplex object, which wraps a complex number, with a better API.
Every function and mathematical operation available is all compatible with Python own int, float and complex numeric types.
Also, all the overhead has been suppressed as much as possible.
"""
import math as m
import cmath as cm
from itertools import chain
from functools import wraps
def use_j(option=True):
""" Changes the letter used to represent the imaginary unit.
Parameters
----------
option : bool
Whether to use j over i to represent the imaginary unit in strings.
"""
AlComplex.symbol = 'j' if option else 'i'
def real_to_complex(z):
""" Transforms a basic Python numeric type to AlComplex if it is not one already.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
if not isinstance(z, AlComplex):
return AlComplex(z.real,z.imag)
else:
return z
def complexize_argument(fun):
""" Converts the first argument of the passed function an AlComplex, if it is not one already.
Meant to be used as a decorator.
Parameters
----------
fun : The function to decorate
Returns
-------
The decorated function.
"""
@wraps(fun)
def wrapper(z, *args):
z = real_to_complex(z)
return fun(z, *args)
return wrapper
# --------- BASIC COMPLEX FUNCTIONS ---------
def conjugate(z):
""" Creates the conjugate of the given number as an AlComplex number.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex(z.real, -z.imag)
def modulus(z):
""" Calculates the given number modulus.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
float
"""
return m.sqrt(z.real**2+z.imag**2)
def phase(z):
""" Calculates the given number main argument.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
float
"""
return m.atan2(z.imag, z.real)
def real(z):
""" Gets the real part of a given number.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
float
"""
return z.real
def imaginary(z):
""" Gets the imaginary part of a given number.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
float
"""
return z.imag
# --------- SINGLE VALUED FUNCTIONS ---------
@complexize_argument
def exp(z):
""" An AlComplex compatible exponential function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.exp(z.to_python_complex()))
@complexize_argument
def Ln(z):
""" An AlComplex compatible natural logarithm function. Gets the main value.
It uses the main argument of the given number.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
See Also
--------
ln_n_branch
Gets a specific branch value, using one of the possible arguments.
ln_values
Generates multiple values between given branches.
"""
return AlComplex.from_python_complex(cm.log(z.to_python_complex()))
@complexize_argument
def inverse(z):
""" Gets the inverse of z.
It's the number z' such that z/z' = 1. Equivalent to 1/z and z**-1.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return z**-1
@complexize_argument
def sqrt(z):
""" Gets the square root of z.
Equivalent to z**(1/2)
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.sqrt(z.to_python_complex()))
# --------- TRIGONOMETRIC ---------
@complexize_argument
def sin(z):
""" An AlComplex compatible sine function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.sin(z.to_python_complex()))
@complexize_argument
def cos(z):
""" An AlComplex compatible cosine function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.cos(z.to_python_complex()))
@complexize_argument
def tan(z):
""" An AlComplex compatible tangent function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.tan(z.to_python_complex()))
@complexize_argument
def sec(z):
""" An AlComplex compatible secant function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.cos(z.to_python_complex())**-1)
@complexize_argument
def csc(z):
""" An AlComplex compatible cosecant function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.sin(z.to_python_complex())**-1)
@complexize_argument
def cot(z):
""" An AlComplex compatible cotangent function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.tan(z.to_python_complex())**-1)
# --------- HYPERBOLIC FUNCTIONS ---------
@complexize_argument
def sinh(z):
""" An AlComplex compatible hyperbolic sine function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.sinh(z.to_python_complex()))
@complexize_argument
def cosh(z):
""" An AlComplex compatible hyperbolic cosine function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.cosh(z.to_python_complex()))
@complexize_argument
def tanh(z):
""" An AlComplex compatible hyperbolic tangent function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.tanh(z.to_python_complex()))
@complexize_argument
def sech(z):
""" An AlComplex compatible hyperbolic secant function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.cosh(z.to_python_complex())**-1)
@complexize_argument
def csch(z):
""" An AlComplex compatible hyperbolic cosecant function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.sinh(z.to_python_complex())**-1)
@complexize_argument
def coth(z):
""" An AlComplex compatible hyperbolic cotangent function.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
z = real_to_complex(z)
return AlComplex.from_python_complex(cm.tanh(z.to_python_complex())**-1)
# ----- INVERSE FUNCTIONS -----
@complexize_argument
def asin(z):
""" An AlComplex compatible arcsine function. It gets the main value.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.asin(z.to_python_complex()))
@complexize_argument
def acos(z):
""" An AlComplex compatible arccosine function. It gets the main value.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.acos(z.to_python_complex()))
@complexize_argument
def atan(z):
""" An AlComplex compatible arctangent function. It gets the main value.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.atan(z.to_python_complex()))
@complexize_argument
def asinh(z):
""" An AlComplex compatible hyperbolic arcsine function. It gets the main value.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.asinh(z.to_python_complex()))
@complexize_argument
def acosh(z):
""" An AlComplex compatible hyperbolic arccosine function. It gets the main value.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.acosh(z.to_python_complex()))
@complexize_argument
def atanh(z):
""" An AlComplex compatible hyperbolic arctangent function. It gets the main value.
Parameters
----------
z : Python numeric type or AlComplex
Returns
-------
AlComplex
"""
return AlComplex.from_python_complex(cm.atanh(z.to_python_complex()))
# --------- MULTIPLE VALUED FUNCTIONS ---------
@complexize_argument
def int_roots(z, n, include_self=False):
""" Generates all the complex n-roots of a number.
For now, n can only be an integer. Similar to z**(1/n) but this yields all the other branches values.
Parameters
----------
z : Python numeric type or AlComplex
n : int
include_self : bool, optional
Whether z itself will be yield.
Returns
------
A generator that yields AlComplex
Raises
------
ValueError:
If n is not an integer greater than 0.
"""
if not isinstance(n, int) or n <= 0:
raise ValueError('Expected second parameter to be an integer greater than zero. Got {} instead'.format(n))
polar = z.to_polar()
magnitude = polar[0]**(1/n)
arg = polar[1]/n
growth = 2*m.pi/n
first_value = (z) if include_self else ()
values_generator = (AlComplex.polar(magnitude, arg+k*growth) for k in range(n))
return chain(first_value, values_generator)
@complexize_argument
def ln_values(z, n_start=0, n_finish=None):
""" Generates all the possible complex natural logarithm values between certain branches.
The complex logarithm function is defined as ln(z) = log|z| + i(phase(z)+2pi*n), where n is a natural number.
This functions yields all the values by varying n itself.
Also note that ln_values(z, n1, n2) is a reversed ln_values(z, n2, n1).
Parameters
----------
z : Python numeric type or AlComplex
n_start : int, optional
The beginning n in the formula above.
n_finish : int or bool, optional
The last n in the formula above. If smaller than n_start, the n will be decreacing in the sequence
given by the formula above
Returns
------
A generator that yields AlComplex
Raises
------
ValueError:
If n_start is not an integer.
If n_finish is provided but is not an integer.
"""
if not isinstance(n_start, int):
raise ValueError('Expected starting value to be an integer. Got {} instead'.format(n_start))
if n_finish is not None:
if not isinstance(n_finish, int):
raise ValueError('Expected finishing value to be an integer. Got {} instead'.format(n_finish))
else:
n_finish = float('inf')
real = m.log(z.abs())
arg = z.phase()
double_pi = 2*m.pi
step = 1 if (n_start <= n_finish) else -1
def values_generator():
counter = n_start
upper_bound = n_finish*step
while counter*step < upper_bound:
yield AlComplex(real, arg+ double_pi*counter)
counter += step
return values_generator()
def ln_n_branch(z, n):
""" Gets the specific value of the complex logarithm in a certain branch.
The complex logarithm function is defined as ln(z) = log|z| + i(phase(z)+2pi*n), where n is an integer.
This functions returns the specific value of the function for the given n.
Parameters
----------
z : Python numeric value or AlComplex
n : int
Yields
------
AlComplex
"""
if not isinstance(n, int):
raise ValueError('Expected function argument to be an integer. Got {} instead'.format(n))
return Ln(z) + 2*m.pi*n*i
class AlComplex():
""" Creates a complex number with rectangular coordinates.
Attributes
---------
symbol : str
How the imaginary unit will be represented (i or j).
precision : float
The error margin of complex numbers components. Used for calculating equalities.
real : float
The real part of the complex number.
imag : float
The imaginary part of the complex number.
Parameters
----------
r : int or float
The real part of the complex number.
i : int or float, optional
The imaginary part of the complex number
"""
def __init__(self, r, i=0):
if isinstance(r, AlComplex):
r = r.to_float()
if isinstance(i, AlComplex):
i = i.to_float()
# Since sin(pi) != 0 thanks to float precision, but a number very very small, we put this guard.
if abs(r) < AlComplex.precision:
r = 0
if abs(i) < AlComplex.precision:
i = 0
self.imag = float(i)
self.real = float(r)
symbol = 'i'
precision = 1e-14
@staticmethod
def polar(r, arg):
""" Creates an AlComplex number from the given polar coordinates.
Parameters
----------
r : int or float
The modulo of the desired complex number.
arg : int or float
The argument in radians of the decided complex number.
Returns
-------
AlComplex
"""
return AlComplex(round(r*m.cos(arg), 15), round(r*m.sin(arg), 15))
@staticmethod
def from_python_complex(n):
""" Wraps a Python standard complex number in an AlComplex number.
Parameters
----------
n : complex
Returns
-------
AlComplex
"""
return AlComplex(n.real, n.imag)
def to_polar(self):
""" Gives the polar representation of the AlComplex number.
Returns
-------
(float, float)
A tuple of the form (modulus, main argument).
"""
return self.abs(), self.phase()
def to_rect_coord(self):
""" Gives the rectangular coordinates representation of the AlComplex number.
Returns
-------
(float, float)
A tuple of the form (real part, imaginary part).
"""
return self.real, self.imag
def to_python_complex(self):
""" Forms a standard Python complex number from the AlComplex number components.
Returns
-------
complex
"""
return self.real + self.imag*1.j
def to_float(self):
""" Converts an AlComplex number to a float if it only has a real part.
Returns
-------
float
Raises
------
TypeError
If the imaginary part of the AlComplex number is not zero.
"""
if self.imag == 0:
return float(self.real)
else:
raise TypeError('Cannot convert to float. Imaginary part is not zero.')
def to_int(self):
""" Converts an AlComplex number to an int if it only has a real part.
Returns
-------
int
Raises
------
TypeError
If the imaginary part of the AlComplex number is not zero.
"""
if self.imag == 0:
return int(self.real)
else:
raise TypeError('Cannot convert to int. Imaginary part is not zero.')
def abs(self):
""" Calculates the modulus of self.
Returns
-------
float
See Also
--------
modulus, magnitude
"""
return modulus(self)
def modulus(self):
""" Calculates the modulus of self.
Returns
-------
float
See Also
--------
abs, magnitude
"""
return modulus(self)
def magnitude(self):
""" Calculates the modulus of self.
Returns
-------
float
See Also
--------
modulus, abs
"""
return modulus(self)
def phase(self):
""" Finds the principal argument in radians of self.
Returns
-------
float
See Also
--------
arg, angle
"""
return phase(self)
def arg(self):
""" Finds the principal argument in radians of self.
Returns
-------
float
See Also
--------
arg, angle
"""
return phase(self)
def angle(self):
""" Finds the principal argument in radians of self.
Returns
-------
float
See Also
--------
arg, phase
"""
return phase(self)
def conjugate(self):
""" Gives the conjugate of self as an AlComplex number.
Returns
-------
AlComplex
"""
return conjugate(self)
# Operator Overloading
def __abs__(self):
return modulus(self)
def __neg__(self):
return AlComplex(-self.real, -self.imag)
def __add__(self, z):
z = real_to_complex(z)
return AlComplex(self.real + z.real, self.imag + z.imag)
def __radd__(self, z):
return self + z
def __mul__(self, z):
z = real_to_complex(z)
return AlComplex(self.real*z.real - self.imag*z.imag,
self.real*z.imag + self.imag*z.real)
def __rmul__(self, z):
return self*z
def __sub__(self, z):
z = real_to_complex(z)
return AlComplex(self.real - z.real, self.imag - z.imag)
def __rsub__(self, z):
return -self + z
def __truediv__(self, z):
z = real_to_complex(z)
return self*z**-1
def __rtruediv__(self, z):
return self**-1*z
def __pow__(self, z):
z = real_to_complex(z)
return exp(z*Ln(self))
def __rpow__(self, z):
z = real_to_complex(z)
return exp(self*Ln(z))
def __eq__(self, z):
z = real_to_complex(z)
# We use this to avoid the typical imprecisions the floating point calculations
return (m.isclose(z.real,self.real, abs_tol=self.precision)
and m.isclose(z.imag ,self.imag, abs_tol=self.precision))
def __repr__(self):
return str(self)
def __str__(self):
sign = ' - ' if self.imag < 0 else ' + '
return str(self.real) + sign + str(abs(self.imag)) + AlComplex.symbol
""" A shorter alias for constructing complex numbers.
"""
C = AlComplex
""" The complex unit.
"""
j = i = I = J = AlComplex(0, 1) | {
"repo_name": "Jexan/AlComplex",
"path": "AlComplex/__init__.py",
"copies": "1",
"size": "20277",
"license": "mit",
"hash": -587384492263944800,
"line_mean": 21.6569832402,
"line_max": 124,
"alpha_frac": 0.5603886176,
"autogenerated": false,
"ratio": 4.258979206049149,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5319367823649149,
"avg_score": null,
"num_lines": null
} |
"""An extended version of the log_settings module from zamboni:
https://github.com/jbalogh/zamboni/blob/master/log_settings.py
"""
from __future__ import absolute_import
from tornado.log import LogFormatter as TornadoLogFormatter
import logging, logging.handlers
import os.path
import types
from logconfig import dictconfig
# Pulled from commonware.log we don't have to import that, which drags with
# it Django dependencies.
class RemoteAddressFormatter(logging.Formatter):
"""Formatter that makes sure REMOTE_ADDR is available."""
def format(self, record):
if ('%(REMOTE_ADDR)' in self._fmt
and 'REMOTE_ADDR' not in record.__dict__):
record.__dict__['REMOTE_ADDR'] = None
return logging.Formatter.format(self, record)
class UTF8SafeFormatter(RemoteAddressFormatter):
def __init__(self, fmt=None, datefmt=None, encoding='utf-8'):
logging.Formatter.__init__(self, fmt, datefmt)
self.encoding = encoding
def formatException(self, e):
r = logging.Formatter.formatException(self, e)
if type(r) in [types.StringType]:
r = r.decode(self.encoding, 'replace') # Convert to unicode
return r
def format(self, record):
t = RemoteAddressFormatter.format(self, record)
if type(t) in [types.UnicodeType]:
t = t.encode(self.encoding, 'replace')
return t
class NullHandler(logging.Handler):
def emit(self, record):
pass
def initialize_logging(syslog_tag, syslog_facility, loggers,
log_level=logging.INFO, use_syslog=False):
base_fmt = ('%(name)s:%(levelname)s %(message)s:%(pathname)s:%(lineno)s')
cfg = {
'version': 1,
'filters': {},
'formatters': {
'debug': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%(asctime)s ' + base_fmt,
},
'prod': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%s: [%%(REMOTE_ADDR)s] %s' % (syslog_tag, base_fmt),
},
'tornado': {
'()': TornadoLogFormatter,
'color': True
},
},
'handlers': {
'console': {
'()': logging.StreamHandler,
'formatter': 'tornado'
},
'null': {
'()': NullHandler,
}
},
'loggers': {
}
}
for key, value in loggers.items():
cfg[key].update(value)
# Set the level and handlers for all loggers.
for logger in cfg['loggers'].values():
if 'handlers' not in logger:
logger['handlers'] = ['syslog' if use_syslog else 'console']
if 'level' not in logger:
logger['level'] = log_level
if 'propagate' not in logger:
logger['propagate'] = False
dictconfig.dictConfig(cfg)
| {
"repo_name": "shizhz/tutu",
"path": "logconfig/logconfig.py",
"copies": "1",
"size": "2974",
"license": "mit",
"hash": -6657438961477350000,
"line_mean": 30.9784946237,
"line_max": 79,
"alpha_frac": 0.5558170814,
"autogenerated": false,
"ratio": 4.008086253369272,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005900938809252849,
"num_lines": 93
} |
"""An extended version of the log_settings module from zamboni:
https://github.com/jbalogh/zamboni/blob/master/log_settings.py
"""
from __future__ import absolute_import
import logging
import logging.handlers
import os.path
import types
from tornado.log import LogFormatter as TornadoLogFormatter
from logconfig import dictconfig
# Pulled from commonware.log we don't have to import that, which drags with
# it Django dependencies.
class RemoteAddressFormatter(logging.Formatter):
"""Formatter that makes sure REMOTE_ADDR is available."""
def format(self, record):
if ('%(REMOTE_ADDR)' in self._fmt
and 'REMOTE_ADDR' not in record.__dict__):
record.__dict__['REMOTE_ADDR'] = None
return logging.Formatter.format(self, record)
class UTF8SafeFormatter(RemoteAddressFormatter):
def __init__(self, fmt=None, datefmt=None, encoding='utf-8'):
logging.Formatter.__init__(self, fmt, datefmt)
self.encoding = encoding
def formatException(self, e):
r = logging.Formatter.formatException(self, e)
if type(r) in [types.StringType]:
r = r.decode(self.encoding, 'replace') # Convert to unicode
return r
def format(self, record):
t = RemoteAddressFormatter.format(self, record)
if type(t) in [types.UnicodeType]:
t = t.encode(self.encoding, 'replace')
return t
class NullHandler(logging.Handler):
def emit(self, record):
pass
def initialize_logging(syslog_tag, syslog_facility, loggers,
log_level=logging.INFO, use_syslog=False):
if os.path.exists('/dev/log'):
syslog_device = '/dev/log'
elif os.path.exists('/var/run/syslog'):
syslog_device = '/var/run/syslog'
base_fmt = ('%(name)s:%(levelname)s %(message)s:%(pathname)s:%(lineno)s')
cfg = {
'version': 1,
'filters': {},
'formatters': {
'debug': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%(asctime)s ' + base_fmt,
},
'prod': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%s: [%%(REMOTE_ADDR)s] %s' % (syslog_tag, base_fmt),
},
'tornado': {
'()': TornadoLogFormatter,
'color': True
},
},
'handlers': {
'console': {
'()': logging.StreamHandler,
'formatter': 'tornado'
},
'null': {
'()': NullHandler,
},
'syslog': {
'()': logging.handlers.SysLogHandler,
'facility': syslog_facility,
'address': syslog_device,
'formatter': 'prod',
},
},
'loggers': {
}
}
for key, value in loggers.items():
cfg[key].update(value)
# Set the level and handlers for all loggers.
for logger in cfg['loggers'].values():
if 'handlers' not in logger:
logger['handlers'] = ['syslog' if use_syslog else 'console']
if 'level' not in logger:
logger['level'] = log_level
if 'propagate' not in logger:
logger['propagate'] = False
dictconfig.dictConfig(cfg)
| {
"repo_name": "LetSpotify/letspotify-server",
"path": "logconfig/logconfig.py",
"copies": "1",
"size": "3364",
"license": "mit",
"hash": -2570820611584533500,
"line_mean": 29.5818181818,
"line_max": 79,
"alpha_frac": 0.5460760999,
"autogenerated": false,
"ratio": 4.02874251497006,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5074818614870059,
"avg_score": null,
"num_lines": null
} |
"""An extended version of the log_settings module from zamboni:
https://github.com/jbalogh/zamboni/blob/master/log_settings.py
"""
from tornado.log import LogFormatter as TornadoLogFormatter
import logging, logging.handlers
import os.path
import types
import dictconfig
# Pulled from commonware.log we don't have to import that, which drags with
# it Django dependencies.
class RemoteAddressFormatter(logging.Formatter):
"""Formatter that makes sure REMOTE_ADDR is available."""
def format(self, record):
if ('%(REMOTE_ADDR)' in self._fmt
and 'REMOTE_ADDR' not in record.__dict__):
record.__dict__['REMOTE_ADDR'] = None
return logging.Formatter.format(self, record)
class UTF8SafeFormatter(RemoteAddressFormatter):
def __init__(self, fmt=None, datefmt=None, encoding='utf-8'):
logging.Formatter.__init__(self, fmt, datefmt)
self.encoding = encoding
def formatException(self, e):
r = logging.Formatter.formatException(self, e)
if type(r) in [types.StringType]:
r = r.decode(self.encoding, 'replace') # Convert to unicode
return r
def format(self, record):
t = RemoteAddressFormatter.format(self, record)
if type(t) in [types.UnicodeType]:
t = t.encode(self.encoding, 'replace')
return t
class NullHandler(logging.Handler):
def emit(self, record):
pass
def initialize_logging(syslog_tag, syslog_facility, loggers,
log_level=logging.INFO, use_syslog=False):
if os.path.exists('/dev/log'):
syslog_device = '/dev/log'
elif os.path.exists('/var/run/syslog'):
syslog_device = '/var/run/syslog'
base_fmt = ('%(name)s:%(levelname)s %(message)s:%(pathname)s:%(lineno)s')
cfg = {
'version': 1,
'filters': {},
'formatters': {
'debug': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%(asctime)s ' + base_fmt,
},
'prod': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%s: [%%(REMOTE_ADDR)s] %s' % (syslog_tag, base_fmt),
},
'tornado': {
'()': TornadoLogFormatter,
'color': True
},
},
'handlers': {
'console': {
'()': logging.StreamHandler,
'formatter': 'tornado'
},
'null': {
'()': NullHandler,
},
'syslog': {
'()': logging.handlers.SysLogHandler,
'facility': syslog_facility,
'address': syslog_device,
'formatter': 'prod',
},
},
'loggers': {
}
}
for key, value in loggers.items():
cfg[key].update(value)
# Set the level and handlers for all loggers.
for logger in cfg['loggers'].values():
if 'handlers' not in logger:
logger['handlers'] = ['syslog' if use_syslog else 'console']
if 'level' not in logger:
logger['level'] = log_level
if 'propagate' not in logger:
logger['propagate'] = False
dictconfig.dictConfig(cfg)
| {
"repo_name": "tijmenNL/iptv-grabber",
"path": "logconfig/logconfig.py",
"copies": "2",
"size": "3294",
"license": "mit",
"hash": -7171974828646311000,
"line_mean": 31.2941176471,
"line_max": 79,
"alpha_frac": 0.5428051002,
"autogenerated": false,
"ratio": 4.021978021978022,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005380267737848186,
"num_lines": 102
} |
"""An extended version of the log_settings module from zamboni:
https://github.com/jbalogh/zamboni/blob/master/log_settings.py
"""
from tornado.options import _LogFormatter as TornadoLogFormatter
import logging, logging.handlers
import os.path
import types
import dictconfig
# Pulled from commonware.log we don't have to import that, which drags with
# it Django dependencies.
class RemoteAddressFormatter(logging.Formatter):
"""Formatter that makes sure REMOTE_ADDR is available."""
def format(self, record):
if ('%(REMOTE_ADDR)' in self._fmt
and 'REMOTE_ADDR' not in record.__dict__):
record.__dict__['REMOTE_ADDR'] = None
return logging.Formatter.format(self, record)
class UTF8SafeFormatter(RemoteAddressFormatter):
def __init__(self, fmt=None, datefmt=None, encoding='utf-8'):
logging.Formatter.__init__(self, fmt, datefmt)
self.encoding = encoding
def formatException(self, e):
r = logging.Formatter.formatException(self, e)
if type(r) in [types.StringType]:
r = r.decode(self.encoding, 'replace') # Convert to unicode
return r
def format(self, record):
t = RemoteAddressFormatter.format(self, record)
if type(t) in [types.UnicodeType]:
t = t.encode(self.encoding, 'replace')
return t
class NullHandler(logging.Handler):
def emit(self, record):
pass
def initialize_logging(syslog_tag, syslog_facility, loggers,
log_level=logging.INFO, use_syslog=False):
if os.path.exists('/dev/log'):
syslog_device = '/dev/log'
elif os.path.exists('/var/run/syslog'):
syslog_device = '/var/run/syslog'
base_fmt = ('%(name)s:%(levelname)s %(message)s:%(pathname)s:%(lineno)s')
# cfg->formatters->tornado->color was originally True but was raising
# errors, and we should investigate why that was happening. in the
# meantime, pretty logging will not be happening.
cfg = {
'version': 1,
'filters': {},
'formatters': {
'debug': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%(asctime)s ' + base_fmt,
},
'prod': {
'()': UTF8SafeFormatter,
'datefmt': '%H:%M:%s',
'format': '%s: [%%(REMOTE_ADDR)s] %s' % (syslog_tag, base_fmt),
},
'tornado': {
'()': TornadoLogFormatter,
'color': False
},
},
'handlers': {
'console': {
'()': logging.StreamHandler,
'formatter': 'tornado'
},
'null': {
'()': NullHandler,
},
'syslog': {
'()': logging.handlers.SysLogHandler,
'facility': syslog_facility,
'address': syslog_device,
'formatter': 'prod',
},
},
'loggers': {
}
}
for key, value in loggers.items():
cfg[key].update(value)
# Set the level and handlers for all loggers.
for logger in cfg['loggers'].values():
if 'handlers' not in logger:
logger['handlers'] = ['syslog' if use_syslog else 'console']
if 'level' not in logger:
logger['level'] = log_level
if 'propagate' not in logger:
logger['propagate'] = False
dictconfig.dictConfig(cfg)
| {
"repo_name": "scrbrd/scoreboard",
"path": "logconfig/logconfig.py",
"copies": "1",
"size": "3500",
"license": "mit",
"hash": 2281988867910946300,
"line_mean": 32.0188679245,
"line_max": 79,
"alpha_frac": 0.554,
"autogenerated": false,
"ratio": 4.041570438799076,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005177238766608632,
"num_lines": 106
} |
"""An extensible dictionary."""
from contextlib import contextmanager
def extend_as_list(original_dict, **kwargs):
new_dict = original_dict
for k, v in kwargs.items():
if k in original_dict:
oldv = new_dict[k]
if not hasattr(oldv, 'extend'):
oldv = [oldv]
if isinstance(v, str):
v = [v]
if hasattr(v, '__iter__'):
oldv.extend(v)
else:
oldv.append(v)
new_dict[k] = oldv
else:
new_dict[k] = v
return new_dict
class ExtensibleDict:
"""A dictionary that provides temporary modification."""
_current = dict()
_extender_fn = None
def __init__(self, extender_fn=None):
self._extender_fn = extender_fn
super().__init__()
@contextmanager
def __call__(self, *args, extender_fn=None, **kwargs):
"""
A context manager to temporarily modify the content of dict.
Any time you enter the context the existing dictionary is updated with
the content of **kwargs. When the context exits the original dictionary
is restored.
If ``extender_fn`` is not None the existing content of the dictionary
is not overwritten, but handed to the extender_fn as first argument in
addition to the changes supplied in the ``kwargs``.
The result will be stored temporarily int he dictionary.
Args:
extender_fn:
Returns:
An updated dictionary.
"""
previous = self._current.copy()
if extender_fn is None:
extender_fn = self._extender_fn
self.update(extender_fn, **kwargs)
try:
yield
finally:
self._current = previous
def __iter__(self):
return iter(self._current.items())
def __len__(self):
return len(self._current)
def __contains__(self, name):
return name in self._current
def __getitem__(self, name):
return self._current[name]
def keys(self):
return self._current.keys()
def values(self):
return self._current.values()
def items(self):
return self._current.items()
def get(self, name, *default):
return self._current.get(name, *default)
def __delitem__(self, name):
del self._current[name]
def __setitem__(self, name, value):
self._current[name] = value
def pop(self, name, *default):
return self._current.pop(name, *default)
def clear(self):
self._current.clear()
def update(self, extender_fn, *args, **kwargs):
if extender_fn is not None:
self._current.update(*args, **extender_fn(self._current, **kwargs))
else:
self._current.update(*args, **kwargs)
def getdict(self):
return dict((k, str(v)) for k, v in self._current.items())
def __str__(self):
return str(self._current)
def __repr__(self):
return repr(self._current)
| {
"repo_name": "PolyJIT/benchbuild",
"path": "benchbuild/utils/dict.py",
"copies": "1",
"size": "3054",
"license": "mit",
"hash": -7912648611071020000,
"line_mean": 25.7894736842,
"line_max": 79,
"alpha_frac": 0.564178127,
"autogenerated": false,
"ratio": 4.1214574898785425,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5185635616878542,
"avg_score": null,
"num_lines": null
} |
"""An extensible library for opening URLs using a variety of protocols
The simplest way to use this module is to call the urlopen function,
which accepts a string containing a URL or a Request object (described
below). It opens the URL and returns the results as file-like
object; the returned object has some extra methods described below.
The OpenerDirector manages a collection of Handler objects that do
all the actual work. Each Handler implements a particular protocol or
option. The OpenerDirector is a composite object that invokes the
Handlers needed to open the requested URL. For example, the
HTTPHandler performs HTTP GET and POST requests and deals with
non-error returns. The HTTPRedirectHandler automatically deals with
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
deals with digest authentication.
urlopen(url, data=None) -- basic usage is that same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
IOError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- function that creates a new OpenerDirector instance.
will install the default handlers. accepts one or more Handlers as
arguments, either instances or Handler classes that it will
instantiate. if one of the argument is a subclass of the default
handler, the argument will be installed instead of the default.
install_opener -- installs a new opener as the default opener.
objects of interest:
OpenerDirector --
Request -- an object that encapsulates the state of a request. the
state can be a simple as the URL. it can also include extra HTTP
headers, e.g. a User-Agent.
BaseHandler --
exceptions:
URLError-- a subclass of IOError, individual protocols have their own
specific subclass
HTTPError-- also a valid HTTP response, so you can treat an HTTP error
as an exceptional event or valid response
internals:
BaseHandler and parent
_call_chain conventions
Example usage:
import urllib2
# set up authentication info
authinfo = urllib2.HTTPBasicAuthHandler()
authinfo.add_password('realm', 'host', 'username', 'password')
proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
# build a new opener that adds authentication and caching FTP handlers
opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
# install it
urllib2.install_opener(opener)
f = urllib2.urlopen('http://www.python.org/')
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# XXX to do:
# name!
# documentation (getting there)
# complex proxies
# abstract factory for opener
# ftp errors aren't handled cleanly
# gopher can return a socket.error
# check digest against correct (i.e. non-apache) implementation
import socket
import httplib
import inspect
import re
import base64
import urlparse
import md5
import mimetypes
import mimetools
import rfc822
import ftplib
import sys
import time
import os
import gopherlib
import posixpath
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import sha
except ImportError:
# need 1.5.2 final
sha = None
# not sure how many of these need to be gotten rid of
from urllib import unwrap, unquote, splittype, splithost, \
addinfourl, splitport, splitgophertype, splitquery, \
splitattr, ftpwrapper, noheaders
# support for proxies via environment variables
from urllib import getproxies
# support for FileHandler
from urllib import localhost, url2pathname
__version__ = "2.0a1"
_opener = None
def urlopen(url, data=None):
global _opener
if _opener is None:
_opener = build_opener()
return _opener.open(url, data)
def install_opener(opener):
global _opener
_opener = opener
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__
def __init__(self, reason):
self.reason = reason
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
def __del__(self):
# XXX is this safe? what if user catches exception, then
# extracts fp and discards exception?
if self.fp:
self.fp.close()
class GopherError(URLError):
pass
class Request:
def __init__(self, url, data=None, headers={}):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr[:12] == '_Request__r_':
name = attr[12:]
if hasattr(Request, 'get_' + name):
getattr(self, 'get_' + name)()
return getattr(self, attr)
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
return self.__r_host
def set_proxy(self, host, type):
self.host, self.type = host, type
self.__r_host = self.__original
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
class OpenerDirector:
def __init__(self):
server_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', server_version)]
# manage the individual handlers
self.handlers = []
self.handle_open = {}
self.handle_error = {}
def add_handler(self, handler):
added = 0
for meth in dir(handler):
if meth[-5:] == '_open':
protocol = meth[:-5]
if protocol in self.handle_open:
self.handle_open[protocol].append(handler)
self.handle_open[protocol].sort()
else:
self.handle_open[protocol] = [handler]
added = 1
continue
i = meth.find('_')
j = meth[i+1:].find('_') + i + 1
if j != -1 and meth[i+1:j] == 'error':
proto = meth[:i]
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
dict = self.handle_error.get(proto, {})
if kind in dict:
dict[kind].append(handler)
dict[kind].sort()
else:
dict[kind] = [handler]
self.handle_error[proto] = dict
added = 1
continue
if added:
self.handlers.append(handler)
self.handlers.sort()
handler.add_parent(self)
def __del__(self):
self.close()
def close(self):
for handler in self.handlers:
handler.close()
self.handlers = []
def _call_chain(self, chain, kind, meth_name, *args):
# XXX raise an exception if no one else should try to handle
# this url. return None if you can't but someone else could.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def open(self, fullurl, data=None):
# accept a URL or a Request object
if isinstance(fullurl, basestring):
req = Request(fullurl, data)
else:
req = fullurl
if data is not None:
req.add_data(data)
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
type_ = req.get_type()
result = self._call_chain(self.handle_open, type_, type_ + \
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ['http', 'https']:
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%d' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP and FTP.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = []
for klass in default_classes:
for check in handlers:
if inspect.isclass(check):
if issubclass(check, klass):
skip.append(klass)
elif isinstance(check, klass):
skip.append(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if inspect.isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
self.parent = None
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
class HTTPRedirectHandler(BaseHandler):
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
return Request(newurl, headers=req.headers)
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
if 'location' in headers:
newurl = headers['location']
elif 'uri' in headers:
newurl = headers['uri']
else:
return
newurl = urlparse.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
new.error_302_dict = {}
if hasattr(req, 'error_302_dict'):
if len(req.error_302_dict)>10 or \
newurl in req.error_302_dict:
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
new.error_302_dict.update(req.error_302_dict)
new.error_302_dict[newurl] = newurl
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
type, r_type = splittype(proxy)
host, XXX = splithost(r_type)
if '@' in host:
user_pass, host = host.split('@', 1)
if ':' in user_pass:
user, password = user_pass.split(':', 1)
user_pass = base64.encodestring('%s:%s' % (unquote(user),
unquote(password)))
req.add_header('Proxy-authorization', 'Basic ' + user_pass)
host = unquote(host)
req.set_proxy(host, type)
if orig_type == type:
# let other handlers take care of it
# XXX this only makes sense if the proxy is before the
# other handlers
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
return self.parent.open(req)
# feature suggested by Duncan Booth
# XXX custom is not a good name
class CustomProxy:
# either pass a function to the constructor or override handle
def __init__(self, proto, func=None, proxy_addr=None):
self.proto = proto
self.func = func
self.addr = proxy_addr
def handle(self, req):
if self.func and self.func(req):
return 1
def get_proxy(self):
return self.addr
class CustomProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, *proxies):
self.proxies = {}
def proxy_open(self, req):
proto = req.get_type()
try:
proxies = self.proxies[proto]
except KeyError:
return None
for p in proxies:
if p.handle(req):
req.set_proxy(p.get_proxy())
return self.parent.open(req)
return None
def do_proxy(self, p, req):
return self.parent.open(req)
def add_proxy(self, cpo):
if cpo.proto in self.proxies:
self.proxies[cpo.proto].append(cpo)
else:
self.proxies[cpo.proto] = [cpo]
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
uri = tuple(map(self.reduce_uri, uri))
if not realm in self.passwd:
self.passwd[realm] = {}
self.passwd[realm][uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
authuri = self.reduce_uri(authuri)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, authuri):
return authinfo
return None, None
def reduce_uri(self, uri):
"""Accept netloc or URI and extract only the netloc and path"""
parts = urlparse.urlparse(uri)
if parts[1]:
return parts[1], parts[2] or '/'
else:
return parts[2], '/'
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
rx = re.compile('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', re.I)
# XXX there can actually be multiple auth-schemes in a
# www-authenticate header. should probably be a lot more careful
# in parsing them to extract multiple alternatives
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.match(authreq)
if mo:
scheme, realm = mo.groups()
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user,pw = self.passwd.find_user_password(realm, host)
if pw:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.encodestring(raw).strip()
if req.headers.get(self.auth_header, None) == auth:
return None
req.add_header(self.auth_header, auth)
return self.parent.open(req)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
return self.http_error_auth_reqed('www-authenticate',
host, req, headers)
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
return self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
class AbstractDigestAuthHandler:
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
authreq = headers.get(self.auth_header, None)
if authreq:
kind = authreq.split()[0]
if kind == 'Digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
req.add_header(self.auth_header, auth_val)
resp = self.parent.open(req)
return resp
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm,
req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.has_data() and 'POST' or 'GET',
# XXX selector: what about proxies and full urls
req.get_selector())
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base = base + ', opaque="%s"' % opaque
if entdig:
base = base + ', digest="%s"' % entdig
if algorithm != 'MD5':
base = base + ', algorithm="%s"' % algorithm
return base
def get_algorithm_impls(self, algorithm):
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
H = lambda x, e=encode_digest:e(md5.new(x).digest())
elif algorithm == 'SHA':
H = lambda x, e=encode_digest:e(sha.new(x).digest())
# XXX MD5-sess
KD = lambda s, d, H=H: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
self.http_error_auth_reqed('www-authenticate', host, req, headers)
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
self.http_error_auth_reqed('proxy-authenticate', host, req, headers)
def encode_digest(digest):
hexrep = []
for c in digest:
n = (ord(c) >> 4) & 0xf
hexrep.append(hex(n)[-1])
n = ord(c) & 0xf
hexrep.append(hex(n)[-1])
return ''.join(hexrep)
class AbstractHTTPHandler(BaseHandler):
# XXX Should rewrite do_open() to use the new httplib interface,
# would would be a little simpler.
def do_open(self, http_class, req):
host = req.get_host()
if not host:
raise URLError('no host given')
h = http_class(host) # will parse host:port
if req.has_data():
data = req.get_data()
h.putrequest('POST', req.get_selector())
if not 'Content-type' in req.headers:
h.putheader('Content-type',
'application/x-www-form-urlencoded')
if not 'Content-length' in req.headers:
h.putheader('Content-length', '%d' % len(data))
else:
h.putrequest('GET', req.get_selector())
scheme, sel = splittype(req.get_selector())
sel_host, sel_path = splithost(sel)
h.putheader('Host', sel_host or host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if name not in req.headers:
h.putheader(name, value)
for k, v in req.headers.items():
h.putheader(k, v)
# httplib will attempt to connect() here. be prepared
# to convert a socket error to a URLError.
try:
h.endheaders()
except socket.error, err:
raise URLError(err)
if req.has_data():
h.send(data)
code, msg, hdrs = h.getreply()
fp = h.getfile()
if code == 200:
return addinfourl(fp, hdrs, req.get_full_url())
else:
return self.parent.error('http', req, fp, code, msg, hdrs)
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTP, req)
if hasattr(httplib, 'HTTPS'):
class HTTPSHandler(AbstractHTTPHandler):
def https_open(self, req):
return self.do_open(httplib.HTTPS, req)
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comman-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma.
"""
# XXX this function could probably use more testing
list = []
end = len(s)
i = 0
inquote = 0
start = 0
while i < end:
cur = s[i:]
c = cur.find(',')
q = cur.find('"')
if c == -1:
list.append(s[start:])
break
if q == -1:
if inquote:
raise ValueError, "unbalanced quotes"
else:
list.append(s[start:i+c])
i = i + c + 1
continue
if inquote:
if q < c:
list.append(s[start:i+c])
i = i + c + 1
start = i
inquote = 0
else:
i = i + q
else:
if c < q:
list.append(s[start:i+c])
i = i + c + 1
start = i
else:
inquote = 1
i = i + q + 1
return map(lambda x: x.strip(), list)
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/':
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
FileHandler.names = (socket.gethostbyname('localhost'),
socket.gethostbyname(socket.gethostname()))
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
host = req.get_host()
file = req.get_selector()
localfile = url2pathname(file)
stats = os.stat(localfile)
size = stats.st_size
modified = rfc822.formatdate(stats.st_mtime)
mtype = mimetypes.guess_type(file)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and socket.gethostbyname(host) in self.get_names()):
return addinfourl(open(localfile, 'rb'),
headers, 'file:'+file)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
host = req.get_host()
if not host:
raise IOError, ('ftp error', 'no host given')
# XXX handle custom username & password
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
path, attrs = splitattr(req.get_selector())
path = unquote(path)
dirs = path.split('/')
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
user = passwd = '' # XXX
try:
fw = self.connect_ftp(user, passwd, host, port, dirs)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitattr(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs):
fw = ftpwrapper(user, passwd, host, port, dirs)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs):
key = user, passwd, host, port
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
class GopherHandler(BaseHandler):
def gopher_open(self, req):
host = req.get_host()
if not host:
raise GopherError('no host given')
host = unquote(host)
selector = req.get_selector()
type, selector = splitgophertype(selector)
selector, query = splitquery(selector)
selector = unquote(selector)
if query:
query = unquote(query)
fp = gopherlib.send_query(selector, query, host)
else:
fp = gopherlib.send_selector(selector, host)
return addinfourl(fp, noheaders(), req.get_full_url())
#bleck! don't use this yet
class OpenerFactory:
default_handlers = [UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler]
handlers = []
replacement_handlers = []
def add_handler(self, h):
self.handlers = self.handlers + [h]
def replace_handler(self, h):
pass
def build_opener(self):
opener = OpenerDirector()
for ph in self.default_handlers:
if inspect.isclass(ph):
ph = ph()
opener.add_handler(ph)
if __name__ == "__main__":
# XXX some of the test code depends on machine configurations that
# are internal to CNRI. Need to set up a public server with the
# right authentication configuration for test purposes.
if socket.gethostname() == 'bitdiddle':
localhost = 'bitdiddle.cnri.reston.va.us'
elif socket.gethostname() == 'bitdiddle.concentric.net':
localhost = 'localhost'
else:
localhost = None
urls = [
# Thanks to Fred for finding these!
'gopher://gopher.lib.ncsu.edu/11/library/stacks/Alex',
'gopher://gopher.vt.edu:10010/10/33',
'file:/etc/passwd',
'file://nonsensename/etc/passwd',
'ftp://www.python.org/pub/python/misc/sousa.au',
'ftp://www.python.org/pub/tmp/blat',
'http://www.espn.com/', # redirect
'http://www.python.org/Spanish/Inquistion/',
('http://www.python.org/cgi-bin/faqw.py',
'query=pythonistas&querytype=simple&casefold=yes&req=search'),
'http://www.python.org/',
'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC/research-reports/00README-Legal-Rules-Regs',
]
## if localhost is not None:
## urls = urls + [
## 'file://%s/etc/passwd' % localhost,
## 'http://%s/simple/' % localhost,
## 'http://%s/digest/' % localhost,
## 'http://%s/not/found.h' % localhost,
## ]
## bauth = HTTPBasicAuthHandler()
## bauth.add_password('basic_test_realm', localhost, 'jhylton',
## 'password')
## dauth = HTTPDigestAuthHandler()
## dauth.add_password('digest_test_realm', localhost, 'jhylton',
## 'password')
cfh = CacheFTPHandler()
cfh.setTimeout(1)
## # XXX try out some custom proxy objects too!
## def at_cnri(req):
## host = req.get_host()
## print host
## if host[-18:] == '.cnri.reston.va.us':
## return 1
## p = CustomProxy('http', at_cnri, 'proxy.cnri.reston.va.us')
## ph = CustomProxyHandler(p)
## install_opener(build_opener(dauth, bauth, cfh, GopherHandler, ph))
install_opener(build_opener(cfh, GopherHandler))
for url in urls:
if isinstance(url, tuple):
url, req = url
else:
req = None
print url
try:
f = urlopen(url, req)
except IOError, err:
print "IOError:", err
except socket.error, err:
print "socket.error:", err
else:
buf = f.read()
f.close()
print "read %d bytes" % len(buf)
print
time.sleep(0.1)
| {
"repo_name": "MalloyPower/parsing-python",
"path": "front-end/testsuite-python-lib/Python-2.3/Lib/urllib2.py",
"copies": "1",
"size": "38857",
"license": "mit",
"hash": -371795576136770940,
"line_mean": 32.3823024055,
"line_max": 102,
"alpha_frac": 0.5736675502,
"autogenerated": false,
"ratio": 3.9629780724120347,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001778352341604084,
"num_lines": 1164
} |
"""An extensible library for opening URLs using a variety of protocols
The simplest way to use this module is to call the urlopen function,
which accepts a string containing a URL or a Request object (described
below). It opens the URL and returns the results as file-like
object; the returned object has some extra methods described below.
The OpenerDirector manages a collection of Handler objects that do
all the actual work. Each Handler implements a particular protocol or
option. The OpenerDirector is a composite object that invokes the
Handlers needed to open the requested URL. For example, the
HTTPHandler performs HTTP GET and POST requests and deals with
non-error returns. The HTTPRedirectHandler automatically deals with
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
deals with digest authentication.
urlopen(url, data=None) -- basic usage is the same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
IOError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- function that creates a new OpenerDirector instance.
will install the default handlers. accepts one or more Handlers as
arguments, either instances or Handler classes that it will
instantiate. if one of the argument is a subclass of the default
handler, the argument will be installed instead of the default.
install_opener -- installs a new opener as the default opener.
objects of interest:
OpenerDirector --
Request -- an object that encapsulates the state of a request. the
state can be a simple as the URL. it can also include extra HTTP
headers, e.g. a User-Agent.
BaseHandler --
exceptions:
URLError-- a subclass of IOError, individual protocols have their own
specific subclass
HTTPError-- also a valid HTTP response, so you can treat an HTTP error
as an exceptional event or valid response
internals:
BaseHandler and parent
_call_chain conventions
Example usage:
import urllib2
# set up authentication info
authinfo = urllib2.HTTPBasicAuthHandler()
authinfo.add_password(realm='PDQ Application',
uri='https://mahler:8092/site-updates.py',
user='klem',
passwd='geheim$parole')
proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
# build a new opener that adds authentication and caching FTP handlers
opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
# install it
urllib2.install_opener(opener)
f = urllib2.urlopen('http://www.python.org/')
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
import base64
import hashlib
import httplib
import mimetools
import os
import posixpath
import random
import re
import socket
import sys
import time
import urlparse
import bisect
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from urllib import (unwrap, unquote, splittype, splithost, quote,
addinfourl, splitport, splitgophertype, splitquery,
splitattr, ftpwrapper, noheaders, splituser, splitpasswd, splitvalue)
# support for FileHandler, proxies via environment variables
from urllib import localhost, url2pathname, getproxies
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None):
global _opener
if _opener is None:
_opener = build_opener()
return _opener.open(url, data)
def install_opener(opener):
global _opener
_opener = opener
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
# slot 0 and strerror in slot 1. This may be better than nothing.
def __init__(self, reason):
self.args = reason,
self.reason = reason
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
class GopherError(URLError):
pass
# copied from cookielib.py
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urlparse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr[:12] == '_Request__r_':
name = attr[12:]
if hasattr(Request, 'get_' + name):
getattr(self, 'get_' + name)()
return getattr(self, attr)
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
# XXX these helper methods are lame
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
return self.__r_host
def set_proxy(self, host, type):
self.host, self.type = host, type
self.__r_host = self.__original
def get_origin_req_host(self):
return self.origin_req_host
def is_unverifiable(self):
return self.unverifiable
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return hdrs.items()
class OpenerDirector:
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# manage the individual handlers
self.handlers = []
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
# XXX why does self.handlers need to be sorted?
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def open(self, fullurl, data=None):
# accept a URL or a Request object
if isinstance(fullurl, basestring):
req = Request(fullurl, data)
else:
req = fullurl
if data is not None:
req.add_data(data)
protocol = req.get_type()
# pre-process request
meth_name = protocol+"_request"
for processor in self.process_request.get(protocol, []):
meth = getattr(processor, meth_name)
req = meth(req)
response = self._open(req, data)
# post-process response
meth_name = protocol+"_response"
for processor in self.process_response.get(protocol, []):
meth = getattr(processor, meth_name)
response = meth(req, response)
return response
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.get_type()
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP and FTP.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, types.ClassType) or hasattr(obj, "__bases__")
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = []
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.append(klass)
elif isinstance(check, klass):
skip.append(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses."""
handler_order = 1000 # after all other processing
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if code not in (200, 206):
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
return Request(newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
else:
return
newurl = urlparse.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:password@proxy.example.com')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:password@proxy.example.com:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:password@proxy.example.com/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:password@proxy.example.com')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if user and password:
user_pass = '%s:%s' % (unquote(user), unquote(password))
creds = base64.b64encode(user_pass).strip()
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type:
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req)
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
if not realm in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlparse.urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, realm = mo.groups()
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
if req.headers.get(self.auth_header, None) == auth:
return None
req.add_header(self.auth_header, auth)
return self.parent.open(req)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.get_full_url()
return self.http_error_auth_reqed('www-authenticate',
url, req, headers)
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib2 does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.get_host()
return self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
def randombytes(n):
"""Return n random bytes."""
# Use /dev/urandom if it is available. Fall back to random module
# if not. It might be worthwhile to extend this function to use
# other platform-specific mechanisms for getting random bytes.
if os.path.exists("/dev/urandom"):
f = open("/dev/urandom")
s = f.read(n)
f.close()
return s
else:
L = [chr(random.randrange(0, 256)) for i in range(n)]
return "".join(L)
class AbstractDigestAuthHandler:
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
req.add_unredirected_header(self.auth_header, auth_val)
resp = self.parent.open(req)
return resp
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
dig = hashlib.sha1("%s:%s:%s:%s" % (self.nonce_count, nonce, time.ctime(),
randombytes(8))).hexdigest()
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.get_selector())
if qop == 'auth':
self.nonce_count += 1
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
raise URLError("qop '%s' is not supported." % qop)
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
H = lambda x: hashlib.md5(x).hexdigest()
elif algorithm == 'SHA':
H = lambda x: hashlib.sha1(x).hexdigest()
# XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
scheme, sel = splittype(request.get_selector())
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host or host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host = req.get_host()
if not host:
raise URLError('no host given')
h = http_class(host) # will parse host:port
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
r = h.getresponse()
except socket.error, err: # XXX what error?
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = socket._fileobject(r, close=True)
resp = addinfourl(fp, r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSHandler(AbstractHTTPHandler):
def https_open(self, req):
return self.do_open(httplib.HTTPSConnection, req)
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
def __init__(self, cookiejar=None):
import cookielib
if cookiejar is None:
cookiejar = cookielib.CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/':
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = (socket.gethostbyname('localhost'),
socket.gethostbyname(socket.gethostname()))
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
import email.Utils
import mimetypes
host = req.get_host()
file = req.get_selector()
localfile = url2pathname(file)
stats = os.stat(localfile)
size = stats.st_size
modified = email.Utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(file)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and socket.gethostbyname(host) in self.get_names()):
return addinfourl(open(localfile, 'rb'),
headers, 'file:'+file)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.get_host()
if not host:
raise IOError, ('ftp error', 'no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = unquote(user or '')
passwd = unquote(passwd or '')
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs):
fw = ftpwrapper(user, passwd, host, port, dirs)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs):
key = user, host, port, '/'.join(dirs)
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
class GopherHandler(BaseHandler):
def gopher_open(self, req):
# XXX can raise socket.error
import gopherlib # this raises DeprecationWarning in 2.5
host = req.get_host()
if not host:
raise GopherError('no host given')
host = unquote(host)
selector = req.get_selector()
type, selector = splitgophertype(selector)
selector, query = splitquery(selector)
selector = unquote(selector)
if query:
query = unquote(query)
fp = gopherlib.send_query(selector, query, host)
else:
fp = gopherlib.send_selector(selector, host)
return addinfourl(fp, noheaders(), req.get_full_url())
| {
"repo_name": "ericlink/adms-server",
"path": "playframework-dist/1.1-src/python/Lib/urllib2.py",
"copies": "2",
"size": "48945",
"license": "mit",
"hash": -2031839721669424600,
"line_mean": 33.9889705882,
"line_max": 82,
"alpha_frac": 0.5733169885,
"autogenerated": false,
"ratio": 4.174057649667406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5747374638167405,
"avg_score": null,
"num_lines": null
} |
""" An extension of optparse.OptionParser that has custom error and exit handling
Classes:
:py:class:`OptionParser`
the extension class of :py:class:`optparse.OptionParser`
"""
import optparse
class OptionParser(optparse.OptionParser):
""" Overrides the error() and exit() methods to allow prevention of
auto-exit
New Methods:
:py:meth:`~OptionParser.set_error_handler`
sets an error handler instead of the default
:py:meth:`~OptionParser.set_exit_handler`
sets an exit handler instead of the default
"""
def __init__(self, *args, **kwdargs):
""" Delegates to optparse.OptionParser
"""
optparse.OptionParser.__init__(self, *args, **kwdargs)
self._errorhandler = None
self._exithandler = None
def set_error_handler(self, handler):
""" Sets an error handling function
Parameters:
handler
A function that takes an error message and does something with
it.
"""
self._errorhandler = handler
def set_exit_handler(self, handler):
""" Sets an exit handling function
Parameters:
handler
A function that takes an exit code and error message and does
something with it.
"""
self._exithandler = handler
def error(self, msg):
""" Declares a user-defined error has occurred.
Parameters:
msg
The error message string
"""
if self._errorhandler is not None:
return self._errorhandler(msg)
else:
return optparse.OptionParser.error(self, msg)
def exit(self, code=0, msg=None):
""" Exits the parser/program (the default calls sys.exit). Often called
by OptionParser.error().
Parameters:
code
The exit code
msg
The error message
"""
if self._exithandler is not None:
return self._exithandler(code, msg)
else:
return optparse.OptionParser.exit(self, code, msg)
| {
"repo_name": "gsmcwhirter/simulations",
"path": "src/simulations/utils/optionparser.py",
"copies": "1",
"size": "2161",
"license": "mit",
"hash": -6333160788913152000,
"line_mean": 21.9893617021,
"line_max": 81,
"alpha_frac": 0.578898658,
"autogenerated": false,
"ratio": 4.812917594654788,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00012973533990659055,
"num_lines": 94
} |
"""An extension to the basic_association.py example, which illustrates
the usage of sqlalchemy.ext.associationproxy.
"""
from datetime import datetime
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
String, DateTime, Float, ForeignKey, and_)
from sqlalchemy.orm import mapper, relationship, Session
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.associationproxy import association_proxy
Base = declarative_base()
class Order(Base):
__tablename__ = 'order'
order_id = Column(Integer, primary_key=True)
customer_name = Column(String(30), nullable=False)
order_date = Column(DateTime, nullable=False, default=datetime.now())
order_items = relationship("OrderItem", cascade="all, delete-orphan",
backref='order')
items = association_proxy("order_items", "item")
def __init__(self, customer_name):
self.customer_name = customer_name
class Item(Base):
__tablename__ = 'item'
item_id = Column(Integer, primary_key=True)
description = Column(String(30), nullable=False)
price = Column(Float, nullable=False)
def __init__(self, description, price):
self.description = description
self.price = price
def __repr__(self):
return 'Item(%r, %r)' % (
self.description, self.price
)
class OrderItem(Base):
__tablename__ = 'orderitem'
order_id = Column(Integer, ForeignKey('order.order_id'), primary_key=True)
item_id = Column(Integer, ForeignKey('item.item_id'), primary_key=True)
price = Column(Float, nullable=False)
def __init__(self, item, price=None):
self.item = item
self.price = price or item.price
item = relationship(Item, lazy='joined')
if __name__ == '__main__':
engine = create_engine('sqlite://')
Base.metadata.create_all(engine)
session = Session(engine)
# create catalog
tshirt, mug, hat, crowbar = (
Item('SA T-Shirt', 10.99),
Item('SA Mug', 6.50),
Item('SA Hat', 8.99),
Item('MySQL Crowbar', 16.99)
)
session.add_all([tshirt, mug, hat, crowbar])
session.commit()
# create an order
order = Order('john smith')
# add items via the association proxy.
# the OrderItem is created automatically.
order.items.append(mug)
order.items.append(hat)
# add an OrderItem explicitly.
order.order_items.append(OrderItem(crowbar, 10.99))
session.add(order)
session.commit()
# query the order, print items
order = session.query(Order).filter_by(customer_name='john smith').one()
# print items based on the OrderItem collection directly
print [(assoc.item.description, assoc.price, assoc.item.price)
for assoc in order.order_items]
# print items based on the "proxied" items collection
print [(item.description, item.price)
for item in order.items]
# print customers who bought 'MySQL Crowbar' on sale
orders = session.query(Order).\
join('order_items', 'item').\
filter(Item.description == 'MySQL Crowbar').\
filter(Item.price > OrderItem.price)
print [o.customer_name for o in orders]
| {
"repo_name": "rclmenezes/sqlalchemy",
"path": "examples/association/proxied_association.py",
"copies": "1",
"size": "3262",
"license": "mit",
"hash": -4647414569573261000,
"line_mean": 31.297029703,
"line_max": 78,
"alpha_frac": 0.6440833844,
"autogenerated": false,
"ratio": 3.824150058616647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.995412964969105,
"avg_score": 0.0028207586651194778,
"num_lines": 101
} |
""" An extractor is a callable that returns or yields data. For example:
.. code-block:: python
def extract(response):
return "something"
The ``response`` parameter here is an instance of
:class:`wex.response.Response`.
Extractors can be combined in various ways.
"""
from __future__ import absolute_import, unicode_literals, print_function
from .value import yield_values
OMITTED = object()
class Chained(object):
set_trace = None
def __init__(self, *extractors):
self.extractors = list(extractors)
@property
def __name__(self):
return repr(self)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.extractors)
def chained(self, *args, **kw):
if self.set_trace:
# Give a hook for debugging
self.set_trace()
# Chained extractors are used in wex.entrypoints.
# We re-seek the response to position 0 for each
# extractor in the chain for convenience.
seek = args and getattr(args[0], 'seek', None)
for extractor in self.extractors:
if seek:
seek(0)
for value in yield_values(extractor, *args, **kw):
yield value
__call__ = chained
def append(self, extractor):
self.extractors.append(extractor)
return extractor
def insert(self, index, extractor=None):
def decorator(func):
self.insert(index, func)
if extractor is None:
return decorator
else:
return decorator(extractor)
def chained(*extractors):
""" Returns an extractor that chains the output of other extractors.
The output is the output from each extractor in sequence.
:param extractors: an iterable of extractor callables to chain
For example an extractor function ``extract`` defined as follows:
.. code-block:: python
def extract1(response):
yield "one"
def extract2(response):
yield "two"
extract = chained(extract1, extract2)
Would produce the following extraction output:
.. code-block:: shell
$ wex http://example.net/
"one"
"two"
"""
return Chained(*extractors)
class Named(object):
""" A extractor that is a collection of named extractors.
Extractors can be added to the collection on construction
using keyword arguments for the names or they can be added
using :meth:`.add`.
The names are labels in the output produced. For example, an
extractor function ``extract`` defined as follows:
.. code-block:: python
extract = Named(
name1 = (lambda response: "one"),
name2 = (lambda response: "two"),
)
Would produce the extraction output something like this:
.. code-block:: shell
$ wex http://example.net/
"name1" "one"
"name2" "two"
The ordering of sub-extractor output is arbitrary.
"""
set_trace = None
def __init__(self, **kw):
self.extractors = {}
for k, v in kw.items():
self.add(v, k)
@property
def __name__(self):
return repr(self)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.extractors.keys())
def __len__(self):
return len(self.extractors)
def named(self, *args, **kwargs):
if self.set_trace:
# Give a hook for debugging
self.set_trace()
for name, extractor in self.extractors.items():
for value in yield_values(extractor, *args, **kwargs):
yield value.label(name)
__call__ = named
def add(self, extractor, label=None):
""" Add an attribute extractor.
:param callable extractor: The extractor to be added.
:param str label: The label for the extractor.
This may be ``None`` in which case the
extractors ``__name__`` attribute will be used.
This method returns the extractor added. This means it can
also be used as a decorator. For example:
.. code-block:: python
attrs = Named()
@attrs.add
def attr1(response):
return "one"
"""
if label is None:
label = extractor.__name__
self.extractors[label] = extractor
return extractor
def named(**kw):
""" Returns a :class:`.Named` collection of extractors. """
return Named(**kw)
class Labelled(object):
set_trace = None
def __init__(self, labels, extractor):
self.labels = labels
self.extractor = extractor
def get_labels(self, *args, **kw):
labels = []
for label in self.labels:
if callable(label):
labels.append(label(*args, **kw))
else:
labels.append(label)
return labels
def labelled(self, *args, **kw):
if self.set_trace:
self.set_trace()
labels = self.get_labels(*args, **kw)
if not all(labels):
# don't yield if any labels are false
return
for value in yield_values(self.extractor, *args, **kw):
yield value.label(*labels)
__call__ = labelled
def labelled(*args):
""" Returns an extractor decorator that will label the output an extractor.
:param literals_or_callables: An iterable of labels or callables.
Each item in ``literals_or_callables`` may be a literal or a callable.
Any callable will called with the same parameters as the extractor
and whatever is returned will by used as a label.
For example an extractor function ``extract`` defined as follows:
.. code-block:: python
def extract1(response):
yield "one"
def label2(response):
return "label2"
extract = label("label1", label2)(extract1)
Would produce the following extraction output:
.. code-block:: shell
$ wex http://example.net/
"label1" "label2" "one"
Note that if any of the labels are
`false <https://docs.python.org/2/library/stdtypes.html#truth-value-testing>`_
then no output will be generated from that extractor.
"""
return Labelled(args[:-1], args[-1])
def label(*labels):
def decorator(extractor):
return labelled(*(labels + (extractor,)))
return decorator
class If(object):
def __init__(self, cond, if_true, if_false):
self.cond = cond
self.if_true = if_true
self.if_false = if_false
def if_(self, *args, **kw):
if self.cond(*args, **kw):
extractor = self.if_true
else:
extractor = self.if_false
if extractor is None:
return
for value in yield_values(extractor, *args, **kw):
yield value
__call__ = if_
def if_(cond, if_true, if_false=None):
return If(cond, if_true, if_false)
| {
"repo_name": "eBay/wextracto",
"path": "wex/extractor.py",
"copies": "3",
"size": "7029",
"license": "bsd-3-clause",
"hash": -8664217992233106000,
"line_mean": 23.9255319149,
"line_max": 82,
"alpha_frac": 0.5837245696,
"autogenerated": false,
"ratio": 4.211503894547634,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6295228464147634,
"avg_score": null,
"num_lines": null
} |
"""An extractor of boolean trigram queries from a regex, such that a superset
of the docs matching the regex are returned. You can then run the actual regex
over just the returned docs, saving a lot of computation and IO.
We support a subset of the PCRE regex language at the moment, lacking
lookarounds and noncapturing parens. We can add those later after we decide
the consequences they have for substring extraction. We may be selective about
what we support in order to avoid regex-based DOS attacks, but, aside from
that, DXR's flavor of regex should approach some more popular flavor as
closely as possible.
Junghoo Ch and Sridhar Rajagopalan, in "A fast regular expression indexing
engine", descibe an intuitive method for accelerating regex searching with a
trigram index. This is roughly an implementation of that.
Russ Cox, in http://swtch.com/~rsc/regexp/regexp4.html, refines
that to {(1) extract use from runs of less than 3 static chars and (2) extract
trigrams that cross the boundaries between subexpressions} by keeping track of
prefix and suffix information while chewing through a pattern and effectively
merging adjacent subpatterns. This is a direction we may go in the future.
"""
from itertools import chain
from parsimonious import Grammar, NodeVisitor
NGRAM_LENGTH = 3
class NoTrigrams(Exception):
"""We couldn't extract any trigrams (or longer) from a regex."""
# We should parse a regex. Then go over the tree and turn things like c+ into cc*, perhaps, as it makes it easier to see trigrams to extract.
# TODO: Parse normal regex syntax, but spit out Lucene-compatible syntax, with " escaped. And all special chars escaped even in character classes, in accordance with https://lucene.apache.org/core/4_6_0/core/org/apache/lucene/util/automaton/RegExp.html?is-external=true.
# TODO: Expand positive char classes so we can get trigrams out of [sp][rne]
# (trilite expands char classes of up to 10 chars but does nothing for larger
# ones), and be able to get trigrams out of sp(rint) as well. Production
# currently does that much. It is not, however, smart enough to expand
# spr{1,3}, nor spr+. An easy way would be to keep track of prefixes and
# suffixes (and trigram-or-better infixes) for each node, then work our way up
# the tree.
class SubstringTree(list):
"""A node specifying a boolean operator, with strings or more such nodes as
its children"""
def __init__(self, iterable=()):
self.extend(iterable)
def __str__(self):
return repr(self)
def __ne__(self, other):
return not self == other
def __eq__(self, other):
return (self.__class__ is other.__class__ and
super(SubstringTree, self).__eq__(other))
def simplified(self, min_length=NGRAM_LENGTH):
"""Return a smaller but equivalent tree structure or a string.
Simplify by turning nodes with only 1 child into mere strings and
removing nodes with 0. If the top-level node ends up having 0
children, the final result is ``u''``.
"""
def simplified(tree_or_string):
"""Typewise dispatcher to turn short strings into '' and
recursively descend Ands and Ors"""
if isinstance(tree_or_string, basestring):
return (tree_or_string if len(tree_or_string) >= min_length
else '')
return tree_or_string.simplified(min_length=min_length)
# TODO: Think about implementing the Cox method. I now see that I'm
# going to have to write some kind of theorems into even the FREE
# method if I want to be able to extract trigrams from ab[cd]
# (prefixes, cross products), so I might as well use Cox's. We can
# code his theorems right into the visitor. I don't think it will get
# too messy. Low-level nodes' visitation will just cast strings to
# ints, etc., and high-level ones will just apply Cox theorems. Btw,
# http://code.ohloh.net/file?fid=rfNSbmGXJxqJhWDMLp3VaEMUlgQ&cid=
# eDOmLT58hyw&s=&fp=305491&mp=&projSelected=true#L0 is PG's
# explanation of their simplification stuff.
# Filter out empty strings and empty subtrees, both of which are
# equally useless. (Remember, adjacent strings in an And don't mean
# adjacent strings in the found text, so a '' in an Or doesn't help us
# narrow down the result set at all.)
simple_children = filter(None,
(simplified(n) for n in self))
if len(simple_children) > 1:
return self.__class__(simple_children)
elif len(simple_children) == 1:
return simple_children[0]
else: # Empty nodes occur at empty regex branches.
return u''
class Useless(SubstringTree):
"""This doubles as the singleton USELESS and a "ruined" Or, to which adding
anything yields USELESS back.
Don't construct any more of these.
"""
def __repr__(self):
return 'USELESS'
def appended(self, branch):
return self
def extended(self, branches):
return self
# Stand-in for a subpattern that's useless for producing trigrams. It is opaque
# for our purposes, either intrinsically or just because we're not yet smart
# enough to shatter it into a rain of ORed literals. USELESS breaks the
# continuity between two things we *can* extract trigrams from, meaning we
# shouldn't try making any trigrams that span the two.
USELESS = Useless()
class And(SubstringTree):
"""A list of strings (or other Ands and Ors) which will all be found in
texts matching a given node
The strings herein are not necessarily contiguous with each other, but two
strings appended in succession are taken to be contiguous and are merged
internally.
"""
# If we just hit a non-string, we should break the previous string of chars
# and start a new one:
string_was_interrupted = True
def __repr__(self):
return 'And(%s)' % super(And, self).__repr__()
def appended(self, thing):
"""Add a string or And or Or as one of my children.
Merge it with the previous node if both are string literals. Return
myself. If the new thing is something useless for the purpose of
extracting trigrams, don't add it.
"""
if thing is USELESS: # TODO: Doesn't handle Ors. Why not?
# ANDs eat USELESSes. We can ignore it.
self.string_was_interrupted = True
elif isinstance(thing, basestring):
if self.string_was_interrupted:
self.string_was_interrupted = False
self.append(thing)
else:
self[-1] += thing
else: # an And or Or node
self.string_was_interrupted = True
self.append(thing)
return self
def extended(self, things):
a = self
for t in things:
a = a.appended(t)
return a
class Or(SubstringTree):
"""A list of strings (or other Ands and Ors) of which one will be found in
all texts matching a given node"""
def __repr__(self):
return 'Or(%s)' % super(Or, self).__repr__()
def appended(self, branch):
"""Add a string or And or Or as one of my children.
Return myself. If the new branch is something that makes me become
useless for the purpose of extracting trigrams, return USELESS.
"""
if branch is USELESS:
return USELESS
self.append(branch)
return self
def extended(self, branches):
"""Like ``appended`` but for multiple children"""
if USELESS in branches:
return USELESS
self.extend(branches)
return self
class BadRegex(Exception):
"""A user-provided regular expression was invalid."""
# Sequences that represent something fancier than just a single, unchanging
# char:
BACKSLASH_METAS = 'AbBdDsSwWZ'
# Single chars that have to be backslashed in regexes lest they mean something
# else:
NONLITERALS = r'][^$?*+(){}|\.'
# This recognizes a subset of Python's regex language, minus lookaround
# assertions, non-greedy quantifiers, and named and other special sorts of
# groups. Lucene doesn't support those, though we might be able to fake it
# later via some transformation. [We're no longer using Lucene regexes, so it
# doesn't matter.]
regex_grammar = Grammar(r"""
regexp = branch more_branches
more_branches = another_branch* # TODO: If I merge this into regexp, why does generic_visit() start getting called for it?
branch = piece*
another_branch = "|" branch
piece = quantified / atom
quantified = atom quantifier
quantifier = "*" / "+" / "?" / repeat
repeat = "{" repeat_range "}"
repeat_range = number ("," number)?
number = ~r"\d+"
# By making each parenthesized subexpr just a "regexp", visit_regexp can
# assign group numbers, starting from 0, and the top-level expression
# conveniently ends up in the conventional group 0.
atom = group / inverted_class / class / hat / dollars / dot / char # Optimize: vacuum up any harmless sequence of chars in one regex, first: [^()[\]^$.?*+{}]+
group = "(" regexp ")"
hat = "^"
dollars = "$"
dot = "."
inverted_class = "[^" class_contents "]"
class = "[" !"^" class_contents "]"
# An unescaped ] is treated as a literal when the first char of a positive
# or inverted character class:
class_contents = "]"? class_items # ['x', USELESS, ('a', 'z')]
class_items = class_item*
class_item = char_range / class_char
char_range = class_char "-" class_char # ('a', 'z') or USELESS
# Chars like $ that are ordinarily special are not special inside classes.
class_char = backslash_char / literal_class_char # 'x' or USELESS
literal_class_char = ~"[^]]"
char = backslash_char / literal_char
backslash_char = "\\" backslash_operand
backslash_operand = backslash_special / backslash_hex / backslash_normal
# We require escaping ]{} even though these are tolerated unescaped by
# Python's re parser:
literal_char = ~r"[^""" +
# \ inside a Python regex char class is an escape char. Escape it:
NONLITERALS.replace('\\', r'\\') + r"""]"
# Char class abbreviations and untypeable chars:
backslash_special = ~r"[""" + BACKSLASH_METAS + r"""aefnrtv]"
backslash_hex = ~r"x[0-9a-fA-F]{2}"
# Normal char with no special meaning:
backslash_normal = ~"."
""")
class SubstringTreeVisitor(NodeVisitor):
"""Visitor that converts a parsed ``regex_grammar`` tree into one suitable
for extracting boolean substring queries from.
In the returned tree, strings represent literal strings, ruling out any
fancy meanings like "*" would have.
I throw away any information that can't contribute to trigrams. In the
future, we might throw away less, expanding things like ``[ab]`` to
``Or(['a', 'b'])``.
"""
unwrapped_exceptions = (BadRegex,)
visit_piece = visit_atom = visit_char = visit_class_char = \
visit_class_item = visit_backslash_operand = NodeVisitor.lift_child
# Not only does a ^ or a $ break up two otherwise contiguous literal
# strings, but there is no text which matches a^b or a$b.
visit_hat = visit_dollars = visit_dot = visit_inverted_class = \
lambda self, node, children: USELESS
backslash_specials = {'a': '\a',
'e': '\x1B', # for PCRE compatibility
'f': '\f',
'n': '\n',
'r': '\r',
't': '\t',
'v': '\v'} # TODO: What about \s and such?
quantifier_expansions = {'*': (0, ''),
'+': (1, ''),
'?': (0, 1)}
def generic_visit(self, node, children):
"""Return the node verbatim if we have nothing better to do.
These will all be thrown away.
"""
return node
def visit_regexp(self, regexp, (branch, other_branches)):
o = Or().appended(branch)
o = o.extended(other_branches)
return o
def visit_branch(self, branch, pieces):
"""Merge adjacent literals (anything we could turn into a string).
Return an And.
"""
# All this thing's possible children return strings, Ors, or USELESS.
a = And().extended(pieces)
if not a:
# Represent a 0-length And with an empty string, for consistency.
a.append('')
return a
def visit_more_branches(self, more_branches, branches):
return branches
def visit_another_branch(self, another_branch, (pipe, branch)):
return branch
def visit_quantified(self, quantified, (atom, (min, max))):
# TODO: This is one place to make smarter. Return USELESS less often.
# At the moment, we just return one copy of ourselves iff we have a min
# of at least 1.
return atom if min else USELESS
def visit_quantifier(self, or_, (quantifier,)):
"""Return a tuple of (min, max), where '' means infinity."""
# It'll either be in the hash, or it will have already been broken
# down into a tuple by visit_repeat_range.
return self.quantifier_expansions.get(quantifier.text, quantifier)
def visit_repeat(self, repeat, (brace, repeat_range, end_brace)):
return repeat_range
def visit_repeat_range(self, repeat_range, children):
"""Return a tuple of (min, max) representing a repeat range.
If max is unspecified (open-ended), return '' for max.
"""
min, comma, max = repeat_range.text.partition(',')
return int(min), (max if max == '' else int(max))
def visit_number(self, number, children):
return int(number)
def visit_group(self, group, (paren, regexp, end_paren)):
return regexp
def visit_class(self, class_, (bracket, no_hat, contents, end_bracket)):
"""Return an Or of unicode chars and 2-tuples of unicode chars.
If the class has too many members, to the point where we guess the
expense of checking so many Or branches in ES would be greater than
the selectivity benefit, return USELESS.
"""
MAX_ORS = 5 # Wild guess. Tune.
if USELESS in contents: # Or-ing with USELESS = USELESS.
return USELESS
if len(contents) > MAX_ORS:
return USELESS
if sum((1 if isinstance(x, basestring) else ord(x[1]) - ord(x[0]) + 1)
for x in contents) > MAX_ORS:
return USELESS
return Or(chain.from_iterable(x if isinstance(x, basestring) else
(unichr(y) for y in xrange(ord(x[0]),
ord(x[1]) + 1))
for x in contents))
def visit_class_contents(self, class_contents, (maybe_bracket,
class_items)):
"""Return a list of unicode chars, USELESS, and 2-tuples of unicode
chars."""
items = [']'] if maybe_bracket.text else []
items.extend(getattr(i, 'text', i) for i in class_items)
return items
def visit_class_items(self, class_item, items):
"""Keep class_item from using visit_generic, which would do the wrong
thing."""
return items
def visit_char_range(self, char_range, (start, _, end)):
"""Return (start char, end char) bounding a char range or USELESS."""
if start is USELESS or end is USELESS:
return USELESS
if start.text > end.text:
raise BadRegex(u'Out-of-order character range: %s-%s' %
(start.text, end.text))
return start.text, end.text
def visit_literal_char(self, literal_char, children):
return literal_char.text
def visit_backslash_special(self, backslash_special, children):
"""Return a char if there is a char equivalent. Otherwise, return a
BackslashSpecial."""
# TODO: Don't return USELESS so much.
return self.backslash_specials.get(backslash_special.text, USELESS)
def visit_backslash_char(self, backslash_char, (backslash, operand)):
"""Return the visited char or special thing. Lose the backslash."""
return operand
def visit_backslash_hex(self, backslash_hex, children):
"""Return the character specified by the hex code."""
return unichr(backslash_hex.text[1:])
def visit_backslash_normal(self, backslash_normal, children):
return backslash_normal.text
class JsRegexVisitor(NodeVisitor):
"""Visitor for converting a parsed DXR-flavored regex to a JS equivalent"""
# All specials but these just stay the same between DXR-flavored and
# JS-flavored regexes:
backslash_specials = {'a': r'\x07',
'e': r'\x1B'}
def text_of_node(self, node, children):
return node.text
visit_piece = visit_atom = visit_class_item = visit_class_char = \
visit_char = visit_backslash_operand = NodeVisitor.lift_child
visit_literal_char = visit_dot = visit_dollars = visit_hat = text_of_node
visit_regexp = visit_more_branches = visit_branch = visit_quantified = \
visit_class_items = lambda self, node, children: u''.join(children)
def generic_visit(self, node, children):
"""We ignore some nodes and handle them higher up the tree."""
return node
def visit_another_branch(self, another_branch, (pipe, branch)):
return u'|{0}'.format(branch)
def visit_quantifier(self, quantifier, children):
"""All quantifiers are the same in JS as in DXR-flavored regexes."""
return quantifier.text
def visit_group(self, group, (paren, regexp, end_paren)):
return u'({0})'.format(regexp)
def visit_inverted_class(self, class_, (bracket_and_hat,
contents,
end_bracket)):
return u'[^{0}]'.format(u''.join(contents))
def visit_class(self, class_, (bracket, no_hat, contents, end_bracket)):
return u'[{0}]'.format(u''.join(contents))
def visit_class_contents(self, class_contents, (maybe_bracket,
class_items)):
bracket = u']' if maybe_bracket.text else u''
return bracket + u''.join(class_items)
def visit_char_range(self, char_range, (start, _, end)):
return u'{0}-{1}'.format(start, end)
def visit_literal_class_char(self, literal_class_char, children):
"""Turn a boring, normal class char into text."""
return literal_class_char.text
def visit_backslash_char(self, backslash_char, (backslash, operand)):
"""We reapply the backslash at lower-level nodes than this so we don't
accidentally preserve backslashes on chars that don't need them, like
c.
That would be bad for c, because \\c is special in JS (but not in
DXR-flavored regexes.
"""
return operand
def visit_backslash_special(self, backslash_special, children):
"""Return the unchanged char (without the backslash)."""
return u'\\' + self.backslash_specials.get(backslash_special.text,
backslash_special.text)
def visit_backslash_hex(self, backslash_hex, children):
return u'\\' + backslash_hex.text
def visit_backslash_normal(self, backslash_normal, children):
"""Take unnecessary backslashes away so we don't end up treading on
metas that are special only in JS, like \\c."""
char = backslash_normal.text
return ur'\{0}'.format(char) if char in NONLITERALS else char
class PythonRegexVisitor(JsRegexVisitor):
"""Visitor for converting a parsed DXR-flavored regex to a Python
equivalent, for highlighting
There's really only one spot where Python's regex language (or at least
the parts whose functionality is implemented by DXR's flavor so far)
differ from JS's: Python's understands \a natively. There are other
differences, like Python's tolerance for unescaped ], {, and } in contrast
to our insistence at backslash-escaping them, but those differences go in
the other direction and so don't matter when translating from DXR to
Python.
"""
# Python supports the rest of the escape sequences natively.
backslash_specials = {'e': r'\x1B'}
def boolean_filter_tree(substrings, trigram_field):
"""Return a (probably nested) ES filter clause expressing the boolean
constraints embodied in ``substrings``.
:arg substrings: A SubstringTree
:arg trigram_field: The ES property under which a trigram index of the
field to match is stored
"""
if isinstance(substrings, basestring):
return {
'query': {
'match_phrase': {
trigram_field: substrings
}
}
}
return {
'and' if isinstance(substrings, And) else 'or':
[boolean_filter_tree(x, trigram_field) for x in substrings]
}
def es_regex_filter(parsed_regex, raw_field, is_case_sensitive):
"""Return an efficient ES filter to find matches to a regex.
Looks for fields of which ``regex`` matches a substring. (^ and $ do
anchor the pattern to the beginning or end of the field, however.)
:arg parsed_regex: A regex pattern as an AST from regex_grammar
:arg raw_field: The name of an ES property to match against. The
lowercase-folded trigram field is assumed to be
raw_field.trigrams_lower, and the non-folded version
raw_field.trigrams.
:arg is_case_sensitive: Whether the match should be performed
case-sensitive
"""
trigram_field = ('%s.trigrams' if is_case_sensitive else
'%s.trigrams_lower') % raw_field
substrings = SubstringTreeVisitor().visit(parsed_regex).simplified()
# If tree is a string, just do a match_phrase. Otherwise, add .* to the
# front and back, and build some boolean algebra.
if isinstance(substrings, basestring) and len(substrings) < NGRAM_LENGTH:
raise NoTrigrams
# We could alternatively consider doing an unaccelerated Lucene regex
# query at this point. It would be slower but tolerable on a
# moz-central-sized codebase: perhaps 500ms rather than 80.
else:
# Should be fine even if the regex already starts or ends with .*:
js_regex = JsRegexVisitor().visit(parsed_regex)
return {
'and': [
boolean_filter_tree(substrings, trigram_field),
{
'script': {
'lang': 'js',
# test() tests for containment, not matching:
'script': '(new RegExp(pattern, flags)).test(doc["%s"][0])' % raw_field,
'params': {
'pattern': js_regex,
'flags': '' if is_case_sensitive else 'i'
}
}
}
]
}
| {
"repo_name": "jbradberry/dxr",
"path": "dxr/trigrammer.py",
"copies": "4",
"size": "23461",
"license": "mit",
"hash": 2493588180821778400,
"line_mean": 38.563237774,
"line_max": 270,
"alpha_frac": 0.6259323984,
"autogenerated": false,
"ratio": 4.0554883318928265,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6681420730292826,
"avg_score": null,
"num_lines": null
} |
"""An extremely asynch approach to unzipping files. This allows you
to unzip a little bit of a file at a time, which means it can
integrate nicely with a reactor.
"""
from __future__ import generators
import zipfile
import os.path
import binascii
import zlib
import struct
class ChunkingZipFile(zipfile.ZipFile):
"""A ZipFile object which, with readfile(), also gives you access
to a filelike object for each entry.
"""
def readfile(self, name):
"""Return file-like object for name."""
if self.mode not in ("r", "a"):
raise RuntimeError, 'read() requires mode "r" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
zinfo = self.getinfo(name)
self.fp.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = self.fp.read(30)
if fheader[0:4] != zipfile.stringFileHeader:
raise zipfile.BadZipfile, "Bad magic number for file header"
fheader = struct.unpack(zipfile.structFileHeader, fheader)
fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])
if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise zipfile.BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
if zinfo.compress_type == zipfile.ZIP_STORED:
return ZipFileEntry(self.fp, zinfo.compress_size)
elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
if not zlib:
raise RuntimeError, \
"De-compression requires the (missing) zlib module"
return DeflatedZipFileEntry(self.fp, zinfo.compress_size)
else:
raise zipfile.BadZipfile, \
"Unsupported compression method %d for file %s" % \
(zinfo.compress_type, name)
def read(self, name):
"""Return file bytes (as a string) for name."""
f = self.readfile(name)
zinfo = self.getinfo(name)
bytes = f.read()
crc = binascii.crc32(bytes)
if crc != zinfo.CRC:
raise zipfile.BadZipfile, "Bad CRC-32 for file %s" % name
return bytes
class ZipFileEntry:
"""File-like object used to read an uncompressed entry in a ZipFile"""
def __init__(self, fp, length):
self.fp = fp
self.readBytes = 0
self.length = length
self.finished = 0
def tell(self):
return self.readBytes
def read(self, n=None):
if n is None:
n = self.length - self.readBytes
if n == 0 or self.finished:
return ''
data = self.fp.read(min(n, self.length - self.readBytes))
self.readBytes += len(data)
if self.readBytes == self.length or len(data) < n:
self.finished = 1
return data
def close(self):
self.finished = 1
del self.fp
class DeflatedZipFileEntry:
"""File-like object used to read a deflated entry in a ZipFile"""
def __init__(self, fp, length):
self.fp = fp
self.returnedBytes = 0
self.readBytes = 0
self.decomp = zlib.decompressobj(-15)
self.buffer = ""
self.length = length
self.finished = 0
def tell(self):
return self.returnedBytes
def read(self, n=None):
if self.finished:
return ""
if n is None:
result = [self.buffer,]
result.append(self.decomp.decompress(self.fp.read(self.length - self.readBytes)))
result.append(self.decomp.decompress("Z"))
result.append(self.decomp.flush())
self.buffer = ""
self.finished = 1
result = "".join(result)
self.returnedBytes += len(result)
return result
else:
while len(self.buffer) < n:
data = self.fp.read(min(n, 1024, self.length - self.readBytes))
self.readBytes += len(data)
if not data:
result = self.buffer + self.decomp.decompress("Z") + self.decomp.flush()
self.finished = 1
self.buffer = ""
self.returnedBytes += len(result)
return result
else:
self.buffer += self.decomp.decompress(data)
result = self.buffer[:n]
self.buffer = self.buffer[n:]
self.returnedBytes += len(result)
return result
def close(self):
self.finished = 1
del self.fp
def unzip(filename, directory=".", overwrite=0):
"""Unzip the file
@param filename: the name of the zip file
@param directory: the directory into which the files will be
extracted
@param overwrite: if on, overwrite files when they exist. You can
still get an error if you try to create a directory over a file
with the same name or vice-versa.
"""
for i in unzipIter(filename, directory, overwrite):
pass
DIR_BIT=16
def unzipIter(filename, directory='.', overwrite=0):
"""Return a generator for the zipfile. This implementation will
yield after every file.
The value it yields is the number of files left to unzip.
"""
zf=zipfile.ZipFile(filename, 'r')
names=zf.namelist()
if not os.path.exists(directory): os.makedirs(directory)
remaining=countZipFileEntries(filename)
for entry in names:
remaining=remaining - 1
isdir=zf.getinfo(entry).external_attr & DIR_BIT
f=os.path.join(directory, entry)
if isdir:
# overwrite flag only applies to files
if not os.path.exists(f): os.makedirs(f)
else:
# create the directory the file will be in first,
# since we can't guarantee it exists
fdir=os.path.split(f)[0]
if not os.path.exists(fdir):
os.makedirs(f)
if overwrite or not os.path.exists(f):
outfile=file(f, 'wb')
outfile.write(zf.read(entry))
outfile.close()
yield remaining
def countZipFileChunks(filename, chunksize):
"""Predict the number of chunks that will be extracted from the
entire zipfile, given chunksize blocks.
"""
totalchunks=0
zf=ChunkingZipFile(filename)
for info in zf.infolist():
totalchunks=totalchunks+countFileChunks(info, chunksize)
return totalchunks
def countFileChunks(zipinfo, chunksize):
size=zipinfo.file_size
count=size/chunksize
if size%chunksize > 0:
count=count+1
# each file counts as at least one chunk
return count or 1
def countZipFileEntries(filename):
zf=zipfile.ZipFile(filename)
return len(zf.namelist())
def unzipIterChunky(filename, directory='.', overwrite=0,
chunksize=4096):
"""Return a generator for the zipfile. This implementation will
yield after every chunksize uncompressed bytes, or at the end of a
file, whichever comes first.
The value it yields is the number of chunks left to unzip.
"""
czf=ChunkingZipFile(filename, 'r')
if not os.path.exists(directory): os.makedirs(directory)
remaining=countZipFileChunks(filename, chunksize)
names=czf.namelist()
infos=czf.infolist()
for entry, info in zip(names, infos):
isdir=info.external_attr & DIR_BIT
f=os.path.join(directory, entry)
if isdir:
# overwrite flag only applies to files
if not os.path.exists(f): os.makedirs(f)
remaining=remaining-1
assert remaining>=0
yield remaining
else:
# create the directory the file will be in first,
# since we can't guarantee it exists
fdir=os.path.split(f)[0]
if not os.path.exists(fdir):
os.makedirs(f)
if overwrite or not os.path.exists(f):
outfile=file(f, 'wb')
fp=czf.readfile(entry)
if info.file_size==0:
remaining=remaining-1
assert remaining>=0
yield remaining
fread=fp.read
ftell=fp.tell
owrite=outfile.write
size=info.file_size
while ftell() < size:
hunk=fread(chunksize)
owrite(hunk)
remaining=remaining-1
assert remaining>=0
yield remaining
outfile.close()
else:
remaining=remaining-countFileChunks(info, chunksize)
assert remaining>=0
yield remaining
| {
"repo_name": "santisiri/popego",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/python/zipstream.py",
"copies": "1",
"size": "8977",
"license": "bsd-3-clause",
"hash": 1152075640708284500,
"line_mean": 33.3946360153,
"line_max": 93,
"alpha_frac": 0.5756934388,
"autogenerated": false,
"ratio": 4.232437529467233,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013983628591072446,
"num_lines": 261
} |
""" An extremely hack script that was used to render the dock guide icons.
"""
from enaml.qt.QtCore import *
from enaml.qt.QtGui import *
class GuidePad(object):
LeftPosition = 0
TopPosition = 1
RightPosition = 2
BottomPosition = 3
CenterTop = 4
SplitLeft = 5
SplitTop = 6
SplitRight = 7
SplitBottom = 8
SplitHorizontal = 9
SplitVertical = 10
CenterLeft = 11
CenterRight = 12
CenterBottom = 13
@staticmethod
def makePath(size):
path = QPainterPath()
rect = QRectF(0, 0, size.width(), size.height())
path.addRoundedRect(rect, 2.0, 2.0)
return path
@staticmethod
def makeTriPath():
path = QPainterPath()
path.moveTo(0.0, 0.0)
path.lineTo(9.0, 0.0)
path.lineTo(5.0, 4.0)
path.lineTo(4.0, 4.0)
path.lineTo(0.0, 0.0)
return path
def __init__(self, rect, position=None):
self._rect = rect
self._position = position
self._opacity = 0.8
self._path = GuidePad.makePath(rect.size())
self._tri_path = GuidePad.makeTriPath()
grad = QLinearGradient(0.0, 0.0, 0.0, 1.0)
grad.setCoordinateMode(QGradient.ObjectBoundingMode)
grad.setColorAt(0.0, QColor(0xF5, 0xF8, 0xFB))
grad.setColorAt(0.33, QColor(0xF0, 0xF3, 0xF6))
grad.setColorAt(0.66, QColor(0xE5, 0xE8, 0xEE))
grad.setColorAt(1.0, QColor(0xDE, 0xE2, 0xE9))
self._brush = QBrush(grad)
grad = QLinearGradient(0.0, 0.0, 0.0, 1.0)
grad.setCoordinateMode(QGradient.ObjectBoundingMode)
grad.setColorAt(0.0, QColor(0xFC, 0xEC, 0xBE))
grad.setColorAt(1.0, QColor(0xF7, 0xC7, 0x73))
self._fill_brush = QBrush(grad)
self._pen = QPen(QColor(0x8A, 0x91, 0x9C))
def rect(self):
return self._rect
def setRect(self, rect):
old = self._rect
self._rect = rect
if rect.isValid():
if self._path is None or old.size() != rect.size():
self._path = GuidePad.makePath(rect.size())
else:
self._path = None
def contains(self, pos):
return self._rect.contains(pos)
def intersects(self, rect):
return self._rect.intersects(rect)
def guidePosition(self):
return self._guide_pos
def setGuidePosition(self, position):
self._position = position
def brush(self):
return self._brush
def setBrush(self, brush):
self._brush = brush
def pen(self):
return self._pen
def setPen(self, pen):
self._pen = pen
def opacity(self):
return self._opacity
def setOpacity(self, opacity):
self._opacity = opacity
def paint(self, painter):
rect = self._rect
if not rect.isValid():
return
painter.save()
painter.translate(rect.x(), rect.y())
# Draw the background
painter.setOpacity(1.0)#self._opacity)
painter.fillPath(self._path, self._brush)
painter.setPen(self._pen)
painter.drawPath(self._path)
color = QColor(0x44, 0x58, 0x79)
fill_brush = self._fill_brush
painter.setPen(color)
position = self._position
if position == self.TopPosition:
width = rect.width() - 8
height = rect.height() / 2 - 4
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
painter.fillRect(QRect(5, 8, width - 1, height - 4), fill_brush)
painter.setRenderHint(QPainter.Antialiasing)
w = rect.width() / 2 + 5
h = rect.height() - 5
painter.translate(w, h)
painter.rotate(180)
painter.fillPath(self._tri_path, color)
elif position == self.BottomPosition:
width = rect.width() - 8
height = rect.height() / 2 - 4
painter.drawRect(QRect(4, height + 4, width, height))
painter.fillRect(QRect(5, height + 5, width - 1, 3), color)
painter.fillRect(QRect(5, height + 8, width - 1, height - 4), fill_brush)
painter.setRenderHint(QPainter.Antialiasing)
w = rect.width() / 2 - 4
painter.translate(w, 6)
painter.fillPath(self._tri_path, color)
elif position == self.LeftPosition:
width = rect.width() / 2 - 4
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
painter.fillRect(QRect(5, 8, width - 1, height - 4), fill_brush)
painter.setRenderHint(QPainter.Antialiasing)
w = rect.width() - 5
h = rect.height() / 2 - 4
painter.translate(w, h)
painter.rotate(90)
painter.fillPath(self._tri_path, color)
elif position == self.RightPosition:
width = rect.width() / 2 - 4
height = rect.height() - 8
painter.drawRect(QRect(width + 4, 4, width, height))
painter.fillRect(QRect(width + 5, 5, width - 1, 3), color)
painter.fillRect(QRect(width + 5, 8, width - 1, height - 4), fill_brush)
painter.setRenderHint(QPainter.Antialiasing)
h = rect.height() / 2 + 5
painter.translate(6, h)
painter.rotate(-90)
painter.fillPath(self._tri_path, color)
elif position == self.CenterTop:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
painter.fillRect(QRect(5, 8, width - 1, height - 4), fill_brush)
elif position == self.CenterBottom:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5 + height - 4, width - 1, 3), color)
painter.fillRect(QRect(5, 5, width - 1, height - 4), fill_brush)
elif position == self.CenterLeft:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, 3, height), color)
painter.fillRect(QRect(8, 5, width - 4, height - 1), fill_brush)
elif position == self.CenterRight:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(width + 1, 5, 3, height - 1), color)
painter.fillRect(QRect(5, 5, width - 4, height - 1), fill_brush)
elif position == self.SplitTop:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
painter.fillRect(QRect(5, 8, width - 1, height / 2 - 2), fill_brush)
pen = QPen(color, 0, Qt.DotLine)
pen.setDashPattern([1, 1])
painter.setPen(pen)
painter.drawLine(5, 8 + height / 2 - 3, 5 + width - 1, 8 + height / 2 - 3)
elif position == self.SplitBottom:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
h = height / 2 - 2
painter.fillRect(QRect(5, 8 + h, width - 1, h), fill_brush)
pen = QPen(color, 0, Qt.DotLine)
pen.setDashPattern([1, 1])
painter.setPen(pen)
painter.drawLine(5, 8 + height / 2 - 2, 5 + width - 1, 8 + height / 2 - 2)
elif position == self.SplitLeft:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
w = width / 2
h = height - 4
painter.fillRect(QRect(5, 8, w - 1, h), fill_brush)
pen = QPen(color, 0, Qt.DotLine)
pen.setDashPattern([1, 1])
pen.setDashOffset(1)
painter.setPen(pen)
painter.drawLine(3 + w, 8, 3 + w, 8 + h)
elif position == self.SplitRight:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
w = width / 2
h = height - 4
painter.fillRect(QRect(5 + w, 8, w - 1, h), fill_brush)
pen = QPen(color, 0, Qt.DotLine)
pen.setDashPattern([1, 1])
pen.setDashOffset(1)
painter.setPen(pen)
painter.drawLine(5 + w, 8, 5 + w, 8 + h)
elif position == self.SplitHorizontal:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
w = width / 4
h = height - 4
painter.fillRect(QRect(6 + w, 8, 2 * w - 1, h), fill_brush)
pen = QPen(color, 0, Qt.DotLine)
pen.setDashPattern([1, 1])
pen.setDashOffset(1)
painter.setPen(pen)
painter.drawLine(6 + w, 8, 6 + w, 8 + h)
painter.drawLine(4 + 3 * w, 8, 4 + 3 * w, 8 + h)
elif position == self.SplitVertical:
width = rect.width() - 8
height = rect.height() - 8
painter.drawRect(QRect(4, 4, width, height))
painter.fillRect(QRect(5, 5, width - 1, 3), color)
h = height / 4
painter.fillRect(QRect(5, 8 + h, width - 1, 2 * h - 2), fill_brush)
pen = QPen(color, 0, Qt.DotLine)
pen.setDashPattern([1, 1])
painter.setPen(pen)
painter.drawLine(5, 8 + h, 4 + width, 8 + h)
painter.drawLine(5, 10 + 2 * h, 4 + width, 10 + 2 * h)
# Draw the indicator
painter.restore()
def render_cross(painter):
path = QPainterPath()
path.moveTo(35.0, 0)
path.lineTo(75.0, 0)
path.lineTo(75.0, 25.0)
path.lineTo(85.0, 35.0)
path.lineTo(110.0, 35.0)
path.lineTo(110.0, 75.0)
path.lineTo(85.0, 75.0)
path.lineTo(75.0, 85.0)
path.lineTo(75.0, 110.0)
path.lineTo(35.0, 110.0)
path.lineTo(35.0, 85.0)
path.lineTo(25.0, 75.0)
path.lineTo(0.0, 75.0)
path.lineTo(0.0, 35.0)
path.lineTo(25.0, 35.0)
path.lineTo(35.0, 25.0)
path.lineTo(35.0, 0.0)
painter.fillPath(path, QColor(0xFF, 0xFF, 0xFF, 0x99))
painter.setPen(QPen(QColor(0x77, 0x77, 0x77), 1.0))
painter.drawPath(path)
def render_cross_ex(painter):
path = QPainterPath()
path.moveTo(49.0, 0)
path.lineTo(89.0, 0)
path.lineTo(89.0, 39.0)
path.lineTo(99.0, 49.0)
path.lineTo(138.0, 49.0)
path.lineTo(138.0, 89.0)
path.lineTo(99.0, 89.0)
path.lineTo(89.0, 99.0)
path.lineTo(89.0, 138.0)
path.lineTo(49.0, 138.0)
path.lineTo(49.0, 99.0)
path.lineTo(39.0, 89.0)
path.lineTo(0.0, 89.0)
path.lineTo(0.0, 49.0)
path.lineTo(39.0, 49.0)
path.lineTo(49.0, 39.0)
path.lineTo(49.0, 0.0)
painter.fillPath(path, QColor(0xFF, 0xFF, 0xFF, 0x99))
painter.setPen(QPen(QColor(0x77, 0x77, 0x77), 1.0))
painter.drawPath(path)
def render_north_cross(painter):
path = QPainterPath()
path.moveTo(35.0, 0)
path.lineTo(75.0, 0)
path.lineTo(75.0, 25.0)
path.lineTo(85.0, 35.0)
path.lineTo(110.0, 35.0)
path.lineTo(110.0, 75.0)
path.lineTo(85.0, 75.0)
path.lineTo(75.0, 85.0)
path.lineTo(75.0, 110.0)
path.lineTo(35.0, 110.0)
path.lineTo(35.0, 85.0)
path.lineTo(25.0, 75.0)
path.lineTo(0.0, 75.0)
path.lineTo(0.0, 35.0)
path.lineTo(25.0, 35.0)
path.lineTo(35.0, 25.0)
path.lineTo(35.0, 0.0)
painter.fillPath(path, QColor(0xFF, 0xFF, 0xFF, 0x99))
painter.setPen(QPen(QColor(0x77, 0x77, 0x77), 1.0))
painter.drawPath(path)
def render_box(painter):
path = QPainterPath()
path.moveTo(0.0, 0.0)
path.lineTo(40.0, 0.0)
path.lineTo(40.0, 40.0)
path.lineTo(0.0, 40.0)
path.lineTo(0.0, 0.0)
painter.fillPath(path, QColor(0xFF, 0xFF, 0xFF, 0x99))
painter.setPen(QPen(QColor(0x77, 0x77, 0x77), 1.0))
painter.drawPath(path)
def render_vbar(painter):
path = QPainterPath()
rect = QRectF(0, 0, 9, 30)
path.addRoundedRect(rect, 2.0, 2.0)
grad = QLinearGradient(0.0, 0.0, 0.0, 1.0)
grad.setCoordinateMode(QGradient.ObjectBoundingMode)
grad.setColorAt(0.0, QColor(0xF5, 0xF8, 0xFB))
grad.setColorAt(0.33, QColor(0xF0, 0xF3, 0xF6))
grad.setColorAt(0.66, QColor(0xE5, 0xE8, 0xEE))
grad.setColorAt(1.0, QColor(0xDE, 0xE2, 0xE9))
brush = QBrush(grad)
pen = QPen(QColor(0x8A, 0x91, 0x9C))
painter.fillPath(path, brush)
painter.setPen(pen)
painter.drawPath(path)
color = QColor(0x44, 0x58, 0x79)
painter.fillRect(QRect(4, 4, 2, 23), color)
def render_hbar(painter):
path = QPainterPath()
rect = QRectF(0, 0, 30, 9)
path.addRoundedRect(rect, 2.0, 2.0)
grad = QLinearGradient(0.0, 0.0, 0.0, 1.0)
grad.setCoordinateMode(QGradient.ObjectBoundingMode)
grad.setColorAt(0.0, QColor(0xF5, 0xF8, 0xFB))
grad.setColorAt(0.33, QColor(0xF0, 0xF3, 0xF6))
grad.setColorAt(0.66, QColor(0xE5, 0xE8, 0xEE))
grad.setColorAt(1.0, QColor(0xDE, 0xE2, 0xE9))
brush = QBrush(grad)
pen = QPen(QColor(0x8A, 0x91, 0x9C))
painter.fillPath(path, brush)
painter.setPen(pen)
painter.drawPath(path)
color = QColor(0x44, 0x58, 0x79)
painter.setPen(color)
painter.fillRect(QRect(4, 4, 23, 2), color)
def render_background(painter):
brush = QBrush(QColor(0x00, 0x00, 0x00, 0x10), Qt.Dense6Pattern)
painter.fillRect(QRect(0, 0, 129, 129), brush)
brush = QBrush(QColor(0xFF, 0xFF, 0xFF, 0x10), Qt.Dense6Pattern)
painter.translate(0, 1)
painter.fillRect(QRect(0, 0, 129, 129), brush)
app = QApplication([])
image = QImage(QSize(128, 128), QImage.Format_ARGB32_Premultiplied)
image.fill(0)
painter = QPainter(image)
#render_box(painter)
#render_cross(painter)
#render_cross_ex(painter)
#render_vbar(painter)
#render_hbar(painter)
render_background(painter)
#pad = GuidePad(QRect(0, 0, 30, 30), GuidePad.CenterQuads)
#pad.paint(painter)
painter.end()
import os
path = os.path.join(os.path.dirname(__file__), 'background.png')
image.save(path)
| {
"repo_name": "ContinuumIO/ashiba",
"path": "enaml/enaml/qt/docking/dock_images/guide_render.py",
"copies": "1",
"size": "14712",
"license": "bsd-3-clause",
"hash": -1723708703575212500,
"line_mean": 33.1345707657,
"line_max": 86,
"alpha_frac": 0.5685834693,
"autogenerated": false,
"ratio": 3.042812823164426,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4111396292464426,
"avg_score": null,
"num_lines": null
} |
"""An extremely simple interface to the signing/verifying capabilities
of gnupg.
You must already have the key in the keyring.
"""
from subprocess import PIPE, Popen
from xmlrpc.client import dumps, loads
# see also myplc/plc.d/gpg
import os.path
GPG = '/usr/bin/gpg1' if os.path.exists("/usr/bin/gpg1") else "/usr/bin/gpg"
def _popen_gpg(*args):
"""Return a Popen object to GPG."""
return Popen((GPG, '--batch', '--no-tty') + args,
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
def sign(data):
"""Return <data> signed with the default GPG key."""
msg = dumps((data,), methodresponse = True)
p = _popen_gpg('--armor', '--sign', '--keyring', '/etc/planetlab/secring.gpg', '--no-default-keyring')
p.stdin.write(msg)
p.stdin.close()
signed_msg = p.stdout.read()
p.stdout.close()
p.stderr.close()
p.wait()
return signed_msg
def verify(signed_msg):
"""If <signed_msg> is a valid signed document, return its contents. Otherwise, return None."""
p = _popen_gpg('--decrypt', '--keyring', '/usr/boot/pubring.gpg', '--no-default-keyring')
p.stdin.write(signed_msg)
p.stdin.close()
msg = p.stdout.read()
p.stdout.close()
p.stderr.close()
if p.wait():
return None # verification failed
else:
data, = loads(msg)[0]
return data
| {
"repo_name": "dreibh/planetlab-lxc-nodemanager",
"path": "ticket.py",
"copies": "1",
"size": "1355",
"license": "bsd-3-clause",
"hash": -3717718730675223600,
"line_mean": 30.511627907,
"line_max": 106,
"alpha_frac": 0.6302583026,
"autogenerated": false,
"ratio": 3.2729468599033815,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4403205162503382,
"avg_score": null,
"num_lines": null
} |
"""An extremely simple interface to the signing/verifying capabilities
of gnupg.
You must already have the key in the keyring.
"""
from subprocess import PIPE, Popen
from xmlrpclib import dumps, loads
GPG = '/usr/bin/gpg'
def _popen_gpg(*args):
"""Return a Popen object to GPG."""
return Popen((GPG, '--batch', '--no-tty') + args,
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
def sign(data):
"""Return <data> signed with the default GPG key."""
msg = dumps((data,), methodresponse = True)
p = _popen_gpg('--armor', '--sign', '--keyring', '/etc/planetlab/secring.gpg', '--no-default-keyring')
p.stdin.write(msg)
p.stdin.close()
signed_msg = p.stdout.read()
p.stdout.close()
p.stderr.close()
p.wait()
return signed_msg
def verify(signed_msg):
"""If <signed_msg> is a valid signed document, return its contents. Otherwise, return None."""
p = _popen_gpg('--decrypt', '--keyring', '/usr/boot/pubring.gpg', '--no-default-keyring')
p.stdin.write(signed_msg)
p.stdin.close()
msg = p.stdout.read()
p.stdout.close()
p.stderr.close()
if p.wait():
return None # verification failed
else:
data, = loads(msg)[0]
return data
| {
"repo_name": "wangyang2013/NodeManager",
"path": "ticket.py",
"copies": "2",
"size": "1253",
"license": "apache-2.0",
"hash": -6229091246316007000,
"line_mean": 29.5609756098,
"line_max": 106,
"alpha_frac": 0.6241021548,
"autogenerated": false,
"ratio": 3.3864864864864863,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5010588641286486,
"avg_score": null,
"num_lines": null
} |
"""A nfqueu that listen for CPS/CPA messages"""
from copy import deepcopy
import sys
from NDprotector.Log import warn
from NDprotector.CertCache import CertCache
from NDprotector.NeighCache import NeighCache
from scapy6send.scapy6 import *
import NDprotector
if "lib" not in sys.path:
sys.path.append("lib")
import nfqueue
def callback(i,payload):
"""a callback function called on each ingoing packets"""
data = payload.get_data()
packet = IPv6(data)
# if something goes wrong and makes this callback crash,
# the packet is dropped
payload.set_verdict(nfqueue.NF_DROP)
# receiving interface
interface = get_if_list()[ payload.get_indev() - 1]
# extract all the TA option from the CPS/CPA
list_of_node_trustanchor = []
ta = packet[ICMPv6NDOptTrustAnchor]
while ta:
if ta.nametype == 1:
# we get the name field of the TA option
list_of_node_trustanchor.append(ta.name_field)
ta = ta.payload[ICMPv6NDOptTrustAnchor]
if NDprotector.is_router:
# we have a CPS
# (filtering rules were set accordingly)
# CPS's message ID
req_id = packet[ICMPv6SEND_CPS].id
dest_node = packet[IPv6].src
if dest_node == "::" :
# if the origin is the unspecified address
# the answer is on the All-Node multicast address
dest_node = "ff02::1"
src_addr = "::"
if packet[IPv6].dst != "ff02::1" :
src_addr = packet[IPv6].dst
else:
# lookup for an adress on this interface:
nc = NeighCache()
configured_addresses = nc.dump_addresses()
for address in configured_addresses :
if address.get_interface() == interface :
src_addr = str(address)
break
# send multiple as many Certification Path
# as there is Trust Anchor options
for path in deepcopy(NDprotector.certification_path):
trust_anchor = path[0]
skip_ta = True
if list_of_node_trustanchor == []:
skip_ta = False
else:
# check if this trust anchor is trusted by the node
# if it isn't, we check for the next cert in the path
while path and skip_ta:
trust_anchor = path[0]
for ta in list_of_node_trustanchor:
# we found the correct trust anchor
if ta in str(Cert(trust_anchor)):
skip_ta=False
break
else:
try:
del path[0]
except IndexError:
warn("CertPath.py - callback - this is likely to be a bug\n")
if skip_ta:
# we do not have a Certification Path
# down to this Trust Anchor
continue
# number of certificates to send
# (we does not count the TA as we do not send it
num_components = len(path) - 2
# send as many CPA as there is certificates in the Certification Path
for cert in path[1:]:
c = Cert(cert)
warn("sending a CPA message\n")
p = Ether(src=get_if_hwaddr(interface)) / \
IPv6(src=src_addr,dst=dest_node)/ \
ICMPv6SEND_CPA(id=req_id,comp=num_components,allcomp=len(path) -1)/ \
ICMPv6NDOptCertificate(cert=str(c))
sendp(p,iface=interface,verbose=NDprotector.verbose)
num_components -= 1
else: # we have a CPA
# connect to the Certificate Cache for future decisions
certcache = CertCache()
warn("Receiving a CPA message\n")
req_id = packet[ICMPv6SEND_CPA].id
# we only accept CPA if they are destined to all the nodes or
# if they are destined to our node
if (packet[IPv6].dst == "ff02::1" and req_id ==0 ) or certcache.id_match(req_id):
lastCPA = (packet[ICMPv6SEND_CPA].comp == 0)
# extract all the certificates and feed them to the cache
certopt = packet[ICMPv6NDOptCertificate]
while certopt:
cert = certopt.cert
cert = cert.output("PEM")
certcache.storecert(req_id,cert)
certopt = certopt.payload[ICMPv6NDOptCertificate]
# when this is the last CPA message, we ask for the
# certificate path validation process
if lastCPA:
certcache.checkcertpath(req_id)
# regardless the content, we drop them
# the kernel can not parse them anyway...
# already done at the beginning of the callback
# payload.set_verdict(nfqueue.NF_DROP)
def cpscpa_queue():
"""setup the NF_queue to "rule" the CPS/CPA messages
return the NFQueue object"""
q = nfqueue.queue()
q.open()
# need to be done once
# performed in In.py
#q.unbind(AF_INET6)
#q.bind(AF_INET6)
q.set_callback(callback)
# queue for the Certificate Path Validation messages is #3
q.create_queue(3)
q.set_queue_maxlen(5000)
return q
| {
"repo_name": "daveti/NDprotector",
"path": "NDprotector/CertPath.py",
"copies": "2",
"size": "5378",
"license": "bsd-3-clause",
"hash": -3444410381783469600,
"line_mean": 29.5568181818,
"line_max": 89,
"alpha_frac": 0.5585719598,
"autogenerated": false,
"ratio": 4.004467609828741,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.556303956962874,
"avg_score": null,
"num_lines": null
} |
"""An FTP client class and some helper functions.
Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds
Example:
>>> from ftplib import FTP
>>> ftp = FTP('ftp.python.org') # connect to host, default port
>>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@
'230 Guest login ok, access restrictions apply.'
>>> ftp.retrlines('LIST') # list directory contents
total 9
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 ..
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc
d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming
drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib
drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub
drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr
-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg
'226 Transfer complete.'
>>> ftp.quit()
'221 Goodbye.'
>>>
A nice test that reveals some of the network dialogue would be:
python ftplib.py -d localhost -l -p -l
"""
#
# Changes and improvements suggested by Steve Majewski.
# Modified by Jack to work on the mac.
# Modified by Siebren to support docstrings and PASV.
# Modified by Phil Schwartz to add storbinary and storlines callbacks.
# Modified by Giampaolo Rodola' to add TLS support.
#
import os
import sys
import socket
import warnings
from socket import _GLOBAL_DEFAULT_TIMEOUT
__all__ = ["FTP", "Netrc"]
# Magic number from <socket.h>
MSG_OOB = 0x1 # Process data out of band
# The standard FTP server control port
FTP_PORT = 21
# The sizehint parameter passed to readline() calls
MAXLINE = 8192
# Exception raised when an error or invalid response is received
class Error(Exception): pass
class error_reply(Error): pass # unexpected [123]xx reply
class error_temp(Error): pass # 4xx errors
class error_perm(Error): pass # 5xx errors
class error_proto(Error): pass # response does not begin with [1-5]
# All exceptions (hopefully) that may be raised here and that aren't
# (always) programming errors on our side
all_errors = (Error, OSError, EOFError)
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
B_CRLF = b'\r\n'
# The class itself
class FTP:
'''An FTP client class.
To create a connection, call the class using these arguments:
host, user, passwd, acct, timeout
The first four arguments are all strings, and have default value ''.
timeout must be numeric and defaults to None if not passed,
meaning that no timeout will be set on any ftp socket(s)
If a timeout is passed, then this is now the default timeout for all ftp
socket operations for this instance.
Then use self.connect() with optional host and port argument.
To download a file, use ftp.retrlines('RETR ' + filename),
or ftp.retrbinary() with slightly different arguments.
To upload a file, use ftp.storlines() or ftp.storbinary(),
which have an open file as argument (see their definitions
below for details).
The download/upload functions first issue appropriate TYPE
and PORT or PASV commands.
'''
debugging = 0
host = ''
port = FTP_PORT
maxline = MAXLINE
sock = None
file = None
welcome = None
passiveserver = 1
encoding = "latin-1"
# Initialization method (called by class instantiation).
# Initialize host to localhost, port to standard ftp port
# Optional arguments are host (for connect()),
# and user, passwd, acct (for login())
def __init__(self, host='', user='', passwd='', acct='',
timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
self.source_address = source_address
self.timeout = timeout
if host:
self.connect(host)
if user:
self.login(user, passwd, acct)
def __enter__(self):
return self
# Context management protocol: try to quit() if active
def __exit__(self, *args):
if self.sock is not None:
try:
self.quit()
except (OSError, EOFError):
pass
finally:
if self.sock is not None:
self.close()
def connect(self, host='', port=0, timeout=-999, source_address=None):
'''Connect to host. Arguments are:
- host: hostname to connect to (string, default previous host)
- port: port to connect to (integer, default previous port)
- timeout: the timeout to set against the ftp socket(s)
- source_address: a 2-tuple (host, port) for the socket to bind
to as its source address before connecting.
'''
if host != '':
self.host = host
if port > 0:
self.port = port
if timeout != -999:
self.timeout = timeout
if source_address is not None:
self.source_address = source_address
self.sock = socket.create_connection((self.host, self.port), self.timeout,
source_address=self.source_address)
self.af = self.sock.family
self.file = self.sock.makefile('r', encoding=self.encoding)
self.welcome = self.getresp()
return self.welcome
def getwelcome(self):
'''Get the welcome message from the server.
(this is read and squirreled away by connect())'''
if self.debugging:
print('*welcome*', self.sanitize(self.welcome))
return self.welcome
def set_debuglevel(self, level):
'''Set the debugging level.
The required argument level means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF'''
self.debugging = level
debug = set_debuglevel
def set_pasv(self, val):
'''Use passive or active mode for data transfers.
With a false argument, use the normal PORT mode,
With a true argument, use the PASV command.'''
self.passiveserver = val
# Internal: "sanitize" a string for printing
def sanitize(self, s):
if s[:5] in {'pass ', 'PASS '}:
i = len(s.rstrip('\r\n'))
s = s[:5] + '*'*(i-5) + s[i:]
return repr(s)
# Internal: send one line to the server, appending CRLF
def putline(self, line):
if '\r' in line or '\n' in line:
raise ValueError('an illegal newline character should not be contained')
line = line + CRLF
if self.debugging > 1:
print('*put*', self.sanitize(line))
self.sock.sendall(line.encode(self.encoding))
# Internal: send one command to the server (through putline())
def putcmd(self, line):
if self.debugging: print('*cmd*', self.sanitize(line))
self.putline(line)
# Internal: return one line from the server, stripping CRLF.
# Raise EOFError if the connection is closed
def getline(self):
line = self.file.readline(self.maxline + 1)
if len(line) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if self.debugging > 1:
print('*get*', self.sanitize(line))
if not line:
raise EOFError
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] in CRLF:
line = line[:-1]
return line
# Internal: get a response from the server, which may possibly
# consist of multiple lines. Return a single string with no
# trailing CRLF. If the response consists of multiple lines,
# these are separated by '\n' characters in the string
def getmultiline(self):
line = self.getline()
if line[3:4] == '-':
code = line[:3]
while 1:
nextline = self.getline()
line = line + ('\n' + nextline)
if nextline[:3] == code and \
nextline[3:4] != '-':
break
return line
# Internal: get a response from the server.
# Raise various errors if the response indicates an error
def getresp(self):
resp = self.getmultiline()
if self.debugging:
print('*resp*', self.sanitize(resp))
self.lastresp = resp[:3]
c = resp[:1]
if c in {'1', '2', '3'}:
return resp
if c == '4':
raise error_temp(resp)
if c == '5':
raise error_perm(resp)
raise error_proto(resp)
def voidresp(self):
"""Expect a response beginning with '2'."""
resp = self.getresp()
if resp[:1] != '2':
raise error_reply(resp)
return resp
def abort(self):
'''Abort a file transfer. Uses out-of-band data.
This does not follow the procedure from the RFC to send Telnet
IP and Synch; that doesn't seem to work with the servers I've
tried. Instead, just send the ABOR command as OOB data.'''
line = b'ABOR' + B_CRLF
if self.debugging > 1:
print('*put urgent*', self.sanitize(line))
self.sock.sendall(line, MSG_OOB)
resp = self.getmultiline()
if resp[:3] not in {'426', '225', '226'}:
raise error_proto(resp)
return resp
def sendcmd(self, cmd):
'''Send a command and return the response.'''
self.putcmd(cmd)
return self.getresp()
def voidcmd(self, cmd):
"""Send a command and expect a response beginning with '2'."""
self.putcmd(cmd)
return self.voidresp()
def sendport(self, host, port):
'''Send a PORT command with the current host and the given
port number.
'''
hbytes = host.split('.')
pbytes = [repr(port//256), repr(port%256)]
bytes = hbytes + pbytes
cmd = 'PORT ' + ','.join(bytes)
return self.voidcmd(cmd)
def sendeprt(self, host, port):
'''Send an EPRT command with the current host and the given port number.'''
af = 0
if self.af == socket.AF_INET:
af = 1
if self.af == socket.AF_INET6:
af = 2
if af == 0:
raise error_proto('unsupported address family')
fields = ['', repr(af), host, repr(port), '']
cmd = 'EPRT ' + '|'.join(fields)
return self.voidcmd(cmd)
def makeport(self):
'''Create a new socket and send a PORT command for it.'''
err = None
sock = None
for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
except OSError as _:
err = _
if sock:
sock.close()
sock = None
continue
break
if sock is None:
if err is not None:
raise err
else:
raise OSError("getaddrinfo returns an empty list")
sock.listen(1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
if self.af == socket.AF_INET:
resp = self.sendport(host, port)
else:
resp = self.sendeprt(host, port)
if self.timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(self.timeout)
return sock
def makepasv(self):
if self.af == socket.AF_INET:
host, port = parse227(self.sendcmd('PASV'))
else:
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
return host, port
def ntransfercmd(self, cmd, rest=None):
"""Initiate a transfer over the data connection.
If the transfer is active, send a port command and the
transfer command, and accept the connection. If the server is
passive, send a pasv command, connect to it, and start the
transfer command. Either way, return the socket for the
connection and the expected size of the transfer. The
expected size may be None if it could not be determined.
Optional `rest' argument can be a string that is sent as the
argument to a REST command. This is essentially a server
marker used to tell the server to skip over any data up to the
given marker.
"""
size = None
if self.passiveserver:
host, port = self.makepasv()
conn = socket.create_connection((host, port), self.timeout,
source_address=self.source_address)
try:
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
# Some servers apparently send a 200 reply to
# a LIST or STOR command, before the 150 reply
# (and way before the 226 reply). This seems to
# be in violation of the protocol (which only allows
# 1xx or error messages for LIST), so we just discard
# this response.
if resp[0] == '2':
resp = self.getresp()
if resp[0] != '1':
raise error_reply(resp)
except:
conn.close()
raise
else:
with self.makeport() as sock:
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
# See above.
if resp[0] == '2':
resp = self.getresp()
if resp[0] != '1':
raise error_reply(resp)
conn, sockaddr = sock.accept()
if self.timeout is not _GLOBAL_DEFAULT_TIMEOUT:
conn.settimeout(self.timeout)
if resp[:3] == '150':
# this is conditional in case we received a 125
size = parse150(resp)
return conn, size
def transfercmd(self, cmd, rest=None):
"""Like ntransfercmd() but returns only the socket."""
return self.ntransfercmd(cmd, rest)[0]
def login(self, user = '', passwd = '', acct = ''):
'''Login, default anonymous.'''
if not user:
user = 'anonymous'
if not passwd:
passwd = ''
if not acct:
acct = ''
if user == 'anonymous' and passwd in {'', '-'}:
# If there is no anonymous ftp password specified
# then we'll just use anonymous@
# We don't send any other thing because:
# - We want to remain anonymous
# - We want to stop SPAM
# - We don't want to let ftp sites to discriminate by the user,
# host or country.
passwd = passwd + 'anonymous@'
resp = self.sendcmd('USER ' + user)
if resp[0] == '3':
resp = self.sendcmd('PASS ' + passwd)
if resp[0] == '3':
resp = self.sendcmd('ACCT ' + acct)
if resp[0] != '2':
raise error_reply(resp)
return resp
def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
"""Retrieve data in binary mode. A new port is created for you.
Args:
cmd: A RETR command.
callback: A single parameter callable to be called on each
block of data read.
blocksize: The maximum number of bytes to read from the
socket at one time. [default: 8192]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE I')
with self.transfercmd(cmd, rest) as conn:
while 1:
data = conn.recv(blocksize)
if not data:
break
callback(data)
# shutdown ssl layer
if _SSLSocket is not None and isinstance(conn, _SSLSocket):
conn.unwrap()
return self.voidresp()
def retrlines(self, cmd, callback = None):
"""Retrieve data in line mode. A new port is created for you.
Args:
cmd: A RETR, LIST, or NLST command.
callback: An optional single parameter callable that is called
for each line with the trailing CRLF stripped.
[default: print_line()]
Returns:
The response code.
"""
if callback is None:
callback = print_line
resp = self.sendcmd('TYPE A')
with self.transfercmd(cmd) as conn, \
conn.makefile('r', encoding=self.encoding) as fp:
while 1:
line = fp.readline(self.maxline + 1)
if len(line) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if self.debugging > 2:
print('*retr*', repr(line))
if not line:
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] == '\n':
line = line[:-1]
callback(line)
# shutdown ssl layer
if _SSLSocket is not None and isinstance(conn, _SSLSocket):
conn.unwrap()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
"""Store a file in binary mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a read(num_bytes) method.
blocksize: The maximum data size to read from fp and send over
the connection at once. [default: 8192]
callback: An optional single parameter callable that is called on
each block of data after it is sent. [default: None]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE I')
with self.transfercmd(cmd, rest) as conn:
while 1:
buf = fp.read(blocksize)
if not buf:
break
conn.sendall(buf)
if callback:
callback(buf)
# shutdown ssl layer
if _SSLSocket is not None and isinstance(conn, _SSLSocket):
conn.unwrap()
return self.voidresp()
def storlines(self, cmd, fp, callback=None):
"""Store a file in line mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a readline() method.
callback: An optional single parameter callable that is called on
each line after it is sent. [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE A')
with self.transfercmd(cmd) as conn:
while 1:
buf = fp.readline(self.maxline + 1)
if len(buf) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if not buf:
break
if buf[-2:] != B_CRLF:
if buf[-1] in B_CRLF: buf = buf[:-1]
buf = buf + B_CRLF
conn.sendall(buf)
if callback:
callback(buf)
# shutdown ssl layer
if _SSLSocket is not None and isinstance(conn, _SSLSocket):
conn.unwrap()
return self.voidresp()
def acct(self, password):
'''Send new account name.'''
cmd = 'ACCT ' + password
return self.voidcmd(cmd)
def nlst(self, *args):
'''Return a list of files in a given directory (default the current).'''
cmd = 'NLST'
for arg in args:
cmd = cmd + (' ' + arg)
files = []
self.retrlines(cmd, files.append)
return files
def dir(self, *args):
'''List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)'''
cmd = 'LIST'
func = None
if args[-1:] and type(args[-1]) != type(''):
args, func = args[:-1], args[-1]
for arg in args:
if arg:
cmd = cmd + (' ' + arg)
self.retrlines(cmd, func)
def mlsd(self, path="", facts=[]):
'''List a directory in a standardized format by using MLSD
command (RFC-3659). If path is omitted the current directory
is assumed. "facts" is a list of strings representing the type
of information desired (e.g. ["type", "size", "perm"]).
Return a generator object yielding a tuple of two elements
for every file found in path.
First element is the file name, the second one is a dictionary
including a variable number of "facts" depending on the server
and whether "facts" argument has been provided.
'''
if facts:
self.sendcmd("OPTS MLST " + ";".join(facts) + ";")
if path:
cmd = "MLSD %s" % path
else:
cmd = "MLSD"
lines = []
self.retrlines(cmd, lines.append)
for line in lines:
facts_found, _, name = line.rstrip(CRLF).partition(' ')
entry = {}
for fact in facts_found[:-1].split(";"):
key, _, value = fact.partition("=")
entry[key.lower()] = value
yield (name, entry)
def rename(self, fromname, toname):
'''Rename a file.'''
resp = self.sendcmd('RNFR ' + fromname)
if resp[0] != '3':
raise error_reply(resp)
return self.voidcmd('RNTO ' + toname)
def delete(self, filename):
'''Delete a file.'''
resp = self.sendcmd('DELE ' + filename)
if resp[:3] in {'250', '200'}:
return resp
else:
raise error_reply(resp)
def cwd(self, dirname):
'''Change to a directory.'''
if dirname == '..':
try:
return self.voidcmd('CDUP')
except error_perm as msg:
if msg.args[0][:3] != '500':
raise
elif dirname == '':
dirname = '.' # does nothing, but could return error
cmd = 'CWD ' + dirname
return self.voidcmd(cmd)
def size(self, filename):
'''Retrieve the size of a file.'''
# The SIZE command is defined in RFC-3659
resp = self.sendcmd('SIZE ' + filename)
if resp[:3] == '213':
s = resp[3:].strip()
return int(s)
def mkd(self, dirname):
'''Make a directory, return its full pathname.'''
resp = self.voidcmd('MKD ' + dirname)
# fix around non-compliant implementations such as IIS shipped
# with Windows server 2003
if not resp.startswith('257'):
return ''
return parse257(resp)
def rmd(self, dirname):
'''Remove a directory.'''
return self.voidcmd('RMD ' + dirname)
def pwd(self):
'''Return current working directory.'''
resp = self.voidcmd('PWD')
# fix around non-compliant implementations such as IIS shipped
# with Windows server 2003
if not resp.startswith('257'):
return ''
return parse257(resp)
def quit(self):
'''Quit, and close the connection.'''
resp = self.voidcmd('QUIT')
self.close()
return resp
def close(self):
'''Close the connection without assuming anything about it.'''
try:
file = self.file
self.file = None
if file is not None:
file.close()
finally:
sock = self.sock
self.sock = None
if sock is not None:
sock.close()
try:
import ssl
except ImportError:
_SSLSocket = None
else:
_SSLSocket = ssl.SSLSocket
class FTP_TLS(FTP):
'''A FTP subclass which adds TLS support to FTP as described
in RFC-4217.
Connect as usual to port 21 implicitly securing the FTP control
connection before authenticating.
Securing the data connection requires user to explicitly ask
for it by calling prot_p() method.
Usage example:
>>> from ftplib import FTP_TLS
>>> ftps = FTP_TLS('ftp.python.org')
>>> ftps.login() # login anonymously previously securing control channel
'230 Guest login ok, access restrictions apply.'
>>> ftps.prot_p() # switch to secure data connection
'200 Protection level set to P'
>>> ftps.retrlines('LIST') # list directory content securely
total 9
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 ..
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc
d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming
drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib
drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub
drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr
-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg
'226 Transfer complete.'
>>> ftps.quit()
'221 Goodbye.'
>>>
'''
ssl_version = ssl.PROTOCOL_SSLv23
def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
certfile=None, context=None,
timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
if context is not None and certfile is not None:
raise ValueError("context and certfile arguments are mutually "
"exclusive")
self.keyfile = keyfile
self.certfile = certfile
if context is None:
context = ssl._create_stdlib_context(self.ssl_version,
certfile=certfile,
keyfile=keyfile)
self.context = context
self._prot_p = False
FTP.__init__(self, host, user, passwd, acct, timeout, source_address)
def login(self, user='', passwd='', acct='', secure=True):
if secure and not isinstance(self.sock, ssl.SSLSocket):
self.auth()
return FTP.login(self, user, passwd, acct)
def auth(self):
'''Set up secure control connection by using TLS/SSL.'''
if isinstance(self.sock, ssl.SSLSocket):
raise ValueError("Already using TLS")
if self.ssl_version >= ssl.PROTOCOL_SSLv23:
resp = self.voidcmd('AUTH TLS')
else:
resp = self.voidcmd('AUTH SSL')
self.sock = self.context.wrap_socket(self.sock,
server_hostname=self.host)
self.file = self.sock.makefile(mode='r', encoding=self.encoding)
return resp
def ccc(self):
'''Switch back to a clear-text control connection.'''
if not isinstance(self.sock, ssl.SSLSocket):
raise ValueError("not using TLS")
resp = self.voidcmd('CCC')
self.sock = self.sock.unwrap()
return resp
def prot_p(self):
'''Set up secure data connection.'''
# PROT defines whether or not the data channel is to be protected.
# Though RFC-2228 defines four possible protection levels,
# RFC-4217 only recommends two, Clear and Private.
# Clear (PROT C) means that no security is to be used on the
# data-channel, Private (PROT P) means that the data-channel
# should be protected by TLS.
# PBSZ command MUST still be issued, but must have a parameter of
# '0' to indicate that no buffering is taking place and the data
# connection should not be encapsulated.
self.voidcmd('PBSZ 0')
resp = self.voidcmd('PROT P')
self._prot_p = True
return resp
def prot_c(self):
'''Set up clear text data connection.'''
resp = self.voidcmd('PROT C')
self._prot_p = False
return resp
# --- Overridden FTP methods
def ntransfercmd(self, cmd, rest=None):
conn, size = FTP.ntransfercmd(self, cmd, rest)
if self._prot_p:
conn = self.context.wrap_socket(conn,
server_hostname=self.host)
return conn, size
def abort(self):
# overridden as we can't pass MSG_OOB flag to sendall()
line = b'ABOR' + B_CRLF
self.sock.sendall(line)
resp = self.getmultiline()
if resp[:3] not in {'426', '225', '226'}:
raise error_proto(resp)
return resp
__all__.append('FTP_TLS')
all_errors = (Error, OSError, EOFError, ssl.SSLError)
_150_re = None
def parse150(resp):
'''Parse the '150' response for a RETR request.
Returns the expected transfer size or None; size is not guaranteed to
be present in the 150 message.
'''
if resp[:3] != '150':
raise error_reply(resp)
global _150_re
if _150_re is None:
import re
_150_re = re.compile(
"150 .* \((\d+) bytes\)", re.IGNORECASE | re.ASCII)
m = _150_re.match(resp)
if not m:
return None
return int(m.group(1))
_227_re = None
def parse227(resp):
'''Parse the '227' response for a PASV request.
Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '227':
raise error_reply(resp)
global _227_re
if _227_re is None:
import re
_227_re = re.compile(r'(\d+),(\d+),(\d+),(\d+),(\d+),(\d+)', re.ASCII)
m = _227_re.search(resp)
if not m:
raise error_proto(resp)
numbers = m.groups()
host = '.'.join(numbers[:4])
port = (int(numbers[4]) << 8) + int(numbers[5])
return host, port
def parse229(resp, peer):
'''Parse the '229' response for an EPSV request.
Raises error_proto if it does not contain '(|||port|)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '229':
raise error_reply(resp)
left = resp.find('(')
if left < 0: raise error_proto(resp)
right = resp.find(')', left + 1)
if right < 0:
raise error_proto(resp) # should contain '(|||port|)'
if resp[left + 1] != resp[right - 1]:
raise error_proto(resp)
parts = resp[left + 1:right].split(resp[left+1])
if len(parts) != 5:
raise error_proto(resp)
host = peer[0]
port = int(parts[3])
return host, port
def parse257(resp):
'''Parse the '257' response for a MKD or PWD request.
This is a response to a MKD or PWD request: a directory name.
Returns the directoryname in the 257 reply.'''
if resp[:3] != '257':
raise error_reply(resp)
if resp[3:5] != ' "':
return '' # Not compliant to RFC 959, but UNIX ftpd does this
dirname = ''
i = 5
n = len(resp)
while i < n:
c = resp[i]
i = i+1
if c == '"':
if i >= n or resp[i] != '"':
break
i = i+1
dirname = dirname + c
return dirname
def print_line(line):
'''Default retrlines callback to print a line.'''
print(line)
def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
'''Copy file from one FTP-instance to another.'''
if not targetname:
targetname = sourcename
type = 'TYPE ' + type
source.voidcmd(type)
target.voidcmd(type)
sourcehost, sourceport = parse227(source.sendcmd('PASV'))
target.sendport(sourcehost, sourceport)
# RFC 959: the user must "listen" [...] BEFORE sending the
# transfer request.
# So: STOR before RETR, because here the target is a "user".
treply = target.sendcmd('STOR ' + targetname)
if treply[:3] not in {'125', '150'}:
raise error_proto # RFC 959
sreply = source.sendcmd('RETR ' + sourcename)
if sreply[:3] not in {'125', '150'}:
raise error_proto # RFC 959
source.voidresp()
target.voidresp()
class Netrc:
"""Class to parse & provide access to 'netrc' format files.
See the netrc(4) man page for information on the file format.
WARNING: This class is obsolete -- use module netrc instead.
"""
__defuser = None
__defpasswd = None
__defacct = None
def __init__(self, filename=None):
warnings.warn("This class is deprecated, use the netrc module instead",
DeprecationWarning, 2)
if filename is None:
if "HOME" in os.environ:
filename = os.path.join(os.environ["HOME"],
".netrc")
else:
raise OSError("specify file to load or set $HOME")
self.__hosts = {}
self.__macros = {}
fp = open(filename, "r")
in_macro = 0
while 1:
line = fp.readline()
if not line:
break
if in_macro and line.strip():
macro_lines.append(line)
continue
elif in_macro:
self.__macros[macro_name] = tuple(macro_lines)
in_macro = 0
words = line.split()
host = user = passwd = acct = None
default = 0
i = 0
while i < len(words):
w1 = words[i]
if i+1 < len(words):
w2 = words[i + 1]
else:
w2 = None
if w1 == 'default':
default = 1
elif w1 == 'machine' and w2:
host = w2.lower()
i = i + 1
elif w1 == 'login' and w2:
user = w2
i = i + 1
elif w1 == 'password' and w2:
passwd = w2
i = i + 1
elif w1 == 'account' and w2:
acct = w2
i = i + 1
elif w1 == 'macdef' and w2:
macro_name = w2
macro_lines = []
in_macro = 1
break
i = i + 1
if default:
self.__defuser = user or self.__defuser
self.__defpasswd = passwd or self.__defpasswd
self.__defacct = acct or self.__defacct
if host:
if host in self.__hosts:
ouser, opasswd, oacct = \
self.__hosts[host]
user = user or ouser
passwd = passwd or opasswd
acct = acct or oacct
self.__hosts[host] = user, passwd, acct
fp.close()
def get_hosts(self):
"""Return a list of hosts mentioned in the .netrc file."""
return self.__hosts.keys()
def get_account(self, host):
"""Returns login information for the named host.
The return value is a triple containing userid,
password, and the accounting field.
"""
host = host.lower()
user = passwd = acct = None
if host in self.__hosts:
user, passwd, acct = self.__hosts[host]
user = user or self.__defuser
passwd = passwd or self.__defpasswd
acct = acct or self.__defacct
return user, passwd, acct
def get_macros(self):
"""Return a list of all defined macro names."""
return self.__macros.keys()
def get_macro(self, macro):
"""Return a sequence of lines which define a named macro."""
return self.__macros[macro]
def test():
'''Test program.
Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ...
-d dir
-l list
-p password
'''
if len(sys.argv) < 2:
print(test.__doc__)
sys.exit(0)
debugging = 0
rcfile = None
while sys.argv[1] == '-d':
debugging = debugging+1
del sys.argv[1]
if sys.argv[1][:2] == '-r':
# get name of alternate ~/.netrc file:
rcfile = sys.argv[1][2:]
del sys.argv[1]
host = sys.argv[1]
ftp = FTP(host)
ftp.set_debuglevel(debugging)
userid = passwd = acct = ''
try:
netrc = Netrc(rcfile)
except OSError:
if rcfile is not None:
sys.stderr.write("Could not open account file"
" -- using anonymous login.")
else:
try:
userid, passwd, acct = netrc.get_account(host)
except KeyError:
# no account for host
sys.stderr.write(
"No account -- using anonymous login.")
ftp.login(userid, passwd, acct)
for file in sys.argv[2:]:
if file[:2] == '-l':
ftp.dir(file[2:])
elif file[:2] == '-d':
cmd = 'CWD'
if file[2:]: cmd = cmd + ' ' + file[2:]
resp = ftp.sendcmd(cmd)
elif file == '-p':
ftp.set_pasv(not ftp.passiveserver)
else:
ftp.retrbinary('RETR ' + file, \
sys.stdout.write, 1024)
ftp.quit()
if __name__ == '__main__':
test()
| {
"repo_name": "IronLanguages/ironpython3",
"path": "Src/StdLib/Lib/ftplib.py",
"copies": "2",
"size": "38532",
"license": "apache-2.0",
"hash": -5585125010265389000,
"line_mean": 34.2534309241,
"line_max": 98,
"alpha_frac": 0.5352693865,
"autogenerated": false,
"ratio": 4.1330043977260535,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0011024139348155956,
"num_lines": 1093
} |
"""An FTP client class and some helper functions.
Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds
Example:
>>> from ftplib import FTP
>>> ftp = FTP('ftp.python.org') # connect to host, default port
>>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@
'230 Guest login ok, access restrictions apply.'
>>> ftp.retrlines('LIST') # list directory contents
total 9
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 ..
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc
d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming
drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib
drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub
drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr
-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg
'226 Transfer complete.'
>>> ftp.quit()
'221 Goodbye.'
>>>
A nice test that reveals some of the network dialogue would be:
python ftplib.py -d localhost -l -p -l
"""
#
# Changes and improvements suggested by Steve Majewski.
# Modified by Jack to work on the mac.
# Modified by Siebren to support docstrings and PASV.
# Modified by Phil Schwartz to add storbinary and storlines callbacks.
# Modified by Giampaolo Rodola' to add TLS support.
#
import os
import sys
# Import SOCKS module if it exists, else standard socket module socket
try:
import SOCKS; socket = SOCKS; del SOCKS # import SOCKS as socket
from socket import getfqdn; socket.getfqdn = getfqdn; del getfqdn
except ImportError:
import socket
from socket import _GLOBAL_DEFAULT_TIMEOUT
__all__ = ["FTP","Netrc"]
# Magic number from <socket.h>
MSG_OOB = 0x1 # Process data out of band
# The standard FTP server control port
FTP_PORT = 21
# The sizehint parameter passed to readline() calls
MAXLINE = 8192
# Exception raised when an error or invalid response is received
class Error(Exception): pass
class error_reply(Error): pass # unexpected [123]xx reply
class error_temp(Error): pass # 4xx errors
class error_perm(Error): pass # 5xx errors
class error_proto(Error): pass # response does not begin with [1-5]
# All exceptions (hopefully) that may be raised here and that aren't
# (always) programming errors on our side
all_errors = (Error, IOError, EOFError)
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
# The class itself
class FTP:
'''An FTP client class.
To create a connection, call the class using these arguments:
host, user, passwd, acct, timeout
The first four arguments are all strings, and have default value ''.
timeout must be numeric and defaults to None if not passed,
meaning that no timeout will be set on any ftp socket(s)
If a timeout is passed, then this is now the default timeout for all ftp
socket operations for this instance.
Then use self.connect() with optional host and port argument.
To download a file, use ftp.retrlines('RETR ' + filename),
or ftp.retrbinary() with slightly different arguments.
To upload a file, use ftp.storlines() or ftp.storbinary(),
which have an open file as argument (see their definitions
below for details).
The download/upload functions first issue appropriate TYPE
and PORT or PASV commands.
'''
debugging = 0
host = ''
port = FTP_PORT
maxline = MAXLINE
sock = None
file = None
welcome = None
passiveserver = 1
# Initialization method (called by class instantiation).
# Initialize host to localhost, port to standard ftp port
# Optional arguments are host (for connect()),
# and user, passwd, acct (for login())
def __init__(self, host='', user='', passwd='', acct='',
timeout=_GLOBAL_DEFAULT_TIMEOUT):
self.timeout = timeout
if host:
self.connect(host)
if user:
self.login(user, passwd, acct)
def connect(self, host='', port=0, timeout=-999):
'''Connect to host. Arguments are:
- host: hostname to connect to (string, default previous host)
- port: port to connect to (integer, default previous port)
'''
if host != '':
self.host = host
if port > 0:
self.port = port
if timeout != -999:
self.timeout = timeout
self.sock = socket.create_connection((self.host, self.port), self.timeout)
self.af = self.sock.family
self.file = self.sock.makefile('rb')
self.welcome = self.getresp()
return self.welcome
def getwelcome(self):
'''Get the welcome message from the server.
(this is read and squirreled away by connect())'''
if self.debugging:
print '*welcome*', self.sanitize(self.welcome)
return self.welcome
def set_debuglevel(self, level):
'''Set the debugging level.
The required argument level means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF'''
self.debugging = level
debug = set_debuglevel
def set_pasv(self, val):
'''Use passive or active mode for data transfers.
With a false argument, use the normal PORT mode,
With a true argument, use the PASV command.'''
self.passiveserver = val
# Internal: "sanitize" a string for printing
def sanitize(self, s):
if s[:5] == 'pass ' or s[:5] == 'PASS ':
i = len(s)
while i > 5 and s[i-1] in '\r\n':
i = i-1
s = s[:5] + '*'*(i-5) + s[i:]
return repr(s)
# Internal: send one line to the server, appending CRLF
def putline(self, line):
line = line + CRLF
if self.debugging > 1: print '*put*', self.sanitize(line)
self.sock.sendall(line)
# Internal: send one command to the server (through putline())
def putcmd(self, line):
if self.debugging: print '*cmd*', self.sanitize(line)
self.putline(line)
# Internal: return one line from the server, stripping CRLF.
# Raise EOFError if the connection is closed
def getline(self):
line = self.file.readline(self.maxline + 1)
if len(line) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if self.debugging > 1:
print '*get*', self.sanitize(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
# Internal: get a response from the server, which may possibly
# consist of multiple lines. Return a single string with no
# trailing CRLF. If the response consists of multiple lines,
# these are separated by '\n' characters in the string
def getmultiline(self):
line = self.getline()
if line[3:4] == '-':
code = line[:3]
while 1:
nextline = self.getline()
line = line + ('\n' + nextline)
if nextline[:3] == code and \
nextline[3:4] != '-':
break
return line
# Internal: get a response from the server.
# Raise various errors if the response indicates an error
def getresp(self):
resp = self.getmultiline()
if self.debugging: print '*resp*', self.sanitize(resp)
self.lastresp = resp[:3]
c = resp[:1]
if c in ('1', '2', '3'):
return resp
if c == '4':
raise error_temp, resp
if c == '5':
raise error_perm, resp
raise error_proto, resp
def voidresp(self):
"""Expect a response beginning with '2'."""
resp = self.getresp()
if resp[:1] != '2':
raise error_reply, resp
return resp
def abort(self):
'''Abort a file transfer. Uses out-of-band data.
This does not follow the procedure from the RFC to send Telnet
IP and Synch; that doesn't seem to work with the servers I've
tried. Instead, just send the ABOR command as OOB data.'''
line = 'ABOR' + CRLF
if self.debugging > 1: print '*put urgent*', self.sanitize(line)
self.sock.sendall(line, MSG_OOB)
resp = self.getmultiline()
if resp[:3] not in ('426', '225', '226'):
raise error_proto, resp
def sendcmd(self, cmd):
'''Send a command and return the response.'''
self.putcmd(cmd)
return self.getresp()
def voidcmd(self, cmd):
"""Send a command and expect a response beginning with '2'."""
self.putcmd(cmd)
return self.voidresp()
def sendport(self, host, port):
'''Send a PORT command with the current host and the given
port number.
'''
hbytes = host.split('.')
pbytes = [repr(port//256), repr(port%256)]
bytes = hbytes + pbytes
cmd = 'PORT ' + ','.join(bytes)
return self.voidcmd(cmd)
def sendeprt(self, host, port):
'''Send a EPRT command with the current host and the given port number.'''
af = 0
if self.af == socket.AF_INET:
af = 1
if self.af == socket.AF_INET6:
af = 2
if af == 0:
raise error_proto, 'unsupported address family'
fields = ['', repr(af), host, repr(port), '']
cmd = 'EPRT ' + '|'.join(fields)
return self.voidcmd(cmd)
def makeport(self):
'''Create a new socket and send a PORT command for it.'''
err = None
sock = None
for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
except socket.error, err:
if sock:
sock.close()
sock = None
continue
break
if sock is None:
if err is not None:
raise err
else:
raise socket.error("getaddrinfo returns an empty list")
sock.listen(1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
if self.af == socket.AF_INET:
resp = self.sendport(host, port)
else:
resp = self.sendeprt(host, port)
if self.timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(self.timeout)
return sock
def makepasv(self):
if self.af == socket.AF_INET:
host, port = parse227(self.sendcmd('PASV'))
else:
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
return host, port
def ntransfercmd(self, cmd, rest=None):
"""Initiate a transfer over the data connection.
If the transfer is active, send a port command and the
transfer command, and accept the connection. If the server is
passive, send a pasv command, connect to it, and start the
transfer command. Either way, return the socket for the
connection and the expected size of the transfer. The
expected size may be None if it could not be determined.
Optional `rest' argument can be a string that is sent as the
argument to a REST command. This is essentially a server
marker used to tell the server to skip over any data up to the
given marker.
"""
size = None
if self.passiveserver:
host, port = self.makepasv()
conn = socket.create_connection((host, port), self.timeout)
try:
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
# Some servers apparently send a 200 reply to
# a LIST or STOR command, before the 150 reply
# (and way before the 226 reply). This seems to
# be in violation of the protocol (which only allows
# 1xx or error messages for LIST), so we just discard
# this response.
if resp[0] == '2':
resp = self.getresp()
if resp[0] != '1':
raise error_reply, resp
except:
conn.close()
raise
else:
sock = self.makeport()
try:
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
# See above.
if resp[0] == '2':
resp = self.getresp()
if resp[0] != '1':
raise error_reply, resp
conn, sockaddr = sock.accept()
if self.timeout is not _GLOBAL_DEFAULT_TIMEOUT:
conn.settimeout(self.timeout)
finally:
sock.close()
if resp[:3] == '150':
# this is conditional in case we received a 125
size = parse150(resp)
return conn, size
def transfercmd(self, cmd, rest=None):
"""Like ntransfercmd() but returns only the socket."""
return self.ntransfercmd(cmd, rest)[0]
def login(self, user = '', passwd = '', acct = ''):
'''Login, default anonymous.'''
if not user: user = 'anonymous'
if not passwd: passwd = ''
if not acct: acct = ''
if user == 'anonymous' and passwd in ('', '-'):
# If there is no anonymous ftp password specified
# then we'll just use anonymous@
# We don't send any other thing because:
# - We want to remain anonymous
# - We want to stop SPAM
# - We don't want to let ftp sites to discriminate by the user,
# host or country.
passwd = passwd + 'anonymous@'
resp = self.sendcmd('USER ' + user)
if resp[0] == '3': resp = self.sendcmd('PASS ' + passwd)
if resp[0] == '3': resp = self.sendcmd('ACCT ' + acct)
if resp[0] != '2':
raise error_reply, resp
return resp
def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
"""Retrieve data in binary mode. A new port is created for you.
Args:
cmd: A RETR command.
callback: A single parameter callable to be called on each
block of data read.
blocksize: The maximum number of bytes to read from the
socket at one time. [default: 8192]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
while 1:
data = conn.recv(blocksize)
if not data:
break
callback(data)
conn.close()
return self.voidresp()
def retrlines(self, cmd, callback = None):
"""Retrieve data in line mode. A new port is created for you.
Args:
cmd: A RETR, LIST, NLST, or MLSD command.
callback: An optional single parameter callable that is called
for each line with the trailing CRLF stripped.
[default: print_line()]
Returns:
The response code.
"""
if callback is None: callback = print_line
resp = self.sendcmd('TYPE A')
conn = self.transfercmd(cmd)
fp = conn.makefile('rb')
while 1:
line = fp.readline(self.maxline + 1)
if len(line) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if self.debugging > 2: print '*retr*', repr(line)
if not line:
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] == '\n':
line = line[:-1]
callback(line)
fp.close()
conn.close()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
"""Store a file in binary mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a read(num_bytes) method.
blocksize: The maximum data size to read from fp and send over
the connection at once. [default: 8192]
callback: An optional single parameter callable that is called on
each block of data after it is sent. [default: None]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
while 1:
buf = fp.read(blocksize)
if not buf: break
conn.sendall(buf)
if callback: callback(buf)
conn.close()
return self.voidresp()
def storlines(self, cmd, fp, callback=None):
"""Store a file in line mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a readline() method.
callback: An optional single parameter callable that is called on
each line after it is sent. [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE A')
conn = self.transfercmd(cmd)
while 1:
buf = fp.readline(self.maxline + 1)
if len(buf) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if not buf: break
if buf[-2:] != CRLF:
if buf[-1] in CRLF: buf = buf[:-1]
buf = buf + CRLF
conn.sendall(buf)
if callback: callback(buf)
conn.close()
return self.voidresp()
def acct(self, password):
'''Send new account name.'''
cmd = 'ACCT ' + password
return self.voidcmd(cmd)
def nlst(self, *args):
'''Return a list of files in a given directory (default the current).'''
cmd = 'NLST'
for arg in args:
cmd = cmd + (' ' + arg)
files = []
self.retrlines(cmd, files.append)
return files
def dir(self, *args):
'''List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)'''
cmd = 'LIST'
func = None
if args[-1:] and type(args[-1]) != type(''):
args, func = args[:-1], args[-1]
for arg in args:
if arg:
cmd = cmd + (' ' + arg)
self.retrlines(cmd, func)
def rename(self, fromname, toname):
'''Rename a file.'''
resp = self.sendcmd('RNFR ' + fromname)
if resp[0] != '3':
raise error_reply, resp
return self.voidcmd('RNTO ' + toname)
def delete(self, filename):
'''Delete a file.'''
resp = self.sendcmd('DELE ' + filename)
if resp[:3] in ('250', '200'):
return resp
else:
raise error_reply, resp
def cwd(self, dirname):
'''Change to a directory.'''
if dirname == '..':
try:
return self.voidcmd('CDUP')
except error_perm, msg:
if msg.args[0][:3] != '500':
raise
elif dirname == '':
dirname = '.' # does nothing, but could return error
cmd = 'CWD ' + dirname
return self.voidcmd(cmd)
def size(self, filename):
'''Retrieve the size of a file.'''
# The SIZE command is defined in RFC-3659
resp = self.sendcmd('SIZE ' + filename)
if resp[:3] == '213':
s = resp[3:].strip()
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
def mkd(self, dirname):
'''Make a directory, return its full pathname.'''
resp = self.sendcmd('MKD ' + dirname)
return parse257(resp)
def rmd(self, dirname):
'''Remove a directory.'''
return self.voidcmd('RMD ' + dirname)
def pwd(self):
'''Return current working directory.'''
resp = self.sendcmd('PWD')
return parse257(resp)
def quit(self):
'''Quit, and close the connection.'''
resp = self.voidcmd('QUIT')
self.close()
return resp
def close(self):
'''Close the connection without assuming anything about it.'''
if self.file is not None:
self.file.close()
if self.sock is not None:
self.sock.close()
self.file = self.sock = None
try:
import ssl
except ImportError:
pass
else:
class FTP_TLS(FTP):
'''A FTP subclass which adds TLS support to FTP as described
in RFC-4217.
Connect as usual to port 21 implicitly securing the FTP control
connection before authenticating.
Securing the data connection requires user to explicitly ask
for it by calling prot_p() method.
Usage example:
>>> from ftplib import FTP_TLS
>>> ftps = FTP_TLS('ftp.python.org')
>>> ftps.login() # login anonymously previously securing control channel
'230 Guest login ok, access restrictions apply.'
>>> ftps.prot_p() # switch to secure data connection
'200 Protection level set to P'
>>> ftps.retrlines('LIST') # list directory content securely
total 9
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 ..
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc
d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming
drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib
drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub
drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr
-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg
'226 Transfer complete.'
>>> ftps.quit()
'221 Goodbye.'
>>>
'''
ssl_version = ssl.PROTOCOL_TLSv1
def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
certfile=None, timeout=_GLOBAL_DEFAULT_TIMEOUT):
self.keyfile = keyfile
self.certfile = certfile
self._prot_p = False
FTP.__init__(self, host, user, passwd, acct, timeout)
def login(self, user='', passwd='', acct='', secure=True):
if secure and not isinstance(self.sock, ssl.SSLSocket):
self.auth()
return FTP.login(self, user, passwd, acct)
def auth(self):
'''Set up secure control connection by using TLS/SSL.'''
if isinstance(self.sock, ssl.SSLSocket):
raise ValueError("Already using TLS")
if self.ssl_version == ssl.PROTOCOL_TLSv1:
resp = self.voidcmd('AUTH TLS')
else:
resp = self.voidcmd('AUTH SSL')
self.sock = ssl.wrap_socket(self.sock, self.keyfile, self.certfile,
ssl_version=self.ssl_version)
self.file = self.sock.makefile(mode='rb')
return resp
def prot_p(self):
'''Set up secure data connection.'''
# PROT defines whether or not the data channel is to be protected.
# Though RFC-2228 defines four possible protection levels,
# RFC-4217 only recommends two, Clear and Private.
# Clear (PROT C) means that no security is to be used on the
# data-channel, Private (PROT P) means that the data-channel
# should be protected by TLS.
# PBSZ command MUST still be issued, but must have a parameter of
# '0' to indicate that no buffering is taking place and the data
# connection should not be encapsulated.
self.voidcmd('PBSZ 0')
resp = self.voidcmd('PROT P')
self._prot_p = True
return resp
def prot_c(self):
'''Set up clear text data connection.'''
resp = self.voidcmd('PROT C')
self._prot_p = False
return resp
# --- Overridden FTP methods
def ntransfercmd(self, cmd, rest=None):
conn, size = FTP.ntransfercmd(self, cmd, rest)
if self._prot_p:
conn = ssl.wrap_socket(conn, self.keyfile, self.certfile,
ssl_version=self.ssl_version)
return conn, size
def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
try:
while 1:
data = conn.recv(blocksize)
if not data:
break
callback(data)
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
finally:
conn.close()
return self.voidresp()
def retrlines(self, cmd, callback = None):
if callback is None: callback = print_line
resp = self.sendcmd('TYPE A')
conn = self.transfercmd(cmd)
fp = conn.makefile('rb')
try:
while 1:
line = fp.readline(self.maxline + 1)
if len(line) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if self.debugging > 2: print '*retr*', repr(line)
if not line:
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] == '\n':
line = line[:-1]
callback(line)
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
finally:
fp.close()
conn.close()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
try:
while 1:
buf = fp.read(blocksize)
if not buf: break
conn.sendall(buf)
if callback: callback(buf)
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
finally:
conn.close()
return self.voidresp()
def storlines(self, cmd, fp, callback=None):
self.voidcmd('TYPE A')
conn = self.transfercmd(cmd)
try:
while 1:
buf = fp.readline(self.maxline + 1)
if len(buf) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if not buf: break
if buf[-2:] != CRLF:
if buf[-1] in CRLF: buf = buf[:-1]
buf = buf + CRLF
conn.sendall(buf)
if callback: callback(buf)
# shutdown ssl layer
if isinstance(conn, ssl.SSLSocket):
conn.unwrap()
finally:
conn.close()
return self.voidresp()
__all__.append('FTP_TLS')
all_errors = (Error, IOError, EOFError, ssl.SSLError)
_150_re = None
def parse150(resp):
'''Parse the '150' response for a RETR request.
Returns the expected transfer size or None; size is not guaranteed to
be present in the 150 message.
'''
if resp[:3] != '150':
raise error_reply, resp
global _150_re
if _150_re is None:
import re
_150_re = re.compile("150 .* \((\d+) bytes\)", re.IGNORECASE)
m = _150_re.match(resp)
if not m:
return None
s = m.group(1)
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
_227_re = None
def parse227(resp):
'''Parse the '227' response for a PASV request.
Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '227':
raise error_reply, resp
global _227_re
if _227_re is None:
import re
_227_re = re.compile(r'(\d+),(\d+),(\d+),(\d+),(\d+),(\d+)')
m = _227_re.search(resp)
if not m:
raise error_proto, resp
numbers = m.groups()
host = '.'.join(numbers[:4])
port = (int(numbers[4]) << 8) + int(numbers[5])
return host, port
def parse229(resp, peer):
'''Parse the '229' response for a EPSV request.
Raises error_proto if it does not contain '(|||port|)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '229':
raise error_reply, resp
left = resp.find('(')
if left < 0: raise error_proto, resp
right = resp.find(')', left + 1)
if right < 0:
raise error_proto, resp # should contain '(|||port|)'
if resp[left + 1] != resp[right - 1]:
raise error_proto, resp
parts = resp[left + 1:right].split(resp[left+1])
if len(parts) != 5:
raise error_proto, resp
host = peer[0]
port = int(parts[3])
return host, port
def parse257(resp):
'''Parse the '257' response for a MKD or PWD request.
This is a response to a MKD or PWD request: a directory name.
Returns the directoryname in the 257 reply.'''
if resp[:3] != '257':
raise error_reply, resp
if resp[3:5] != ' "':
return '' # Not compliant to RFC 959, but UNIX ftpd does this
dirname = ''
i = 5
n = len(resp)
while i < n:
c = resp[i]
i = i+1
if c == '"':
if i >= n or resp[i] != '"':
break
i = i+1
dirname = dirname + c
return dirname
def print_line(line):
'''Default retrlines callback to print a line.'''
print line
def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
'''Copy file from one FTP-instance to another.'''
if not targetname: targetname = sourcename
type = 'TYPE ' + type
source.voidcmd(type)
target.voidcmd(type)
sourcehost, sourceport = parse227(source.sendcmd('PASV'))
target.sendport(sourcehost, sourceport)
# RFC 959: the user must "listen" [...] BEFORE sending the
# transfer request.
# So: STOR before RETR, because here the target is a "user".
treply = target.sendcmd('STOR ' + targetname)
if treply[:3] not in ('125', '150'): raise error_proto # RFC 959
sreply = source.sendcmd('RETR ' + sourcename)
if sreply[:3] not in ('125', '150'): raise error_proto # RFC 959
source.voidresp()
target.voidresp()
class Netrc:
"""Class to parse & provide access to 'netrc' format files.
See the netrc(4) man page for information on the file format.
WARNING: This class is obsolete -- use module netrc instead.
"""
__defuser = None
__defpasswd = None
__defacct = None
def __init__(self, filename=None):
if filename is None:
if "HOME" in os.environ:
filename = os.path.join(os.environ["HOME"],
".netrc")
else:
raise IOError, \
"specify file to load or set $HOME"
self.__hosts = {}
self.__macros = {}
fp = open(filename, "r")
in_macro = 0
while 1:
line = fp.readline(self.maxline + 1)
if len(line) > self.maxline:
raise Error("got more than %d bytes" % self.maxline)
if not line: break
if in_macro and line.strip():
macro_lines.append(line)
continue
elif in_macro:
self.__macros[macro_name] = tuple(macro_lines)
in_macro = 0
words = line.split()
host = user = passwd = acct = None
default = 0
i = 0
while i < len(words):
w1 = words[i]
if i+1 < len(words):
w2 = words[i + 1]
else:
w2 = None
if w1 == 'default':
default = 1
elif w1 == 'machine' and w2:
host = w2.lower()
i = i + 1
elif w1 == 'login' and w2:
user = w2
i = i + 1
elif w1 == 'password' and w2:
passwd = w2
i = i + 1
elif w1 == 'account' and w2:
acct = w2
i = i + 1
elif w1 == 'macdef' and w2:
macro_name = w2
macro_lines = []
in_macro = 1
break
i = i + 1
if default:
self.__defuser = user or self.__defuser
self.__defpasswd = passwd or self.__defpasswd
self.__defacct = acct or self.__defacct
if host:
if host in self.__hosts:
ouser, opasswd, oacct = \
self.__hosts[host]
user = user or ouser
passwd = passwd or opasswd
acct = acct or oacct
self.__hosts[host] = user, passwd, acct
fp.close()
def get_hosts(self):
"""Return a list of hosts mentioned in the .netrc file."""
return self.__hosts.keys()
def get_account(self, host):
"""Returns login information for the named host.
The return value is a triple containing userid,
password, and the accounting field.
"""
host = host.lower()
user = passwd = acct = None
if host in self.__hosts:
user, passwd, acct = self.__hosts[host]
user = user or self.__defuser
passwd = passwd or self.__defpasswd
acct = acct or self.__defacct
return user, passwd, acct
def get_macros(self):
"""Return a list of all defined macro names."""
return self.__macros.keys()
def get_macro(self, macro):
"""Return a sequence of lines which define a named macro."""
return self.__macros[macro]
def test():
'''Test program.
Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ...
-d dir
-l list
-p password
'''
if len(sys.argv) < 2:
print test.__doc__
sys.exit(0)
debugging = 0
rcfile = None
while sys.argv[1] == '-d':
debugging = debugging+1
del sys.argv[1]
if sys.argv[1][:2] == '-r':
# get name of alternate ~/.netrc file:
rcfile = sys.argv[1][2:]
del sys.argv[1]
host = sys.argv[1]
ftp = FTP(host)
ftp.set_debuglevel(debugging)
userid = passwd = acct = ''
try:
netrc = Netrc(rcfile)
except IOError:
if rcfile is not None:
sys.stderr.write("Could not open account file"
" -- using anonymous login.")
else:
try:
userid, passwd, acct = netrc.get_account(host)
except KeyError:
# no account for host
sys.stderr.write(
"No account -- using anonymous login.")
ftp.login(userid, passwd, acct)
for file in sys.argv[2:]:
if file[:2] == '-l':
ftp.dir(file[2:])
elif file[:2] == '-d':
cmd = 'CWD'
if file[2:]: cmd = cmd + ' ' + file[2:]
resp = ftp.sendcmd(cmd)
elif file == '-p':
ftp.set_pasv(not ftp.passiveserver)
else:
ftp.retrbinary('RETR ' + file, \
sys.stdout.write, 1024)
ftp.quit()
if __name__ == '__main__':
test()
| {
"repo_name": "hexlism/xx_net",
"path": "python27/1.0/lib/ftplib.py",
"copies": "3",
"size": "38027",
"license": "bsd-2-clause",
"hash": -5930315979643732000,
"line_mean": 33.8407163054,
"line_max": 98,
"alpha_frac": 0.5174744261,
"autogenerated": false,
"ratio": 4.174204171240395,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0022451108398400784,
"num_lines": 1061
} |
#AngeCryption: getting valid files after encryption
#takes any file as input, and a standard PDF/PNG/JPG as target
#will create a result_file that is source_file with appended 'garbage'
#and once ENcrypted with the chosen algorithm with the supplied script, it will show target_file
#any block cipher is supported as long as the block size matches the target type's header
#Ange Albertini 2014, BSD Licence - with the help of Jean-Philippe Aumasson
# - Added FLV support
import struct
import sys
import binascii
PNGSIG = '\x89PNG\r\n\x1a\n'
JPGSIG = "\xff\xd8"
FLVSIG = "FLV"
source_file, target_file, result_file, encryption_key, algo = sys.argv[1:6]
if algo.lower() == "aes":
from Crypto.Cipher import AES
algo = AES
BS = 16
else:
from Crypto.Cipher import DES3 # will work only with JPEG as others require 16 bytes block size
algo = DES3
BS = 8
pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS) # non-standard padding might be preferred for PDF
#from Crypto import Random
#key = Random.new().read(16)
key = encryption_key
with open(source_file, "rb") as f:
s = pad(f.read())
with open(target_file, "rb") as f:
t = pad(f.read())
p = s[:BS] # our first plaintext block
ecb_dec = algo.new(key, algo.MODE_ECB)
# we need to generate our first cipher block, depending on the target type
if t.startswith(PNGSIG): #PNG
assert BS >= 16
size = len(s) - BS
# our dummy chunk type
# 4 letters, first letter should be lowercase to be ignored
chunktype = 'aaaa'
# PNG signature, chunk size, our dummy chunk type
c = PNGSIG + struct.pack(">I",size) + chunktype
c = ecb_dec.decrypt(c)
IV = "".join([chr(ord(c[i]) ^ ord(p[i])) for i in range(BS)])
cbc_enc = algo.new(key, algo.MODE_CBC, IV)
result = cbc_enc.encrypt(s)
#write the CRC of the remaining of s at the end of our dummy block
result = result + struct.pack(">I", binascii.crc32(result[12:]) % 0x100000000)
#and append the actual data of t, skipping the sig
result = result + t[8:]
elif t.startswith(JPGSIG): #JPG
assert BS >= 2
size = len(s) - BS # we could make this shorter, but then could require padding again
# JPEG Start of Image, COMment segment marker, segment size, padding
c = JPGSIG + "\xFF\xFE" + struct.pack(">H",size) + "\0" * 10
c = ecb_dec.decrypt(c)
IV = "".join([chr(ord(c[i]) ^ ord(p[i])) for i in range(BS)])
cbc_enc = algo.new(key, algo.MODE_CBC, IV)
result = cbc_enc.encrypt(s)
#and append the actual data of t, skipping the sig
result = result + t[2:]
elif t.startswith(FLVSIG):
assert BS >= 9
size = len(s) - BS # we could make this shorter, but then could require padding again
# reusing FLV's sig and type, data offset, padding
c = t[:5] + struct.pack(">I",size + 16) + "\0" * 7
c = ecb_dec.decrypt(c)
IV = "".join([chr(ord(c[i]) ^ ord(p[i])) for i in range(BS)])
cbc_enc = algo.new(key, algo.MODE_CBC, IV)
result = cbc_enc.encrypt(s)
#and append the actual data of t, skipping the sig
result = result + t[9:]
elif t.find("%PDF-") > -1:
assert BS >= 16
size = len(s) - BS # we take the whole first 16 bits
#truncated signature, dummy stream object start
c = "%PDF-\0obj\nstream"
c = ecb_dec.decrypt(c)
IV = "".join([chr(ord(c[i]) ^ ord(p[i])) for i in range(BS)])
cbc_enc = algo.new(key, algo.MODE_CBC, IV)
result = cbc_enc.encrypt(s)
#close the dummy object and append the whole t
#(we don't know where the sig is, we can't skip anything)
result = result + "\nendstream\nendobj\n" + t
else:
print "file type not supported"
sys.exit()
#we have our result, key and IV
#generate the result file
cbc_dec = algo.new(key, algo.MODE_CBC, IV)
with open(result_file, "wb") as f:
f.write(cbc_dec.decrypt(pad(result)))
#generate the script
print """from Crypto.Cipher import %(algo)s
algo = %(algo)s.new(%(key)s, %(algo)s.MODE_CBC, %(IV)s)
with open(%(source)s, "rb") as f:
d = f.read()
d = algo.encrypt(d)
with open("dec-" + %(target)s, "wb") as f:
f.write(d)""" % {
'algo': algo.__name__.split(".")[-1],
'key':`key`,
'IV':`IV`,
'source':`result_file`,
'target':`target_file`}
| {
"repo_name": "kanpol/angeak",
"path": "polyglot/angecrypt.py",
"copies": "3",
"size": "4404",
"license": "mit",
"hash": 4755520906795119000,
"line_mean": 28.7972027972,
"line_max": 112,
"alpha_frac": 0.6153496821,
"autogenerated": false,
"ratio": 3.062586926286509,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.517793660838651,
"avg_score": null,
"num_lines": null
} |
"""Angelina URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.views.static import serve
import xadmin
from Angelina import settings
from users.views import *
urlpatterns = [
url(r'^$', IndexView.as_view(), name='index'), # 首页
url(r'^xadmin/', xadmin.site.urls), # 后台管理
url(r'^captcha/', include('captcha.urls')), # 验证码
url(r'^media/(?P<path>.*)/$', serve, {'document_root': settings.MEDIA_ROOT}), # 处理上传文件的路径
url(r'^login/$', LoginView.as_view(), name="login"), # 登录
url(r'^register/$', RegisterView.as_view(), name='register'), # 注册
url(r'^logout/$', LogoutView.as_view(), name="logout"), # 退出
url(r'^active/(?P<active_code>.*)/$', ActiveUserView.as_view(), name="active"), # 用户激活链接
url(r'^forgetpwd/$', ForgetPwdView.as_view(), name="forgetpwd"), # 重置密码
url(r'^reset/(?P<reset_code>.*)/$', ResetView.as_view(), name="reset_pwd"), # 修改密码
url(r'^modify_pwd/$', ModifyPwdView.as_view(), name="modify_pwd"), # 修改密码接口
url(r'^org/', include('organization.urls', namespace="org")), # 课程机构URL配置
url(r'^course/', include('courses.urls', namespace="course")), # 课程URL配置
url(r'^users/', include('users.urls', namespace="user")), # 用户URL配置
]
# 全局404页面配置
handler404 = 'users.views.page_not_found'
handler403 = 'users.views.forbidden'
handler500 = 'users.views.page_error'
| {
"repo_name": "anshengme/Angelina",
"path": "Angelina/urls.py",
"copies": "1",
"size": "2096",
"license": "mit",
"hash": 7916763786252801000,
"line_mean": 44.7674418605,
"line_max": 94,
"alpha_frac": 0.6600609756,
"autogenerated": false,
"ratio": 2.8114285714285714,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8966271648303246,
"avg_score": 0.0010435797450650268,
"num_lines": 43
} |
import math
import pygame, sys, colorsys
from pygame.locals import *
pygame.init()
WIDTH, HEIGHT = 700, 700 #alter the screen dimensions here
screen = pygame.display.set_mode((WIDTH, HEIGHT))
position = [0, 0]
def degrees_to_radians(angle):
return angle * math.pi / 180 % (2 * math.pi)
def angle_to_point(start, length, angle):
angle = degrees_to_radians(angle)
start[0] += length * math.cos(angle)
start[1] += length * math.sin(angle)
start[0] = float("{0:.5f}".format(start[0]))
start[1] = float("{0:.5f}".format(start[1]))
return(start)
#initial angle = 0
a = 0
#initial huw = 0
h = 0
while True:
#finds the coordiate along the circle to plot, try changing the setting the
#centre of the circle to be a point along a different circle for interesting
#effects eg:
#x, y = angle_to_point(angle_to_point([WIDTH/2,HEIGHT/2], 100, a*5), 50, a*0.25)
#which create a donut
#the following example just draws a cicle centered to the screen radius 100
x, y = angle_to_point([WIDTH/2,HEIGHT/2], 100, a)
x = int(x) #converts the x and y coodiates into integers to be plotted
y = int(y)
h = h + 0.0005 % 1 #alters the hue but ensures it's never above 1, the smaller the increment the slower the gradient
colour = colorsys.hsv_to_rgb(h,1,1) #converts the hue into an rgb value
colour = (colour[0] * 255, colour[1] * 255, colour[2] * 255) #converts the rgb value into on pygame understands
screen.set_at((x,y), colour) #you can either plot a single point using this line
pygame.draw.circle(screen, colour, (x, y), 2) # or if you want a thicker line you can plot circles, alter the radius for different thicknesses
a += 0.1 #increments the angle, the smaller this value, the more solid the line
pygame.display.update() #updates the screen, if you want it to draw faster only update if i % a value == 0 eg:
#if i % 18 ==0:
#pygame.display.update()
for event in pygame.event.get():
if event.type == QUIT: #checks to see if you've pressed the x button
pygame.quit() #if you have its shuts the program down
sys.exit()
| {
"repo_name": "MiniGirlGeek/Spirograph",
"path": "angle.py",
"copies": "1",
"size": "2281",
"license": "mit",
"hash": -7730023757864572000,
"line_mean": 40.4727272727,
"line_max": 146,
"alpha_frac": 0.6733888645,
"autogenerated": false,
"ratio": 3.263233190271817,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4436622054771817,
"avg_score": null,
"num_lines": null
} |
angle = radians(10.)
patches = radians(360.)/angle
theta = np.arange(0,radians(360.),angle)
count = [0]*patches
for i, item in enumerate(some_array_of_azimuth_directions):
temp = int((item - item%angle)/angle)
count[temp] += 1
width = angle * np.ones(patches)
# force square figure and square axes looks better for polar, IMO
fig = plt.figure(figsize=(8,8))
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8], polar=True)
rmax = max(count) + 1
ax.set_rlim(0,rmax)
ax.set_theta_offset(np.pi/2)
ax.set_thetagrids(np.arange(0,360,10))
ax.set_theta_direction(-1)
# project strike distribution as histogram bars
bars = ax.bar(theta, count, width=width)
r_values = []
colors = []
for r,bar in zip(count, bars):
r_values.append(r/float(max(count)))
colors.append(cm.jet(r_values[-1], alpha=0.5))
bar.set_facecolor(colors[-1])
bar.set_edgecolor('grey')
bar.set_alpha(0.5)
# Add colorbar, make sure to specify tick locations to match desired ticklabels
colorlist = []
r_values.sort()
values = []
for val in r_values:
if val not in values:
values.append(val*float(max(count)))
color = cm.jet(val, alpha=0.5)
if color not in colorlist:
colorlist.append(color)
cpt = mpl.colors.ListedColormap(colorlist)
bounds = range(max(count)+1)
norm = mpl.colors.BoundaryNorm(values, cpt.N-1)
cax = fig.add_axes([0.97, 0.3, 0.03, 0.4])
cb = mpl.colorbar.ColorbarBase(cax, cmap=cpt,
norm=norm,
boundaries=bounds,
# Make the length of each extension
# the same as the length of the
# interior colors:
extendfrac='auto',
ticks=[bounds[i] for i in range(0, len(bounds), 2)],
#ticks=bounds,
spacing='uniform')
| {
"repo_name": "Leviyu/Maligaro",
"path": "cpp_lib/backup/10_plotly/rose.py",
"copies": "1",
"size": "1966",
"license": "mit",
"hash": -7444637095276663000,
"line_mean": 32.8965517241,
"line_max": 89,
"alpha_frac": 0.5696846389,
"autogenerated": false,
"ratio": 3.320945945945946,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4390630584845946,
"avg_score": null,
"num_lines": null
} |
"""Angles and anomalies.
"""
from astropy import units as u
from poliastro.core.angles import (D_to_nu as D_to_nu_fast, nu_to_D as nu_to_D_fast,
E_to_nu as E_to_nu_fast, nu_to_E as nu_to_E_fast,
F_to_nu as F_to_nu_fast, nu_to_F as nu_to_F_fast,
M_to_D as M_to_D_fast, D_to_M as D_to_M_fast,
M_to_E as M_to_E_fast, E_to_M as E_to_M_fast,
M_to_F as M_to_F_fast, F_to_M as F_to_M_fast,
fp_angle as fp_angle_fast,
M_to_nu as M_to_nu_fast, nu_to_M as nu_to_M_fast)
@u.quantity_input(D=u.rad)
def D_to_nu(D):
"""True anomaly from parabolic eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Eccentric anomaly.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (D_to_nu_fast(D.to(u.rad).value) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad)
def nu_to_D(nu):
"""Parabolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
Returns
-------
D : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (nu_to_D_fast(nu.to(u.rad).value) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_E(nu, ecc):
"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (nu_to_E_fast(nu.to(u.rad).value, ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_F(nu, ecc):
"""Hyperbolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Note
-----
Taken from Curtis, H. (2013). *Orbital mechanics for engineering students*. 167
"""
return (nu_to_F_fast(nu.to(u.rad).value, ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_nu(E, ecc):
"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (E_to_nu_fast(E.to(u.rad).value, ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_nu(F, ecc):
"""True anomaly from hyperbolic eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (F_to_nu_fast(F.to(u.rad).value, ecc.value) * u.rad).to(F.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (M_to_E_fast(M.to(u.rad).value, ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_F(M, ecc):
"""Hyperbolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
"""
return (M_to_F_fast(M.to(u.rad).value, ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_D(M, ecc):
"""Parabolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
"""
return (M_to_D_fast(M.to(u.rad).value, ecc.value) * u.rad).to(M.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_M(E, ecc):
"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (E_to_M_fast(E.to(u.rad).value, ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_M(F, ecc):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (F_to_M_fast(F.to(u.rad).value, ecc.value) * u.rad).to(F.unit)
@u.quantity_input(D=u.rad, ecc=u.one)
def D_to_M(D, ecc):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (D_to_M_fast(D.to(u.rad).value, ecc.value) * u.rad).to(D.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_nu(M, ecc, delta=1e-2):
"""True anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
delta : float (optional)
threshold of near-parabolic regime definition (from Davide Farnocchia et al)
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
Examples
--------
>>> M_to_nu(30.0 * u.deg, 0.06 * u.one)
<Quantity 33.67328493 deg>
"""
return (M_to_nu_fast(M.to(u.rad).value, ecc.value, delta) * u.rad).to(M.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_M(nu, ecc, delta=1e-2):
"""Mean anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
delta : float (optional)
threshold of near-parabolic regime definition (from Davide Farnocchia et al)
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (nu_to_M_fast(nu.to(u.rad).value, ecc.value, delta) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def fp_angle(nu, ecc):
"""Flight path angle.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Note
-----
Algorithm taken from Vallado 2007, pp. 113.
"""
return (fp_angle_fast(nu.to(u.rad).value, ecc.value) * u.rad).to(nu.unit)
| {
"repo_name": "newlawrence/poliastro",
"path": "src/poliastro/twobody/angles.py",
"copies": "1",
"size": "8044",
"license": "mit",
"hash": 256057497540889380,
"line_mean": 22.4402332362,
"line_max": 84,
"alpha_frac": 0.5722636816,
"autogenerated": false,
"ratio": 2.9667896678966788,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40390533494966785,
"avg_score": null,
"num_lines": null
} |
"""Angles and anomalies.
"""
import numpy as np
from astropy import coordinates, units as u
from poliastro import constants
from poliastro.core.angles import (
D_to_M as D_to_M_fast,
D_to_nu as D_to_nu_fast,
E_to_M as E_to_M_fast,
E_to_nu as E_to_nu_fast,
F_to_M as F_to_M_fast,
F_to_nu as F_to_nu_fast,
M_to_D as M_to_D_fast,
M_to_E as M_to_E_fast,
M_to_F as M_to_F_fast,
fp_angle as fp_angle_fast,
nu_to_D as nu_to_D_fast,
nu_to_E as nu_to_E_fast,
nu_to_F as nu_to_F_fast,
)
@u.quantity_input(D=u.rad)
def D_to_nu(D):
"""True anomaly from parabolic eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Eccentric anomaly.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (D_to_nu_fast(D.to(u.rad).value) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad)
def nu_to_D(nu):
"""Parabolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
Returns
-------
D : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (nu_to_D_fast(nu.to(u.rad).value) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_E(nu, ecc):
"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (nu_to_E_fast(nu.to(u.rad).value, ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_F(nu, ecc):
"""Hyperbolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Note
-----
Taken from Curtis, H. (2013). *Orbital mechanics for engineering students*. 167
"""
return (nu_to_F_fast(nu.to(u.rad).value, ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_nu(E, ecc):
"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (E_to_nu_fast(E.to(u.rad).value, ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_nu(F, ecc):
"""True anomaly from hyperbolic eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (F_to_nu_fast(F.to(u.rad).value, ecc.value) * u.rad).to(F.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (M_to_E_fast(M.to(u.rad).value, ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_F(M, ecc):
"""Hyperbolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
"""
return (M_to_F_fast(M.to(u.rad).value, ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_D(M):
"""Parabolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
Returns
-------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
"""
return (M_to_D_fast(M.to(u.rad).value) * u.rad).to(M.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_M(E, ecc):
"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (E_to_M_fast(E.to(u.rad).value, ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_M(F, ecc):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (F_to_M_fast(F.to(u.rad).value, ecc.value) * u.rad).to(F.unit)
@u.quantity_input(D=u.rad, ecc=u.one)
def D_to_M(D):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (D_to_M_fast(D.to(u.rad).value) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def fp_angle(nu, ecc):
"""Flight path angle.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Note
-----
Algorithm taken from Vallado 2007, pp. 113.
"""
return (fp_angle_fast(nu.to(u.rad).value, ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(ltan=u.hourangle)
def raan_from_ltan(epoch, ltan=12.0):
"""RAAN angle from LTAN for SSO around the earth
Parameters
----------
epoch : ~astropy.time.Time
Value of time to calculate the RAAN for
ltan: ~astropy.units.Quantity
Decimal hour between 0 and 24
Returns
-------
RAAN: ~astropy.units.Quantity
Right ascension of the ascending node angle in GCRS
Note
----
Calculations of the sun mean longitude and equation of time
follow "Fundamentals of Astrodynamics and Applications"
Fourth edition by Vallado, David A.
"""
T_UT1 = ((epoch.ut1 - constants.J2000).value / 36525.0) * u.deg
T_TDB = ((epoch.tdb - constants.J2000).value / 36525.0) * u.deg
# Apparent sun position
sun_position = coordinates.get_sun(epoch)
# Calculate the sun apparent local time
salt = sun_position.ra + 12 * u.hourangle
# Use the equation of time to calculate the mean sun local time (fictional sun without anomalies)
# Sun mean anomaly
M_sun = 357.5291092 * u.deg + 35999.05034 * T_TDB
# Sun mean longitude
l_sun = 280.460 * u.deg + 36000.771 * T_UT1
l_ecliptic_part2 = 1.914666471 * u.deg * np.sin(
M_sun
) + 0.019994643 * u.deg * np.sin(2 * M_sun)
l_ecliptic = l_sun + l_ecliptic_part2
eq_time = (
-l_ecliptic_part2
+ 2.466 * u.deg * np.sin(2 * l_ecliptic)
- 0.0053 * u.deg * np.sin(4 * l_ecliptic)
)
# Calculate sun mean local time
smlt = salt + eq_time
# Desired angle between sun and ascending node
alpha = (coordinates.Angle(ltan).wrap_at(24 * u.hourangle)).to(u.rad)
# Use the mean sun local time calculate needed RAAN for given LTAN
raan = smlt + alpha
return raan
| {
"repo_name": "poliastro/poliastro",
"path": "src/poliastro/twobody/angles.py",
"copies": "1",
"size": "8269",
"license": "mit",
"hash": -6682440819581601000,
"line_mean": 22.0222841226,
"line_max": 101,
"alpha_frac": 0.5962492438,
"autogenerated": false,
"ratio": 2.939189189189189,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9035136083637192,
"avg_score": 0.00006046987039941324,
"num_lines": 359
} |
"""Angles and anomalies.
"""
import numpy as np
from astropy import units as u
from scipy import optimize
def _kepler_equation(E, M, ecc):
return E - ecc * np.sin(E) - M
def _kepler_equation_prime(E, M, ecc):
return 1 - ecc * np.cos(E)
def _kepler_equation_hyper(F, M, ecc):
return -F + ecc * np.sinh(F) - M
def _kepler_equation_prime_hyper(F, M, ecc):
return ecc * np.cosh(F) - 1
def nu_to_E(nu, ecc):
"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : float
True anomaly (rad).
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
"""
E = 2 * np.arctan(np.sqrt((1 - ecc) / (1 + ecc)) * np.tan(nu / 2))
return E
def nu_to_F(nu, ecc):
"""Hyperbolic eccentric anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly (rad).
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic eccentric anomaly.
Note
-----
Taken from Curtis, H. (2013). *Orbital mechanics for engineering students*. 167
"""
F = np.log((np.sqrt(ecc + 1) + np.sqrt(ecc - 1) * np.tan(nu / 2)) /
(np.sqrt(ecc + 1) - np.sqrt(ecc - 1) * np.tan(nu / 2))) * u.rad
return F
def E_to_nu(E, ecc):
"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly (rad).
ecc : float
Eccentricity.
Returns
-------
nu : float
True anomaly (rad).
"""
nu = 2 * np.arctan(np.sqrt((1 + ecc) / (1 - ecc)) * np.tan(E / 2))
return nu
def F_to_nu(F, ecc):
"""True anomaly from hyperbolic eccentric anomaly.
Parameters
----------
F : float
Hyperbolic eccentric anomaly (rad).
ecc : float
Eccentricity (>1).
Returns
-------
nu : float
True anomaly (rad).
"""
with u.set_enabled_equivalencies(u.dimensionless_angles()):
nu = 2 * np.arctan((np.exp(F) * np.sqrt(ecc + 1) - np.sqrt(ecc + 1)) /
(np.exp(F) * np.sqrt(ecc - 1) + np.sqrt(ecc - 1)))
return nu
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly (rad).
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
"""
with u.set_enabled_equivalencies(u.dimensionless_angles()):
E = optimize.newton(_kepler_equation, M, _kepler_equation_prime,
args=(M, ecc))
return E
def M_to_F(M, ecc):
"""Hyperbolic eccentric anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly (rad).
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic eccentric anomaly.
"""
with u.set_enabled_equivalencies(u.dimensionless_angles()):
F = optimize.newton(_kepler_equation_hyper, M, _kepler_equation_prime_hyper,
args=(M, ecc))
return F
def E_to_M(E, ecc):
"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly (rad).
ecc : float
Eccentricity.
Returns
-------
M : float
Mean anomaly (rad).
"""
with u.set_enabled_equivalencies(u.dimensionless_angles()):
M = _kepler_equation(E, 0.0 * u.rad, ecc)
return M
def F_to_M(F, ecc):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : float
Hyperbolic eccentric anomaly (rad).
ecc : float
Eccentricity (>1).
Returns
-------
M : float
Mean anomaly (rad).
"""
with u.set_enabled_equivalencies(u.dimensionless_angles()):
M = _kepler_equation_hyper(F, 0.0 * u.rad, ecc)
return M
def M_to_nu(M, ecc):
"""True anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly (rad).
ecc : float
Eccentricity.
Returns
-------
nu : float
True anomaly (rad).
Examples
--------
>>> nu = M_to_nu(np.radians(30.0), 0.06)
>>> np.rad2deg(nu)
33.673284930211658
"""
if ecc > 1:
F = M_to_F(M, ecc)
nu = F_to_nu(F, ecc)
else:
E = M_to_E(M, ecc)
nu = E_to_nu(E, ecc)
return nu
def nu_to_M(nu, ecc):
"""Mean anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : float
True anomaly (rad).
ecc : float
Eccentricity.
Returns
-------
M : float
Mean anomaly (rad).
"""
if ecc > 1:
F = nu_to_F(nu, ecc)
M = F_to_M(F, ecc)
else:
E = nu_to_E(nu, ecc)
M = E_to_M(E, ecc)
return M
def fp_angle(nu, ecc):
"""Flight path angle.
.. versionadded:: 0.4.0
Parameters
----------
nu : float
True anomaly (rad).
ecc : float
Eccentricity.
Note
-----
Algorithm taken from Vallado 2007, pp. 113.
"""
return np.arctan2(ecc * np.sin(nu), 1 + ecc * np.cos(nu))
| {
"repo_name": "anhiga/poliastro",
"path": "src/poliastro/twobody/angles.py",
"copies": "1",
"size": "5312",
"license": "mit",
"hash": -945965548803119500,
"line_mean": 17.6385964912,
"line_max": 84,
"alpha_frac": 0.5124246988,
"autogenerated": false,
"ratio": 3.1581450653983354,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4170569764198335,
"avg_score": null,
"num_lines": null
} |
"""Anglit distribution."""
import numpy
from ..baseclass import SimpleDistribution, ShiftScaleDistribution
class anglit(SimpleDistribution):
"""Anglit distribution."""
def __init__(self):
super(anglit, self).__init__()
def _pdf(self, x):
return numpy.cos(2*x)
def _cdf(self, x):
return numpy.sin(x+numpy.pi/4)**2.0
def _ppf(self, q):
return (numpy.arcsin(numpy.sqrt(q))-numpy.pi/4)
def _lower(self):
return -numpy.pi/4
def _upper(self):
return numpy.pi/4
class Anglit(ShiftScaleDistribution):
"""
Anglit distribution.
Args:
loc (float, Distribution):
Location parameter
scale (float, Distribution):
Scaling parameter
Examples:
>>> distribution = chaospy.Anglit()
>>> distribution
Anglit()
>>> uloc = numpy.linspace(0, 1, 6)
>>> uloc
array([0. , 0.2, 0.4, 0.6, 0.8, 1. ])
>>> xloc = distribution.inv(uloc)
>>> xloc.round(3)
array([-0.785, -0.322, -0.101, 0.101, 0.322, 0.785])
>>> numpy.allclose(distribution.fwd(xloc), uloc)
True
>>> distribution.pdf(xloc).round(3)
array([0. , 0.8 , 0.98, 0.98, 0.8 , 0. ])
>>> distribution.sample(4).round(3)
array([ 0.156, -0.439, 0.561, -0.018])
"""
def __init__(self, scale=1, shift=0):
super(Anglit, self).__init__(dist=anglit(), scale=scale, shift=shift)
| {
"repo_name": "jonathf/chaospy",
"path": "chaospy/distributions/collection/anglit.py",
"copies": "1",
"size": "1484",
"license": "mit",
"hash": 5427485560274517000,
"line_mean": 24.1525423729,
"line_max": 77,
"alpha_frac": 0.5444743935,
"autogenerated": false,
"ratio": 3.2472647702407,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42917391637407,
"avg_score": null,
"num_lines": null
} |
"""AnGroDeto URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from market_blog import urls as market_urls
from market_blog import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.index, name='index'),
url(r'^market/', include(market_urls)),
]
if settings.DEBUG is True:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| {
"repo_name": "SHARPRISE/AnGroDeto",
"path": "AnGroDeto/urls.py",
"copies": "1",
"size": "1105",
"license": "apache-2.0",
"hash": -2290727541376042500,
"line_mean": 35.8333333333,
"line_max": 80,
"alpha_frac": 0.7140271493,
"autogenerated": false,
"ratio": 3.4968354430379747,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47108625923379743,
"avg_score": null,
"num_lines": null
} |
#AngryBirds
#The original code was modified into an object-oriented setting, creating the class AngryBirdsGame. We define the basic methods that
# will let us extract all game features and manipulate the underlying game mechanics- we will now be able to set levels, get scores,
# extract pig positions, perform an action, run a certain number of frames, and so on.
import os
import sys
import math
import time
import pygame
current_path = os.getcwd()
sys.path.insert(0, os.path.join(current_path, "../pymunk-4.0.0"))
import pymunk as pm
from characters import Bird
from level import Level
class AngryBirdsGame:
show = False
def getMaxScore(self):
max_score = 10000*len(self.level.pigs) + 10000*len(self.level.number_of_birds-1) + 5000*(len(self.level.beams) + len(self.level.columns))
return max_score
def getScore(self):
return self.score
def getNumberRemainingBirds(self):
return self.level.number_of_birds + len(self.birds)
def getBirds(self):
# Returns a list of Bird objects
return self.birds
def getBirdPositions(self):
return [bird.getPosition() for bird in self.getBirds()]
def getBirdRadii(self):
return [bird.getRadius() for bird in self.getBirds()]
def getBirdPositionsAndRadius(self):
return [(bird.getPosition(), bird.getRadius()) for bird in self.getBirds()]
def getPigs(self):
# Returns a list of Pig objects
return self.level.pigs
def getPigPositions(self): # TODO note (LR): this is currently inconsistent to other getters, but useful...
return [(pig.getPosition()[0], pig.getPosition()[1]) for pig in self.getPigs()]
def getPigRadii(self):
return [pig.getRadius() for pig in self.getPigs()]
def getPigPositionsAndRadius(self):
return [(pig.getPosition(), pig.getRadius()) for pig in self.getPigs()]
def getPolys(self):
#Some useful methods: poly.shape
return self.beams + self.columns
def getPolyFeatures(self):
return [(poly.getPosition(), poly.getVertices(), poly.getRadius()) for poly in self.getPolys()]
def getLines(self):
return self.static_lines1
def getScore(self):
return self.score
def runFrames(self,nFrames,show=False):
#Runs the Angry Birds Emulator for nFrames number of frames
self.show=show
for _ in range(nFrames):
self.run()
def runUntilStatic(self, show=False):
"""
Runs the game (i.e. n frames) until it has become static again. Useful to wait for the successor state of an action
"""
self.show=show
while (not self.isStatic() and not self.game_state==4):
self.run()
def isStatic(self):
"""
Checks that every bird, polygon and pig are static.
"""
# TODO - polys and pigs may have velocity zero but because of breaking point. This is unlikely but would like to check.
if len(self.getBirds())>0:
return False
for pig in self.getPigs():
if pig.getVelocity()>8:
return False
for poly in self.getPolys():
if poly.getVelocity()>8:
return False
return True
def humanPlay(self):
#Allows a human player to play the game
self.show = True
self.humanPlaying = True
while self.running:
self.run()
def restartGame(self):
self.restart()
self.level.load_level()
self.game_state = 0
self.bird_path = []
self.score = 0
def performAction(self,angle,distance):
# Given an angle and some distance, performs the given action on the game.
self.angle = angle
self.mouse_distance = distance
self.takeAction = True
self.run() # Need to run one frame for this to take effect.
def getRopeLength(self):
return self.rope_lenght
def startNewLevel(self):
self.restart()
self.level.number += 1
self.game_state = 0
self.level.load_level()
self.score = 0
self.bird_path = []
self.bonus_score_once = True
def startAtLevel(self,level):
self.restart()
self.level.number=level
self.game_state = 0
self.level.load_level()
self.score = 0
self.bird_path = []
self.bonus_score_once = True
def getLevel(self):
return self.level.number
def __init__(self):
pygame.init()
self.screen = pygame.display.set_mode((1200, 650))
self.redbird = pygame.image.load(
"../resources/images/red-bird3.png").convert_alpha()
self.background2 = pygame.image.load(
"../resources/images/background3.png").convert_alpha()
self.sling_image = pygame.image.load(
"../resources/images/sling-3.png").convert_alpha()
self.full_sprite = pygame.image.load(
"../resources/images/full-sprite.png").convert_alpha()
rect = pygame.Rect(181, 1050, 50, 50)
self.cropped = self.full_sprite.subsurface(rect).copy()
self.pig_image = pygame.transform.scale(self.cropped, (30, 30))
self.buttons = pygame.image.load(
"../resources/images/selected-buttons.png").convert_alpha()
self.pig_happy = pygame.image.load(
"../resources/images/pig_failed.png").convert_alpha()
self.stars = pygame.image.load(
"../resources/images/stars-edited.png").convert_alpha()
rect = pygame.Rect(0, 0, 200, 200)
self.star1 = self.stars.subsurface(rect).copy()
rect = pygame.Rect(204, 0, 200, 200)
self.star2 = self.stars.subsurface(rect).copy()
rect = pygame.Rect(426, 0, 200, 200)
self.star3 = self.stars.subsurface(rect).copy()
rect = pygame.Rect(164, 10, 60, 60)
self.pause_button = self.buttons.subsurface(rect).copy()
rect = pygame.Rect(24, 4, 100, 100)
self.replay_button = self.buttons.subsurface(rect).copy()
rect = pygame.Rect(142, 365, 130, 100)
self.next_button = self.buttons.subsurface(rect).copy()
self.clock = pygame.time.Clock()
rect = pygame.Rect(18, 212, 100, 100)
self.play_button = self.buttons.subsurface(rect).copy()
self.clock = pygame.time.Clock()
self.running = True
self.humanPlaying =False
# the base of the physics
self.space = pm.Space()
self.space.gravity = (0.0, -700.0)
self.pigs = []
self.birds = []
self.balls = []
self.polys = []
self.beams = []
self.columns = []
self.poly_points = []
self.ball_number = 0
self.mouse_distance = 0
self.rope_lenght = 90
self.angle = 0
self.x_mouse = 0
self.y_mouse = 0
self.count = 0
self.takeAction =False
self.mouse_pressed = False
self.t1 = 0
self.tick_to_next_circle = 10
self.RED = (255, 0, 0)
self.BLUE = (0, 0, 255)
self.BLACK = (0, 0, 0)
self.WHITE = (255, 255, 255)
self.sling_x, self.sling_y = 135, 450
self.sling2_x, self.sling2_y = 160, 450
self.score = 0
self.game_state = 0
self.bird_path = []
self.counter = 0
self.restart_counter = False
self.bonus_score_once = True
self.bold_font = pygame.font.SysFont("arial", 30, bold=True)
self.bold_font2 = pygame.font.SysFont("arial", 40, bold=True)
self.bold_font3 = pygame.font.SysFont("arial", 50, bold=True)
self.wall = True
# Static floor
self.static_body = pm.Body()
self.static_lines = [pm.Segment(self.static_body, (0.0, 060.0), (1200.0, 060.0), 0.0)]
self.static_lines1 = [pm.Segment(self.static_body, (1200.0, 060.0), (1200.0, 800.0), 0.0)]
for line in self.static_lines:
line.elasticity = 0.95
line.friction = 1
line.collision_type = 3
for line in self.static_lines1:
line.elasticity = 0.95
line.friction = 1
line.collision_type = 3
self.space.add(self.static_lines)
# bird and pigs
self.space.add_collision_handler(0, 1, post_solve= lambda x,y: self.post_solve_bird_pig(x,y,self.screen))
# bird and wood
self.space.add_collision_handler(0, 2, post_solve=self.post_solve_bird_wood)
# pig and wood
self.space.add_collision_handler(1, 2, post_solve=self.post_solve_pig_wood)
self.level = Level(self.pigs, self.columns, self.beams, self.space)
self.level.load_level()
def to_pygame(self,p):
"""Convert pymunk to pygame coordinates"""
return int(p.x), int(-p.y+600)
def vector(self,p0, p1):
"""Return the vector of the points
p0 = (xo,yo), p1 = (x1,y1)"""
a = p1[0] - p0[0]
b = p1[1] - p0[1]
return (a, b)
def unit_vector(self,v):
"""Return the unit vector of the points
v = (a,b)"""
h = ((v[0]**2)+(v[1]**2))**0.5
if h == 0:
h = 0.000000000000001
ua = v[0] / h
ub = v[1] / h
return (ua, ub)
def distance(self,xo, yo, x, y):
"""distance between points"""
dx = x - xo
dy = y - yo
d = ((dx ** 2) + (dy ** 2)) ** 0.5
return d
def sling_action(self):
"""Set up sling behavior"""
# Fixing bird to the sling rope
v = self.vector((self.sling_x, self.sling_y), (self.x_mouse, self.y_mouse))
uv = self.unit_vector(v)
uv1 = uv[0]
uv2 = uv[1]
self.mouse_distance = self.distance(self.sling_x,self.sling_y, self.x_mouse, self.y_mouse)
pu = (uv1*self.rope_lenght+self.sling_x, uv2*self.rope_lenght+self.sling_y)
bigger_rope = 102
x_redbird = self.x_mouse - 20
y_redbird = self.y_mouse - 20
if self.mouse_distance > self.rope_lenght:
pux, puy = pu
pux -= 20
puy -= 20
pul = pux, puy
self.screen.blit(self.redbird, pul)
pu2 = (uv1*bigger_rope+self.sling_x, uv2*bigger_rope+self.sling_y)
pygame.draw.line(self.screen, (0, 0, 0), (self.sling2_x, self.sling2_y), pu2, 5)
self.screen.blit(self.redbird, pul)
pygame.draw.line(self.screen, (0, 0, 0), (self.sling_x, self.sling_y), pu2, 5)
else:
self.mouse_distance += 10
pu3 = (uv1*self.mouse_distance+self.sling_x, uv2*self.mouse_distance+self.sling_y)
pygame.draw.line(self.screen, (0, 0, 0), (self.sling2_x, self.sling2_y), pu3, 5)
self.screen.blit(self.redbird, (x_redbird, y_redbird))
pygame.draw.line(self.screen, (0, 0, 0), (self.sling_x, self.sling_y), pu3, 5)
# Angle of impulse
dy = self.y_mouse - self.sling_y
dx = self.x_mouse - self.sling_x
if dx == 0:
dx = 0.00000000000001
self.angle = math.atan((float(dy))/dx)
def draw_level_cleared(self):
"""Draw level cleared"""
self.level_cleared = self.bold_font3.render("Level Cleared!", 1, self.WHITE)
self.score_level_cleared = self.bold_font2.render(str(self.score), 1, self.WHITE)
if self.level.number_of_birds >= 0 and len(self.pigs) == 0:
if self.bonus_score_once:
self.score += (self.level.number_of_birds-1) * 10000
self.bonus_score_once = False
self.game_state = 4
rect = pygame.Rect(300, 0, 600, 800)
pygame.draw.rect(self.screen, self.BLACK, rect)
self.screen.blit(self.level_cleared, (450, 90))
if self.score >= self.level.one_star and self.score <= self.level.two_star:
self.screen.blit(self.star1, (310, 190))
if self.score >= self.level.two_star and self.score <= self.level.three_star:
self.screen.blit(self.star1, (310, 190))
self.screen.blit(self.star2, (500, 170))
if self.score >= self.level.three_star:
self.screen.blit(self.star1, (310, 190))
self.screen.blit(self.star2, (500, 170))
self.screen.blit(self.star3, (700, 200))
self.screen.blit(self.score_level_cleared, (550, 400))
self.screen.blit(self.replay_button, (510, 480))
self.screen.blit(self.next_button, (620, 480))
def draw_level_failed(self):
"""Draw level failed"""
self.failed = self.bold_font3.render("Level Failed", 1, self.WHITE)
if self.level.number_of_birds <= 0 and time.time() - self.t2 > 5 and len(self.pigs) > 0:
self.game_state = 3
rect = pygame.Rect(300, 0, 600, 800)
pygame.draw.rect(self.screen, self.BLACK, rect)
self.screen.blit(self.failed, (450, 90))
self.screen.blit(self.pig_happy, (380, 120))
self.screen.blit(self.replay_button, (520, 460))
def restart(self):
"""Delete all objects of the level"""
pigs_to_remove = []
birds_to_remove = []
columns_to_remove = []
beams_to_remove = []
for pig in self.pigs:
pigs_to_remove.append(pig)
for pig in pigs_to_remove:
self.space.remove(pig.shape, pig.shape.body)
self.pigs.remove(pig)
for bird in self.birds:
birds_to_remove.append(bird)
for bird in birds_to_remove:
self.space.remove(bird.shape, bird.shape.body)
self.birds.remove(bird)
for column in self.columns:
columns_to_remove.append(column)
for column in columns_to_remove:
self.space.remove(column.shape, column.shape.body)
self.columns.remove(column)
for beam in self.beams:
beams_to_remove.append(beam)
for beam in beams_to_remove:
self.space.remove(beam.shape, beam.shape.body)
self.beams.remove(beam)
def post_solve_bird_pig(self,space, arbiter, surface):
"""Collision between bird and pig"""
a, b = arbiter.shapes
bird_body = a.body
pig_body = b.body
p = self.to_pygame(bird_body.position)
p2 = self.to_pygame(pig_body.position)
r = 30
pygame.draw.circle(surface, self.BLACK, p, r, 4)
pygame.draw.circle(surface, self.RED, p2, r, 4)
pigs_to_remove = []
for pig in self.pigs:
if pig_body == pig.body:
pig.life -= 20
pigs_to_remove.append(pig)
self.score += 10000
for pig in pigs_to_remove:
space.remove(pig.shape, pig.shape.body)
self.pigs.remove(pig)
def post_solve_bird_wood(self,space, arbiter):
"""Collision between bird and wood"""
poly_to_remove = []
if arbiter.total_impulse.length > 1100:
a, b = arbiter.shapes
for column in self.columns:
if b == column.shape:
poly_to_remove.append(column)
for beam in self.beams:
if b == beam.shape:
poly_to_remove.append(beam)
for poly in poly_to_remove:
if poly in self.columns:
self.columns.remove(poly)
if poly in self.beams:
self.beams.remove(poly)
space.remove(b, b.body)
self.score += 5000
def post_solve_pig_wood(self,space, arbiter):
"""Collision between pig and wood"""
pigs_to_remove = []
if arbiter.total_impulse.length > 700:
pig_shape, wood_shape = arbiter.shapes
for pig in self.pigs:
if pig_shape == pig.shape:
pig.life -= 20
self.score += 10000
if pig.life <= 0:
pigs_to_remove.append(pig)
for pig in pigs_to_remove:
space.remove(pig.shape, pig.shape.body)
self.pigs.remove(pig)
def run(self): # runs one frame of the game
if True:
if (self.takeAction):
# Release new bird because of a take Action call
self.takeAction = False
if self.level.number_of_birds > 0:
self.level.number_of_birds -= 1
self.t1 = time.time()*1000
xo = 154
yo = 156
if self.x_mouse < self.sling_x+5 and self.humanPlaying:
bird = Bird(self.mouse_distance, self.angle, xo, yo, self.space)
self.birds.append(bird)
else:
bird = Bird(-self.mouse_distance, self.angle, xo, yo, self.space)
self.birds.append(bird)
if self.level.number_of_birds == 0:
self.t2 = time.time()
#Input handling
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.running = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
self.running = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_w:
# Toggle wall
if self.wall:
self.space.remove(self.static_lines1)
self.wall = False
print('Wall off')
else:
self.space.add(self.static_lines1)
self.wall = True
print('Wall on')
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
print('Now in space mode')
self.space.gravity = (0.0, -10.0)
self.level.bool_space = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_n:
print('Back in normal mode')
self.space.gravity = (0.0, -700.0)
self.level.bool_space = False
if (pygame.mouse.get_pressed()[0] and self.x_mouse > 100 and
self.x_mouse < 250 and self.y_mouse > 370 and self.y_mouse < 550):
self.mouse_pressed = True
if (event.type == pygame.MOUSEBUTTONUP and
event.button == 1 and self.mouse_pressed):
# Release new bird
self.mouse_pressed = False
if self.level.number_of_birds > 0:
self.level.number_of_birds -= 1
self.t1 = time.time()*1000
xo = 154
yo = 156
if self.mouse_distance > self.rope_lenght:
self.mouse_distance = self.rope_lenght
# Bird is initiated with power ~ distance, angle, x, y, space):
if self.x_mouse < self.sling_x+5 and self.humanPlaying:
bird = Bird(self.mouse_distance, self.angle, xo, yo, self.space)
self.birds.append(bird)
else:
bird = Bird(-self.mouse_distance, self.angle, xo, yo, self.space)
self.birds.append(bird)
if self.level.number_of_birds == 0:
self.t2 = time.time()
if event.type == pygame.MOUSEBUTTONUP and event.button == 1:
if (self.x_mouse < 60 and self.y_mouse < 155 and self.y_mouse > 90):
self.game_state = 1
if self.game_state == 1:
if self.x_mouse > 500 and self.y_mouse > 200 and self.y_mouse < 300:
# Resume in the paused screen
self.game_state = 0
if self.x_mouse > 500 and self.y_mouse > 300:
# Restart in the paused screen
self.restart()
self.level.load_level()
self.game_state = 0
self.bird_path = []
if self.game_state == 3:
# Restart in the failed level screen
if self.x_mouse > 500 and self.x_mouse < 620 and self.y_mouse > 450:
self.restart()
self.level.load_level()
self.game_state = 0
self.bird_path = []
self.score = 0
if self.game_state == 4:
# Build next level
if self.x_mouse > 610 and self.y_mouse > 450:
self.restart()
self.level.number += 1
self.game_state = 0
self.level.load_level()
self.score = 0
self.bird_path = []
self.bonus_score_once = True
if self.x_mouse < 610 and self.x_mouse > 500 and self.y_mouse > 450:
# Restart in the level cleared screen
self.restart()
self.level.load_level()
self.game_state = 0
self.bird_path = []
self.score = 0
self.x_mouse, self.y_mouse = pygame.mouse.get_pos()
# Draw background
self.screen.fill((130, 200, 100))
self.screen.blit(self.background2, (0, -50))
# Draw first part of the sling
rect = pygame.Rect(50, 0, 70, 220)
self.screen.blit(self.sling_image, (138, 420), rect)
# Draw the trail left behind
for point in self.bird_path:
pygame.draw.circle(self.screen, self.WHITE, point, 5, 0)
# Draw the birds in the wait line
if self.level.number_of_birds > 0:
for i in range(self.level.number_of_birds-1):
x = 100 - (i*35)
self.screen.blit(self.redbird, (x, 508))
# Draw sling behavior
if self.mouse_pressed and self.level.number_of_birds > 0:
self.sling_action()
else:
if time.time()*1000 - self.t1 > 300 and self.level.number_of_birds > 0:
self.screen.blit(self.redbird, (130, 426))
else:
pygame.draw.line(self.screen, (0, 0, 0), (self.sling_x, self.sling_y-8),
(self.sling2_x, self.sling2_y-7), 5)
birds_to_remove = []
pigs_to_remove = []
columns_to_remove = []
beams_to_remove=[]
self.counter += 1
#Remove polygons that went out of the screen:
for column in self.columns:
column_y = column.shape.body.position.y
column_x = column.shape.body.position.x
if column_y<.01 or column_x<.01 or column_x>1200 or column_y>650:
columns_to_remove.append(column)
for beam in self.beams:
beam_y = beam.shape.body.position.y
beam_x = beam.shape.body.position.x
if beam_y<.01 or beam_x<.01 or beam_x>1200 or beam_y>650:
beams_to_remove.append(beam)
for column in columns_to_remove:
self.space.remove(column.shape, column.shape.body)
self.columns.remove(column)
for beam in beams_to_remove:
self.space.remove(beam.shape, beam.shape.body)
self.beams.remove(beam)
# Draw birds
for bird in self.birds:
if bird.shape.body.position.y < 0:
birds_to_remove.append(bird)
if bird.dead():
birds_to_remove.append(bird)
else:
bird.ageWhenStatic()
p = self.to_pygame(bird.shape.body.position)
x, y = p
x -= 22
y -= 20
self.screen.blit(self.redbird, (x, y))
pygame.draw.circle(self.screen, self.BLUE,
p, int(bird.shape.radius), 2)
if self.counter >= 3 and time.time() - self.t1 < 5:
self.bird_path.append(p)
restart_counter = True
if self.restart_counter:
self.counter = 0
self.restart_counter = False
# Remove birds and pigs
for bird in birds_to_remove:
self.space.remove(bird.shape, bird.shape.body)
self.birds.remove(bird)
# Draw static lines
for line in self.static_lines:
body = line.body
pv1 = body.position + line.a.rotated(body.angle)
pv2 = body.position + line.b.rotated(body.angle)
p1 = self.to_pygame(pv1)
p2 = self.to_pygame(pv2)
pygame.draw.lines(self.screen, (150, 150, 150), False, [p1, p2])
i = 0
# Draw pigs
for pig in self.pigs:
i += 1
if pig.shape.body.position.y < 0:
pigs_to_remove.append(pig)
if pig.shape.body.position.x < 0 or pig.shape.body.position.x >1200:
pigs_to_remove.append(pig)
p = self.to_pygame(pig.shape.body.position)
x, y = p
x -= 22
y -= 20
self.screen.blit(self.pig_image, (x+7, y+4))
pygame.draw.circle(self.screen, self.BLUE, p, int(pig.radius), 2)
for pig in pigs_to_remove:
self.space.remove(pig.shape, pig.shape.body)
self.pigs.remove(pig)
# Draw columns and Beams
for column in self.columns:
column.draw_poly('columns', self.screen)
for beam in self.beams:
beam.draw_poly('beams', self.screen)
# Update physics
dt = 1.0/60.0
for x in range(1):
self.space.step(dt)
# Drawing second part of the sling
rect = pygame.Rect(0, 0, 60, 200)
self.screen.blit(self.sling_image, (120, 420), rect)
# Draw score
score_font = self.bold_font.render("SCORE", 1, self.WHITE)
number_font = self.bold_font.render(str(self.score), 1, self.WHITE)
self.screen.blit(score_font, (1060, 90))
if self.score == 0:
self.screen.blit(number_font, (1100, 130))
else:
self.screen.blit(number_font, (1060, 130))
self.screen.blit(self.pause_button, (10, 90))
# Pause option
if self.game_state == 1:
self.screen.blit(self.play_button, (500, 200))
self.screen.blit(self.replay_button, (500, 300))
self.draw_level_cleared()
self.draw_level_failed()
if (self.show==True):
pygame.display.flip()
self.clock.tick(500)
pygame.display.set_caption("fps: " + str(self.clock.get_fps()))
if __name__=='__main__':
ab = AngryBirdsGame()
ab.humanPlay()
| {
"repo_name": "imanolarrieta/angrybirds",
"path": "src/AngryBirds.py",
"copies": "1",
"size": "27868",
"license": "mit",
"hash": -1910434652767243300,
"line_mean": 39.5647743814,
"line_max": 145,
"alpha_frac": 0.5213506531,
"autogenerated": false,
"ratio": 3.6505108724128896,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9655466170842881,
"avg_score": 0.00327907093400166,
"num_lines": 687
} |
""" Angular momentum module. """
from __future__ import division
from scipy import floor, sqrt
from scipy.special import factorial
from numpy import arange
def calc_clebsch_hf(J_a, I_a, F_a, mF_a, J_b, I_b, F_b, mF_b, q):
""" Clebsch-Gordan coefficient for the hyperfine transition dipole matrix
element.
Args:
J_a, I_a, F_a, mF_a, J_b, I_b, F_b, mF_b: Angular momentum numbers
for coupled hyperfine levels a and b.
q (int): Polarisation. Choose from [-1, 0, 1]
Returns:
(double) The Clebsch-Gordan coefficient.
Notes:
- The transition is from level a to level b.
- References to equations in Steck are
Daniel A. Steck, 'Rubidium 87 D Line Data' available online at
http://steck.us/alkalidata. (revision 2.1.4, 23 December 2010).
"""
coeff_F = ((-1)**(F_b+J_a+1+I_a)*
sqrt((2*F_b+1)*(2*J_a+1))*
wigner_6j(J_a,J_b,1,F_b,F_a,I_a)) # Steck, Eqn 36
coeff_hf = ((-1)**(F_b-1+mF_a)*
sqrt(2*F_a+1)*
wigner_3j(F_b,1,F_a,mF_b,q,-mF_a)) # Steck, Eqn 35
return coeff_hf*coeff_F
def wigner_3j(j1, j2, j3, m1, m2, m3):
""" Compute the Wigner 3j factor using the Racah formula.
Args:
/ j1 j2 j3 \
| |
\ m1 m2 m3 /
"""
# Error checking
if ((2 * j1 != floor(2 * j1)) |
(2 * j2 != floor(2 * j2)) |
(2 * j3 != floor(2 * j3)) |
(2 * m1 != floor(2 * m1)) |
(2 * m2 != floor(2 * m2)) |
(2 * m3 != floor(2 * m3))):
raise ValueError('All arguments must be integers or half-integers.')
# Additional check if the sum of the second row equals zero
if (m1 + m2 + m3 != 0):
# print '3j-Symbol unphysical.'
return 0.0
if (j1 - m1 != floor(j1 - m1)):
# print '2*j1 and 2*m1 must have the same parity'
return 0.0
if (j2 - m2 != floor(j2 - m2)):
# print '2*j2 and 2*m2 must have the same parity'
return 0.0
if (j3 - m3 != floor(j3 - m3)):
# print '2*j3 and 2*m3 must have the same parity'
return 0.0
if (j3 > j1 + j2) | (j3 < abs(j1 - j2)):
# print 'j3 is out of bounds.'
return 0.0
if abs(m1) > j1:
# print 'm1 is out of bounds.'
return 0.0
if abs(m2) > j2:
# print 'm2 is out of bounds.'
return 0.0
if abs(m3) > j3:
# print 'm3 is out of bounds.'
return 0.0
t1 = j2 - m1 - j3
t2 = j1 + m2 - j3
t3 = j1 + j2 - j3
t4 = j1 - m1
t5 = j2 + m2
tmin = max(0, max(t1, t2))
tmax = min(t3, min(t4, t5))
tvec = arange(tmin, tmax + 1, 1)
wigner = 0
for t in tvec:
wigner += (-1)**t / (factorial(t) * factorial(t - t1) *
factorial(t - t2) * factorial(t3 - t) *
factorial(t4 - t) * factorial(t5 - t))
wigner *= ((-1)**(j1 - j2 - m3) * sqrt(factorial(j1 + j2 - j3) *
factorial(j1 - j2 + j3) * factorial(-j1 + j2 + j3) /
factorial(j1 + j2 + j3 + 1) * factorial(j1 + m1) * factorial(j1 - m1) *
factorial(j2 + m2) * factorial(j2 - m2) * factorial(j3 + m3) *
factorial(j3 - m3)))
return wigner
def wigner_6j(j1, j2, j3, J1, J2, J3):
""" Compute the Wigner 6j factor using the Racah formula.
Args:
/ j1 j2 j3 \
< >
\ J1 J2 J3 /
Note:
https://en.wikipedia.org/wiki/Racah_W-coefficient
"""
# Check that the js and Js are only integer or half integer
if ((2 * j1 != round(2 * j1)) |
(2 * j2 != round(2 * j2)) |
(2 * j2 != round(2 * j2)) |
(2 * J1 != round(2 * J1)) |
(2 * J2 != round(2 * J2)) |
(2 * J3 != round(2 * J3))):
raise ValueError('All arguments must be integers or half-integers.')
# Check if the 4 triads ( (j1 j2 j3), (j1 J2 J3), (J1 j2 J3), (J1 J2 j3) )
# satisfy the triangular inequalities
if ((abs(j1 - j2) > j3) |
(j1 + j2 < j3) |
(abs(j1 - J2) > J3) |
(j1 + J2 < J3) |
(abs(J1 - j2) > J3) |
(J1 + j2 < J3) |
(abs(J1 - J2) > j3) |
(J1 + J2 < j3)):
# print '6j-Symbol is not triangular!'
return 0.0
# Check if the sum of the elements of each traid is an integer
if ((2 * (j1 + j2 + j3) != round(2 * (j1 + j2 + j3))) |
(2 * (j1 + J2 + J3) != round(2 * (j1 + J2 + J3))) |
(2 * (J1 + j2 + J3) != round(2 * (J1 + j2 + J3))) |
(2 * (J1 + J2 + j3) != round(2 * (J1 + J2 + j3)))):
# print '6j-Symbol is not triangular!'
return 0
# Arguments for the factorials
t1 = j1 + j2 + j3
t2 = j1 + J2 + J3
t3 = J1 + j2 + J3
t4 = J1 + J2 + j3
t5 = j1 + j2 + J1 + J2
t6 = j2 + j3 + J2 + J3
t7 = j1 + j3 + J1 + J3
# Finding summation borders
tmin = max(0, max(t1, max(t2, max(t3, t4))))
tmax = min(t5, min(t6, t7))
tvec = arange(tmin, tmax + 1, 1)
# Calculation the sum part of the 6j-Symbol
WignerReturn = 0
for t in tvec:
WignerReturn += ((-1)**t * factorial(t + 1) / (factorial(t - t1) *
factorial(t - t2) * factorial(t - t3) * factorial(t - t4) *
factorial(t5 - t) * factorial(t6 - t) * factorial(t7 - t)))
# Calculation of the 6j-Symbol
return WignerReturn * sqrt(triangle_coeff(j1, j2, j3) *
triangle_coeff(j1, J2, J3) * triangle_coeff(J1, j2, J3) *
triangle_coeff(J1, J2, j3))
def triangle_coeff(a, b, c):
""" Calculate the triangle coefficient. """
return (factorial(a + b - c) * factorial(a - b + c) * factorial(-a + b + c)
/factorial(a + b + c + 1))
| {
"repo_name": "tommyogden/maxwellbloch",
"path": "maxwellbloch/angmom.py",
"copies": "1",
"size": "5766",
"license": "mit",
"hash": 3415979549837137400,
"line_mean": 29.3473684211,
"line_max": 79,
"alpha_frac": 0.4915019077,
"autogenerated": false,
"ratio": 2.678123548536925,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8636337544821101,
"avg_score": 0.006657582283164674,
"num_lines": 190
} |
#An HDF5 file is a container for two kinds of objects:
# * datasets (array-like collections of data)
# * groups (folder-like containers that hold datasets).
# Groups work like dictionaries, and datasets work like NumPy arrays
from __future__ import division
import numpy as np
def read_rst_h5py (filename=None):
import h5py ,os
if filename is None:
path = os.getcwd()
filename =find('*.hdf5', path)
#filename= 'SCREENED_COULOMB.hdf5'
with h5py.File(filename, 'r') as f:
#print("Keys: %s" % f.keys())
a_group_key = list(f.keys())[0]
# Get the data
data = list(f[a_group_key])
msg = 'RESTART: Full matrix elements of screened interactions (W_c) was read from {}'.format(filename)
return data, msg
def write_rst_h5py(data, filename = None):
import h5py
if filename is None:
filename= 'SCREENED_COULOMB.hdf5'
with h5py.File(filename, 'w') as data_file:
try:
data_file.create_dataset('W_c', data=data)
except:
print("failed writting data to SCREENED_COULOMB.hdf5")
print(type(data))
data_file.close
msg = 'Full matrix elements of screened interactions (W_c) stored in {}'.format(filename)
return msg
def write_rst_yaml (data , filename=None):
import yaml
if filename is None: filename= 'SCREENED_COULOMB.yaml'
with open(filename, 'w+', encoding='utf8') as outfile:
yaml.dump(data, outfile, default_flow_style=False, allow_unicode=True)
msg = 'Full matrix elements of screened interactions stored in {}'.format(filename)
return msg
def read_rst_yaml (filename=None):
import yaml, os
if filename is None:
path = os.getcwd()
filename =find('*.yaml', path)
with open(filename, 'r') as stream:
try:
data = yaml.load(stream)
msg = 'RESTART: Full matrix elements of screened interactions (W_c) was read from {}'.format(filename)
return data, msg
except yaml.YAMLError as exc:
return exc
| {
"repo_name": "gkc1000/pyscf",
"path": "pyscf/nao/m_restart.py",
"copies": "1",
"size": "2079",
"license": "apache-2.0",
"hash": 6185923045166163000,
"line_mean": 32,
"line_max": 114,
"alpha_frac": 0.6301106301,
"autogenerated": false,
"ratio": 3.5660377358490565,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46961483659490566,
"avg_score": null,
"num_lines": null
} |
'''An helper file for the pydev debugger (REPL) console
'''
from code import InteractiveConsole
import sys
import traceback
from _pydev_bundle import _pydev_completer
from _pydevd_bundle.pydevd_tracing import get_exception_traceback_str
from _pydevd_bundle.pydevd_vars import make_valid_xml_value
from _pydev_bundle.pydev_imports import Exec
from _pydevd_bundle.pydevd_io import IOBuf
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn
from _pydev_bundle.pydev_override import overrides
from _pydevd_bundle import pydevd_save_locals
CONSOLE_OUTPUT = "output"
CONSOLE_ERROR = "error"
#=======================================================================================================================
# ConsoleMessage
#=======================================================================================================================
class ConsoleMessage:
"""Console Messages
"""
def __init__(self):
self.more = False
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
self.console_messages = []
def add_console_message(self, message_type, message):
"""add messages in the console_messages list
"""
for m in message.split("\n"):
if m.strip():
self.console_messages.append((message_type, m))
def update_more(self, more):
"""more is set to true if further input is required from the user
else more is set to false
"""
self.more = more
def to_xml(self):
"""Create an XML for console message_list, error and more (true/false)
<xml>
<message_list>console message_list</message_list>
<error>console error</error>
<more>true/false</more>
</xml>
"""
makeValid = make_valid_xml_value
xml = '<xml><more>%s</more>' % (self.more)
for message_type, message in self.console_messages:
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
xml += '</xml>'
return xml
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
overrides(BaseStdIn.readline)
def readline(self, *args, **kwargs):
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
return '\n'
#=======================================================================================================================
# DebugConsole
#=======================================================================================================================
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
"""Wrapper around code.InteractiveConsole, in order to send
errors and outputs to the debug console
"""
overrides(BaseInterpreterInterface.create_std_in)
def create_std_in(self):
try:
if not self.__buffer_output:
return sys.stdin
except:
pass
return DebugConsoleStdIn() #If buffered, raw_input is not supported in this console.
overrides(InteractiveConsole.push)
def push(self, line, frame, buffer_output=True):
"""Change built-in stdout and stderr methods by the
new custom StdMessage.
execute the InteractiveConsole.push.
Change the stdout and stderr back be the original built-ins
:param buffer_output: if False won't redirect the output.
Return boolean (True if more input is required else False),
output_messages and input_messages
"""
self.__buffer_output = buffer_output
more = False
if buffer_output:
original_stdout = sys.stdout
original_stderr = sys.stderr
try:
try:
self.frame = frame
if buffer_output:
out = sys.stdout = IOBuf()
err = sys.stderr = IOBuf()
more = self.add_exec(line)
except Exception:
exc = get_exception_traceback_str()
if buffer_output:
err.buflist.append("Internal Error: %s" % (exc,))
else:
sys.stderr.write("Internal Error: %s\n" % (exc,))
finally:
#Remove frame references.
self.frame = None
frame = None
if buffer_output:
sys.stdout = original_stdout
sys.stderr = original_stderr
if buffer_output:
return more, out.buflist, err.buflist
else:
return more, [], []
overrides(BaseInterpreterInterface.do_add_exec)
def do_add_exec(self, line):
return InteractiveConsole.push(self, line)
overrides(InteractiveConsole.runcode)
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
Exec(code, self.frame.f_globals, self.frame.f_locals)
pydevd_save_locals.save_locals(self.frame)
except SystemExit:
raise
except:
self.showtraceback()
#=======================================================================================================================
# InteractiveConsoleCache
#=======================================================================================================================
class InteractiveConsoleCache:
thread_id = None
frame_id = None
interactive_console_instance = None
#Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
def get_interactive_console(thread_id, frame_id, frame, console_message):
"""returns the global interactive console.
interactive console should have been initialized by this time
"""
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
return InteractiveConsoleCache.interactive_console_instance
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
InteractiveConsoleCache.thread_id = thread_id
InteractiveConsoleCache.frame_id = frame_id
console_stacktrace = traceback.extract_stack(frame, limit=1)
if console_stacktrace:
current_context = console_stacktrace[0] # top entry from stacktrace
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
return InteractiveConsoleCache.interactive_console_instance
def clear_interactive_console():
InteractiveConsoleCache.thread_id = None
InteractiveConsoleCache.frame_id = None
InteractiveConsoleCache.interactive_console_instance = None
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
"""fetch an interactive console instance from the cache and
push the received command to the console.
create and return an instance of console_message
"""
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
console_message.update_more(more)
for message in output_messages:
console_message.add_console_message(CONSOLE_OUTPUT, message)
for message in error_messages:
console_message.add_console_message(CONSOLE_ERROR, message)
return console_message
def get_completions(frame, act_tok):
""" fetch all completions, create xml for the same
return the completions xml
"""
return _pydev_completer.generate_completions_as_xml(frame, act_tok)
| {
"repo_name": "mrknow/filmkodi",
"path": "plugin.video.mrknow/mylib/_pydevd_bundle/pydevd_console.py",
"copies": "9",
"size": "8356",
"license": "apache-2.0",
"hash": -1376898324150235400,
"line_mean": 35.3304347826,
"line_max": 120,
"alpha_frac": 0.5762326472,
"autogenerated": false,
"ratio": 4.715575620767495,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9791808267967496,
"avg_score": null,
"num_lines": null
} |
'''An helper file for the pydev debugger (REPL) console
'''
from code import InteractiveConsole
import sys
import traceback
import _pydev_completer
from pydevd_tracing import GetExceptionTracebackStr
from pydevd_vars import makeValidXmlValue
from pydev_imports import Exec
from pydevd_io import IOBuf
from pydev_console_utils import BaseInterpreterInterface, BaseStdIn
from pydev_override import overrides
import pydevd_save_locals
CONSOLE_OUTPUT = "output"
CONSOLE_ERROR = "error"
#=======================================================================================================================
# ConsoleMessage
#=======================================================================================================================
class ConsoleMessage:
"""Console Messages
"""
def __init__(self):
self.more = False
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
self.console_messages = []
def add_console_message(self, message_type, message):
"""add messages in the console_messages list
"""
for m in message.split("\n"):
if m.strip():
self.console_messages.append((message_type, m))
def update_more(self, more):
"""more is set to true if further input is required from the user
else more is set to false
"""
self.more = more
def toXML(self):
"""Create an XML for console message_list, error and more (true/false)
<xml>
<message_list>console message_list</message_list>
<error>console error</error>
<more>true/false</more>
</xml>
"""
makeValid = makeValidXmlValue
xml = '<xml><more>%s</more>' % (self.more)
for message_type, message in self.console_messages:
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
xml += '</xml>'
return xml
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
overrides(BaseStdIn.readline)
def readline(self, *args, **kwargs):
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
return '\n'
#=======================================================================================================================
# DebugConsole
#=======================================================================================================================
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
"""Wrapper around code.InteractiveConsole, in order to send
errors and outputs to the debug console
"""
overrides(BaseInterpreterInterface.createStdIn)
def createStdIn(self):
try:
if not self.__buffer_output:
return sys.stdin
except:
pass
return DebugConsoleStdIn() #If buffered, raw_input is not supported in this console.
overrides(InteractiveConsole.push)
def push(self, line, frame, buffer_output=True):
"""Change built-in stdout and stderr methods by the
new custom StdMessage.
execute the InteractiveConsole.push.
Change the stdout and stderr back be the original built-ins
:param buffer_output: if False won't redirect the output.
Return boolean (True if more input is required else False),
output_messages and input_messages
"""
self.__buffer_output = buffer_output
more = False
if buffer_output:
original_stdout = sys.stdout
original_stderr = sys.stderr
try:
try:
self.frame = frame
if buffer_output:
out = sys.stdout = IOBuf()
err = sys.stderr = IOBuf()
more = self.addExec(line)
except Exception:
exc = GetExceptionTracebackStr()
if buffer_output:
err.buflist.append("Internal Error: %s" % (exc,))
else:
sys.stderr.write("Internal Error: %s\n" % (exc,))
finally:
#Remove frame references.
self.frame = None
frame = None
if buffer_output:
sys.stdout = original_stdout
sys.stderr = original_stderr
if buffer_output:
return more, out.buflist, err.buflist
else:
return more, [], []
overrides(BaseInterpreterInterface.doAddExec)
def doAddExec(self, line):
return InteractiveConsole.push(self, line)
overrides(InteractiveConsole.runcode)
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
Exec(code, self.frame.f_globals, self.frame.f_locals)
pydevd_save_locals.save_locals(self.frame)
except SystemExit:
raise
except:
self.showtraceback()
#=======================================================================================================================
# InteractiveConsoleCache
#=======================================================================================================================
class InteractiveConsoleCache:
thread_id = None
frame_id = None
interactive_console_instance = None
#Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
def get_interactive_console(thread_id, frame_id, frame, console_message):
"""returns the global interactive console.
interactive console should have been initialized by this time
"""
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
return InteractiveConsoleCache.interactive_console_instance
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
InteractiveConsoleCache.thread_id = thread_id
InteractiveConsoleCache.frame_id = frame_id
console_stacktrace = traceback.extract_stack(frame, limit=1)
if console_stacktrace:
current_context = console_stacktrace[0] # top entry from stacktrace
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
return InteractiveConsoleCache.interactive_console_instance
def clear_interactive_console():
InteractiveConsoleCache.thread_id = None
InteractiveConsoleCache.frame_id = None
InteractiveConsoleCache.interactive_console_instance = None
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
"""fetch an interactive console instance from the cache and
push the received command to the console.
create and return an instance of console_message
"""
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
console_message.update_more(more)
for message in output_messages:
console_message.add_console_message(CONSOLE_OUTPUT, message)
for message in error_messages:
console_message.add_console_message(CONSOLE_ERROR, message)
return console_message
def get_completions(frame, act_tok):
""" fetch all completions, create xml for the same
return the completions xml
"""
return _pydev_completer.GenerateCompletionsAsXML(frame, act_tok)
| {
"repo_name": "SylvainCorlay/PyDev.Debugger",
"path": "pydevd_console.py",
"copies": "11",
"size": "8205",
"license": "epl-1.0",
"hash": -2551930944242683400,
"line_mean": 34.6739130435,
"line_max": 120,
"alpha_frac": 0.5746496039,
"autogenerated": false,
"ratio": 4.815140845070423,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
'''An helper file for the pydev debugger (REPL) console
'''
import sys
import traceback
from code import InteractiveConsole
from _pydev_bundle import _pydev_completer
from _pydev_bundle.pydev_code_executor import BaseCodeExecutor
from _pydev_bundle.pydev_imports import Exec
from _pydev_bundle.pydev_override import overrides
from _pydev_bundle.pydev_stdin import BaseStdIn
from _pydevd_bundle import pydevd_save_locals
from _pydevd_bundle.pydevd_io import IOBuf
from _pydevd_bundle.pydevd_tracing import get_exception_traceback_str
from _pydevd_bundle.pydevd_xml import make_valid_xml_value
CONSOLE_OUTPUT = "output"
CONSOLE_ERROR = "error"
#=======================================================================================================================
# ConsoleMessage
#=======================================================================================================================
class ConsoleMessage:
"""Console Messages
"""
def __init__(self):
self.more = False
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
self.console_messages = []
def add_console_message(self, message_type, message):
"""add messages in the console_messages list
"""
for m in message.split("\n"):
if m.strip():
self.console_messages.append((message_type, m))
def update_more(self, more):
"""more is set to true if further input is required from the user
else more is set to false
"""
self.more = more
def to_xml(self):
"""Create an XML for console message_list, error and more (true/false)
<xml>
<message_list>console message_list</message_list>
<error>console error</error>
<more>true/false</more>
</xml>
"""
makeValid = make_valid_xml_value
xml = '<xml><more>%s</more>' % (self.more)
for message_type, message in self.console_messages:
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
xml += '</xml>'
return xml
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
overrides(BaseStdIn.readline)
def readline(self, *args, **kwargs):
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
return '\n'
#=======================================================================================================================
# DebugConsole
#=======================================================================================================================
class DebugConsole(InteractiveConsole, BaseCodeExecutor):
"""Wrapper around code.InteractiveConsole, in order to send
errors and outputs to the debug console
"""
overrides(BaseCodeExecutor.create_std_in)
def create_std_in(self, *args, **kwargs):
try:
if not self.__buffer_output:
return sys.stdin
except:
pass
return DebugConsoleStdIn() #If buffered, raw_input is not supported in this console.
overrides(InteractiveConsole.push)
def push(self, line, frame, buffer_output=True):
"""Change built-in stdout and stderr methods by the
new custom StdMessage.
execute the InteractiveConsole.push.
Change the stdout and stderr back be the original built-ins
:param buffer_output: if False won't redirect the output.
Return boolean (True if more input is required else False),
output_messages and input_messages
"""
self.__buffer_output = buffer_output
more = False
if buffer_output:
original_stdout = sys.stdout
original_stderr = sys.stderr
try:
try:
self.frame = frame
if buffer_output:
out = sys.stdout = IOBuf()
err = sys.stderr = IOBuf()
more = self.add_exec(line)
except Exception:
exc = get_exception_traceback_str()
if buffer_output:
err.buflist.append("Internal Error: %s" % (exc,))
else:
sys.stderr.write("Internal Error: %s\n" % (exc,))
finally:
#Remove frame references.
self.frame = None
frame = None
if buffer_output:
sys.stdout = original_stdout
sys.stderr = original_stderr
if buffer_output:
return more, out.buflist, err.buflist
else:
return more, [], []
overrides(BaseCodeExecutor.do_add_exec)
def do_add_exec(self, line):
return InteractiveConsole.push(self, line)
overrides(InteractiveConsole.runcode)
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
Exec(code, self.frame.f_globals, self.frame.f_locals)
pydevd_save_locals.save_locals(self.frame)
except SystemExit:
raise
except:
self.showtraceback()
def get_namespace(self):
dbg_namespace = {}
dbg_namespace.update(self.frame.f_globals)
dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals
return dbg_namespace
#=======================================================================================================================
# InteractiveConsoleCache
#=======================================================================================================================
class InteractiveConsoleCache:
thread_id = None
frame_id = None
interactive_console_instance = None
#Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
def get_interactive_console(thread_id, frame_id, frame, console_message):
"""returns the global interactive console.
interactive console should have been initialized by this time
:rtype: DebugConsole
"""
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
return InteractiveConsoleCache.interactive_console_instance
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
InteractiveConsoleCache.thread_id = thread_id
InteractiveConsoleCache.frame_id = frame_id
console_stacktrace = traceback.extract_stack(frame, limit=1)
if console_stacktrace:
current_context = console_stacktrace[0] # top entry from stacktrace
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
return InteractiveConsoleCache.interactive_console_instance
def clear_interactive_console():
InteractiveConsoleCache.thread_id = None
InteractiveConsoleCache.frame_id = None
InteractiveConsoleCache.interactive_console_instance = None
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
"""fetch an interactive console instance from the cache and
push the received command to the console.
create and return an instance of console_message
"""
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
console_message.update_more(more)
for message in output_messages:
console_message.add_console_message(CONSOLE_OUTPUT, message)
for message in error_messages:
console_message.add_console_message(CONSOLE_ERROR, message)
return console_message
def get_description(frame, thread_id, frame_id, expression):
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
try:
interpreter.frame = frame
return interpreter.getDescription(expression)
finally:
interpreter.frame = None
def get_completions(frame, act_tok):
""" fetch all completions, create xml for the same
return the completions xml
"""
return _pydev_completer.generate_completions_as_xml(frame, act_tok)
| {
"repo_name": "paplorinc/intellij-community",
"path": "python/helpers/pydev/_pydevd_bundle/pydevd_console.py",
"copies": "4",
"size": "8987",
"license": "apache-2.0",
"hash": 6408743953949661000,
"line_mean": 35.2379032258,
"line_max": 120,
"alpha_frac": 0.5835095137,
"autogenerated": false,
"ratio": 4.661307053941909,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.724481656764191,
"avg_score": null,
"num_lines": null
} |
'''An helper file for the pydev debugger (REPL) console
'''
import sys
import traceback
from code import InteractiveConsole
from _pydev_bundle import _pydev_completer
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn
from _pydev_bundle.pydev_imports import Exec
from _pydev_bundle.pydev_override import overrides
from _pydevd_bundle import pydevd_save_locals
from _pydevd_bundle.pydevd_io import IOBuf
from _pydevd_bundle.pydevd_tracing import get_exception_traceback_str
from _pydevd_bundle.pydevd_xml import make_valid_xml_value
CONSOLE_OUTPUT = "output"
CONSOLE_ERROR = "error"
#=======================================================================================================================
# ConsoleMessage
#=======================================================================================================================
class ConsoleMessage:
"""Console Messages
"""
def __init__(self):
self.more = False
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
self.console_messages = []
def add_console_message(self, message_type, message):
"""add messages in the console_messages list
"""
for m in message.split("\n"):
if m.strip():
self.console_messages.append((message_type, m))
def update_more(self, more):
"""more is set to true if further input is required from the user
else more is set to false
"""
self.more = more
def to_xml(self):
"""Create an XML for console message_list, error and more (true/false)
<xml>
<message_list>console message_list</message_list>
<error>console error</error>
<more>true/false</more>
</xml>
"""
makeValid = make_valid_xml_value
xml = '<xml><more>%s</more>' % (self.more)
for message_type, message in self.console_messages:
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
xml += '</xml>'
return xml
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
overrides(BaseStdIn.readline)
def readline(self, *args, **kwargs):
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
return '\n'
#=======================================================================================================================
# DebugConsole
#=======================================================================================================================
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
"""Wrapper around code.InteractiveConsole, in order to send
errors and outputs to the debug console
"""
overrides(BaseInterpreterInterface.create_std_in)
def create_std_in(self, *args, **kwargs):
try:
if not self.__buffer_output:
return sys.stdin
except:
pass
return DebugConsoleStdIn() #If buffered, raw_input is not supported in this console.
overrides(InteractiveConsole.push)
def push(self, line, frame, buffer_output=True):
"""Change built-in stdout and stderr methods by the
new custom StdMessage.
execute the InteractiveConsole.push.
Change the stdout and stderr back be the original built-ins
:param buffer_output: if False won't redirect the output.
Return boolean (True if more input is required else False),
output_messages and input_messages
"""
self.__buffer_output = buffer_output
more = False
if buffer_output:
original_stdout = sys.stdout
original_stderr = sys.stderr
try:
try:
self.frame = frame
if buffer_output:
out = sys.stdout = IOBuf()
err = sys.stderr = IOBuf()
more = self.add_exec(line)
except Exception:
exc = get_exception_traceback_str()
if buffer_output:
err.buflist.append("Internal Error: %s" % (exc,))
else:
sys.stderr.write("Internal Error: %s\n" % (exc,))
finally:
#Remove frame references.
self.frame = None
frame = None
if buffer_output:
sys.stdout = original_stdout
sys.stderr = original_stderr
if buffer_output:
return more, out.buflist, err.buflist
else:
return more, [], []
overrides(BaseInterpreterInterface.do_add_exec)
def do_add_exec(self, line):
return InteractiveConsole.push(self, line)
overrides(InteractiveConsole.runcode)
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
Exec(code, self.frame.f_globals, self.frame.f_locals)
pydevd_save_locals.save_locals(self.frame)
except SystemExit:
raise
except:
self.showtraceback()
def get_namespace(self):
dbg_namespace = {}
dbg_namespace.update(self.frame.f_globals)
dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals
return dbg_namespace
#=======================================================================================================================
# InteractiveConsoleCache
#=======================================================================================================================
class InteractiveConsoleCache:
thread_id = None
frame_id = None
interactive_console_instance = None
#Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
def get_interactive_console(thread_id, frame_id, frame, console_message):
"""returns the global interactive console.
interactive console should have been initialized by this time
:rtype: DebugConsole
"""
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
return InteractiveConsoleCache.interactive_console_instance
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
InteractiveConsoleCache.thread_id = thread_id
InteractiveConsoleCache.frame_id = frame_id
console_stacktrace = traceback.extract_stack(frame, limit=1)
if console_stacktrace:
current_context = console_stacktrace[0] # top entry from stacktrace
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
return InteractiveConsoleCache.interactive_console_instance
def clear_interactive_console():
InteractiveConsoleCache.thread_id = None
InteractiveConsoleCache.frame_id = None
InteractiveConsoleCache.interactive_console_instance = None
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
"""fetch an interactive console instance from the cache and
push the received command to the console.
create and return an instance of console_message
"""
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
console_message.update_more(more)
for message in output_messages:
console_message.add_console_message(CONSOLE_OUTPUT, message)
for message in error_messages:
console_message.add_console_message(CONSOLE_ERROR, message)
return console_message
def get_description(frame, thread_id, frame_id, expression):
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
try:
interpreter.frame = frame
return interpreter.getDescription(expression)
finally:
interpreter.frame = None
def get_completions(frame, act_tok):
""" fetch all completions, create xml for the same
return the completions xml
"""
return _pydev_completer.generate_completions_as_xml(frame, act_tok)
| {
"repo_name": "ThiagoGarciaAlves/intellij-community",
"path": "python/helpers/pydev/_pydevd_bundle/pydevd_console.py",
"copies": "16",
"size": "8982",
"license": "apache-2.0",
"hash": 3262138753901514000,
"line_mean": 35.3643724696,
"line_max": 120,
"alpha_frac": 0.5839456691,
"autogenerated": false,
"ratio": 4.673257023933402,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.006829673536183366,
"num_lines": 247
} |
'''An helper file for the pydev debugger (REPL) console
'''
import sys
import traceback
from code import InteractiveConsole
from _pydev_bundle import _pydev_completer
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn
from _pydev_bundle.pydev_imports import Exec
from _pydev_bundle.pydev_override import overrides
from _pydevd_bundle import pydevd_save_locals
from _pydevd_bundle.pydevd_io import IOBuf
from pydevd_tracing import get_exception_traceback_str
from _pydevd_bundle.pydevd_xml import make_valid_xml_value
CONSOLE_OUTPUT = "output"
CONSOLE_ERROR = "error"
#=======================================================================================================================
# ConsoleMessage
#=======================================================================================================================
class ConsoleMessage:
"""Console Messages
"""
def __init__(self):
self.more = False
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
self.console_messages = []
def add_console_message(self, message_type, message):
"""add messages in the console_messages list
"""
for m in message.split("\n"):
if m.strip():
self.console_messages.append((message_type, m))
def update_more(self, more):
"""more is set to true if further input is required from the user
else more is set to false
"""
self.more = more
def to_xml(self):
"""Create an XML for console message_list, error and more (true/false)
<xml>
<message_list>console message_list</message_list>
<error>console error</error>
<more>true/false</more>
</xml>
"""
makeValid = make_valid_xml_value
xml = '<xml><more>%s</more>' % (self.more)
for message_type, message in self.console_messages:
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
xml += '</xml>'
return xml
#=======================================================================================================================
# _DebugConsoleStdIn
#=======================================================================================================================
class _DebugConsoleStdIn(BaseStdIn):
@overrides(BaseStdIn.readline)
def readline(self, *args, **kwargs):
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
return '\n'
#=======================================================================================================================
# DebugConsole
#=======================================================================================================================
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
"""Wrapper around code.InteractiveConsole, in order to send
errors and outputs to the debug console
"""
@overrides(BaseInterpreterInterface.create_std_in)
def create_std_in(self, *args, **kwargs):
try:
if not self.__buffer_output:
return sys.stdin
except:
pass
return _DebugConsoleStdIn() # If buffered, raw_input is not supported in this console.
@overrides(InteractiveConsole.push)
def push(self, line, frame, buffer_output=True):
"""Change built-in stdout and stderr methods by the
new custom StdMessage.
execute the InteractiveConsole.push.
Change the stdout and stderr back be the original built-ins
:param buffer_output: if False won't redirect the output.
Return boolean (True if more input is required else False),
output_messages and input_messages
"""
self.__buffer_output = buffer_output
more = False
if buffer_output:
original_stdout = sys.stdout
original_stderr = sys.stderr
try:
try:
self.frame = frame
if buffer_output:
out = sys.stdout = IOBuf()
err = sys.stderr = IOBuf()
more = self.add_exec(line)
except Exception:
exc = get_exception_traceback_str()
if buffer_output:
err.buflist.append("Internal Error: %s" % (exc,))
else:
sys.stderr.write("Internal Error: %s\n" % (exc,))
finally:
# Remove frame references.
self.frame = None
frame = None
if buffer_output:
sys.stdout = original_stdout
sys.stderr = original_stderr
if buffer_output:
return more, out.buflist, err.buflist
else:
return more, [], []
@overrides(BaseInterpreterInterface.do_add_exec)
def do_add_exec(self, line):
return InteractiveConsole.push(self, line)
@overrides(InteractiveConsole.runcode)
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
Exec(code, self.frame.f_globals, self.frame.f_locals)
pydevd_save_locals.save_locals(self.frame)
except SystemExit:
raise
except:
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
# (showtraceback does it on python 3.5 onwards)
sys.excepthook = sys.__excepthook__
try:
self.showtraceback()
finally:
sys.__excepthook__ = sys.excepthook
def get_namespace(self):
dbg_namespace = {}
dbg_namespace.update(self.frame.f_globals)
dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals
return dbg_namespace
#=======================================================================================================================
# InteractiveConsoleCache
#=======================================================================================================================
class InteractiveConsoleCache:
thread_id = None
frame_id = None
interactive_console_instance = None
# Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
def get_interactive_console(thread_id, frame_id, frame, console_message):
"""returns the global interactive console.
interactive console should have been initialized by this time
:rtype: DebugConsole
"""
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
return InteractiveConsoleCache.interactive_console_instance
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
InteractiveConsoleCache.thread_id = thread_id
InteractiveConsoleCache.frame_id = frame_id
console_stacktrace = traceback.extract_stack(frame, limit=1)
if console_stacktrace:
current_context = console_stacktrace[0] # top entry from stacktrace
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
return InteractiveConsoleCache.interactive_console_instance
def clear_interactive_console():
InteractiveConsoleCache.thread_id = None
InteractiveConsoleCache.frame_id = None
InteractiveConsoleCache.interactive_console_instance = None
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
"""fetch an interactive console instance from the cache and
push the received command to the console.
create and return an instance of console_message
"""
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
console_message.update_more(more)
for message in output_messages:
console_message.add_console_message(CONSOLE_OUTPUT, message)
for message in error_messages:
console_message.add_console_message(CONSOLE_ERROR, message)
return console_message
def get_description(frame, thread_id, frame_id, expression):
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
try:
interpreter.frame = frame
return interpreter.getDescription(expression)
finally:
interpreter.frame = None
def get_completions(frame, act_tok):
""" fetch all completions, create xml for the same
return the completions xml
"""
return _pydev_completer.generate_completions_as_xml(frame, act_tok)
| {
"repo_name": "glenngillen/dotfiles",
"path": ".vscode/extensions/ms-python.python-2021.5.842923320/pythonFiles/lib/python/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py",
"copies": "1",
"size": "9545",
"license": "mit",
"hash": -245404088064549760,
"line_mean": 36.4879032258,
"line_max": 120,
"alpha_frac": 0.5674174961,
"autogenerated": false,
"ratio": 4.68811394891945,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0036345778559234983,
"num_lines": 248
} |
"""An HTML5 Canvas backend for matplotlib.
Simon Ratcliffe (sratcliffe@ska.ac.za)
Ludwig Schwardt (ludwig@ska.ac.za)
Copyright (c) 2010-2013, SKA South Africa
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of SKA South Africa nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import division
import sys
import math
import webbrowser
import time
import thread
import numpy as np
from matplotlib._pylab_helpers import Gcf
from matplotlib.backend_bases import RendererBase, GraphicsContextBase, FigureManagerBase, FigureCanvasBase
from matplotlib.figure import Figure
from matplotlib.transforms import Affine2D
from matplotlib.path import Path
from matplotlib.colors import colorConverter, rgb2hex
from matplotlib.cbook import maxdict
from matplotlib.ft2font import FT2Font, LOAD_NO_HINTING
from matplotlib.font_manager import findfont
from matplotlib.mathtext import MathTextParser
from matplotlib import _png, is_interactive
import simple_server
import management_server
import uuid
from mplh5canvas import MANAGEMENT_PORT_BASE, MANAGEMENT_LIMIT, FIGURE_LIMIT
import logging
logger = logging.getLogger("mplh5canvas.backed_h5canvas")
_capstyle_d = {'projecting' : 'square', 'butt' : 'butt', 'round': 'round',}
# mapping from matplotlib style line caps to H5 canvas
figure_number = 0
_figure_ports = {}
_figure_ports['count'] = 0
#_request_handlers = {}
_frame = ""
_test = False
_metrics = False
h5m = management_server.H5Manager(MANAGEMENT_PORT_BASE, MANAGEMENT_LIMIT)
# start a new management server...
BASE_PORT = h5m.port + 1
# get the base port to use for websocket connections. Each distinct management instance can handle 98 figures
def new_web_port():
# TODO: needs to handle reuse of port as well.
_figure_ports['count'] += 1
return BASE_PORT + _figure_ports['count']
def register_web_server(port, canvas):
h5m.add_figure(port, canvas)
_figure_ports[port] = canvas
def deregister_web_server(port):
h5m.remove_figure(port)
_figure_ports.pop(port)
# not particularly intelligent as we can't reuse ports. some form of map required.
def mpl_to_css_color(color, alpha=None, isRGB=True):
"""Convert Matplotlib color spec (or rgb tuple + alpha) to CSS color string."""
if not isRGB:
r, g, b, alpha = colorConverter.to_rgba(color)
color = (r, g, b)
if alpha is None and len(color) == 4:
alpha = color[3]
if alpha is None:
return rgb2hex(color[:3])
else:
return 'rgba(%d, %d, %d, %.3g)' % (color[0] * 255, color[1] * 255, color[2] * 255, alpha)
class WebPNG(object):
"""Very simple file like object for use with the write_png method.
Used to grab the output that would have headed to a standard file, and allow further manipulation
such as base 64 encoding."""
def __init__(self):
self.buffer = ""
def write(self, s):
self.buffer += s
def get_b64(self):
import base64
return base64.b64encode(self.buffer)
class H5Frame(object):
def __init__(self, frame_number=0, context_name='c'):
self._frame_number = frame_number
# the frame number in the current animated sequence
self._context_name = context_name
# the name of the context to use for drawing
self._content = ""
# a full frame of script ready for rendering
self._extra = ""
self._header = "frame_body_%s();" % self._context_name
self._custom_header = False
def _convert_obj(self, obj):
return (isinstance(obj, unicode) and repr(obj.replace("'","`"))[1:] or (isinstance(obj, float) and '%.2f' % obj or repr(obj)))
def __getattr__(self, method_name):
# when frame is called in .<method_name>(<argument>) context
def h5_method(*args):
self._content += '%s.%s(%s);\n' % (self._context_name, method_name, ','.join([self._convert_obj(obj) for obj in args]))
return h5_method
def __setattr__(self, prop, value):
# when frame properties are assigned to .<prop> = <value>
if prop.startswith('_'):
self.__dict__[prop] = value
return
self._content += '%s.%s=%s;\n' % (self._context_name, prop, self._convert_obj(value))
def moveTo(self, x, y):
self._content += '%s.%s(%.2f,%.2f);\n' % (self._context_name, "moveTo", x, y)
def lineTo(self, x, y):
self._content += '%s.%s(%.2f,%.2f);\n' % (self._context_name, "lineTo", x, y)
#self._content = self._content + self._context_name + ".lineTo(" + str(x) + "," + str(y) + ");\n"
# options for speed...
def dashedLine(self, x1, y1, x2, y2, dashes):
"""Draw dashed line from (x1, y1) to (x2, y2), given dashes structure, and return new dash offset."""
length = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
if length <= 0.0:
return dashes[0]
dash_length = np.sum(dashes[1])
# Wrap offset to fall in interval [-dash_length..0], and do one dash period extra to ensure dashed line has no gaps
offset, num_periods = -(dashes[0] % dash_length), int(length // dash_length) + 2
unit_x, unit_y = (x2 - x1) / length, (y2 - y1) / length
# The rest of the function can be implemented in Javascript instead, to compress the string being sent across the network
self.moveTo(x1, y1)
for n in xrange(num_periods):
for m, dash_step in enumerate(dashes[1]):
# Clip start of dash segment if it straddles (x1, y1)
if offset < 0.0 and (offset + dash_step) > 0.0:
dash_step += offset
offset = 0.0
# Clip end of dash segment if it straddles (x2, y2)
if offset < length and (offset + dash_step) > length:
dash_step = length - offset
# Advance to end of current dash segment
offset += dash_step
if offset >= 0.0 and offset <= length:
# Alternately draw dash and move to start of next dash
if m % 2 == 0:
self.lineTo(x1 + unit_x * offset, y1 + unit_y * offset)
else:
self.moveTo(x1 + unit_x * offset, y1 + unit_y * offset)
return dashes[0] + (length % dash_length)
def beginPath(self):
self._content += '%s.%s();\n' % (self._context_name, "beginPath")
def stroke(self):
self._content += '%s.%s();\n' % (self._context_name, "stroke")
def closePath(self):
self._content += '%s.%s();\n' % (self._context_name, "closePath")
def add_header(self, s, start=False):
if not self._custom_header:
self._custom_header = True
self._header = ""
if start: self._header = "%s\n" % s + self._header
else: self._header += "%s\n" % s
def write_extra(self, s):
self._extra += '%s\n' % s
def write(self, s):
self._content += '%s\n' % s
def get_frame(self):
return "function frame_body_%s() { %s }\n" % (self._context_name, self._content)
def get_frame_extra(self):
return "function frame_body_%s() { %s\n%s }\n" % (self._context_name, self._extra, self._content)
def get_header(self):
return "function frame_header() { %s }\n" % self._header
def get_extra(self):
return self._extra
class RendererH5Canvas(RendererBase):
"""The renderer handles drawing/rendering operations."""
fontd = maxdict(50)
def __init__(self, width, height, ctx, dpi=72):
self.width = width
self.height = height
self.dpi = dpi
self.ctx = ctx
self._image_count = 0
# used to uniquely label each image created in this figure...
# define the js context
self.ctx.width = width
self.ctx.height = height
#self.ctx.textAlign = "center";
self.ctx.textBaseline = "alphabetic"
self.flip = Affine2D().scale(1, -1).translate(0, height)
self.mathtext_parser = MathTextParser('bitmap')
self._path_time = 0
self._text_time = 0
self._marker_time = 0
self._sub_time = 0
self._last_clip = None
self._last_clip_path = None
self._clip_count = 0
def _set_style(self, gc, rgbFace=None):
ctx = self.ctx
if rgbFace is not None:
ctx.fillStyle = mpl_to_css_color(rgbFace, gc.get_alpha())
ctx.strokeStyle = mpl_to_css_color(gc.get_rgb(), gc.get_alpha())
if gc.get_capstyle():
ctx.lineCap = _capstyle_d[gc.get_capstyle()]
ctx.lineWidth = self.points_to_pixels(gc.get_linewidth())
def _path_to_h5(self, ctx, path, transform, clip=None, stroke=True, dashes=(None, None)):
"""Iterate over a path and produce h5 drawing directives."""
transform = transform + self.flip
ctx.beginPath()
current_point = None
dash_offset, dash_pattern = dashes
if dash_pattern is not None:
dash_offset = self.points_to_pixels(dash_offset)
dash_pattern = tuple([self.points_to_pixels(dash) for dash in dash_pattern])
for points, code in path.iter_segments(transform, clip=clip):
# Shift all points by half a pixel, so that integer coordinates are aligned with pixel centers instead of edges
# This prevents lines that are one pixel wide and aligned with the pixel grid from being rendered as a two-pixel wide line
# This happens because HTML Canvas defines (0, 0) as the *top left* of a pixel instead of the center,
# which causes all integer-valued coordinates to fall exactly between pixels
points += 0.5
if code == Path.MOVETO:
ctx.moveTo(points[0], points[1])
current_point = (points[0], points[1])
elif code == Path.LINETO:
t = time.time()
if (dash_pattern is None) or (current_point is None):
ctx.lineTo(points[0], points[1])
else:
dash_offset = ctx.dashedLine(current_point[0], current_point[1], points[0], points[1], (dash_offset, dash_pattern))
self._sub_time += time.time() - t
current_point = (points[0], points[1])
elif code == Path.CURVE3:
ctx.quadraticCurveTo(*points)
current_point = (points[2], points[3])
elif code == Path.CURVE4:
ctx.bezierCurveTo(*points)
current_point = (points[4], points[5])
else:
pass
if stroke: ctx.stroke()
def _do_path_clip(self, ctx, clip):
self._clip_count += 1
ctx.save()
ctx.beginPath()
ctx.moveTo(clip[0],clip[1])
ctx.lineTo(clip[2],clip[1])
ctx.lineTo(clip[2],clip[3])
ctx.lineTo(clip[0],clip[3])
ctx.clip()
def draw_path(self, gc, path, transform, rgbFace=None):
t = time.time()
self._set_style(gc, rgbFace)
clip = self._get_gc_clip_svg(gc)
clippath, cliptrans = gc.get_clip_path()
ctx = self.ctx
if clippath is not None and self._last_clip_path != clippath:
ctx.restore()
ctx.save()
self._path_to_h5(ctx, clippath, cliptrans, None, stroke=False)
ctx.clip()
self._last_clip_path = clippath
if self._last_clip != clip and clip is not None and clippath is None:
ctx.restore()
self._do_path_clip(ctx, clip)
self._last_clip = clip
if clip is None and clippath is None and (self._last_clip is not None or self._last_clip_path is not None): self._reset_clip()
if rgbFace is None and gc.get_hatch() is None:
figure_clip = (0, 0, self.width, self.height)
else:
figure_clip = None
self._path_to_h5(ctx, path, transform, figure_clip, dashes=gc.get_dashes())
if rgbFace is not None:
ctx.fill()
ctx.fillStyle = '#000000'
self._path_time += time.time() - t
def _get_gc_clip_svg(self, gc):
cliprect = gc.get_clip_rectangle()
if cliprect is not None:
x, y, w, h = cliprect.bounds
y = self.height-(y+h)
return (x,y,x+w,y+h)
return None
def draw_markers(self, gc, marker_path, marker_trans, path, trans, rgbFace=None):
t = time.time()
for vertices, codes in path.iter_segments(trans, simplify=False):
if len(vertices):
x,y = vertices[-2:]
self._set_style(gc, rgbFace)
clip = self._get_gc_clip_svg(gc)
ctx = self.ctx
self._path_to_h5(ctx, marker_path, marker_trans + Affine2D().translate(x, y), clip)
if rgbFace is not None:
ctx.fill()
ctx.fillStyle = '#000000'
self._marker_time += time.time() - t
def _slipstream_png(self, x, y, im_buffer, width, height):
"""Insert image directly into HTML canvas as base64-encoded PNG."""
# Shift x, y (top left corner) to the nearest CSS pixel edge, to prevent resampling and consequent image blurring
x = math.floor(x + 0.5)
y = math.floor(y + 1.5)
# Write the image into a WebPNG object
f = WebPNG()
_png.write_png(im_buffer, width, height, f)
# Write test PNG as file as well
#_png.write_png(im_buffer, width, height, 'canvas_image_%d.png' % (self._image_count,))
# Extract the base64-encoded PNG and send it to the canvas
uname = str(uuid.uuid1()).replace("-","") #self.ctx._context_name + str(self._image_count)
# try to use a unique image name
enc = "var canvas_image_%s = 'data:image/png;base64,%s';" % (uname, f.get_b64())
s = "function imageLoaded_%s(ev) {\nim = ev.target;\nim_left_to_load_%s -=1;\nif (im_left_to_load_%s == 0) frame_body_%s();\n}\ncanv_im_%s = new Image();\ncanv_im_%s.onload = imageLoaded_%s;\ncanv_im_%s.src = canvas_image_%s;\n" % \
(uname, self.ctx._context_name, self.ctx._context_name, self.ctx._context_name, uname, uname, uname, uname, uname)
self.ctx.add_header(enc)
self.ctx.add_header(s)
# Once the base64 encoded image has been received, draw it into the canvas
self.ctx.write("%s.drawImage(canv_im_%s, %g, %g, %g, %g);" % (self.ctx._context_name, uname, x, y, width, height))
# draw the image as loaded into canv_im_%d...
self._image_count += 1
def _reset_clip(self):
self.ctx.restore()
self._last_clip = None
self._last_clip_path = None
#<1.0.0: def draw_image(self, x, y, im, bbox, clippath=None, clippath_trans=None):
#1.0.0 and up: def draw_image(self, gc, x, y, im, clippath=None):
#API for draw image changed between 0.99 and 1.0.0
def draw_image(self, *args, **kwargs):
x, y, im = args[:3]
try:
h,w = im.get_size_out()
except AttributeError:
x, y, im = args[1:4]
h,w = im.get_size_out()
clippath = (kwargs.has_key('clippath') and kwargs['clippath'] or None)
if self._last_clip is not None or self._last_clip_path is not None: self._reset_clip()
if clippath is not None:
self._path_to_h5(self.ctx,clippath, clippath_trans, stroke=False)
self.ctx.save()
self.ctx.clip()
(x,y) = self.flip.transform((x,y))
im.flipud_out()
rows, cols, im_buffer = im.as_rgba_str()
self._slipstream_png(x, (y-h), im_buffer, cols, rows)
if clippath is not None:
self.ctx.restore()
def _get_font(self, prop):
key = hash(prop)
font = self.fontd.get(key)
if font is None:
fname = findfont(prop)
font = self.fontd.get(fname)
if font is None:
font = FT2Font(str(fname))
self.fontd[fname] = font
self.fontd[key] = font
font.clear()
font.set_size(prop.get_size_in_points(), self.dpi)
return font
def draw_tex(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
logger.error("Tex support is currently not implemented. Text element '%s' will not be displayed..." % s)
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
if self._last_clip is not None or self._last_clip_path is not None: self._reset_clip()
t = time.time()
if ismath:
self._draw_mathtext(gc, x, y, s, prop, angle)
return
angle = math.radians(angle)
width, height, descent = self.get_text_width_height_descent(s, prop, ismath)
x -= math.sin(angle) * descent
y -= math.cos(angle) * descent
ctx = self.ctx
if angle != 0:
ctx.save()
ctx.translate(x, y)
ctx.rotate(-angle)
ctx.translate(-x, -y)
font_size = self.points_to_pixels(prop.get_size_in_points())
font_str = '%s %s %.3gpx %s, %s' % (prop.get_style(), prop.get_weight(), font_size, prop.get_name(), prop.get_family()[0])
ctx.font = font_str
# Set the text color, draw the text and reset the color to black afterwards
ctx.fillStyle = mpl_to_css_color(gc.get_rgb(), gc.get_alpha())
ctx.fillText(unicode(s), x, y)
ctx.fillStyle = '#000000'
if angle != 0:
ctx.restore()
self._text_time = time.time() - t
def _draw_mathtext(self, gc, x, y, s, prop, angle):
"""Draw math text using matplotlib.mathtext."""
# Render math string as an image at the configured DPI, and get the image dimensions and baseline depth
rgba, descent = self.mathtext_parser.to_rgba(s, color=gc.get_rgb(), dpi=self.dpi, fontsize=prop.get_size_in_points())
height, width, tmp = rgba.shape
angle = math.radians(angle)
# Shift x, y (top left corner) to the nearest CSS pixel edge, to prevent resampling and consequent image blurring
x = math.floor(x + 0.5)
y = math.floor(y + 1.5)
ctx = self.ctx
if angle != 0:
ctx.save()
ctx.translate(x, y)
ctx.rotate(-angle)
ctx.translate(-x, -y)
# Insert math text image into stream, and adjust x, y reference point to be at top left of image
self._slipstream_png(x, y - height, rgba.tostring(), width, height)
if angle != 0:
ctx.restore()
def flipy(self):
return True
def get_canvas_width_height(self):
return self.width, self.height
def get_text_width_height_descent(self, s, prop, ismath):
if ismath:
image, d = self.mathtext_parser.parse(s, self.dpi, prop)
w, h = image.get_width(), image.get_height()
else:
font = self._get_font(prop)
font.set_text(s, 0.0, flags=LOAD_NO_HINTING)
w, h = font.get_width_height()
w /= 64.0 # convert from subpixels
h /= 64.0
d = font.get_descent() / 64.0
return w, h, d
def new_gc(self):
return GraphicsContextH5Canvas()
def points_to_pixels(self, points):
# The standard desktop-publishing (Postscript) point is 1/72 of an inch
return points/72.0 * self.dpi
class GraphicsContextH5Canvas(GraphicsContextBase):
"""
The graphics context provides the color, line styles, etc... See the gtk
and postscript backends for examples of mapping the graphics context
attributes (cap styles, join styles, line widths, colors) to a particular
backend. In GTK this is done by wrapping a gtk.gdk.GC object and
forwarding the appropriate calls to it using a dictionary mapping styles
to gdk constants. In Postscript, all the work is done by the renderer,
mapping line styles to postscript calls.
If it's more appropriate to do the mapping at the renderer level (as in
the postscript backend), you don't need to override any of the GC methods.
If it's more appropriate to wrap an instance (as in the GTK backend) and
do the mapping here, you'll need to override several of the setter
methods.
The base GraphicsContext stores colors as a RGB tuple on the unit
interval, eg, (0.5, 0.0, 1.0). You may need to map this to colors
appropriate for your backend.
"""
pass
########################################################################
#
# The following functions and classes are for pylab and implement
# window/figure managers, etc...
#
########################################################################
def draw_if_interactive():
if is_interactive():
figManager = Gcf.get_active()
if figManager is not None:
figManager.show()
show(block=False)
# enforce a local show...
def show(block=True, layout='', open_plot=False):
"""
This show is typically called via pyplot.show.
In general usage a script will have a sequence of figure creation followed by a pyplot.show which
effectively blocks and leaves the figures open for the user.
We suspect this blocking is because the mainloop thread of the GUI is not setDaemon and thus halts
python termination.
To simulate this we create a non daemon dummy thread and instruct the user to use Ctrl-C to finish...
"""
Gcf.get_active().canvas.draw()
# update the current figure
# open the browser with the current active figure shown...
if not _test and open_plot:
try:
webbrowser.open_new_tab(h5m.url + "/" + str(layout))
except:
logger.warning("Failed to open figure page in your browser. Please browse to %s/%s" % (h5m.url,str(Gcf.get_active().canvas.figure.number)))
if block and not _test:
print "Showing figures. Hit Ctrl-C to finish script and close figures..."
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
def new_figure_manager(num, *args, **kwargs):
"""
Create a new figure manager instance
"""
# if a main-level app must be created, this is the usual place to
# do it -- see backend_wx, backend_wxagg and backend_tkagg for
# examples. Not all GUIs require explicit instantiation of a
# main-level app (egg backend_gtk, backend_gtkagg) for pylab
FigureClass = kwargs.pop('FigureClass', Figure)
thisFig = FigureClass(*args, **kwargs)
canvas = FigureCanvasH5Canvas(thisFig)
manager = FigureManagerH5Canvas(canvas, num)
thisFig.__dict__['show'] = canvas.draw
thisFig.__dict__['close'] = canvas.close
thisFig.__dict__['show_browser'] = canvas.show_browser
# provide a show that is basically just a canvas refresh...
return manager
class FigureCanvasH5Canvas(FigureCanvasBase):
"""
The canvas the figure renders into. Calls the draw and print fig
methods, creates the renderers, etc...
Public attribute
figure - A Figure instance
Note GUI templates will want to connect events for button presses,
mouse movements and key presses to functions that call the base
class methods button_press_event, button_release_event,
motion_notify_event, key_press_event, and key_release_event. See,
eg backend_gtk.py, backend_wx.py and backend_tkagg.py
"""
def __init__(self, figure):
if _figure_ports['count'] >= FIGURE_LIMIT:
logger.warning("Figure limit of %i reached. Returning NULL figure" % FIGURE_LIMIT)
return None
FigureCanvasBase.__init__(self, figure)
self.frame_count = 0
self._user_event = None
self._user_cmd_ret = None
self._server_port = new_web_port()
self._request_handlers = {}
self._frame = None
self._header = ""
self._home_x = {}
self._home_y = {}
self._zoomed = False
self._panned = False
self._first_frame = True
self._custom_content = None
self._width, self._height = self.get_width_height()
self.flip = Affine2D().scale(1, -1).translate(0, self._height)
logger.debug("Initialising figure of width: %i, height: %i" % (self._width, self._height))
logger.debug("Creating canvas web server on port %i" % self._server_port)
try:
self._server = simple_server.WebSocketServer(('', self._server_port), self.web_socket_transfer_data, simple_server.WebSocketRequestHandler)
self._thread = thread.start_new_thread(self._server.serve_forever, ())
register_web_server(self._server_port, self)
except Exception, e:
logger.error("Failed to create webserver. (%s)" % str(e))
sys.exit(1)
def register_request_handler(self, request):
self._request_handlers[request] = request.connection.remote_addr[0]
# if we have a lurking frame, send it on
if self._frame is not None:
self.send_frame(self._header + self._frame_extra)
def parse_web_cmd(self, s):
if s is None:
raise ValueError("Received empty web command - connection probably closed on client side")
action = None
try:
action = s[1:s.find(" ")]
args = s[s.find("args='")+6:-2].split(",")
method = getattr(self, "handle_%s" % action)
method(*args)
except AttributeError:
logger.warning("Cannot find request method handle_%s", action)
def show_browser(self):
self.draw()
webbrowser.open_new_tab(h5m.url + "/" + str(self.figure.number))
def handle_user_cmd_ret(self, *args):
if self._user_cmd_ret is not None:
try:
self._user_cmd_ret(*args)
except Exception, e:
logger.warning("User cmd ret exception %s" % str(e))
def handle_user_event(self, *args):
if self._user_event is not None:
try:
self._user_event(*args)
except Exception, e:
logger.warning("User event exception %s" % str(e))
else: logger.info("User event called but no callback registered to handle it...")
def handle_click(self, x, y, button):
self.button_press_event(float(x), float(y), int(button))
self.button_release_event(float(x),float(y),int(button))
# currently we do not distinguish between press and release on the javascript side. So call both :)
def handle_resize(self, width, height):
width, height = float(width), float(height)
if math.isnan(width) or math.isnan(height): ## XXXX: more fixin needed
## Some clientside horror happened? Skip it
print "E: NaN resize (%r, %r)" % (width, height)
return
width_in = width / self.figure.dpi
height_in = height / self.figure.dpi
self.figure.set_size_inches(width_in, height_in)
self.draw()
# set the figure and force a redraw...
def handle_close(self, *args):
self.figure.close()
self._stop_server()
def handle_home(self, *args):
# reset the plot to it's home coordinates
for i in self._home_x.keys():
self.figure.axes[i].set_xlim(self._home_x[i][0], self._home_x[i][1])
self.figure.axes[i].set_ylim(self._home_y[i][0], self._home_y[i][1])
self._zoomed = False
self._panned = False
self.draw()
def calculate_transform(self, ax, x0, y0, x1, y1):
# convert pixel coordinates into data coordinates
inverse = self.figure.axes[int(ax)].transData.inverted()
lastx, lasty = inverse.transform_point((float(x0), float(y0)))
x, y = inverse.transform_point((float(x1), float(y1)))
return (lastx, lasty, x, y)
def preserve_home(self, ax):
ax = int(ax)
if not (self._zoomed or self._panned):
self._home_x[ax] = self.figure.axes[ax].get_xlim()
self._home_y[ax] = self.figure.axes[ax].get_ylim()
def handle_pan(self, ax, x0, y0, x1, y1):
ax = int(ax)
self.preserve_home(ax)
self._panned = True
(lastx, lasty, x, y) = self.calculate_transform(ax, x0, y0, x1, y1)
xdiff = lastx - x
ydiff = y - lasty
(x0,x1) = self.figure.axes[ax].get_xlim()
(y0,y1) = self.figure.axes[ax].get_ylim()
self.figure.axes[ax].set_xlim((x0+xdiff, x1+xdiff))
self.figure.axes[ax].set_ylim((y0+ydiff, y1+ydiff))
self.draw()
def handle_zoom(self, ax, x0, y0, x1, y1):
ax = int(ax)
self.preserve_home(ax)
self._zoomed = True
(lastx, lasty, x, y) = self.calculate_transform(ax, x0, y0, x1, y1)
x0, y0, x1, y1 = self.figure.axes[ax].viewLim.frozen().extents
Xmin,Xmax=self.figure.axes[ax].get_xlim()
Ymin,Ymax=self.figure.axes[ax].get_ylim()
twinx, twiny = False, False
# need to figure out how to detect twin axis here TODO
if twinx:
x0, x1 = Xmin, Xmax
else:
if Xmin < Xmax:
if x<lastx: x0, x1 = x, lastx
else: x0, x1 = lastx, x
if x0 < Xmin: x0=Xmin
if x1 > Xmax: x1=Xmax
else:
if x>lastx: x0, x1 = x, lastx
else: x0, x1 = lastx, x
if x0 > Xmin: x0=Xmin
if x1 < Xmax: x1=Xmax
if twiny:
y0, y1 = Ymin, Ymax
else:
if Ymin < Ymax:
if y<lasty: y0, y1 = y, lasty
else: y0, y1 = lasty, y
if y0 < Ymin: y0=Ymin
if y1 > Ymax: y1=Ymax
else:
if y>lasty: y0, y1 = y, lasty
else: y0, y1 = lasty, y
if y0 > Ymin: y0=Ymin
if y1 < Ymax: y1=Ymax
self.figure.axes[ax].set_xlim((x0, x1))
self.figure.axes[ax].set_ylim((y0, y1))
self.draw()
def deregister_request_handler(self, request):
del self._request_handlers[request]
def web_socket_transfer_data(self, request):
self.register_request_handler(request)
while True:
if request.client_terminated:
self.deregister_request_handler(request)
return
try:
line = request.ws_stream.receive_message()
logger.debug("Received web cmd: %s" % line)
self.parse_web_cmd(line)
except Exception, e:
logger.exception("Caught exception. Removing registered handler")
self.deregister_request_handler(request)
return
def close(self):
self._stop_server()
def _stop_server(self):
logger.debug("Stopping canvas web server...")
self._server.shutdown()
deregister_web_server(self._server_port)
def draw(self, ctx_override='c', *args, **kwargs):
"""
Draw the figure using the renderer
"""
ts = time.time()
width, height = self.get_width_height()
ctx = H5Frame(context_name=ctx_override)
# the context to write the js in...
renderer = RendererH5Canvas(width, height, ctx, dpi=self.figure.dpi)
ctx.write_extra("resize_canvas(id," + str(width) + "," + str(height) + ");")
ctx.write_extra("native_w[id] = " + str(width) + ";")
ctx.write_extra("native_h[id] = " + str(height) + ";")
#ctx.write("// Drawing frame " + str(self.frame_count))
#ctx.write(ctx_override + ".width = " + ctx_override + ".width;")
# clear the canvas...
t = time.time()
self.figure.draw(renderer)
logger.debug("Render took %s s" % (time.time() - t))
logger.debug("Path time: %s, Text time: %s, Marker time: %s, Sub time: %s" % (renderer._path_time, renderer._text_time, renderer._marker_time, renderer._sub_time))
self.frame_count+=1
for i,ax in enumerate(self.figure.axes):
corners = ax.bbox.corners()
bb_str = ""
for corner in corners: bb_str += str(corner[0]) + "," + str(corner[1]) + ","
ctx.add_header("ax_bb[%d] = [%s];" % (i, bb_str[:-1]))
datalim_str = ','.join([('%s' % (dl,)) for dl in ax.axis()])
ctx.add_header("ax_datalim[%d] = [%s];" % (i, datalim_str))
if renderer._image_count > 0:
ctx.add_header("var im_left_to_load_%s = %i;" % (ctx._context_name, renderer._image_count), start=True)
else:
ctx.add_header("frame_body_%s();" % ctx._context_name)
# if no image we can draw the frame body immediately..
self._header = ctx.get_header()
self._frame = ctx.get_frame()
self._frame_extra = ctx.get_frame_extra()
# additional script commands needed for handling functions other than drawing
self._width, self._height = self.get_width_height()
# redo my height and width...
self.send_frame(self._header + self._frame_extra)
# if we have a frame ready, send it on...
if self._first_frame:
h5m.tell()
self._first_frame = False
logger.debug("Overall draw took %s s, with %i clipcount" % ((time.time() - ts), renderer._clip_count))
def send_cmd(self, cmd):
"""Send a string of javascript to be executed on the client side of each connected user."""
self.send_frame("/*exec_user_cmd*/ %s" % cmd)
def send_frame(self, frame):
for r in self._request_handlers.keys():
try:
r.ws_stream.send_message(frame.decode('utf-8'))
except AttributeError:
# connection has gone
logger.info("Connection %s has gone. Closing..." % r.connection.remote_addr[0])
except Exception, e:
logger.warning("Failed to send message (%s)" % str(e))
def show(self):
logger.info("Show called... Not implemented in this function...")
filetypes = {'js': 'HTML5 Canvas'}
def print_js(self, filename, *args, **kwargs):
logger.debug("Print js called with args %s and kwargs %s" % (str(args), str(kwargs)))
width, height = self.get_width_height()
writer = open(filename, 'w')
renderer = RendererH5Canvas(width, height, writer, dpi=self.figure.dpi)
self.figure.draw(renderer)
def get_default_filetype(self):
return 'js'
class FigureManagerH5Canvas(FigureManagerBase):
"""
Wrap everything up into a window for the pylab interface
For non interactive backends, the base class does all the work
"""
def __init__(self, canvas, num):
self.canvas = canvas
FigureManagerBase.__init__(self, canvas, num)
def destroy(self, *args):
self.canvas._stop_server()
logger.debug("Destroy called on figure manager")
def show(self):
logger.debug("Show called for figure manager")
FigureManager = FigureManagerH5Canvas
| {
"repo_name": "HoverHell/mplh5canvas",
"path": "mplh5canvas/backend_h5canvas.py",
"copies": "1",
"size": "36543",
"license": "bsd-3-clause",
"hash": -4258252920205791700,
"line_mean": 41.4425087108,
"line_max": 755,
"alpha_frac": 0.5934105027,
"autogenerated": false,
"ratio": 3.617044442244878,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9676807775338196,
"avg_score": 0.0067294339213360655,
"num_lines": 861
} |
'''An HTML conversion action for the Coda Plugin Skeleton'''
import cp_actions as cp
from markdown import markdown
from textile import textile
from html2text import html2text
from html2textile import html2textile
from rest2html import rest2html
from html2rest_wrapper import html2rest
def act(controller, bundle, options):
'''
Required action method
'''
context = cp.get_context(controller)
from_lang = cp.get_option(options, 'from', 'markdown').lower()
to_lang = cp.get_option(options, 'to', 'html').lower()
selection, range = cp.selection_and_range(context)
# grab the whole document if they haven't selected anything...
if range.length == 0:
selection = context.string()
range = cp.new_range(0, len(selection))
# what are we coming from?
if from_lang == 'markdown':
html = markdown(selection)
elif from_lang == 'textile':
html = textile(selection)
elif from_lang == 'rest':
html = rest2html(selection)
elif from_lang == 'html':
html = selection
else:
return
# what are we going to?
if to_lang == 'markdown':
text = html2text(html)
elif to_lang == 'textile':
text = html2textile(html)
elif to_lang == 'rest':
text = html2rest(html)
elif to_lang == 'html':
text = html
else:
return
cp.insert_text(context, text, range) | {
"repo_name": "bobthecow/ManipulateCoda",
"path": "src/Support/Scripts/HTMLConvert.py",
"copies": "1",
"size": "1423",
"license": "mit",
"hash": -7521880641688528000,
"line_mean": 25.8679245283,
"line_max": 66,
"alpha_frac": 0.6317638791,
"autogenerated": false,
"ratio": 3.696103896103896,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4827867775203896,
"avg_score": null,
"num_lines": null
} |
""" An HTTP proxy that routes requests through a number of Tor circuits. """
import logging
import sys
from argparse import ArgumentParser
from shutil import rmtree
from tempfile import mkdtemp
from time import sleep
from miproxy.proxy import AsyncMitmProxy
from proctor.vendor.exit import handle_exit
LOG_FORMAT = '%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s'
def get_args_parser():
parser = ArgumentParser(description=__doc__)
parser.add_argument('-d', '--work-dir', help='Working directory')
parser.add_argument('-p', '--port', type=int, default=8080,
help='Proxy server listening port')
parser.add_argument('-s', '--base-socks-port', type=int, default=19050,
help='Base socks port for the Tor processes')
parser.add_argument('-c', '--base-control-port', type=int, default=18118,
help='Base control port for the Tor processes')
parser.add_argument('-n', '--instances', type=int, default=2,
help='Number of Tor processes to launch')
parser.add_argument('-m', '--max-use', type=int,
help='Max number of requests before replacing '
'Tor processes')
parser.add_argument('-t', '--max-conn-time', type=float, default=2,
help='Number of Tor processes to launch')
return parser
def parse_args():
parser = get_args_parser()
parser.add_argument('-l', '--loglevel', default='INFO',
choices=('CRITICAL', 'ERROR', 'WARN', 'INFO', 'DEBUG'),
help='Display messages above this log level')
return parser.parse_args()
def run_proxy(port, base_socks_port, base_control_port, work_dir,
num_instances, sockets_max, **kwargs):
# Imported here so that the logging module could be initialized by another
# script that would import from the present module. Not sure that's the
# best way to accomplish this though.
from .tor import TorSwarm
from .proxy import tor_proxy_handler_factory
log = logging.getLogger(__name__)
proxy = None
tor_swarm = None
def kill_handler():
log.warn('Interrupted, stopping server')
try:
if proxy:
proxy.server_close()
finally:
if tor_swarm is not None:
tor_swarm.stop()
with handle_exit(kill_handler):
tor_swarm = TorSwarm(base_socks_port, base_control_port, work_dir,
sockets_max, **kwargs)
tor_instances = tor_swarm.start(num_instances)
log.debug('Waiting for at least one connected Tor instance...')
while not [t for t in tor_instances if t.connected]:
if len(list(i for i in tor_instances if not i.terminated)) == 0:
log.critical('No alive Tor instance left. Bailing out.')
sys.exit(1)
sleep(0.25)
handler_factory = tor_proxy_handler_factory(tor_swarm)
proxy = AsyncMitmProxy(server_address=('', port),
RequestHandlerClass=handler_factory)
log.info('Starting proxy server on port %s' % port)
proxy.serve_forever()
def main():
args = parse_args()
work_dir = args.work_dir or mkdtemp()
logging.basicConfig(level=getattr(logging, args.loglevel),
format=LOG_FORMAT)
try:
run_proxy(args.port, args.base_socks_port, args.base_control_port,
work_dir, args.instances, args.max_use,
conn_time_avg_max=args.max_conn_time)
finally:
if not args.work_dir:
rmtree(work_dir)
if __name__ == '__main__':
main()
| {
"repo_name": "ncadou/proctor",
"path": "proctor/scripts.py",
"copies": "1",
"size": "3739",
"license": "bsd-3-clause",
"hash": 3191877486931096000,
"line_mean": 37.1530612245,
"line_max": 79,
"alpha_frac": 0.5982883124,
"autogenerated": false,
"ratio": 4.05971769815418,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.515800601055418,
"avg_score": null,
"num_lines": null
} |
"""An HTTP proxy that supports IPv6 as well as the HTTP CONNECT method, among
other things."""
# Standard libary imports
import socket
import thread
import select
__version__ = '0.1.0 Draft 1'
BUFFER_LENGTH = 8192
VERSION = 'Python Proxy/{}'.format(__version__)
HTTP_VERSION = 'HTTP/1.1'
class ConnectionHandler(object):
"""Handles connections between the HTTP client and HTTP server."""
def __init__(self, connection, _, timeout):
self.client = connection
self.client_buffer = ''
self.timeout = timeout
self.target = None
method, path, protocol = self.get_base_header()
if method == 'CONNECT':
self.method_connect(path)
else:
self.method_others(method, path, protocol)
def get_base_header(self):
"""Return a tuple of (method, path, protocol) from the recieved
message."""
while 1:
self.client_buffer += self.client.recv(BUFFER_LENGTH)
end = self.client_buffer.find('\n')
if end != -1:
break
print '{}'.format(self.client_buffer[:end])
data = (self.client_buffer[:end+1]).split()
self.client_buffer = self.client_buffer[end+1:]
return data
def method_connect(self, path):
"""Handle HTTP CONNECT messages."""
self._connect_target(path)
self.client.send('{http_version} 200 Connection established\n'
'Proxy-agent: {version}\n\n'.format(
http_version=HTTP_VERSION,
version=VERSION))
self.client_buffer = ''
self._read_write()
def method_others(self, method, path, protocol):
"""Handle all non-HTTP CONNECT messages."""
path = path[7:]
i = path.find('/')
host = path[:i]
path = path[i:]
self._connect_target(host)
self.target.send('{method} {path} {protocol}\n{client_buffer}'.format(
method=method,
path=path,
protocol=protocol,
client_buffer=self.client_buffer))
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
"""Create a connection to the HTTP server specified by *host*."""
i = host.find(':')
if i != -1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
(soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
self.target = socket.socket(soc_family)
self.target.connect(address)
def _read_write(self):
"""Read data from client connection and forward to server
connection."""
time_out_max = self.timeout/3
socs = [self.client, self.target]
count = 0
while 1:
count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
break
if recv:
for in_ in recv:
data = in_.recv(BUFFER_LENGTH)
if in_ is self.client:
out = self.target
else:
out = self.client
if data:
out.send(data)
count = 0
if count == time_out_max:
break
self.client.close()
self.target.close()
def start_server(host='localhost', port=8080, ipv_6=False, timeout=60,
handler=ConnectionHandler):
"""Start the HTTP proxy server."""
if ipv_6:
soc_type = socket.AF_INET6
else:
soc_type = socket.AF_INET
soc = socket.socket(soc_type)
soc.bind((host, port))
print 'Serving on {0}:{1}.'.format(host, port)
soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
if __name__ == '__main__':
start_server()
| {
"repo_name": "jeffknupp/kickstarter_video_two",
"path": "proxy.py",
"copies": "1",
"size": "3925",
"license": "apache-2.0",
"hash": 1161910399718794800,
"line_mean": 31.7083333333,
"line_max": 78,
"alpha_frac": 0.5299363057,
"autogenerated": false,
"ratio": 4.046391752577319,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00030864197530864197,
"num_lines": 120
} |
# ANI2102A16_Tripod.py | Programmation Python avec Maya | coding=utf-8
# Exemple de génération de la structure hiérarchique du squelette d'un personnage composé de plusieurs joints d'animation.
# Le personnage est un 'tripod', une créature dont le squelette a une tête et trois jambes.
# paramètres du programme
offset = 10
angle = 45
ratio = 0.618
# 1. création d'un joint d'animation connexe à aucun autre joint
# vider la selection courante pour que le prochain joint soit une nouvelle racine hiérarchique
maya.cmds.select(deselect=True)
# création d'un joint à l'origine la scène
maya.cmds.joint(position=(0, 0, 0), name='Origin')
# 2. création d'une chaîne de joints d'animation pour la colonne du personnage et un point d'ancrage avec le sol
# vider la selection courante pour que le prochain joint soit une nouvelle racine hiérarchique
maya.cmds.select(deselect=True)
# création de la racine hiérarchique du personnage (point d'ancrage)
maya.cmds.joint(position=(0, 0, 0), relative=True, name='Tripod')
# création d'un joint pour le bassin du personnage
maya.cmds.joint(position=(0, offset*ratio, 0), relative=True, name='TripodPelvis')
# création d'un joint pour la colonne du personnage
maya.cmds.joint(position=(0, offset*ratio**1, 0), relative=True, name='TripodSpine')
# création d'un joint pour le coup du personnage
maya.cmds.joint(position=(0, offset*ratio**2, 0), relative=True, name='TripodNeck')
# création d'un joint pour la tête du personnage
maya.cmds.joint(position=(0, offset*ratio**3, 0), relative=True, name='TripodHead')
# 3. création d'une chaîne de joints d'animation pour les jambes du personnage
# vider la selection courante pour que le prochain joint soit une nouvelle racine hiérarchique
maya.cmds.select(deselect=True)
# liste des proportions de taille qui permettent de déterminer la position de chaque joint de la chaîne
listPosition = [0, offset*ratio, offset*ratio, offset*ratio, offset*ratio]
# liste des rotations qui permettent de déterminer l'angle entre chaque joint de la chaîne
listRotation = [0, angle, -angle, -angle, -angle]
# boucler sur chacune des trois jambes
for indexLeg in range(3):
# boucler pour la génération des joints de chaque jambe
for indexJoint in range(len(listPosition)):
maya.cmds.joint(name='TripodLeg%s%s' % (indexLeg+1, indexJoint+1))
maya.cmds.rotate(listRotation[indexJoint], rotateZ=True, relative=True, objectSpace=True)
maya.cmds.move (listPosition[indexJoint], moveX=True, relative=True, objectSpace=True)
# vider la selection courante pour que le prochain joint soit une nouvelle racine hiérarchique
maya.cmds.select(deselect=True)
# 4. transformation des trois jambes et connection avec le reste du corps du personnage
# extraire la position du pelvis
positionPelvis = maya.cmds.xform('TripodPelvis', query=True, translation=True, worldSpace=True)
# assigner la position du pelvis à chacune des jambes
maya.cmds.xform('TripodLeg11', worldSpace=True, translation=positionPelvis)
maya.cmds.xform('TripodLeg21', worldSpace=True, translation=positionPelvis)
maya.cmds.xform('TripodLeg31', worldSpace=True, translation=positionPelvis)
# orienter les trois jambes
maya.cmds.xform('TripodLeg11', worldSpace=True, rotation=(0, 90, 0))
maya.cmds.xform('TripodLeg21', worldSpace=True, rotation=(0, 30*7, 0))
maya.cmds.xform('TripodLeg31', worldSpace=True, rotation=(0, 30*11, 0))
# connecter chacune des jambes au pelvis
maya.cmds.connectJoint('TripodLeg11', 'TripodPelvis', parentMode=True)
maya.cmds.connectJoint('TripodLeg21', 'TripodPelvis', parentMode=True)
maya.cmds.connectJoint('TripodLeg31', 'TripodPelvis', parentMode=True)
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module09/EXE03/ANI2012A17_Tripod.py",
"copies": "1",
"size": "3705",
"license": "mit",
"hash": 8816204246977481000,
"line_mean": 44.3209876543,
"line_max": 122,
"alpha_frac": 0.7676382457,
"autogenerated": false,
"ratio": 2.795887281035796,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4063525526735796,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Attribute.py | Programmation Python avec Maya | coding=utf-8
# Exemples de manipulations d'attributs d'animation assignés à des noeuds dans une scène Maya.
def attribute_has(nodeName, attributeName):
"""fonction qui valide l'existence d'un attribut sur un noeud"""
# valider si l'attribut existe déjà sur le noeud
if maya.cmds.objExists("%s.%s" % (nodeName, attributeName)):
return True
else:
return False
def attribute_add(nodeName, attributeName):
"""fonction pour ajouter d'un nouvel attribut numérique sur un noeud"""
# valider si l'attribut existe déjà sur le noeud
if not maya.cmds.objExists("%s.%s" % (nodeName, attributeName)):
# ajouter un attribut sur le noeud avec une borne inférieur, supérieur et une valeur par défaut
maya.cmds.addAttr(nodeName, longName=attributeName, minValue=0.0, maxValue=100.0, defaultValue=0.0)
else:
print u"<fonction 'attribute_add' annulée : l'attribut '%s' existe déjà sur le noeud '%s'>" % (attributeName, nodeName)
def attribute_remove(nodeName, attributeName):
"""fonction pour supprimer un attribut assigné à un noeud"""
# valider si l'attribut existe déjà
if maya.cmds.objExists("%s.%s" % (nodeName, attributeName)):
# supprimer l'attribut
maya.cmds.deleteAttr(nodeName, attribute=attributeName)
else:
print u"<fonction 'attribute_remove' annulée : l'attribut '%s' n'existe pas sur le noeud '%s'>" % (attributeName, nodeName)
def attribute_read(nodeName, attributeName):
"""fonction de lecture d'une valeur d'un attribut assigné à un noeud"""
# valider l'existence de l'attribut sur le noeud
if maya.cmds.objExists("%s.%s" % (nodeName, attributeName)):
# extraire et retourner la valeur de l'attribut
return maya.cmds.getAttr("%s.%s" % (nodeName, attributeName))
else:
print u"<fonction 'attribute_read' annulée : l'attribut '%s' n'existe pas sur le noeud '%s'>" % (attributeName, nodeName)
return None
def attribute_write(nodeName, attributeName, attributeValue):
"""fonction d'écriture d'une valeur dans un attribut assigné à un noeud"""
# valider l'existence de l'attribut sur le noeud
if maya.cmds.objExists("%s.%s" % (nodeName, attributeName)):
# assigner la nouvelle valeur de l'attribut
maya.cmds.setAttr("%s.%s" % (nodeName, attributeName), attributeValue)
else:
print u"<fonction 'attribute_write' annulée : l'attribut '%s' n'existe pas sur le noeud '%s'>" % (attributeName, nodeName)
def attribute_connect(nodeName1, attributeName1, nodeName2, attributeName2):
"""fonction pour créer une connection entre deux attributs de deux noeuds différents"""
attribute1 = "%s.%s" % (nodeName1, attributeName1)
attribute2 = "%s.%s" % (nodeName2, attributeName2)
# valider si les attributs existent déjà sur les deux objets
if not maya.cmds.objExists(attribute1):
print u"<fonction 'attribute_connect' annulée : l'attribut '%s' n'existe pas sur le noeud '%s'>" % (attributeName1, nodeName1)
if not maya.cmds.objExists(attribute2):
print u"<fonction 'attribute_connect' annulée : l'attribut '%s' n'existe pas sur le noeud '%s'>" % (attributeName2, nodeName2)
# création d'une connexion entre les deux attributs
maya.cmds.connectAttr(attribute1, attribute2)
print "\n<début de l'exécution>\n"
# création de deux primitives géométriques (cube et sphère)
newCube = maya.cmds.polyCube();
newSphere = maya.cmds.polySphere()
# aller chercher le premier noeud des deux primitves géométriques (noeud de transformation)
node1 = newCube[0]
node2 = newSphere[0]
# 1. définition du nom d'un nouvel attribut
myAttributeName = 'customAttributeName'
# 2. tentative de manipulation d'un attribut inexistant
attribute_read(node1, myAttributeName)
attribute_write(node1, myAttributeName, 0.0)
attribute_remove(node1, myAttributeName)
print u"<le noeud '%s' possède l'attribut '%s' ? %s>" % (node1, myAttributeName, attribute_has(node1, myAttributeName))
# 3. modifier un attribut de transformation
attribute_write(node1, 'translateZ', 5.0)
attribute_write(node2, 'translateZ', -5.0)
# 4. ajouter un nouvel attribut sur le cube
attribute_add(node1, myAttributeName)
# 5. ajouter un nouvel attribut sur la sphère
attribute_add(node2, myAttributeName)
# 6. supprimer l'attribut sur la sphère
attribute_remove(node2, myAttributeName)
# 7. ajouter un nouvel attribut sur le cube
attribute_add(node1, myAttributeName)
# 8. ajouter un nouvel attribut sur la sphère
attribute_add(node2, myAttributeName)
# 9. valider l'existence de l'attribut sur les deux objets
print u"<le noeud '%s' possède l'attribut '%s' ? %s>" % (node1, myAttributeName, attribute_has(node1, myAttributeName))
print u"<le noeud '%s' possède l'attribut '%s' ? %s>" % (node2, myAttributeName, attribute_has(node2, myAttributeName))
# 10. écrire une valeur numérique dans l'attribut du cube
attribute_write(node1, myAttributeName, 12.3)
# 11. écrire une nouvelle valeur numérique dans l'attribut du cube
attribute_write(node1, myAttributeName, 45.6)
# 12. écrire une valeur numérique dans l'attribut de la sphère
attribute_write(node2, myAttributeName, 78.9)
# 13. connecter la translation en X du cube sur la translation en Y de la sphère
attribute_connect(node1, 'translateX', node2, 'translateY')
# 14. connecter la translation en Z de la sphère sur la rotation en Y du cube
attribute_connect(node2, 'translateZ', node1, 'rotateY')
# 15. connecter le nouvel attribut des deux objets
attribute_connect(node1, myAttributeName, node2, myAttributeName)
# 16. lire la valeur de l'attribut sur les deux objets
print u"<la valeur de l'attribut '%s' du noeud '%s' est %s>" % (myAttributeName, node1, attribute_read(node1, myAttributeName))
print u"<la valeur de l'attribut '%s' du noeud '%s' est %s>" % (myAttributeName, node2, attribute_read(node2, myAttributeName))
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE05/ANI2012A17_Attribute.py",
"copies": "1",
"size": "5920",
"license": "mit",
"hash": -3631828363795796000,
"line_mean": 40.8714285714,
"line_max": 130,
"alpha_frac": 0.7386557489,
"autogenerated": false,
"ratio": 2.8511673151750974,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4089823064075097,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Collection.py | Programmation Python avec Maya | coding=utf-8
# Exemples de manipulations de différents types de collection (tuple, liste et dictionnaire).
import sys
print "\n<début de l'exécution>\n"
def enumerate(sequence):
"""fonction qui affiche dans la console une énumération de tous les éléments d'une séquence"""
print "%s" % str(sequence)
count = len(sequence)
print "\n<enumerate>"
if count == 0:
return
else:
for index in range(0, count):
print "\t%s" % str(sequence[index])
print "<enumerate>\n"
# 1. tuple
print "\n<ex1: tuples d'éléments de même type>\n"
tupleOfInteger = (1, 2, 3)
tupleOfFloat = (1.5, 2.5, 3.5)
tupleOfBoolean = (True, False, True, False)
tupleOfString = ('abc', 'xyz', 'rgb')
# tupleOfInteger[0] = 4 # impossible car un tuple est immuable
enumerate(tupleOfInteger)
enumerate(tupleOfFloat)
enumerate(tupleOfBoolean)
enumerate(tupleOfString)
# 2. liste
print "\n<ex2: listes d'éléments de même type>\n"
listOfInteger = [1, 2, 3]
listOfFloat = [1.5, 2.5, 3.5]
listOfBoolean = [True, False, True, False]
listOfString = ['abc', 'xyz', 'rgb']
listOfInteger[0] = 4 # possible car une liste est mutable
enumerate(listOfInteger)
enumerate(listOfFloat)
enumerate(listOfBoolean)
enumerate(listOfString)
# 3. tuple vs liste
print "\n<ex3: comparaison d'une même séquence sous forme de tuple et de liste>\n"
numberSequenceAsTuple = tuple(range(32))
numberSequenceAsList = list(range(32))
print "<séquence sous forme de tuple : %s>" % str(numberSequenceAsTuple)
print "<séquence sous forme de liste : %s>\n" % str(numberSequenceAsList)
sizeAsTuple = sys.getsizeof(numberSequenceAsTuple)
sizeAsList = sys.getsizeof(numberSequenceAsList)
print "<taille en mémoire de numberSequenceAsTuple : %d octets>" % sizeAsTuple
print "<taille en mémoire de numberSequenceAsList : %d octets>\n" % sizeAsList
if sizeAsTuple < sizeAsList:
print "<conclusion : la version sous forme de tuple prend moins de mémoire que la version sous forme de liste>\n"
else:
print "<conclusion : la version sous forme de liste prend moins de mémoire que la version sous forme de tuple>\n"
print "<cependant le tuple est immuable contrairement à la liste qui est mutable>\n"
# 4. listes d'éléments avec des types différents
print "\n<ex4: listes d'éléments avec des types différents>\n"
listOfDifferentType1 = [True, 1, 2.3, 'text']
listOfDifferentType2 = ['abc', "xyz", '''rgb''', 6]
listOfDifferentType3 = [1+2, 3/4, len('text')]
enumerate(listOfDifferentType1)
enumerate(listOfDifferentType2)
enumerate(listOfDifferentType3)
# 5. listes imbriquées
print "\n<ex5: listes imbriquées>\n"
listOfList1 = [[1, 2, 3], [1.5, 2.5, 3.5], [True, False, True, False]]
listOfList2 = [tupleOfInteger, tupleOfFloat, tupleOfBoolean]
listOfList3 = [listOfInteger, listOfFloat, listOfBoolean]
listOfList4 = [listOfList1, [tupleOfInteger, tupleOfFloat, tupleOfBoolean], [listOfInteger, listOfFloat, listOfBoolean]]
print "<listOfList1 : %s >" % str(listOfList1)
print "<listOfList2 : %s >" % str(listOfList2)
print "<listOfList3 : %s >" % str(listOfList3)
print "<listOfList4 : %s >" % str(listOfList4)
enumerate(listOfList1)
enumerate(listOfList3)
enumerate(listOfList4)
enumerate(listOfList2)
# 6. dictionnaire de clés et de valeurs
print "\n<ex6: création d'une dictionnaire des 6 possibilités d'ordre de rotation {clé : ordre de rotation}>\n"
dictRotationOrder = {
0 : 'xyz',
1 : 'yzx',
2 : 'zxy',
3 : 'xzy',
4 : 'yxz',
5 : 'zyx'
}
enumerate(dictRotationOrder)
# itération sur les paires de clés et valeurs du dictionnaire
print "<dictionnaire>"
for key, value in dictRotationOrder.iteritems():
print "\t%s : %s" % (key, value)
print "<dictionnaire>\n"
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE02/ANI2012A17_Collection.py",
"copies": "1",
"size": "3814",
"license": "mit",
"hash": 3924258987913293300,
"line_mean": 25.780141844,
"line_max": 120,
"alpha_frac": 0.7208686441,
"autogenerated": false,
"ratio": 2.7263537906137185,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39472224347137186,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Lambda.py | Programmation Python avec Maya | coding=utf-8
# Exemples de manipulations de listes avec fonctions lambda, map, reduce, filter et leurs équivalents avec la technique de compréhension de liste.
print "\n<début de l'exécution>\n"
# définition de quelques listes de valeurs numériques
listA = [1, 2, 3, 4, 5]
listB = [6, 7, 8, 9, 10]
listC = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377]
# 1. fonction lambda (x+1)
print "\n<ex1: application sur une liste d'une fonction lambda qui incrémente de 1 la valeur de chaque élément de la liste>\n"
# version 1
result = map(lambda x : x+1, listA)
print "<résultat 1.1 : %s (avec fonction map et lambda)>\n" % result
# version 2
result = [x+1 for x in listA]
print "<résultat 1.2 : %s (avec technique de compréhension de liste)>\n" % result
# 2. fonction lambda (x*2)
print "\n<ex2: application sur une liste d'une fonction lambda qui multiplie par 2 la valeur de chaque élément de la liste>\n"
# version 1
result = map(lambda x : x*2, listA)
print "<résultat 2.1 : %s (avec fonction map et lambda)>\n" % result
# version 2
result = [x*2 for x in listA]
print "<résultat 2.2 : %s (avec technique de compréhension de liste)>\n" % result
# 3. fonction lambda (x*x)
print "\n<ex3: application sur une liste d'une fonction lambda qui multiplie par elle-même la valeur de chaque élément de la liste>\n"
# version 1
result = map(lambda x : x*x, listA)
print "<résultat 3.1 : %s (avec fonction map et lambda)>\n" % result
# version 2
result = [x*x for x in listA]
print "<résultat 3.2 : %s (avec technique de compréhension de liste)>\n" % result
# 4. fonction lambda (compteurs)
print "\n<ex4: application sur une liste de listes d'une fonction lambda qui détermine le nombre d'éléments dans chaque sous-listes>\n"
# version 1
result = map(lambda x : len(x), [listA, listB, listC])
print "<résultat 4.1 : %s (avec fonction map et lambda)>\n" % result
# version 2
result = [len(x) for x in [listA, listB, listC]]
print "<résultat 4.2 : %s (avec technique de compréhension de liste)>\n" % result
# 5. fonction lambda, mapping et réduction (somme des compteurs)
print "\n<ex5: détermine la somme du nombre d'éléments dans chaque sous-listes>\n"
# version 1
result = reduce(lambda x, y: x+y, map(lambda x : len(x), [listA, listB, listC]))
print "<résultat 5.1 : %s (avec fonction map, reduce et lambda)>\n" % result
# version 2
result = "pas d'équivalent simple pour ce cas"
print "<résultat 5.2 : %s (avec technique de compréhension de liste)>\n" % result
# 6. fonction lambda et filtrage (nombres pairs)
print "\n<ex6: application d'un filtre qui retourne la sous-liste des nombres paires d'une liste>\n"
# version 1
result = filter(lambda x: x % 2 == 0, listC)
print "<résultat 6.1 : %s (avec fonction map, filter et lambda)>\n" % result
# version 2
result = [x for x in listC if x % 2 == 0]
print "<résultat 6.2 : %s (avec technique de compréhension de liste)>\n" % result
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE03/ANI2012A17_Lambda.py",
"copies": "1",
"size": "3074",
"license": "mit",
"hash": -1336408682116153600,
"line_mean": 25.3739130435,
"line_max": 146,
"alpha_frac": 0.6884272997,
"autogenerated": false,
"ratio": 2.4420289855072466,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.861921117541222,
"avg_score": 0.0022490219590054826,
"num_lines": 115
} |
# ANI2102A17_Oscillator.py | Programmation Python avec Maya | coding=utf-8
# Création d'une séquence d'animation par oscillation de différents attributs.
import math
# paramètres l'animation
frameFirst = 1
frameLast = 240
frequency = 0.1
amplitude = 10
# liste d'attributs d'animation
keyframeAttributes = [
'translateX', 'translateY', 'translateZ',
'rotateX', 'rotateY', 'rotateZ',
'scaleX', 'scaleY', 'scaleZ']
def oscillator(time=0, frequency=1, amplitude=1):
"""fonction qui retourne la valeur d'une oscillation en fonction de ses paramètres"""
return amplitude * math.sin(time * frequency)
# prendre le premier élément de la sélection
target = maya.cmds.ls(selection=True)
print "<génération des poses clés dans l'intervale [%s, %s]>" % (frameFirst, frameLast)
# génération des poses clés
if len(target) > 0:
for index in range(frameFirst, frameLast):
# mise à jour de l'oscillateur
oscillation = oscillator(index, frequency, amplitude)
print "<oscillation: %s>" % str(oscillation)
# poses clés sur trois différents attributs en fonction de la valeur courante de l'oscillateur
maya.cmds.setKeyframe(target[0], attribute=keyframeAttributes[0], value=oscillation * 1.0, time=index)
maya.cmds.setKeyframe(target[0], attribute=keyframeAttributes[4], value=oscillation * 5.0, time=index)
maya.cmds.setKeyframe(target[0], attribute=keyframeAttributes[8], value=oscillation * 0.2, time=index)
else:
print "<faire une sélection avant d'exécuter le script>"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE07/ANI2012A17_Oscillator.py",
"copies": "1",
"size": "1544",
"license": "mit",
"hash": 4926521807189459000,
"line_mean": 32.152173913,
"line_max": 106,
"alpha_frac": 0.7298360656,
"autogenerated": false,
"ratio": 2.893738140417457,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41235742060174574,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Pythonic_Iteration.py | Programmation Python avec Maya | coding=utf-8
# Exemples de structures de contrôle itératives en Python.
print "\n<début de l'exécution>"
#
# exemples d'itérations avec boucle 'for'
#
# 1. exemple de boucle 'for'
print "\n<boucle 'for' de 1 à 5 avec séquence explicite>"
for index in [1, 2, 3, 4, 5]:
print "\t<boucle %i>" % (index)
# 2. exemple de boucle 'for'
print "\n<boucle 'for' de 6 à 10 avec séquence explicite>"
for index in [6, 7, 8, 9, 10]:
print "\t<boucle %i>" % (index)
# 3. exemple de boucle 'for'
print "\n<boucle 'for' de 1 à 21 avec séquence explicite non continue>"
for index in [1, 2, 3, 5, 8, 13, 21]:
print "\t<boucle %i>" % (index)
# 4. exemple de boucle 'for'
print "\n<boucle 'for' de -1 à 1 avec séquence explicite>"
for index in [-1, 0, 1]:
print "\t<boucle %i>" % (index)
# 5. exemple de boucle 'for'
print "\n<boucle 'for' de -3 à 3 avec séquence explicite non continue>"
for index in [-3, 0, 3]:
print "\t<boucle %i>" % (index)
# 6. exemple de boucle 'for'
print "\n<boucle 'for' de 0 à 9 avec intervalle qui commence à 0>"
for index in range(10):
print "\t<boucle %i>" % (index)
# 7. exemple de boucle 'for'
print "\n<boucle 'for' de 1 à 9 avec intervalle qui commence à un seuil spécifique>"
for index in range(1, 10):
print "\t<boucle %i>" % (index)
# 8. exemple de boucle 'for'
print "\n<boucle 'for' de 5 à 10 avec intervalle qui commence à un seuil spécifique>"
for index in range(5, 11):
print "\t<boucle %i>" % (index)
# 9. exemple de boucle 'for'
print "\n<boucle 'for' de 0 à 18 avec intervalle et décalage de 3 nombres par boucle>"
for index in range(0, 20, 3):
print "\t<boucle %i>" % (index)
# 10. exemple de boucle 'for'
print "\n<boucle 'for' de 0 à 8 avec intervalle et décalage de 2 nombres par boucle (pair)>"
for index in range(0, 10, 2):
print "\t<boucle %i>" % (index)
# 11. exemple de boucle 'for'
print "\n<boucle 'for' de 1 à 9 avec intervalle et décalage de 2 nombres par boucle (impair)>"
for index in range(1, 10, 2):
print "\t<boucle %i>" % (index)
# 12. exemple de boucle 'for'
print "\n<boucle 'for' de 10 à 1 avec intervalle décroissant>"
for index in range(10, 0, -1):
print "\t<boucle %i>" % (index)
# 13. exemple de boucle 'for'
print "\n<boucle 'for' de 0 à -10 avec intervalle décroissant>"
for index in range(0, -11, -1):
print "\t<boucle %i>" % (index)
# 14. exemple de boucle 'for'
print "\n<boucle 'for' de 2 à 3 avec intervalle et filtrage par séquence>"
for index in range(5)[2:4]:
print "\t<boucle %i>" % (index)
# 15. exemple de boucle 'for'
print "\n<boucle 'for' de 1 à 4 avec intervalle et filtrage par séquence sur la borne inférieure>"
for index in range(5)[1:]:
print "\t<boucle %i>" % (index)
# 16. exemple de boucle 'for'
print "\n<boucle 'for' de 0 à 3 avec intervalle et filtrage par séquence sur la borne supérieure>"
for index in range(5)[:4]:
print "\t<boucle %i>" % (index)
# 17. exemple de boucle 'for'
print "\n<boucle 'for' sur chaque caractère d'une chaîne de caractères>"
for character in "python":
print "\t<%s>" % character
# 18. exemple de boucle 'for'
print "\n<boucle 'for' sur les clés d'un dictionnaire>"
for key in {'x': 1, 'y': 2, 'z': 3}:
print "\t<%s>" % key
# 19. exemple de boucle 'for'
print "\n<boucle 'for' à partir d'une fonction génératrice>"
# fonction génératrice
def iterateWithGenerator(n):
value = 0
while value < n:
yield value
value += 1
# utilisation de la fonction génératrice comme source d'itération
for index in iterateWithGenerator(10):
print "\t<boucle %i>" % index
#
# exemples d'itérations avec boucle 'while'
#
# 20. exemple de boucle 'while' #3
print "\n<boucle 'while' avec aucune itération>"
while (0):
print "\t<trace impossible>"
print "\t<aucune trace>"
# 21. exemple de boucle 'while' (en commentaire car cause une boucle infinie)
#while (1):
# print "\t<boucle infinie>"
# 22. exemple de boucle 'while' (en commentaire car cause une boucle infinie)
#while (True):
# print "\t<boucle infinie>"
# 23. exemple de boucle 'while'
print "\n<boucle 'while' de 0 à 4>"
index = 0
while (index < 5):
print "\t<boucle %i>" % (index)
index += 1
# 24. exemple de boucle 'while'
print "\n<boucle 'while' de 5 à 1>"
index = 5
while (index > 0):
print "\t<boucle %i>" % (index)
index -= 1
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module07/EXE06/ANI2012A17_Pythonic_Iteration.py",
"copies": "1",
"size": "4723",
"license": "mit",
"hash": 2045251210773427500,
"line_mean": 19.301369863,
"line_max": 98,
"alpha_frac": 0.6184351554,
"autogenerated": false,
"ratio": 2.5,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8591596847934702,
"avg_score": 0.005367661493059621,
"num_lines": 219
} |
# ANI2102A17_Selection.py | Programmation Python avec Maya | coding=utf-8#
# Exemples en lien avec l'énumération et la sélection d'éléments présents dans une scène Maya.
def enumerate(sequence):
"""fonction qui affiche dans la console une énumération de tous les éléments d'une séquence"""
print "%s" % str(sequence)
count = len(sequence)
print "\n<enumerate>"
if count == 0:
return
else:
for index in range(0, count):
print "\t%s" % str(sequence[index])
print "<enumerate>\n"
print "\n<début de l'exécution>\n"
# 1.
print "<ex1: lister tous les éléments de la scène>\n"
sequence = maya.cmds.ls()
if len(sequence) == 0:
print "<la scène est vide>\n"
else:
enumerate(sequence)
# 2.
print "\n<ex2: lister tous les éléments sélectionnés de la scène>\n"
sequence = maya.cmds.ls(selection=True)
if len(sequence) == 0:
print "<il n'y a pas d'élément sélectionné dans la scène>\n"
else:
enumerate(sequence)
# 3.
print "\n<ex3: extraire le premier élément d'une sélection>\n"
sequence = maya.cmds.ls(selection=True, head=1)
if len(sequence) == 0:
print "<pas de premier élément car il n'y a pas d'élément sélectionné dans la scène>\n"
else:
enumerate(sequence)
# 4.
print "\n<ex4: extraire les 2 premiers éléments d'une sélection>\n"
sequence = maya.cmds.ls(selection=True, head=2)
if len(sequence) < 2:
print "<pas assez d'éléments sélectionnés dans la scène>\n"
else:
enumerate(sequence)
# 5.
print "\n<ex5: extraire le dernier élément d'une sélection>\n"
sequence = maya.cmds.ls(selection=True, tail=1)
if len(sequence) == 0:
print "<pas de dernier élément car il n'y a pas d'élément sélectionné dans la scène>\n"
else:
enumerate(sequence)
# 6.
print "\n<ex6: extraire les 2 derniers éléments d'une sélection>\n"
sequence = maya.cmds.ls(selection=True, tail=2)
if len(sequence) < 2:
print "<pas assez d'éléments sélectionnés dans la scène>\n"
else:
enumerate(sequence)
# 7.
print "\n<ex7: sélectionner tous les éléments de la scène>\n"
maya.cmds.select(all=True)
sequence = maya.cmds.ls(selection=True)
if len(sequence) == 0:
print "<la scène est vide>\n"
else:
enumerate(sequence)
# 8.
print "\n<ex8: déselectionner tous les éléments présents dans la sélection active>\n"
maya.cmds.select(clear=True)
sequence = maya.cmds.ls(selection=True)
if len(sequence) == 0:
print "<il n'y a pas d'élément sélectionné dans la scène>\n"
else:
enumerate(sequence) # pas supposé être exécuté car on vient de tout déselectionner
# 9.
print "\n<ex9: ajouter un élément à la sélection par nom>\n"
maya.cmds.polySphere(radius=10, name='sphere')
maya.cmds.select('sphere', add=True)
sequence = maya.cmds.ls(selection=True)
if len(sequence) == 0:
print "<il n'y a pas d'élément appelé 'sphere' dans la scène>\n"
else:
enumerate(sequence)
# 10.
print "\n<ex10: sélectionner toutes les transformations géométriques de la scène (transform)>\n"
sequence = maya.cmds.ls(type='transform')
if len(sequence) == 0:
print "<il n'y a pas de transformation géométrique dans la scène>\n"
else:
enumerate(sequence)
# 11.
print "\n<ex11: sélectionner toutes les formes visuelles de la scène (shape)>\n"
sequence = maya.cmds.ls(type='shape')
if len(sequence) == 0:
print "<il n'y a pas de forme visuelle dans la scène>\n"
else:
enumerate(sequence)
# 12.
print "\n<ex12: sélectionner tous les maillages géométriques de la scène (mesh)>\n"
sequence = maya.cmds.ls(type='mesh')
if len(sequence) == 0:
print "<il n'y a pas de maillage géométrique dans la scène>\n"
else:
enumerate(sequence)
# 13.
print "\n<ex13: sélectionner toutes les lumières de la scène (light)>\n"
sequence = maya.cmds.ls(type='light')
if len(sequence) == 0:
print "<il n'y a pas de lumière dans la scène>\n"
else:
enumerate(sequence)
# 14.
print "\n<ex14: sélectionner tous les joints d'animation de la scène>\n"
sequence = maya.cmds.ls(type='joint')
if len(sequence) == 0:
print "<il n'y a pas de joint d'animation dans la scène>\n"
else:
enumerate(sequence)
# 15.
print "\n<ex15: extraire la position et la rotation du premier élément d'une sélection>\n"
index = 0;
sequence = maya.cmds.ls(selection=True, type='transform')
# valider s'il y a au moins un élément sélectionné
if len(sequence) == 0:
# transformation par défaut si aucun élément sélectionné
selectionPosition = (0, 0, 0)
selectionRotation = (0, 0, 0)
else:
# extraire la position et l'orientation du premier élément de la liste des éléments sélectionnés
selectionPosition = maya.cmds.xform(sequence[index], query=True, worldSpace=True, translation=True)
selectionRotation = maya.cmds.xform(sequence[index], query=True, worldSpace=True, rotation=True)
print "<position : %s>" % selectionPosition
print "<rotation : %s>" % selectionRotation
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE04/ANI2012A17_Selection.py",
"copies": "1",
"size": "4983",
"license": "mit",
"hash": 5544459855380067000,
"line_mean": 21.4398148148,
"line_max": 101,
"alpha_frac": 0.7066226532,
"autogenerated": false,
"ratio": 2.575451647183847,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37820743003838475,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Skeleton.py | Programmation Python avec Maya | coding=utf-8
# Exemple de manipulations de listes contentant les noms de différents joints d'animation d'un biped.
def enumerate(sequence):
"""fonction qui affiche dans la console une énumération de tous les éléments d'une séquence"""
print "%s" % str(sequence)
count = len(sequence)
print "\n<enumerate>"
if count == 0:
return
else:
for index in range(0, count):
print "\t%s" % str(sequence[index])
print "<enumerate>\n"
print "\n<début de l'exécution>\n"
# 1. création d'une liste pour chaque type de joint du squelette
# liste des joints de la tête
listJointHead = ['Neck', 'Head']
# liste des joints du torse
listJointTorso = ['Pelvis', 'Spine1', 'Spine2', 'Spine3']
# liste des joints d'un bras
listJointArm = ['Hand', 'Elbow', 'Shoulder']
# liste des joints d'une jambe
listJointLeg = ['Hip', 'Knee', 'Foot']
enumerate(listJointHead)
enumerate(listJointTorso)
enumerate(listJointArm)
enumerate(listJointLeg)
# 2. créer des variations des listes de joints (bras et jambe, gauche et droit)
# création de quelques listes vides
listJointArmLeft = []
listJointArmRight = []
listJointLegLeft = []
listJointLegRight = []
# remplir les 4 variations de liste par renommage
print "<liste des joints du bras gauche et droit>"
for joint in listJointArm:
listJointArmLeft.append ('Left' + joint)
listJointArmRight.append('Right' + joint)
enumerate(listJointArmLeft)
enumerate(listJointArmRight)
print "<liste des joints de la jambe gauche et droite>"
for joint in listJointLeg:
listJointLegLeft.append ('Left' + joint)
listJointLegRight.append('Right' + joint)
enumerate(listJointLegLeft)
enumerate(listJointLegRight)
# 3. fusion des listes dans une seule liste
# fusion de toutes les listes de joints en une seule liste
listSkeleton = listJointHead + listJointTorso + listJointArmLeft + listJointArmRight + listJointLegLeft + listJointLegRight
enumerate(listSkeleton)
# 4. parcours de la liste et création des joints
# boucler sur la liste pour la génération des joints du squelette
for index in range(len(listSkeleton)):
maya.cmds.joint(name='%s' % listSkeleton[index])
maya.cmds.select(deselect=True)
# notez qu'à ce stade, les joints ne sont ni transformés, ni connectés, ils sont seulement instanciés dans la scène
# TODO transformation initiale des joints
# TODO définir les relations hiérarchiques entre les joints
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module09/EXE02/ANI2012A17_Skeleton.py",
"copies": "1",
"size": "2508",
"license": "mit",
"hash": 8906514391757763000,
"line_mean": 26.0108695652,
"line_max": 123,
"alpha_frac": 0.7448692153,
"autogenerated": false,
"ratio": 2.792134831460674,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40370040467606744,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_System.py | Programmation Python avec Maya | coding=utf-8
# Exemples de scripts qui permettent d'extraire de l'information sur le système hôte.
import os
import sys
import platform
import maya.cmds
import maya.utils
print "\n<début de l'exécution>\n"
# 1. système d'exploitation
print "\n<déterminer quel est le système d'exploitation>\n"
if os.name == 'posix':
print "\n<os : posix (linux ou macos)>"
else:
print "\n<os : windows>"
# 2. versions de l'environnement
print "\n<information sur les versions de l'environnement où le script est exécuté>\n"
print "\n<system version: sys.version>"
print "%s" % sys.version
print "\n<system version: sys.version_info>"
print "%s" % sys.version_info
# 3. analyse du système
print "\n<informations sur le système hôte où le script est exécuté>\n"
def system_analysis():
"""fonction qui analyse le système hôte"""
print "\n<system analysis>"
print "\tos: %s" % os.name
print "\tmachine: %s" % platform.machine()
print "\tprocessor: %s" % platform.processor()
print "\tsystem: %s" % platform.system()
print "\trelease: %s" % platform.release()
print "\tversion: %s" % platform.version()
print "\tnode: %s" % platform.node()
print "<system analysis>"
system_analysis()
# 4. délimiteur de chemin d'accès
print "\n<déterminer quel est le délimiteur de chemin d'accès en fonction du système d'exploitation>\n"
if os.name == 'posix': # macos & linux
delimitor = ':'
else: # windows
delimitor = ';'
print "\n<delimitor: '%s'>" % delimitor
# 5. variables d'environnement
# afficher le contenu de la variable d'environnement 'MAYA_SCRIPT_PATH' (répertoires où les scripts MEL sont accessibles)
print "\n<environment variable: 'MAYA_SCRIPT_PATH'>"
for path in os.getenv("MAYA_SCRIPT_PATH").split(delimitor):
print "\t%s" % path
print "<environment variable>"
# afficher le contenu de la variable d'environnement 'PYTHONPATH' (répertoires où les scripts Python sont accessibles)
print "\n<environment variable: 'PYTHONPATH'>"
for path in os.getenv("PYTHONPATH").split(delimitor):
print "\t%s" % path
print "<environment variable>"
# 6. chemins d'accès
print "\n<afficher les chemins d'accès utilisés par Maya>"
print "\n<maya path>"
print "\t%s" % maya.cmds.internalVar(userAppDir = True)
print "\t%s" % maya.cmds.internalVar(userScriptDir = True)
print "\t%s" % maya.cmds.internalVar(userPrefDir = True)
print "\t%s" % maya.cmds.internalVar(userPresetsDir = True)
print "\t%s" % maya.cmds.internalVar(userShelfDir = True)
print "\t%s" % maya.cmds.internalVar(userMarkingMenuDir = True)
print "\t%s" % maya.cmds.internalVar(userBitmapsDir = True)
print "\t%s" % maya.cmds.internalVar(userTmpDir = True)
print "\t%s" % maya.cmds.internalVar(userWorkspaceDir = True)
print "<maya path>"
print "\n<afficher la liste des chemins d'accès du système>"
def print_system_paths():
"""fonction que affiche la liste des chemins d'accès du système"""
print "\n<system path>"
for path in sys.path:
print "\t%s" % path
print "<system path>"
print_system_paths()
# 7. symboles
print "\n<afficher la liste des symboles de portée globale du système>\n"
def print_global_symbols():
"""affiche la liste des symboles avec définition de portée globale"""
print "\n<global symbols>"
for symbol in globals().keys():
print "\t%s" % symbol
print "<global symbols>"
print_global_symbols()
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE01/ANI2012A17_System.py",
"copies": "1",
"size": "3568",
"license": "mit",
"hash": 6501109126017649000,
"line_mean": 25.328358209,
"line_max": 121,
"alpha_frac": 0.6887755102,
"autogenerated": false,
"ratio": 2.8246597277822256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8957194103202426,
"avg_score": 0.011248226955959945,
"num_lines": 134
} |
# ANI2102A17_Timeline.py | Programmation Python avec Maya | coding=utf-8
# Exemple de programme qui fait la création d'une animation constituée de deux lignes de temps qui contiennent des ensembles de poses clés.
import random
# paramètres du programme
count = 10
speed = 1
theta = 45
ratio = 0.5
# classe de type Keyframe
class Keyframe:
def __init__(self, t, a, v):
"""constructeur de la classe"""
self.time = t
self.attribute = a
self.value = v
def trace(self):
"""fonction qui affiche dans la console les données de la pose clé"""
print "\t\t<keyframe time: %s attribute: %s value: %s >" % (self.time, self.attribute, self.value)
# classe de type Timeline
class Timeline:
def __init__(self, n, t, s):
"""constructeur de la classe"""
self.name = n
self.target = t
self.start = s
self.keyframes = []
def add_keyframe(self, k):
"""fonction qui ajoute une pose clé dans la ligne de temps"""
self.keyframes.append(k)
def trace(self):
"""fonction qui affiche les données de la ligne de temps dans la consoles"""
print "\n\t<timeline>"
for keyframe in self.keyframes:
keyframe.trace()
print "\t<timeline>\n"
def bake(self):
"""fonction qui écrit les poses clés dans le système d'animation de Maya"""
if self.target is not None:
for keyframe in self.keyframes:
maya.cmds.setKeyframe(
self.target,
time = keyframe.time,
attribute = keyframe.attribute,
value = keyframe.value)
else:
print "<annulé car aucun objet sur lequel appliquer les poses clés>"
# classe de type Animation
class Animation:
def __init__(self, n):
"""constructeur de la classe"""
self.name = n
self.timelines = {}
def add_timeline(self, t):
"""fonction qui ajoute une ligne de temps dans l'animation"""
self.timelines.update({t.name : t})
def trace(self):
"""fonction qui affiche l'ensemble des données de l'animation dans la console"""
print "\n<animation>"
for key, timeline in self.timelines.iteritems():
print "\n\t<%s : %s>" % (key, timeline)
timeline.trace()
print "<animation>\n"
def bake(self):
"""fonction qui écrit les poses clés dans le système d'animation de Maya"""
print "\n<bake animation>\n"
for key, timeline in self.timelines.iteritems():
timeline.bake()
print "\n<début de l'exécution>\n"
# 1. initialisation de la scène animée
print "\n<instanciation d'une pyramide sur laquelle l'animation sera appliquée>\n"
target = maya.cmds.polyPyramid(name='pyramid', numberOfSides=4, subdivisionsHeight=1, subdivisionsCaps=1)
print "\n<instanciation d'une nouvelle animation>\n"
animation = Animation('nom animation')
print "\n<instanciation des lignes de temps>\n"
timeline1 = Timeline('ligne de temps 1', target, 1)
timeline2 = Timeline('ligne de temps 2', target, 16)
print "\n<ajouter la ligne de temps à l'animation>\n"
animation.add_timeline(timeline1)
animation.add_timeline(timeline2)
# 2. Générer des poses clés une à la fois
print "\n<ex1: ajouter les poses clés dans la première ligne de temps une à la fois>\n"
print "<l'attribut et la valeur sont spécifiés directement>\n"
keyframe1 = Keyframe(1, 'translateX', 1)
keyframe2 = Keyframe(2, 'translateX', 2)
keyframe3 = Keyframe(3, 'translateX', 3)
keyframe4 = Keyframe(4, 'translateX', 5)
keyframe5 = Keyframe(5, 'translateX', 8)
timeline1.add_keyframe(keyframe1)
timeline1.add_keyframe(keyframe2)
timeline1.add_keyframe(keyframe3)
timeline1.add_keyframe(keyframe4)
timeline1.add_keyframe(keyframe5)
# 3. Générer des poses clés à partir d'une structure itérative
print "\n<ex2: ajouter les poses clés dans la première ligne de temps par itération>"
print "<l'attribut et la valeur sont déterminés au hasard >\n"
# liste d'attributs d'animation
keyframeAttributes = [
'translateX', 'translateY', 'translateZ',
'rotateX', 'rotateY', 'rotateZ',
'scaleX', 'scaleY', 'scaleZ']
for index in range(6, 11):
timeline1.add_keyframe(
Keyframe(
index,
random.choice(keyframeAttributes),
random.randint(-5, 5)))
# 4. Générer des poses clés à partir de la technique de compréhension de liste
print "\n<ex3: ajouter les poses clés dans la première ligne de temps par technique de compréhension de liste>\n"
print "<l'attribut est le même que la séquence précédente mais la valeur est doublée>\n"
sequence = [Keyframe(keyframe.time + 5, keyframe.attribute, keyframe.value * 3) for keyframe in timeline1.keyframes[5:10]]
for index in range(len(sequence)):
timeline1.add_keyframe(sequence[index])
# 5. Générer des poses clés à partir d'une fonction génératrice
print "\n<ex4: ajouter les poses clés dans la seconde ligne de temps avec une fonction génératrice>\n"
print "<l'attribut est choisi au hasard parmi une liste de transformations représentées sous forme de tuple>\n"
# liste de primitives de transformation
transforms = []
transforms.append(('move_forward', 'translateZ', speed))
transforms.append(('move_backward', 'translateZ', -speed))
transforms.append(('move_right', 'translateX', speed))
transforms.append(('move_left', 'translateX', -speed))
transforms.append(('rotate_right', 'rotateY', theta))
transforms.append(('rotate_left', 'rotateY', -theta))
def frame_generator(count):
index = 0
while index < count:
transform = random.choice(transforms)
keyframe = Keyframe(timeline2.start + index, transform[1], transform[2])
yield keyframe
index += 1
sequence = list(frame_generator(100))
for index in range(len(sequence)):
timeline2.add_keyframe(sequence[index])
# afficher le contenu de l'animation sous forme textuelle dans la console
animation.trace()
# ajouter les pose clé de l'animation dans la scène maya
animation.bake()
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE10/maya/ANI2012A17_Timeline.py",
"copies": "1",
"size": "5955",
"license": "mit",
"hash": -7314184533377667000,
"line_mean": 27.1626794258,
"line_max": 139,
"alpha_frac": 0.6980971797,
"autogenerated": false,
"ratio": 2.908102766798419,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4106199946498419,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Tool.py | Programmation Python avec Maya | coding=utf-8
# Exemple d'un outil avec interface graphique qui est directement intégré dans Maya.
# Fonctionnalités :
# + Assignation de transformations géométriques
# + Assignation d'une couleur de sommet
# + Validation ajout et retrait d'un attribut
# + Diverses commandes à activer avec un bouton
# + Sauvegarde et le chargement de données dans un fichier externe.
import random
import cPickle
class CustomToolWindow(object):
"""classe permettant d'instancier une instance de fenêtre d'interface"""
@classmethod
def showUI(cls):
"""fonction d'affichage de la fenêtre"""
# instanciation d'une instance de la classe
win = cls()
# invoquer la fonction de construction de la fenêtre
win.create()
# retourner la référence vers l'instance de fenêtre
return win
def __init__(self):
"""fonction d'initialisation de la classe"""
# identifiant unique de la fenêtre
self.window = 'CustomToolWindow'
# titre de la fenêtre
self.title = 'Custom Tool Window'
# dimensions de la fenêtre (largeur, hauteur)
self.size = (520, 360)
# texte du bouton d'action
self.actionName = 'Apply && Close'
# option qui détermine si la fenêtre doit flotter ou être ancrée.
self.isDockable = False
# dictionnaire de données utilisé pour opération de sauvegarde/chargement
self.dictionaryData = {}
def create(self):
"""fonction d'instanciation de la fenêtre"""
# valider si une instance de la fenêtre existe déjà
if maya.cmds.window(self.window, exists=True):
# si oui, supprimer l'instance de la fenêtre précédente
maya.cmds.deleteUI(self.window, window=True)
# instancier et configurer un contrôle de type fenêtre
self.window = maya.cmds.window(
self.window,
title=self.title,
widthHeight=self.size,
menuBar=True,
maximizeButton=False,
toolbox=True,
resizeToFitChildren=True,
sizeable=False
)
# mode de disposition des éléments à la racine de la fenêtre
self.layoutFormRoot = maya.cmds.formLayout(numberOfDivisions=100)
# invoquer la fonction de création des menus
self.createMenu()
# invoquer la fonction de création de boutons
self.createButton()
# invoquer la fonction de création des contrôles
self.createControl()
if self.isDockable:
# ancrer la fenêtre à même l'interface de Maya
allowedAreas = ['right', 'left']
self.windowDock = maya.cmds.dockControl(area='right', label=self.title, content=self.window, allowedArea=allowedAreas)
# affichage de la fenêtre active (celle qui vient d'être créée)
maya.cmds.showWindow()
def createMenu(self):
"""fonction de création de la barre de menu"""
# menu 'Edit' (début)
self.menuEdit = maya.cmds.menu(label='Edit')
self.menuEditItemReset = maya.cmds.menuItem(
label='Reset',
command=self.callbackMenuReset)
maya.cmds.menuItem(divider=True) # marqueur de division
self.menuEditItemSave = maya.cmds.menuItem(
label='Save',
command=self.callbackMenuSave)
self.menuEditItemLoad = maya.cmds.menuItem(
label='Load',
command=self.callbackMenuLoad)
maya.cmds.menuItem(divider=True)
self.menuEditQuit = maya.cmds.menuItem(
label='Quit',
command=self.callbackMenuQuit)
# menu 'Edit' (fin)
# menu 'Help' (début)
self.menuHelp = maya.cmds.menu(label='Help')
self.menuHelpItem = maya.cmds.menuItem(
label='Help on %s' %self.title,
command=self.callbackMenuHelp)
# menu 'Help' (fin)
def createButton(self):
"""fonction de création des boutons"""
# définir la taille des boutons (largeur, hauteur) avec écarts
self.commonBtnSize = ((self.size[0]-(5+5+4+4))/3, 26)
# création des boutons
self.buttonAction = maya.cmds.button(
label=self.actionName,
height=self.commonBtnSize[1],
command=self.callbackButtonAction)
self.buttonApply = maya.cmds.button(
label='Apply',
height=self.commonBtnSize[1],
command=self.callbackButtonApply)
self.buttonClose = maya.cmds.button(
label='Close',
height=self.commonBtnSize[1],
command=self.callbackButtonClose)
# définition des contraintes de disposition des boutons
maya.cmds.formLayout(
self.layoutFormRoot, edit=True,
attachForm=(
[self.buttonAction, 'left' , 5],
[self.buttonAction, 'bottom', 5],
[self.buttonApply , 'bottom', 5],
[self.buttonClose , 'bottom', 5],
[self.buttonClose , 'right' , 5]),
attachPosition=(
[self.buttonAction, 'right', 1, 33],
[self.buttonClose , 'left' , 0, 67]),
attachControl=(
[self.buttonApply,'left' , 4, self.buttonAction],
[self.buttonApply,'right', 4, self.buttonClose]),
attachNone=(
[self.buttonAction,'top'],
[self.buttonApply, 'top'],
[self.buttonClose, 'top'])
)
# mode de disposition en onglet
self.layoutTab = maya.cmds.tabLayout(scrollable=True, tabsVisible=False, height=1, childResizable=True)
# définition des contraintes de disposition
maya.cmds.formLayout(
self.layoutFormRoot, edit=True,
attachForm=(
[self.layoutTab,'top' , 0],
[self.layoutTab,'left' , 2],
[self.layoutTab,'right', 2]),
attachControl=(
[self.layoutTab,'bottom', 5, self.buttonApply])
)
def createControl(self):
"""fonction de création des contrôles interactifs de l'outil"""
# mode de disposition en formulaire
self.layoutFormContent = maya.cmds.formLayout(numberOfDivisions=100)
# panneau rétractable 'Transformation' (début)
# mode de disposition en panneau rétractable
self.layoutFrameTransform = maya.cmds.frameLayout(label='Transformation', collapsable=True)
# définition des contraintes de disposition
maya.cmds.formLayout(
self.layoutFormContent, edit=True,
attachForm=(
[self.layoutFrameTransform,'left' , 0],
[self.layoutFrameTransform,'right', 0],
[self.layoutFrameTransform,'top' , 0])
)
# mode de disposition à l'intérieur du frame layout
maya.cmds.columnLayout()
# zones de texte contenant les valeurs numériques de transformation (translation)
self.textboxTranslation = maya.cmds.floatFieldGrp(
label='Translation ',
numberOfFields=3, # notez que 4 valeurs doivent quand même être spécifiées
value=[0.0, 0.0, 0.0, -1.0])
# zones de texte contenant les valeurs numériques de transformation (rotation)
self.textboxRotation = maya.cmds.floatFieldGrp(
label='Rotation ',
numberOfFields=3,
value=[0.0, 0.0, 0.0, -1.0])
# zones de texte contenant les valeurs numériques de transformation (dimension)
self.textboxDimension = maya.cmds.floatFieldGrp(
label='Dimension ',
numberOfFields=3,
value=[1.0, 1.0, 1.0, -1.0])
# panneau rétractable 'Transformation' (fin)
# remonter de 2 niveaux dans l'hiérarchie
maya.cmds.setParent('..') # frame > subform
maya.cmds.setParent('..') # subform > form
# panneau rétractable 'Shading' (début)
# mode de disposition en panneau rétractable
self.layoutFrameShading = maya.cmds.frameLayout(label='Shading', collapsable=True)
# définition des contraintes de disposition
maya.cmds.formLayout(
self.layoutFormContent, edit=True,
attachForm=(
[self.layoutFrameShading,'left' , 0],
[self.layoutFrameShading,'right', 0],
[self.layoutFrameShading,'top' , 5]),
attachControl=(
[self.layoutFrameShading,'top', 0, self.layoutFrameTransform])
)
# mode de disposition à l'intérieur du frame layout
maya.cmds.columnLayout()
# contrôle permettant la sélection d'une couleur et de son opacité
self.colorPicker = maya.cmds.colorSliderGrp(label='Vertex Color ')
# panneau rétractable 'Shading' (fin)
# remonter de 2 niveaux dans l'hiérarchie
maya.cmds.setParent('..') # frame > subform
maya.cmds.setParent('..') # subform > form
# panneau rétractable 'Command' (début)
# mode de disposition en panneau rétractable
self.layoutFrameCommand = maya.cmds.frameLayout(label='Commands', collapsable=True)
# définition des contraintes de disposition
maya.cmds.formLayout(
self.layoutFormContent, edit=True,
attachForm=(
[self.layoutFrameCommand,'left' , 0],
[self.layoutFrameCommand,'right', 0],
[self.layoutFrameCommand,'top' , 0]),
attachControl=(
[self.layoutFrameCommand,'top', 0, self.layoutFrameShading])
)
# mode de disposition à l'intérieur du frame layout
maya.cmds.rowColumnLayout(numberOfColumns=5)
# définir la taille des boutons (largeur, hauteur)
self.buttonWidth = 96
self.buttonHeight = 32
# création des boutons
self.buttonCommand1 = maya.cmds.button(
label='Reset',
width=self.buttonWidth,
height=self.buttonHeight,
command=self.callbackButtonCommand1)
self.buttonCommand2 = maya.cmds.button(
label='Random',
width=self.buttonWidth,
height=self.buttonHeight,
command=self.callbackButtonCommand2)
self.buttonCommand3 = maya.cmds.button(
label='Extrude',
width=self.buttonWidth,
height=self.buttonHeight,
command=self.callbackButtonCommand3)
self.buttonCommand4 = maya.cmds.button(
label='-',
width=self.buttonWidth,
height=self.buttonHeight,
command=self.callbackButtonCommand4)
self.buttonCommand5 = maya.cmds.button(
label='-',
width=self.buttonWidth,
height=self.buttonHeight,
command=self.callbackButtonCommand5)
# panneau rétractable 'Command' (fin)
# remonter de 2 niveaux dans l'hiérarchie
maya.cmds.setParent('..') # frame > subform
maya.cmds.setParent('..') # subform > form
# panneau rétractable 'Attribute' (début)
# mode de disposition en panneau rétractable
self.layoutFrameAttribute = maya.cmds.frameLayout(label='Attributes ', collapsable=True)
# définition des contraintes de disposition
maya.cmds.formLayout(
self.layoutFormContent, edit=True,
attachForm=(
[self.layoutFrameAttribute,'left' , 0],
[self.layoutFrameAttribute,'right', 0],
[self.layoutFrameAttribute,'top' , 5]),
attachControl=(
[self.layoutFrameAttribute,'top', 0, self.layoutFrameCommand])
)
# mode de disposition à l'intérieur du frame layout
maya.cmds.flowLayout()
# contrôle permettant saisir le nom d'un attribut
maya.cmds.text( label=' Name ' )
self.textboxAttributeName = maya.cmds.textField(width=110)
# contrôle permettant d'afficher et saisir la valeur d'un attribut
maya.cmds.text( label=' Value ' )
self.textboxAttributeValue = maya.cmds.textField(width=110)
# bouton permettant de valider l'existence de l'attribut sur l'objet sélectionné
self.buttonAttributeHas = maya.cmds.button(
label='Has',
width=60,
height=20,
command=self.callbackButtonAttributeHas)
# bouton permettant d'ajouter l'attribut sur l'objet sélectionné
self.buttonAttributeAdd = maya.cmds.button(
label='Add',
width=60,
height=20,
command=self.callbackButtonAttributeAdd)
# bouton permettant d'enlever l'attribut sur l'objet sélectionné
self.buttonAddAttributeRemove = maya.cmds.button(
label='Remove',
width=60,
height=20,
command=self.callbackButtonAttributeRemove)
# panneau rétractable 'Attribute' (fin)
def destroy(self):
"""fonction permettant de détruire les contrôles de la fenêtre"""
# valider l'existence d'une fenêtre flottante
if maya.cmds.window(self.window, exists=True):
maya.cmds.deleteUI(self.window, window=True)
print "destroy window"
if self.isDockable:
# valider l'existence d'une fenêtre ancrable
if maya.cmds.dockControl(self.windowDock, exists=True):
maya.cmds.deleteUI(self.windowDock, control=True)
print "destroy dock"
def reset(self):
"""fonction appelée lors de l'activation du menu 'Reset' permettant de ré-initialiser les données de l'outil"""
# initialiser les champs textes à zéro (floatFieldGrp a 4 valeurs, même si seulement 3 sont affichées)
maya.cmds.floatFieldGrp(self.textboxTranslation, edit=True, value=[0.0, 0.0, 0.0, -1.0])
maya.cmds.floatFieldGrp(self.textboxRotation, edit=True, value=[0.0, 0.0, 0.0, -1.0])
maya.cmds.floatFieldGrp(self.textboxDimension, edit=True, value=[1.0, 1.0, 1.0, -1.0])
# initialiser la couleur à blanc
maya.cmds.colorSliderGrp(self.colorPicker, edit=True, rgbValue=[1.0, 1.0, 1.0])
def selectTarget(self):
"""sélection de l'élément sur lequel les actions seront appliquées (premier objet)"""
# sélection du premier élément de la liste des objets sélectionnés
self.target = maya.cmds.ls(selection=True, head=2)
# valider s'il y a au moins un élément de sélectionné
if len(self.target) > 0:
return True
else:
return False
def applyToSelection(self):
"""fonction permettant d'appliquer les données de l'outil sur l'objet sélectionné"""
if self.selectTarget():
print "Selected node:", (self.target)
# extraction de la valeur de translation
valueTranslation = maya.cmds.floatFieldGrp(self.textboxTranslation, query=True, value=True)
# extraction de la valeur de rotation
valueRotation = maya.cmds.floatFieldGrp(self.textboxRotation, query=True, value=True)
# extraction de la valeur de dimension
valueDimension = maya.cmds.floatFieldGrp(self.textboxDimension, query=True, value=True)
# appliquer la transformation
maya.cmds.xform(self.target[0], objectSpace=True, translation=valueTranslation, rotation=valueRotation, scale=valueDimension)
# extraction de la valeur de couleur
color = maya.cmds.colorSliderGrp(self.colorPicker, query=True, rgbValue=True)
# affecter à l'objet comme couleur de sommet
maya.cmds.polyColorPerVertex(self.target[0], colorRGB=color, colorDisplayOption=True)
else:
print "Selection is empty"
# fonctions de callback des menus
def callbackMenuReset(self, *args):
"""fonction appelée lors de l'activation du menu 'Reset' permettant de ré-initialiser les données de l'outil"""
self.reset()
def callbackMenuSave(self, *args):
"""fonction appelée lors de l'activation du menu 'Save' permettant de sauvegarder des données à l'externe de Maya"""
filePath = ''
fileExtension = 'dat'
fileFilter = 'Custom data format (*.%s)' % fileExtension
# affichier l'interface de sélection de fichier
try: # 'fileDialog2' pour Maya 2011 et plus
filePath = maya.cmds.fileDialog2(fileFilter=fileFilter, fileMode=0)
except: # 'fileDialog' sinon
filePath = maya.cmds.fileDialog(directoryMask='*.%s' % fileExtension, mode=1)
# annuler si aucun chemin n'est retourné
if filePath is None or len(filePath) < 1:
return
# prendre le premier élément si une liste de chemins fut retournée
if isinstance(filePath, list):
filePath = filePath[0]
# ouverture du fichier en écriture
try:
file = open(filePath, 'w')
except:
maya.cmds.confirmDialog(title='Error', button=['OK'], message='Unable to write file: %s' % filePath)
raise
# extraction des données de l'outil
valueTranslation = maya.cmds.floatFieldGrp(self.textboxTranslation, query=True, value=True)
valueRotation = maya.cmds.floatFieldGrp(self.textboxRotation, query=True, value=True)
valueDimension = maya.cmds.floatFieldGrp(self.textboxDimension, query=True, value=True)
valueColor = maya.cmds.colorSliderGrp(self.colorPicker, query=True, rgbValue=True)
# ajouter les données extraitent de l'outil vers le dictionnaire
self.dictionaryData.update({
'translation': valueTranslation,
'rotation': valueRotation,
'dimension': valueDimension,
'color': valueColor})
# sérialisation des données du dictionnaire dans le fichier
cPickle.dump(self.dictionaryData, file)
# fermeture du fichier
file.close()
def callbackMenuLoad(self, *args):
"""fonction appelée lors de l'activation du menu 'Load' permettant de charger des données externes à Maya"""
filePath = ''
fileExtension = 'dat'
fileFilter = 'Custom data format (*.%s)' % fileExtension
# affichier l'interface de sélection de fichier
try: # 'fileDialog2' pour Maya 2011 et plus
filePath = maya.cmds.fileDialog2(fileFilter=fileFilter, fileMode=1)
except: # 'fileDialog' sinon
filePath = maya.cmds.fileDialog(directoryMask='*.%s' % fileExtension, mode=0)
# annuler si aucun chemin n'est retourné
if filePath is None or len(filePath) < 1:
return
# prendre le premier élément si une liste de chemins fut retournée
if isinstance(filePath, list):
filePath = filePath[0]
# ouverture du fichier en lecture
try:
file = open(filePath, 'r')
except:
maya.cmds.confirmDialog(title='Error', button=['OK'], message='Unable to read file: %s' % filePath)
raise
# dé-sérialisation des données du ficher dans le dictionnaire
self.dictionaryData = cPickle.load(file)
# fermeture du fichier
file.close()
# extraction des données du dictionnaire
valueTranslation = self.dictionaryData['translation']
valueRotation = self.dictionaryData['rotation']
valueDimension = self.dictionaryData['dimension']
valueColor = self.dictionaryData['color']
# ajouter une 4ième valeurs pour le floatFieldGrp (même si seulement 3 sont utilisées)
valueTranslation.append(-1.0)
valueRotation.append(-1.0)
valueDimension.append(-1.0)
# extraction des données vers les contrôles de l'outil
maya.cmds.floatFieldGrp(self.textboxTranslation, edit=True, value=valueTranslation)
maya.cmds.floatFieldGrp(self.textboxRotation, edit=True, value=valueRotation)
maya.cmds.floatFieldGrp(self.textboxDimension, edit=True, value=valueDimension)
maya.cmds.colorSliderGrp(self.colorPicker, edit=True, rgbValue=valueColor)
def callbackMenuQuit(self, *args):
"""fonction appelée lors de l'activation du menu 'Quit' permettant de quitter l'outil"""
self.destroy()
def callbackMenuHelp(self, *args):
"""fonction appelée lors de l'activation du menu 'Help' permettant d'ouvrir une page web qui pourrait contenir de l'aide sur l'outil"""
maya.cmds.launch(web='http://wikipedia.com')
# fonctions de callback des boutons
def callbackButtonCommand1(self, *args):
"""fonction appelée lors de l'activation du bouton 1"""
print 'command: RESET'
self.reset()
self.applyToSelection()
def callbackButtonCommand2(self, *args):
"""fonction appelée lors de l'activation du bouton 2"""
print 'command: RANDOM'
# définition de l'espace à l'intérieur duquel la position aléatoire doit être choisie
rangePosition = 10
tresholdA = -rangePosition
tresholdB = rangePosition
# génération d'une position aléatoire sur les trois axes à l'intérieur du seuil
randomX = random.uniform(tresholdA, tresholdB)
randomY = random.uniform(tresholdA, tresholdB)
randomZ = random.uniform(tresholdA, tresholdB)
# assigner la position aléatoire aux champs textes du floatFieldGrp
maya.cmds.floatFieldGrp(self.textboxTranslation, edit=True, value=[randomX, randomY, randomZ, -1.0])
self.applyToSelection()
def callbackButtonCommand3(self, *args):
"""fonction appelée lors de l'activation du bouton 3"""
print 'command: EXTRUDE'
if self.selectTarget():
# sélectionner le modèle
shape = self.target[0]
# compter le nombre de faces du modèle
polycount = maya.cmds.polyEvaluate(shape, face=True)
# extrusion des faces du modèle
for index in range(0, polycount):
face = '%s.f[%s]'%(shape, index)
maya.cmds.polyExtrudeFacet(face, localTranslateZ=1, constructionHistory=0)
maya.cmds.polyExtrudeFacet(face, localTranslateZ=1, constructionHistory=0, localScale=[0.5, 0.5, 0.5])
def callbackButtonCommand4(self, *args):
"""fonction appelée lors de l'activation du bouton 4"""
print 'command: NOTHING'
print "<button 4 placeholder>"
def callbackButtonCommand5(self, *args):
"""fonction appelée lors de l'activation du bouton 5"""
print 'command: NOTHING'
print "<button 5 placeholder>"
def callbackButtonAttributeHas(self, *args):
"""fonction appelée lors de l'activation du bouton 'Has' (attribute)"""
if self.selectTarget():
# extraire le premier noeud de l'objet
objectName = self.target[0]
# extraire le nom de l'attibut à partir du champ texte
attributeName = maya.cmds.textField(self.textboxAttributeName, query=True, text=True)
# valider si l'attribut existe déjà sur l'objet
if maya.cmds.objExists("%s.%s" % (objectName, attributeName)):
# lire la valeur de l'attribut
attributeValue = maya.cmds.getAttr("%s.%s" % (objectName, attributeName))
# écrire la valeur de l'attribut dans le champ texte
maya.cmds.textField(self.textboxAttributeValue, edit=True, text=attributeValue)
else:
message = "attibute not found"
maya.cmds.textField(self.textboxAttributeValue, edit=True, text=message)
print message
else:
print "Selection is empty"
def callbackButtonAttributeAdd(self, *args):
"""fonction appelée lors de l'activation du bouton 'Add' (attribute)"""
if self.selectTarget():
# extraire le premier noeud de l'objet
objectName = self.target[0]
# extraire le nom de l'attibut à partir du champ texte
attributeName = maya.cmds.textField(self.textboxAttributeName, query=True, text=True)
# extraire la valeur de l'attibut à partir du champ texte
attributeValue = str(maya.cmds.textField(self.textboxAttributeValue, query=True, text=True))
# valider si l'attribut existe déjà sur l'objet
if not maya.cmds.objExists("%s.%s" % (objectName, attributeName)):
# ajouter l'attribut avec une valeur par défaut
maya.cmds.addAttr(objectName, longName=attributeName, dataType='string')
# écrire la valeur de l'attribut
maya.cmds.setAttr("%s.%s" % (objectName, attributeName), attributeValue, type='string')
else:
print "Selection is empty"
def callbackButtonAttributeRemove(self, *args):
"""fonction appelée lors de l'activation du bouton 'Remove' (attribute)"""
if self.selectTarget():
# extraire le premier noeud de l'objet
objectName = self.target[0]
# extraire le nom de l'attibut à partir du champ texte
attributeName = maya.cmds.textField(self.textboxAttributeName, query=True, text=True)
# valider si l'attribut existe déjà sur l'objet
if maya.cmds.objExists("%s.%s" % (objectName, attributeName)):
# supprimer l'attribut
maya.cmds.deleteAttr(objectName, attribute=attributeName)
else:
message = "attibute not found"
maya.cmds.textField(self.textboxAttributeValue, edit=True, text=message)
print message
else:
print "Selection is empty"
def callbackButtonAction(self, *args):
"""fonction appelée lors de l'activation du bouton 'Apply & Close'"""
self.applyToSelection()
self.destroy()
def callbackButtonApply(self, *args):
"""fonction appelée lors de l'activation du bouton 'Apply'"""
self.applyToSelection()
def callbackButtonClose(self, *args):
"""fonction appelée lors de l'activation du bouton 'Close'"""
self.destroy()
print "\n<début de l'exécution>\n"
# instanciation de la fenêtre
tool = CustomToolWindow()
# initialisation et affichage de la fenêtre
tool.showUI()
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module09/EXE05/ANI2012A17_Tool.py",
"copies": "1",
"size": "24325",
"license": "mit",
"hash": -2442210005626527000,
"line_mean": 32.4355062413,
"line_max": 139,
"alpha_frac": 0.6887626001,
"autogenerated": false,
"ratio": 3.2354046436719903,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44241672437719903,
"avg_score": null,
"num_lines": null
} |
# ANI2102A17_Tween.py | Programmation Python avec Maya | coding=utf-8
# Exemples d'interpolations de valeurs numériques (interpolation linéaire et smoothstep).
# paramètres du programme
size = 10
# valeurs minimum et maximum de la transition
positionFrom = -20
positionTo = 20
# durée de l'interpolation
duration = 120
print "\n<début de l'exécution>\n"
# 1. interpolation linéaire
print "<interpolation linéaire entre deux bornes (lerp)>\n"
# création d'un cube
maya.cmds.polyCube(name='cube', width=size, height=size, depth=size)
# vider la sélection
maya.cmds.select(clear=True)
# sélectionner le cube
maya.cmds.select('cube', add=True)
# extraire le node de transformation du cube
target = maya.cmds.ls(selection=True, head=True)[0]
def lerp(thresholdMin, thresholdMax, value):
"""fonction qui calcule une interpolation linéaire entre deux bornes"""
if value > 0.0:
if value < 1.0:
return (thresholdMin * (1.0 - value)) + (thresholdMax * value)
else:
return thresholdMax
else:
return thresholdMin
print "<lerp>"
for index in range(duration+1):
# déterminer la position de la tête de lecture entre les deux bornes selon l'index de la boucle (dans l'intervalle [0, 1])
playhead = index / float(duration)
# invocation de la fonction d'interpolation dont la valeur de retour dépend de la position de la tête de lecture
position = lerp(positionFrom, positionTo, playhead)
# création d'une pose clé sur l'attribut de translation en X du cube à la position courante de l'interpolation
maya.cmds.setKeyframe(target, time=index, attribute='translateX', value=position)
# trace des différentes valeurs utilisées pour le calcul d'interpolation
print "\t<from:\t%s\tto:\t%s\tplayhead:\t%0.2f\tposition:\t%0.2f>" % (positionFrom, positionTo, playhead, position)
print "<lerp>\n"
# 2. interpolation avec accélération et décélération
print "<interpolation entre deux bornes avec accélération et décélération (smoothstep)>\n"
# création d'une sphère
maya.cmds.polySphere(name='sphere', radius=size * 0.618)
# vider la sélection
maya.cmds.select(clear=True)
# sélectionner la sphère
maya.cmds.select('sphere', add=True)
# extraire le node de transformation de la sphère
target = maya.cmds.ls(selection=True, head=True)[0]
def smoothstep(thresholdMin, thresholdMax, value):
"""fonction qui calcule une interpolation entre deux bornes avec accélération et décélération"""
if thresholdMin < thresholdMax:
if value > thresholdMin:
if value < thresholdMax:
value = (value - thresholdMin) / float(thresholdMax - thresholdMin)
value = value if value > 0.0 else 0.0
value = value if value < 1.0 else 1.0
return value * value * (3.0 - 2.0 * value)
else:
return 1.0
else:
return 0.0
else:
return 0.0
print "<smoothstep>"
for index in range(duration + 1):
# déterminer la position de la tête de lecture entre les deux bornes selon l'index de la boucle (dans l'intervalle [positionFrom, positionTo])
playhead = positionFrom + index / float(duration) * (positionTo - positionFrom)
# invocation de la fonction d'interpolation dont la valeur de retour dépend de la position de la tête de lecture
value = smoothstep(positionFrom, positionTo, playhead)
# déterminer la position en fonction de la valeur retournée par la fonction d'interpolation
position = (positionFrom * (1.0 - value)) + (positionTo * value)
# création d'une pose clé sur l'attribut de translation en X de la sphère à la position courante de l'interpolation
maya.cmds.setKeyframe(target, time=index, attribute='translateX', value=position)
# trace des différentes valeurs utilisées pour le calcul d'interpolation
print "\t<from:\t%s\tto:\t%s\tplayhead:\t%0.2f\tvalue:\t%0.2f\tposition:\t%0.2f>" % (positionFrom, positionTo, playhead, value, position)
print "<smoothstep>\n"
print "\n<fin de l'exécution>\n"
| {
"repo_name": "philvoyer/ANI2012A17",
"path": "Module08/EXE09/ANI2012A17_Tween.py",
"copies": "1",
"size": "3984",
"license": "mit",
"hash": -3825321818809536500,
"line_mean": 32.2966101695,
"line_max": 144,
"alpha_frac": 0.7284296259,
"autogenerated": false,
"ratio": 3.003822629969419,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9183059143190994,
"avg_score": 0.009838622535685038,
"num_lines": 118
} |
"""aniauth accounts app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib.auth.views import LogoutView
import logintokens.views as views
urlpatterns = [
url(r'^logout/$', LogoutView.as_view(),
name='logout'),
url(r'^token_login/$', views.TokenLoginView.as_view(),
name='token_login'),
url(r'^send_token/$', views.SendTokenView.as_view(),
name='send_token'),
url(r'^send_token/done/$', views.SendTokenDoneView.as_view(),
name='send_token_done'),
]
| {
"repo_name": "randomic/aniauth-tdd",
"path": "logintokens/urls.py",
"copies": "1",
"size": "1131",
"license": "mit",
"hash": -7763168502605513000,
"line_mean": 35.4838709677,
"line_max": 79,
"alpha_frac": 0.6790450928,
"autogenerated": false,
"ratio": 3.448170731707317,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9627215824507318,
"avg_score": 0,
"num_lines": 31
} |
from anibots import *
import breve
class AnibotPhysicsSim( breve.PhysicalControl ):
def __init__( self ):
breve.Control.__init__( self )
self.bots = breve.objectList()
self.actionDuration = 45
self.iterCount=self.actionDuration
self.videoLog = breve.Movie()
self.block = None
#configure the anibots
self.env = None
self.numBots = 1
self.iterations = 20
self.kDepth = 3
self.takeTurns = True
#self.anibotConfig = AnibotConfig("../exp_0_save/0.1.A/01a-graf.dat", "../exp_0_save/0.1.A/01a-pedge.dat","../exp_0_save/0.1.A/01a-pweights.dat")
self.anibotConfig = AnibotConfig("../exp_0_save/0.2.B/02b-graf.dat", "../exp_0_save/0.2.B/02b-pedge.dat","../exp_0_save/0.2.B/02b-pweights.dat")
self.anibotConfig.proxify = False
# bool proxyWeightsProportional;
# float proxyWeightsProportion;
#bool randomizeEdges
self.anibotConfig.randomize = False
#self.anibotConfig.quant = 11
#self.anibotConfig.quantDiff = 1
#anibotConfig.quantIrregular;
#self.anibotConfig.randMin = 0
#self.anibotConfig.randMax = 10
# bool singleTops;
AnibotPhysicsSim.init( self )
def init( self ):
print '''Setting up Anibot environment'''
# start the anibots environment (mental simulation)
self.env = AgentEnv("anibots_breve_exp0-single.py",self.kDepth,self.takeTurns)
self.env.NewAnibot(self.numBots,self.anibotConfig)
self.env.InitLoner(0)
#self.env.InitTransaction(0,1)
print '''Setting up Physics Sim.'''
#start the visual/physical environment in Breve
self.setDisplayText( "Anibots Sim", -1.0, 0.8, 1 )
self.setRandomSeedFromDevRandom()
self.enableFastPhysics()
self.setFastPhysicsIterations( 15 )
#self.setGravity( breve.vector(0.0,-3.0,0.0) )
self.enableLighting()
self.enableSmoothDrawing()
self.moveLight( breve.vector( 20, 30, 20 ) )
floor = breve.createInstances( breve.Floor, 1 )
floor.catchShadows()
#floor.setE( 1.000000 )
floor.setMu(0.0)
#floor.showAxis()
self.cloudTexture = breve.createInstances( breve.Image, 1 ).load( 'images/clouds.png' )
self.enableShadowVolumes()
self.enableReflections()
self.setBackgroundColor( breve.vector( 0.400000, 0.600000, 0.900000 ) )
self.setBackgroundTextureImage( self.cloudTexture )
#self.offsetCamera( breve.vector( 3, 13, -13 ) )
self.pointCamera( breve.vector( 0, 0, 0 ), breve.vector( 20, 20, 60 ) )
#the virtual bodies
self.bots = breve.createInstances( breve.AnibotBody, 1 )
self.bots.move( breve.vector( -7.5, self.bots.radius, 14 ) )
self.env.Next()
#the block
self.block = breve.createInstances( breve.Mobile, 1 )
shape = breve.createInstances( breve.Cube, 1 ).initWith( breve.vector(15,3,4) )
shape.setMass(0.5)
self.block.setShape(shape)
self.block.setColor(breve.vector( 1.0, 0.5 ,0.0 ))
self.block.move( breve.vector( 0.0, 1.5 ,0.0 ) )
self.block.setMu(0.0)
#self.block.setE(0.1)
self.block.enablePhysics()
print self.block.getMass()
#self.block.setForce( breve.vector( 500.0, 500.0 , 500.0 ) )
#self.block.setVelocity( breve.vector( 0, 0, -10 ) )
#self.watch( self.bots[0] )
self.watch( self.block )
self.videoLog.record("anibots-0_1_B.mpg")
def iterate( self ):
self.iterCount = self.iterCount + 1
if self.iterCount > self.actionDuration:
action="foo"
self.iterCount=0
self.env.Next()
i = 0
if self.env.CurrentAction(i) == 67:
action = "center"
self.bots.moveX(0.0)
elif self.env.CurrentAction(i) == 76:
action = "left"
self.bots.moveX(-3.7)
elif self.env.CurrentAction(i) == 82:
action = "right"
self.bots.moveX(3.7)
else:
action = "unknown"
self.bots[i].moveX(8.0)
j=4
s = "bot"+ str(i)+"action: " + action
print s
self.setDisplayText(s, -1.0, 0.7-(i/10.0), i+2 )
s2 = "block dist: %.2f" % (-self.block.getLocation()[2])
self.setDisplayText(s2, -1.0, 0.5, 6)
breve.Control.iterate( self )
breve.AnibotPhysicsSim = AnibotPhysicsSim
class AnibotBody( breve.Mobile ):
def __init__( self ):
breve.Mobile.__init__( self )
self.radius = 1.5
AnibotBody.init( self )
def init( self ):
shape = breve.createInstances( breve.Sphere, 1 ).initWith( self.radius )
shape.setDensity(100)
self.setShape( shape )
#self.setShape( breve.createInstances( breve.Cube, 1 ).initWith( breve.vector(self.radius,self.radius,self.radius) ))
#self.setColor( breve.randomExpression( breve.vector( 1.000000, 1.000000, 1.000000 ) ) )
self.setColor(breve.vector( 0.70, 0.2 ,0.6 ))
#self.move( breve.vector( breve.randomExpression(8.0)-4.0, self.radius, breve.randomExpression(20.0) + 8.0 ) )
self.move( breve.vector( 0.0, self.radius, 14.0 ) )
print self.getMass()
self.enablePhysics()
#self.setVelocity( breve.vector( 0.0, 0.0, -2.0 ) )
#self.setForce( breve.vector( 0.0, 0.0, -100.0 ) )
def moveX( self, x ):
if self.getLocation()[0] != x:
z = self.getLocation()[2]
self.move( breve.vector( x, self.radius, z+2 ) )
def iterate( self ):
#print self.getVelocity()
self.setVelocity( breve.vector( 0.0, 0.0, -2.0 ) )
breve.AnibotBody = AnibotBody
# Create an instance of our controller object to initialize the simulation
AnibotPhysicsSim()
| {
"repo_name": "SynapticNulship/Anibots",
"path": "sim_py/anibots_breve_exp0-single.py",
"copies": "1",
"size": "5739",
"license": "mit",
"hash": -574097927540259200,
"line_mean": 32.3662790698,
"line_max": 147,
"alpha_frac": 0.6820003485,
"autogenerated": false,
"ratio": 2.624142661179698,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8280933322540975,
"avg_score": 0.10504193742774459,
"num_lines": 172
} |
# Requires Python >= 2.4
import os
import filecmp
import difflib
import subprocess
print("Anibots Regression Test\n")
def runFileRegression( expectedFile, testFile):
expected = os.path.normpath(expectedFile)
test = os.path.normpath(testFile)
coarse_test = filecmp.cmp(expected, test, False)
if coarse_test == True:
print("Test succeeded.\n")
else:
print("Coarse test failed!\nRunning fine test for more info...\n")
#todo: fine test
#meta-test: test the regression tester:
print("Meta test...")
expectedPath = "./expected/case1/"
expected = expectedPath + "behavior-log.txt"
testOutput = expected
runFileRegression(expected, testOutput)
# test case 1 is based on exp0\0.2a\alternating
print("Test Case 1")
# anibots_cli -f 02a-graf.dat -p 02a-pedge.dat -k 2 -v -i 10 -r 0 10 -q 11 1
testcmd = "anibots_cli.exe -f ./expected/case1/r0-graf.dat -p ./expected/case1/r0-pedge.dat -pw ./expected/case1/r0-pweights.dat -k 2 -v -i 10 -q 11 1 > behavior-log.txt"
ret = subprocess.check_output(testcmd, shell=True)
#print(ret)
expectedPath = "./expected/case1/"
expected = expectedPath + "behavior-log.txt"
testOutput = "behavior-log.txt"
runFileRegression(expected, testOutput)
#todo: clean the test output | {
"repo_name": "SynapticNulship/Anibots",
"path": "regression/regtest.py",
"copies": "1",
"size": "1440",
"license": "mit",
"hash": -3461717275489422000,
"line_mean": 29.0208333333,
"line_max": 170,
"alpha_frac": 0.7347222222,
"autogenerated": false,
"ratio": 2.9508196721311477,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.911228087274033,
"avg_score": 0.01465220431816349,
"num_lines": 48
} |
""" An ice cream stand is a specific kind of restaurant. Write a class called IceCreamStand that inherits from the Restaurant class you wrote
in Exercise 9-1 (page 166) or Exercise 9-4 (page 171). Either version of the class will work; just pick the one you like better.
Add an attribute called flavors that stores a list of ice cream flavors. Write a method that displays
these flavors. Create an instance of IceCreamStand, and call this method. """
class Restaurant(object):
"""Representa cualquier restaurant."""
def __init__(self, restaurant_name, cuisine_type):
self.restaurant_name = restaurant_name
self.cuisine_type = cuisine_type
self.number_served = 0
def describe_restaurant(self):
description="Nombre del restaurante: " + self.restaurant_name.title() + "\nTipo de cocina: " + self.cuisine_type
return description
def open_restaurant(self):
print("El restaurante está abierto")
def read_number_served(self):
print("Se han atendido " + str(self.number_served)+ " persona(s)")
def set_number_served(self, number):
self.number_served=number
def increment_number_server(self, more_served):
self.number_served+=more_served
class IceCreamStand(Restaurant):
"""Representa un tipo de restaurant, especificamente de helados"""
def __init__(self, restaurant_name, cuisine_type):
super(IceCreamStand, self).__init__(restaurant_name, cuisine_type)
self.flavors = []
def set_flavors(self, *list_flavors):
self.flavors=list_flavors
def read_flavors(self):
print("\nSabores de helados:")
for flavor in self.flavors:
print("- "+ flavor)
ice_cream_stand=IceCreamStand('helados morelia', 'helados')
print(ice_cream_stand.describe_restaurant())
ice_cream_stand.set_flavors('chocolate', 'vainilla', 'fresa', 'coco')
ice_cream_stand.read_flavors()
| {
"repo_name": "AnhellO/DAS_Sistemas",
"path": "Ene-Jun-2019/Karla Berlanga/Practica 1/restaurant.py",
"copies": "1",
"size": "1918",
"license": "mit",
"hash": 1637752311033574100,
"line_mean": 39.7872340426,
"line_max": 141,
"alpha_frac": 0.6922274387,
"autogenerated": false,
"ratio": 3.4918032786885247,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46840307173885243,
"avg_score": null,
"num_lines": null
} |
#An ice cream stand is a specific kind of restaurant. Write
#a class called IceCreamStand that inherits from the Restaurant class you wrote
#in Exercise 9-1 (page 166) or Exercise 9-4 (page 171). Either version of
#the class will work; just pick the one you like better. Add an attribute called
#flavors that stores a list of ice cream flavors. Write a method that displays
#these flavors. Create an instance of IceCreamStand, and call this method.
class Restaurant():
def __init__(self, name, cuisine_type):
"""Inicializando"""
self.name = name.title()
self.cuisine_type = cuisine_type
self.number_served = 0
def describe_restaurant(self):
"""Descripción del restaurant"""
msg = self.name + " sirve un delicioso " + self.cuisine_type + "."
print("\n" + msg)
def open_restaurant(self):
"""Muestra mensaje de restaurante abierto"""
msg = self.name + " is open. Come on in!"
print("\n" + msg)
def set_number_served(self, number_served):
"""Numero de clientes atendidos"""
self.number_served = number_served
def increment_number_served(self, additional_served):
"""Incremento de clientes atendidos"""
self.number_served += additional_served
class IceCreamStand(Restaurant):
def __init__(self, name, cuisine_type='helado doble!!'):
super().__init__(name, cuisine_type)
self.flavors = []
def show_flavors(self):
"""Mostrar los sabores disponibles"""
print("\nSabores disponibles:")
for flavor in self.flavors:
print("- " + flavor.title())
helado_feliz = IceCreamStand('Helado Feliz')
helado_feliz.flavors = ['Vanilla', 'Chocolate', 'Fresa']
helado_feliz.describe_restaurant()
helado_feliz.show_flavors() | {
"repo_name": "AnhellO/DAS_Sistemas",
"path": "Ene-Jun-2019/NoemiFlores/PrimerParcial/PrimeraPractica/IceCream.py",
"copies": "1",
"size": "1803",
"license": "mit",
"hash": -3328454368342488600,
"line_mean": 34.3529411765,
"line_max": 80,
"alpha_frac": 0.6553829079,
"autogenerated": false,
"ratio": 3.4389312977099236,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45943142056099234,
"avg_score": null,
"num_lines": null
} |
# A nicer way to build up index tuples for arrays.
#
# You can do all this with slice() plus a few special objects,
# but there's a lot to remember. This version is simpler because
# it uses the standard array indexing syntax.
#
# Written by Konrad Hinsen <hinsen@cnrs-orleans.fr>
# last revision: 1999-7-23
#
"""This module provides a convenient method for constructing
array indices algorithmically. It provides one importable object,
'index_expression'.
For any index combination, including slicing and axis insertion,
'a[indices]' is the same as 'a[index_expression[indices]]' for any
array 'a'. However, 'index_expression[indices]' can be used anywhere
in Python code and returns a tuple of indexing objects that can be
used in the construction of complex index expressions.
Sole restriction: Slices must be specified in the double-colon
form, i.e. a[::] is allowed, whereas a[:] is not.
"""
class _index_expression_class:
import sys
maxint = sys.maxint
def __getitem__(self, item):
if type(item) != type(()):
return (item,)
else:
return item
def __len__(self):
return self.maxint
def __getslice__(self, start, stop):
if stop == self.maxint:
stop = None
return self[start:stop:None]
index_expression = _index_expression_class()
| {
"repo_name": "OS2World/DEV-PYTHON-UTIL-ScientificPython",
"path": "src/Lib/site-packages/Scientific/indexing.py",
"copies": "1",
"size": "1338",
"license": "isc",
"hash": -4005946570136556000,
"line_mean": 29.4090909091,
"line_max": 68,
"alpha_frac": 0.6890881913,
"autogenerated": false,
"ratio": 3.867052023121387,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5056140214421387,
"avg_score": null,
"num_lines": null
} |
"""An IDLE extension to avoid having very long texts printed in the shell.
A common problem in IDLE's interactive shell is printing of large amounts of
text into the shell. This makes looking at the previous history difficult.
Worse, this can cause IDLE to become very slow, even to the point of being
completely unusable.
This extension will automatically replace long texts with a small button.
Double-clicking this button will remove it and insert the original text instead.
Middle-clicking will copy the text to the clipboard. Right-clicking will open
the text in a separate viewing window.
Additionally, any output can be manually "squeezed" by the user. This includes
output written to the standard error stream ("stderr"), such as exception
messages and their tracebacks.
"""
import re
import tkinter as tk
import tkinter.messagebox as tkMessageBox
from idlelib.config import idleConf
from idlelib.textview import view_text
from idlelib.tooltip import Hovertip
from idlelib import macosx
def count_lines_with_wrapping(s, linewidth=80):
"""Count the number of lines in a given string.
Lines are counted as if the string was wrapped so that lines are never over
linewidth characters long.
Tabs are considered tabwidth characters long.
"""
tabwidth = 8 # Currently always true in Shell.
pos = 0
linecount = 1
current_column = 0
for m in re.finditer(r"[\t\n]", s):
# Process the normal chars up to tab or newline.
numchars = m.start() - pos
pos += numchars
current_column += numchars
# Deal with tab or newline.
if s[pos] == '\n':
# Avoid the `current_column == 0` edge-case, and while we're
# at it, don't bother adding 0.
if current_column > linewidth:
# If the current column was exactly linewidth, divmod
# would give (1,0), even though a new line hadn't yet
# been started. The same is true if length is any exact
# multiple of linewidth. Therefore, subtract 1 before
# dividing a non-empty line.
linecount += (current_column - 1) // linewidth
linecount += 1
current_column = 0
else:
assert s[pos] == '\t'
current_column += tabwidth - (current_column % tabwidth)
# If a tab passes the end of the line, consider the entire
# tab as being on the next line.
if current_column > linewidth:
linecount += 1
current_column = tabwidth
pos += 1 # After the tab or newline.
# Process remaining chars (no more tabs or newlines).
current_column += len(s) - pos
# Avoid divmod(-1, linewidth).
if current_column > 0:
linecount += (current_column - 1) // linewidth
else:
# Text ended with newline; don't count an extra line after it.
linecount -= 1
return linecount
class ExpandingButton(tk.Button):
"""Class for the "squeezed" text buttons used by Squeezer
These buttons are displayed inside a Tk Text widget in place of text. A
user can then use the button to replace it with the original text, copy
the original text to the clipboard or view the original text in a separate
window.
Each button is tied to a Squeezer instance, and it knows to update the
Squeezer instance when it is expanded (and therefore removed).
"""
def __init__(self, s, tags, numoflines, squeezer):
self.s = s
self.tags = tags
self.numoflines = numoflines
self.squeezer = squeezer
self.editwin = editwin = squeezer.editwin
self.text = text = editwin.text
# The base Text widget is needed to change text before iomark.
self.base_text = editwin.per.bottom
line_plurality = "lines" if numoflines != 1 else "line"
button_text = f"Squeezed text ({numoflines} {line_plurality})."
tk.Button.__init__(self, text, text=button_text,
background="#FFFFC0", activebackground="#FFFFE0")
button_tooltip_text = (
"Double-click to expand, right-click for more options."
)
Hovertip(self, button_tooltip_text, hover_delay=80)
self.bind("<Double-Button-1>", self.expand)
if macosx.isAquaTk():
# AquaTk defines <2> as the right button, not <3>.
self.bind("<Button-2>", self.context_menu_event)
else:
self.bind("<Button-3>", self.context_menu_event)
self.selection_handle( # X windows only.
lambda offset, length: s[int(offset):int(offset) + int(length)])
self.is_dangerous = None
self.after_idle(self.set_is_dangerous)
def set_is_dangerous(self):
dangerous_line_len = 50 * self.text.winfo_width()
self.is_dangerous = (
self.numoflines > 1000 or
len(self.s) > 50000 or
any(
len(line_match.group(0)) >= dangerous_line_len
for line_match in re.finditer(r'[^\n]+', self.s)
)
)
def expand(self, event=None):
"""expand event handler
This inserts the original text in place of the button in the Text
widget, removes the button and updates the Squeezer instance.
If the original text is dangerously long, i.e. expanding it could
cause a performance degradation, ask the user for confirmation.
"""
if self.is_dangerous is None:
self.set_is_dangerous()
if self.is_dangerous:
confirm = tkMessageBox.askokcancel(
title="Expand huge output?",
message="\n\n".join([
"The squeezed output is very long: %d lines, %d chars.",
"Expanding it could make IDLE slow or unresponsive.",
"It is recommended to view or copy the output instead.",
"Really expand?"
]) % (self.numoflines, len(self.s)),
default=tkMessageBox.CANCEL,
parent=self.text)
if not confirm:
return "break"
self.base_text.insert(self.text.index(self), self.s, self.tags)
self.base_text.delete(self)
self.squeezer.expandingbuttons.remove(self)
def copy(self, event=None):
"""copy event handler
Copy the original text to the clipboard.
"""
self.clipboard_clear()
self.clipboard_append(self.s)
def view(self, event=None):
"""view event handler
View the original text in a separate text viewer window.
"""
view_text(self.text, "Squeezed Output Viewer", self.s,
modal=False, wrap='none')
rmenu_specs = (
# Item structure: (label, method_name).
('copy', 'copy'),
('view', 'view'),
)
def context_menu_event(self, event):
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
rmenu = tk.Menu(self.text, tearoff=0)
for label, method_name in self.rmenu_specs:
rmenu.add_command(label=label, command=getattr(self, method_name))
rmenu.tk_popup(event.x_root, event.y_root)
return "break"
class Squeezer:
"""Replace long outputs in the shell with a simple button.
This avoids IDLE's shell slowing down considerably, and even becoming
completely unresponsive, when very long outputs are written.
"""
@classmethod
def reload(cls):
"""Load class variables from config."""
cls.auto_squeeze_min_lines = idleConf.GetOption(
"main", "PyShell", "auto-squeeze-min-lines",
type="int", default=50,
)
def __init__(self, editwin):
"""Initialize settings for Squeezer.
editwin is the shell's Editor window.
self.text is the editor window text widget.
self.base_test is the actual editor window Tk text widget, rather than
EditorWindow's wrapper.
self.expandingbuttons is the list of all buttons representing
"squeezed" output.
"""
self.editwin = editwin
self.text = text = editwin.text
# Get the base Text widget of the PyShell object, used to change
# text before the iomark. PyShell deliberately disables changing
# text before the iomark via its 'text' attribute, which is
# actually a wrapper for the actual Text widget. Squeezer,
# however, needs to make such changes.
self.base_text = editwin.per.bottom
# Twice the text widget's border width and internal padding;
# pre-calculated here for the get_line_width() method.
self.window_width_delta = 2 * (
int(text.cget('border')) +
int(text.cget('padx'))
)
self.expandingbuttons = []
# Replace the PyShell instance's write method with a wrapper,
# which inserts an ExpandingButton instead of a long text.
def mywrite(s, tags=(), write=editwin.write):
# Only auto-squeeze text which has just the "stdout" tag.
if tags != "stdout":
return write(s, tags)
# Only auto-squeeze text with at least the minimum
# configured number of lines.
auto_squeeze_min_lines = self.auto_squeeze_min_lines
# First, a very quick check to skip very short texts.
if len(s) < auto_squeeze_min_lines:
return write(s, tags)
# Now the full line-count check.
numoflines = self.count_lines(s)
if numoflines < auto_squeeze_min_lines:
return write(s, tags)
# Create an ExpandingButton instance.
expandingbutton = ExpandingButton(s, tags, numoflines, self)
# Insert the ExpandingButton into the Text widget.
text.mark_gravity("iomark", tk.RIGHT)
text.window_create("iomark", window=expandingbutton,
padx=3, pady=5)
text.see("iomark")
text.update()
text.mark_gravity("iomark", tk.LEFT)
# Add the ExpandingButton to the Squeezer's list.
self.expandingbuttons.append(expandingbutton)
editwin.write = mywrite
def count_lines(self, s):
"""Count the number of lines in a given text.
Before calculation, the tab width and line length of the text are
fetched, so that up-to-date values are used.
Lines are counted as if the string was wrapped so that lines are never
over linewidth characters long.
Tabs are considered tabwidth characters long.
"""
return count_lines_with_wrapping(s, self.editwin.width)
def squeeze_current_text_event(self, event):
"""squeeze-current-text event handler
Squeeze the block of text inside which contains the "insert" cursor.
If the insert cursor is not in a squeezable block of text, give the
user a small warning and do nothing.
"""
# Set tag_name to the first valid tag found on the "insert" cursor.
tag_names = self.text.tag_names(tk.INSERT)
for tag_name in ("stdout", "stderr"):
if tag_name in tag_names:
break
else:
# The insert cursor doesn't have a "stdout" or "stderr" tag.
self.text.bell()
return "break"
# Find the range to squeeze.
start, end = self.text.tag_prevrange(tag_name, tk.INSERT + "+1c")
s = self.text.get(start, end)
# If the last char is a newline, remove it from the range.
if len(s) > 0 and s[-1] == '\n':
end = self.text.index("%s-1c" % end)
s = s[:-1]
# Delete the text.
self.base_text.delete(start, end)
# Prepare an ExpandingButton.
numoflines = self.count_lines(s)
expandingbutton = ExpandingButton(s, tag_name, numoflines, self)
# insert the ExpandingButton to the Text
self.text.window_create(start, window=expandingbutton,
padx=3, pady=5)
# Insert the ExpandingButton to the list of ExpandingButtons,
# while keeping the list ordered according to the position of
# the buttons in the Text widget.
i = len(self.expandingbuttons)
while i > 0 and self.text.compare(self.expandingbuttons[i-1],
">", expandingbutton):
i -= 1
self.expandingbuttons.insert(i, expandingbutton)
return "break"
Squeezer.reload()
if __name__ == "__main__":
from unittest import main
main('idlelib.idle_test.test_squeezer', verbosity=2, exit=False)
# Add htest.
| {
"repo_name": "batermj/algorithm-challenger",
"path": "code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/idlelib/squeezer.py",
"copies": "2",
"size": "12840",
"license": "apache-2.0",
"hash": 1343460937836105500,
"line_mean": 36.2173913043,
"line_max": 80,
"alpha_frac": 0.6066199377,
"autogenerated": false,
"ratio": 4.091778202676864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5698398140376865,
"avg_score": null,
"num_lines": null
} |
"""An Image file reader object.
"""
# Author: KK Rai (kk.rai [at] iitb.ac.in)
# R. Ambareesha (ambareesha [at] iitb.ac.in)
# Chandrashekhar Kaushik
# Suyog Dutt Jain <suyog.jain [at] aero.iitb.ac.in>
# Prabhu Ramachandran <prabhu [at] aero.iitb.ac.in>
# Copyright (c) 2007-2015, Enthought, Inc.
# License: BSD Style.
from os.path import basename
# Enthought library imports.
from traits.api import Instance, Str, Dict
from traitsui.api import View, Group, Item, Include
from tvtk.api import tvtk
# Local imports.
from mayavi.core.file_data_source import FileDataSource
from mayavi.core.pipeline_info import PipelineInfo
########################################################################
# `ImageReader` class
########################################################################
class ImageReader(FileDataSource):
"""A Image file reader. The reader supports all the
different types of Image files.
"""
# The version of this class. Used for persistence.
__version__ = 0
# The Image data file reader.
reader = Instance(tvtk.Object, allow_none=False, record=True)
# Information about what this object can produce.
output_info = PipelineInfo(datasets=['image_data'])
# Our view.
view = View(Group(Include('time_step_group'),
Item(name='base_file_name'),
Item(name='reader',
style='custom',
resizable=True),
show_labels=False),
resizable=True)
######################################################################
# Private Traits
_image_reader_dict = Dict(Str, Instance(tvtk.Object))
######################################################################
# `object` interface
######################################################################
def __init__(self, **traits):
d = {'bmp':tvtk.BMPReader(),
'jpg':tvtk.JPEGReader(),
'png':tvtk.PNGReader(),
'pnm':tvtk.PNMReader(),
'dcm':tvtk.DICOMImageReader(),
'tiff':tvtk.TIFFReader(),
'ximg':tvtk.GESignaReader(),
'dem':tvtk.DEMReader(),
'mha':tvtk.MetaImageReader(),
'mhd':tvtk.MetaImageReader(),
}
# Account for pre 5.2 VTk versions, without MINC reader
if hasattr(tvtk, 'MINCImageReader'):
d['mnc'] = tvtk.MINCImageReader()
d['jpeg'] = d['jpg']
self._image_reader_dict = d
# Call parent class' init.
super(ImageReader, self).__init__(**traits)
def __set_pure_state__(self, state):
# The reader has its own file_name which needs to be fixed.
state.reader.file_name = state.file_path.abs_pth
# Now call the parent class to setup everything.
super(ImageReader, self).__set_pure_state__(state)
######################################################################
# `FileDataSource` interface
######################################################################
def update(self):
self.reader.update()
if len(self.file_path.get()) == 0:
return
self.render()
def has_output_port(self):
""" Return True as the reader has output port."""
return True
def get_output_object(self):
""" Return the reader output port."""
return self.reader.output_port
######################################################################
# Non-public interface
######################################################################
def _file_path_changed(self, fpath):
value = fpath.get()
if len(value) == 0:
return
# Extract the file extension
splitname = value.strip().split('.')
extension = splitname[-1].lower()
# Select image reader based on file type
old_reader = self.reader
if extension in self._image_reader_dict:
self.reader = self._image_reader_dict[extension]
else:
self.reader = tvtk.ImageReader()
self.reader.file_name = value.strip()
self.reader.update()
self.reader.update_information()
if old_reader is not None:
old_reader.on_trait_change(self.render, remove=True)
self.reader.on_trait_change(self.render)
self.outputs = [self.reader.output]
# Change our name on the tree view
self.name = self._get_name()
def _get_name(self):
""" Returns the name to display on the tree view. Note that
this is not a property getter.
"""
fname = basename(self.file_path.get())
ret = "%s"%fname
if len(self.file_list) > 1:
ret += " (timeseries)"
if '[Hidden]' in self.name:
ret += ' [Hidden]'
return ret
| {
"repo_name": "dmsurti/mayavi",
"path": "mayavi/sources/image_reader.py",
"copies": "1",
"size": "4929",
"license": "bsd-3-clause",
"hash": 6654195044652316000,
"line_mean": 33.7112676056,
"line_max": 74,
"alpha_frac": 0.5017244877,
"autogenerated": false,
"ratio": 4.209222886421862,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0025671966133462157,
"num_lines": 142
} |
animal_adjectives = [
'agile',
'bashful',
'clever',
'clumsy',
'drowsy',
'fearful',
'graceful',
'hungry',
'lonely',
'morose',
'placid',
'ruthless',
'silent',
'thoughtful',
'vapid',
'weary'
]
animal_colours = [
'beige',
'black',
'blue',
'bright',
'bronze',
'brown',
'dark',
'drab',
'green',
'gold',
'grey',
'jade',
'pale',
'pink',
'red',
'white'
]
animal_nouns = [
'ape',
'bear',
'crow',
'dove',
'frog',
'goat',
'hawk',
'lamb',
'mouse',
'newt',
'owl',
'pig',
'rat',
'snake',
'toad',
'wolf'
]
animal_verbs = [
'aches',
'basks',
'cries',
'dives',
'eats',
'fights',
'groans',
'hunts',
'jumps',
'lies',
'prowls',
'runs',
'sleeps',
'thrives',
'wakes',
'yawns'
]
nature_adjectives = [
'ancient',
'barren',
'blazing',
'crowded',
'distant',
'empty',
'foggy',
'fragrant',
'frozen',
'moonlit',
'peaceful',
'quiet',
'rugged',
'serene',
'sunlit',
'wind-swept'
]
nature_nouns = [
'canyon',
'clearing',
'desert',
'foothills',
'forest',
'grasslands',
'jungle',
'meadow',
'mountains',
'prairie',
'river',
'rockpool',
'sand-dune',
'tundra',
'valley',
'wetlands'
]
plant_nouns = [
'autumn colors',
'cherry blossoms',
'chrysanthemums',
'crabapple blooms',
'dry palm fronds',
'fat horse chestnuts',
'forget-me-nots',
'jasmine petals',
'lotus flowers',
'ripe blackberries',
'the maple seeds',
'the pine needles',
'tiger lillies',
'water lillies',
'willow branches',
'yellowwood leaves'
]
plant_verbs = [
'blow',
'crunch',
'dance',
'drift',
'drop',
'fall',
'grow',
'pile',
'rest',
'roll',
'show',
'spin',
'stir',
'sway',
'turn',
'twist'
]
# IPv6 dictionaries
adjectives = [
'ace',
'apt',
'arched',
'ash',
'bad',
'bare',
'beige',
'big',
'black',
'bland',
'bleak',
'blond',
'blue',
'blunt',
'blush',
'bold',
'bone',
'both',
'bound',
'brash',
'brass',
'brave',
'brief',
'brisk',
'broad',
'bronze',
'brushed',
'burned',
'calm',
'ceil',
'chaste',
'cheap',
'chilled',
'clean',
'coarse',
'cold',
'cool',
'corn',
'crass',
'crazed',
'cream',
'crisp',
'crude',
'cruel',
'cursed',
'cute',
'daft',
'damp',
'dark',
'dead',
'deaf',
'dear',
'deep',
'dense',
'dim',
'drab',
'dry',
'dull',
'faint',
'fair',
'fake',
'false',
'famed',
'far',
'fast',
'fat',
'fierce',
'fine',
'firm',
'flat',
'flawed',
'fond',
'foul',
'frail',
'free',
'fresh',
'full',
'fun',
'glum',
'good',
'grave',
'gray',
'great',
'green',
'grey',
'grim',
'gruff',
'hard',
'harsh',
'high',
'hoarse',
'hot',
'huge',
'hurt',
'ill',
'jade',
'jet',
'jinxed',
'keen',
'kind',
'lame',
'lank',
'large',
'last',
'late',
'lean',
'lewd',
'light',
'limp',
'live',
'loath',
'lone',
'long',
'loose',
'lost',
'louche',
'loud',
'low',
'lush',
'mad',
'male',
'masked',
'mean',
'meek',
'mild',
'mint',
'moist',
'mute',
'near',
'neat',
'new',
'nice',
'nude',
'numb',
'odd',
'old',
'pained',
'pale',
'peach',
'pear',
'peeved',
'pink',
'piqued',
'plain',
'plum',
'plump',
'plush',
'poor',
'posed',
'posh',
'prim',
'prime',
'prompt',
'prone',
'proud',
'prune',
'puce',
'pure',
'quaint',
'quartz',
'quick',
'rare',
'raw',
'real',
'red',
'rich',
'ripe',
'rough',
'rude',
'rushed',
'rust',
'sad',
'safe',
'sage',
'sane',
'scortched',
'shaped',
'sharp',
'sheared',
'short',
'shrewd',
'shrill',
'shrunk',
'shy',
'sick',
'skilled',
'slain',
'slick',
'slight',
'slim',
'slow',
'small',
'smart',
'smooth',
'smug',
'snide',
'snug',
'soft',
'sore',
'sought',
'sour',
'spare',
'sparse',
'spent',
'spoilt',
'spry',
'squat',
'staid',
'stale',
'stary',
'staunch',
'steep',
'stiff',
'strange',
'straw',
'stretched',
'strict',
'striped',
'strong',
'suave',
'sure',
'svelte',
'swank',
'sweet',
'swift',
'tall',
'tame',
'tan',
'tart',
'taut',
'teal',
'terse',
'thick',
'thin',
'tight',
'tiny',
'tired',
'toothed',
'torn',
'tough',
'trim',
'trussed',
'twin',
'used',
'vague',
'vain',
'vast',
'veiled',
'vexed',
'vile',
'warm',
'weak',
'webbed',
'wrong',
'wry',
'young'
]
nouns = [
'ants',
'apes',
'asps',
'balls',
'barb',
'barbs',
'bass',
'bats',
'beads',
'beaks',
'bears',
'bees',
'bells',
'belts',
'birds',
'blades',
'blobs',
'blooms',
'boars',
'boats',
'bolts',
'books',
'bowls',
'boys',
'bream',
'brides',
'broods',
'brooms',
'brutes',
'bucks',
'bulbs',
'bulls',
'busks',
'cakes',
'calfs',
'calves',
'cats',
'char',
'chests',
'choirs',
'clams',
'clans',
'clouds',
'clowns',
'cod',
'coins',
'colts',
'cones',
'cords',
'cows',
'crabs',
'cranes',
'crows',
'cults',
'czars',
'darts',
'dates',
'deer',
'dholes',
'dice',
'discs',
'does',
'dogs',
'doors',
'dopes',
'doves',
'dreams',
'drones',
'ducks',
'dunes',
'dwarves',
'eels',
'eggs',
'elk',
'elks',
'elms',
'elves',
'ewes',
'eyes',
'faces',
'facts',
'fawns',
'feet',
'ferns',
'fish',
'fists',
'flames',
'fleas',
'flocks',
'flutes',
'foals',
'foes',
'fools',
'fowl',
'frogs',
'fruits',
'gangs',
'gar',
'geese',
'gems',
'germs',
'ghosts',
'gnomes',
'goats',
'grapes',
'grooms',
'grouse',
'grubs',
'guards',
'gulls',
'hands',
'hares',
'hawks',
'heads',
'hearts',
'hens',
'herbs',
'hills',
'hogs',
'holes',
'hordes',
'ide',
'jars',
'jays',
'kids',
'kings',
'kites',
'lads',
'lakes',
'lambs',
'larks',
'lice',
'lights',
'limbs',
'looms',
'loons',
'mares',
'masks',
'mice',
'mimes',
'minks',
'mists',
'mites',
'mobs',
'molds',
'moles',
'moons',
'moths',
'newts',
'nymphs',
'orbs',
'orcs',
'owls',
'pearls',
'pears',
'peas',
'perch',
'pigs',
'pikes',
'pines',
'plains',
'plants',
'plums',
'pools',
'prawns',
'prunes',
'pugs',
'punks',
'quail',
'quails',
'queens',
'quills',
'rafts',
'rains',
'rams',
'rats',
'rays',
'ribs',
'rocks',
'rooks',
'ruffs',
'runes',
'sands',
'seals',
'seas',
'seeds',
'serfs',
'shards',
'sharks',
'sheep',
'shells',
'ships',
'shoals',
'shrews',
'shrimp',
'skate',
'skies',
'skunks',
'sloths',
'slugs',
'smew',
'smiles',
'snails',
'snakes',
'snipes',
'sole',
'songs',
'spades',
'sprats',
'sprouts',
'squabs',
'squads',
'squares',
'squid',
'stars',
'stoats',
'stones',
'storks',
'strays',
'suns',
'swans',
'swarms',
'swells',
'swifts',
'tars',
'teams',
'teeth',
'terns',
'thorns',
'threads',
'thrones',
'ticks',
'toads',
'tools',
'trees',
'tribes',
'trolls',
'trout',
'tunes',
'tusks',
'veins',
'verbs',
'vines',
'voles',
'wasps',
'waves',
'wells',
'whales',
'whelks',
'whiffs',
'winds',
'wolves',
'worms',
'wraiths',
'wrens',
'yaks'
]
verbs = [
'aid',
'arm',
'awe',
'axe',
'bag',
'bait',
'bare',
'bash',
'bathe',
'beat',
'bid',
'bilk',
'blame',
'bleach',
'bleed',
'bless',
'bluff',
'blur',
'boast',
'boost',
'boot',
'bore',
'botch',
'breed',
'brew',
'bribe',
'brief',
'brine',
'broil',
'browse',
'bruise',
'build',
'burn',
'burst',
'call',
'calm',
'carve',
'chafe',
'chant',
'charge',
'chart',
'cheat',
'check',
'cheer',
'chill',
'choke',
'chomp',
'choose',
'churn',
'cite',
'clamp',
'clap',
'clasp',
'claw',
'clean',
'cleanse',
'clip',
'cloack',
'clone',
'clutch',
'coax',
'crack',
'crave',
'crunch',
'cry',
'cull',
'cure',
'curse',
'cuss',
'dare',
'daze',
'dent',
'dig',
'ding',
'doubt',
'dowse',
'drag',
'drain',
'drape',
'draw',
'dread',
'dredge',
'drill',
'drink',
'drip',
'drive',
'drop',
'drown',
'dry',
'dump',
'eat',
'etch',
'face',
'fail',
'fault',
'fear',
'feed',
'feel',
'fetch',
'fight',
'find',
'fix',
'flap',
'flay',
'flee',
'fling',
'flip',
'float',
'foil',
'forge',
'free',
'freeze',
'frisk',
'gain',
'glimpse',
'gnaw',
'goad',
'gouge',
'grab',
'grasp',
'graze',
'grieve',
'grip',
'groom',
'guard',
'guards',
'guide',
'gulp',
'gush',
'halt',
'harm',
'hate',
'haul',
'haunt',
'have',
'heal',
'hear',
'help',
'herd',
'hex',
'hire',
'hit',
'hoist',
'hound',
'hug',
'hurl',
'irk',
'jab',
'jeer',
'join',
'jolt',
'keep',
'kick',
'kill',
'kiss',
'lash',
'leash',
'leave',
'lift',
'like',
'love',
'lugg',
'lure',
'maim',
'make',
'mask',
'meet',
'melt',
'mend',
'miss',
'mould',
'move',
'nab',
'name',
'need',
'oust',
'paint',
'paw',
'pay',
'peck',
'peeve',
'pelt',
'please',
'pluck',
'poach',
'poll',
'praise',
'prick',
'print',
'probe',
'prod',
'prompt',
'punch',
'quash',
'quell',
'quote',
'raid',
'raise',
'raze',
'ride',
'roast',
'rouse',
'rule',
'scald',
'scalp',
'scar',
'scathe',
'score',
'scorn',
'scour',
'scuff',
'sear',
'see',
'seek',
'seize',
'send',
'sense',
'serve',
'shake',
'shear',
'shift',
'shoot',
'shun',
'slap',
'slay',
'slice',
'smack',
'smash',
'smell',
'smite',
'snare',
'snatch',
'sniff',
'snub',
'soak',
'spare',
'splash',
'split',
'spook',
'spray',
'squash',
'squeeze',
'stab',
'stain',
'starve',
'steal',
'steer',
'sting',
'strike',
'stun',
'tag',
'tame',
'taste',
'taunt',
'teach',
'tend'
]
ipv6_key = [
(adjectives, nouns),
(adjectives, nouns),
(verbs, adjectives),
(adjectives, adjectives),
(adjectives, adjectives),
(nouns, adjectives),
(nouns, verbs),
(adjectives, nouns)
]
ipv4_key = [
(animal_adjectives, animal_colours),
(animal_nouns, animal_verbs),
(nature_adjectives, nature_nouns),
(plant_nouns, plant_verbs)
]
ipv4_schema = "The {} {} {}\n{} in the {} {}.\n{} {}."
ipv6_schema = "{} {} and {} {}\n{} {} {} {} {} {} {}.\n{} {} {} {} {}."
| {
"repo_name": "Drakekin/hipku",
"path": "hipku/dictionaries.py",
"copies": "1",
"size": "12392",
"license": "mit",
"hash": -145063107033376900,
"line_mean": 12.0168067227,
"line_max": 71,
"alpha_frac": 0.3933989671,
"autogenerated": false,
"ratio": 2.6405284466226293,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.3533927413722629,
"avg_score": null,
"num_lines": null
} |
## Animal is-a object.
class Animal(object):
pass
## Dog is-a Animal.
class Dog(Animal):
## Dog has-a name.
def __init__(self, name):
self.name = name
## Cat is-a Animal.
class Cat(Animal):
## Cat has-a name.
def __init__(self, name):
self.name = name
## Person is-a object.
class Person(object):
def __init__(self, name):
## Person has-a name.
self.name = name
## Person has-a pet of some kind.
self.pet = None
## Employee is-a Person.
class Employee(Person):
def __init__(self, name, salary):
## Employee inherits from superclass Person, therefore has-a name.
super(Employee, self).__init__(name)
## Employee has-a salary.
self.salary = salary
## Fish is-a object.
class Fish(object):
pass
## Salmon is-a Fish.
class Salmon(Fish):
pass
## Halibut is-a Fish.
class Halibut(Fish):
pass
## rover is-a Dog.
rover = Dog("Rover")
## satan is-a Cat. So clever!
satan = Cat("Satan")
## mary is-a Person.
mary = Person("Mary")
## From mary get pet attribute and set it to satan.
mary.pet = satan
## frank is-a Employee.
frank = Employee("Frank", 120000)
## From frank get pet attribute and set it to rover.
frank.pet = rover
## flipper is-a Fish.
flipper = Fish()
## crouse is-a Salmon.
crouse = Salmon()
## harry is-a Halibut.
harry = Halibut()
| {
"repo_name": "nchristiny/python",
"path": "ex42.py",
"copies": "1",
"size": "1374",
"license": "mit",
"hash": 6051829211600861000,
"line_mean": 18.3521126761,
"line_max": 74,
"alpha_frac": 0.6128093159,
"autogenerated": false,
"ratio": 2.8155737704918034,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39283830863918034,
"avg_score": null,
"num_lines": null
} |
## Animal is-a object
class Animal(object):
pass
## Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
## Dog has-a name
self.name = name
## Cat is-a Animal
class Cat(Animal):
def __init__(self, name):
## Cat has-a name
self.name = name
## Person is-a object
class Person(object):
def __init__(self, name):
## Person has-a name
self.name = name
## Person has-a pet of some kind. Daufalt is None
self.pet = None
## Employee is-a Person
class Employee(Person):
def __init__(self, name, salary):
## ??
super(Employee, self).__init__(name)
## Employee has-a salary
self.salary = salary
## Fish is-a object
class Fish(object):
pass
## Salmon is-a Fish
class Salmon(Fish):
pass
## Halibut is-a Fish
class Halibut(Fish):
pass
## rover is-a Dog
rover = Dog("Rover")
print (rover)
## Satan is-a Cat
satan = Cat("Satan")
print (satan)
## Mary is-a Person
mary = Person("Mary")
## Mary has-a pet is Satan
mary.pet = satan
## Frank is-a Employee has-a salary 120000
frank = Employee("Frank", 120000)
##Frank has-a pet is Rover
frank.pet = rover
print (frank.pet)
## Fish has-a flipper
flipper = Fish()
## Salmon has-a crouse
crouse = Salmon()
## Halibut has-a harry
harry = Halibut()
| {
"repo_name": "SunWalter/Hard",
"path": "ex42.py",
"copies": "1",
"size": "1330",
"license": "apache-2.0",
"hash": -3863981951236563500,
"line_mean": 16.5,
"line_max": 58,
"alpha_frac": 0.6060150376,
"autogenerated": false,
"ratio": 2.788259958071279,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.38942749956712785,
"avg_score": null,
"num_lines": null
} |
# Animal is-a object
class Animal(object):
pass
# Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
# Dog has-a name
self.name = name
# Cat is-a Animal
class Cat(Animal):
def __init__(self, name):
# Cat has-a name
self.name = name
# Person is-a object
class Person(object):
def __init__(self, name):
# Person has-a name
self.name = name
# Person has-a pet of some kind
self.pet = None
# Employee is-a Person
class Employee(Person):
def __init__(self, name, salary):
super(Employee, self).__init__(name)
# Employee has-a salary
self.salary = salary
# Fish is-a object
class Fish(object):
pass
# Salmon is-a Fish
class Salmon(Fish):
pass
# Halibut is-a Fish
class Halibut(Fish):
pass
# rover is-a Dog
rover = Dog("Rover")
# satan is-a cat
satan = Cat("Satan")
# mary is-a Person
mary = Person("Mary")
# frank is-a Employee
frank = Employee("Frank", 120000)
# frank has-a Dog rover
frank.pet = rover
# flipper is-a Fish
flipper = Fish()
# crouse is-a Salmon
crouse = Salmon()
# harry is-a Halibut
harry = Halibut()
| {
"repo_name": "githubfun/lphw",
"path": "ex42.py",
"copies": "1",
"size": "1178",
"license": "mit",
"hash": 7128417490403029000,
"line_mean": 12.6976744186,
"line_max": 44,
"alpha_frac": 0.6018675722,
"autogenerated": false,
"ratio": 2.758782201405152,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8857881224324975,
"avg_score": 0.0005537098560354374,
"num_lines": 86
} |
# Animal is-a object
class Animal(object):
@staticmethod
def latin_name():
return "Animal"
# Dog is-a Animal
class Dog(Animal):
# Dog has-a name
def __init__(self, name):
self.name = name
def eater(self):
return "Carnivore", Animal.latin_name(), self.name
# Cat is-a Animal
class Cat(Animal):
# Cat has-a name
def __init__(self, name):
self.name = name
# Person is-a object
class Person(object):
def __init__(self, name):
# person has-a name
self.name = name
# person has-a Pet of some kind
self.pet = None
# Employees are people
class Employee(Person):
def __init__(self, name, salary):
# call constructor of super class
super(Employee, self).__init__(name)
# Employee has-a salary
self.salary = salary
# Fish is-a object
class Fish(object):
pass
# Salmon is-a Fish
class Salmon(Fish):
pass
# Halibut is-a Fish
class Halibut(Fish):
pass
# Rover is-a dog
rover = Dog("Rover")
# Satan is-a cat
satan = Cat("Satan")
# Mary is-a person
mary = Person("Mary")
# Mary has-a pet - cat Satan
mary.pet = satan
# Fran is-a employee with a salary of ...
frank = Employee("Frank", 12000)
# Frank has-a pet rover
frank.pet = rover
# crouse is-a Salmon
crouse = Salmon()
# harry is-a Halibut
harry = Halibut()
print rover.eater()
| {
"repo_name": "aurelo/lphw",
"path": "source/ex42.py",
"copies": "1",
"size": "1389",
"license": "mit",
"hash": -643607310038790900,
"line_mean": 15.1511627907,
"line_max": 58,
"alpha_frac": 0.6126709863,
"autogenerated": false,
"ratio": 2.8877338877338876,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40004048740338877,
"avg_score": null,
"num_lines": null
} |
## Animal is-a object
class Animal(object):
pass
## Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
## A self has-a name? What?
self.name = name
## Cat is-a Animal
class Cat(Animal):
def __init__(self, name):
## Self has-a name? I don't understand what is supposed to go here
self.name = name
## Person is-a object
class Person(object):
def __init__(self, name):
## A self has-a name, sure
self.name = name
## Person has-a pet of some kind
self.pet = None
## Employee is-a person
class Employee(Person):
def __init__(self, name, salary):
## ?? Whaaaaaaaaaaat
super(Employee, self).__init__(name)
## a person has a salary?
## fish is-a object
class Fish(object):
pass
## Salmon is-a fish
class Salmon(fish):
pass
## Halibut is-a fish
class Halibut(fish):
pass
## rover is-a Dog
rover = Dog("Rover")
## satan is-a Cat
satan = Cat("Satan")
## mary is-a Person
mary = Person("Mary")
## mary has-a satan. satan is-a cat
mary.pet = satan
## Frank is-a employee. frank has-a salary of 120000
frank = Employee("Frank", 120000)
## frank has-a rover. rover is-a pet
frank.pet = rover
## flipper is-a fish
flipper = Fish()
## crouse is-a salmon
crouse = Salmon()
## harry is-a halibut
harry = Halibut()
# help
| {
"repo_name": "kaitlinahrens/learn-python-the-hard-way",
"path": "ex42.py",
"copies": "1",
"size": "1299",
"license": "apache-2.0",
"hash": -8546893483490669000,
"line_mean": 10.9174311927,
"line_max": 69,
"alpha_frac": 0.6343341032,
"autogenerated": false,
"ratio": 2.587649402390438,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3721983505590438,
"avg_score": null,
"num_lines": null
} |
# Animal is-a object
class Animal(object):
pass
# Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
# Dog has-a name
self.name = name
# Cat is-a Animal
class Cat(Animal):
def __init__(self, name):
# Cat has-a name
self.name = name
# Person is-a object
class Person(object):
def __init__(self, name):
# Person has-a name
self.name = name
# Person has-a pet of some kind
self.pet = None
# Employee is-a Person
class Employee(Person):
def __init__(self, name, salary):
super(Employee, self).__init__(name)
# Employee has-a salary
self.salary = salary
class Fish(object):
pass
class Salmon(Fish):
pass
class Halibut(Fish):
pass
# rover is-a Dog named Rover (and an Animal)
rover = Dog("Rover")
# satan is-a Cat named Satan (and an Animal)
satan = Cat("Satan")
# mary is-a Person named Mary
mary = Person("Mary")
# Frank is-a Employee named Frank with salary 120000 (and a Person)
frank = Employee("Frank", 120000)
# frank has-a pet which is rover
frank.pet = rover
# flipper is a Fish
flipper = Fish()
# crouse is-a Salmon (and a Fish)
crouse = Salmon()
# harry is-a Halibut (and a Fish)
harry = Halibut()
| {
"repo_name": "michsien/Learning_python",
"path": "exercises _1-44/ex42.py",
"copies": "1",
"size": "1165",
"license": "mit",
"hash": 7075806384504754000,
"line_mean": 16.1323529412,
"line_max": 67,
"alpha_frac": 0.6686695279,
"autogenerated": false,
"ratio": 2.549234135667396,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8585908644668263,
"avg_score": 0.026399003779826548,
"num_lines": 68
} |
# Animal "IS-A" object
class Animal(object):
pass
# Dog "IS-A" Animal
class Dog(Animal):
def __init__(self, name):
# Dog "has-a" name
self.name = name
# Cat 'is-a' Animal
class Cat(Animal):
def __init__(self, name):
# Cat 'has-a' name
self.name = name
# Person 'is-a' object. How dare you objectify a Person!
class Person(object):
def __init__(self, name):
# Person 'has-a' name
self.name = name
# Person "HAS-A" pet of some kind
self.pet = None
# Employee 'is-a' Person
class Employee(Person):
def __init__(self, name, salary):
# ?? What is this strange magic?
super(Employee, self).__init__(name)
# ?? Employee 'has-a' salary
self.salary = salary
# Fish 'is-a' object
class Fish(object):
pass
# Salmon 'is-a' Fish
class Salmon(Fish):
pass
# Halibut 'is-a' Fish
class Halibut(Fish):
pass
# rover "IS-A" dog
rover = Dog("Rover")
# satan 'is-a' Cat
satan = Cat("Satan")
# mary 'is-a' Person
mary = Person("Mary")
# mary 'has-a' pet; Satan
mary.pet = satan
# frank 'is-a' Employee with a salary of 120000. Wish I was Frank.
frank = Employee("Frank", 120000)
# frank 'has-a' pet; rover
frank.pet = rover
# flipper 'is-a' Fish
flipper = Fish()
# crouse 'is-a' Salmon
crouse = Salmon()
# harry 'is-a' Halibut
harry = Halibut()
| {
"repo_name": "Chris-Wales/LearnPython",
"path": "ex42.py",
"copies": "1",
"size": "1445",
"license": "mit",
"hash": -5510732723873922000,
"line_mean": 17.527027027,
"line_max": 66,
"alpha_frac": 0.5653979239,
"autogenerated": false,
"ratio": 2.6709796672828094,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37363775911828095,
"avg_score": null,
"num_lines": null
} |
## Animal is-a object (yes, sort of confusing) look at the extra credit
class Animal(object):
# a __str__ method is called when the string representation of the object
# is called for. Doing `print("Hello, %s", rover)` would print the return
# from this method.
def __str__(self):
return "I am a animal!"
## Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
## Dog has-a name
self.name = name
def __str__(self):
return "%s is-a Dog which is a type of Animal." % self.name
## Cat is-a Aniaml
class Cat(Animal):
def __init__(self, name):
## Cat has-a name
self.name = name
def __str__(self):
return "%s is-a Cat which is a type of Animal." % self.name
## Person is-a object
class Person(object):
def __init__(self, name):
## Person has-a name
self.name = name
## Person has-a pet of some kind
self.pet = None
def __str__(self):
if self.pet is None:
return "%s is-a Person and does not have a pet" % self.name
else:
return ("%s is-a Person and has-a pet:\n\t%s"
% (self.name, self.pet))
## Employee is-a Person
class Employee(Person):
def __init__(self, name, salary):
## Employee has a name like a Person. `super` means to look at the
#super class (in this case, the super class is Person) and we are using
#the __init__ method to assign the name.
super(Employee, self).__init__(name)
## Employee has-a salary
self.salary = salary
def __str__(self):
return ("%s is-a Employee with a salary of $%d. An Employee is-a " \
"Person." % (self.name, self.salary))
## Fish is-a object
class Fish(object):
def __str__(self):
return "Fish is-a object."
## Salmon is-a Fish
class Salmon(Fish):
def __str__(self):
return "Salmon is-a Fish."
## Halibut is-a Fish
class Halibut(Fish):
def __str__(self):
return "Halibut is-a Fish."
## rover is-a Dog
rover = Dog("Rover")
print(rover)
## satan is-a Cat
satan = Cat("Satan")
print(satan)
## mary is-a Person
mary = Person("Mary")
print(mary)
## mary has-a pet (satan)
mary.pet = satan
print(mary)
## frank is-a Employee and frank has-a salary of 120000
frank = Employee("Frank", 120000)
print(frank)
## frank has-a pet (rover). Frank is Employee, therefore, Frank is Person.
frank.pet = rover
## flipper is-a Fish
flipper = Fish()
print("Flipper says '%s'" % flipper)
## crouse is-a Salmon
crouse = Salmon()
print("Crouse says '%s'" % crouse)
## harry is-a Halibut
harry = Halibut()
print("Harry say '%s'" % harry)
| {
"repo_name": "Paul-Haley/LPTHW_python3",
"path": "ex42_printing.py",
"copies": "1",
"size": "2712",
"license": "mit",
"hash": -3065945102559683000,
"line_mean": 22.7894736842,
"line_max": 79,
"alpha_frac": 0.5884955752,
"autogenerated": false,
"ratio": 3.095890410958904,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.908051963019108,
"avg_score": 0.020773271193564914,
"num_lines": 114
} |
## Animal is-a object (yes, sort of confusing) look at the extra credit
class Animal(object):
pass
## Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
## Dog has-a name
self.name = name
## Cat is-a Animal
class Cat(Animal):
def __init__(self, name):
## Cat has-a name
self.name = name
## Person is-a object
class Person(object):
def __init__(self, name):
## Cat has-a name
self.name = name
## Person has-a pet of some kind
self.pet = None
## Employee is-a Person
class Employee(Person):
def __init__(self, name, salary):
## Employee has-a name
super(Employee, self).__init__(name)
## Employee has-a salary
self.salary = salary
## Fish is-a object
class Fish(object):
pass
## Salmon is-a Fish
class Salmon(Fish):
pass
## Halibut is-a Fish
class Halibut(Fish):
pass
## rover is-a Dog
rover = Dog("Rover")
## satan is-a Cat
satan = Cat("Satan")
## mary is-a Person
mary = Person("Mary")
## mary has-a pet who is-a satan who is-a Cat
mary.pet = satan
## frank is-a Employee
frank = Employee("Frank", 120000)
## frank has-a pet who is-a rover who is-a Dog
frank.pet = rover
## flipper is-a Fish
flipper = Fish()
## crouse is-a Salmon
crouse = Salmon()
## harry is-a Halibut
harry = Halibut()
| {
"repo_name": "udoyen/pythonlearning",
"path": "chp42/ex42.py",
"copies": "1",
"size": "1350",
"license": "mit",
"hash": -1187606278516857000,
"line_mean": 17.75,
"line_max": 71,
"alpha_frac": 0.6059259259,
"autogenerated": false,
"ratio": 2.800829875518672,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3906755801418672,
"avg_score": null,
"num_lines": null
} |
## Animal is-a object (yes, sort of confusing) look at the extra credit
class Animal(object):
pass
## Dog is-a Animal
class Dog(Animal):
def __init__(self, name):
## Dog has-a name
self.name = name
## Cat is-a Animal
class Cat(Animal):
def __init__(self, name):
## Cat has-a name
self.name = name
## Person is-a object
class Person(object):
def __init__(self, name):
## Person has-a name
self.name = name
## Person has-a pet of some kind
self.pet = None
## Employee is-a Person
class Employee(Person):
def __init__(self, name, salary):
## Employee has a name too
super(Employee, self).__init__(name)
## Employee has-a salary
self.salary = salary
## Fish is-a object
class Fish(object):
pass
## Salmon is-a Fish
class Salmon(Fish):
pass
## Halibut is-a Fish:
class Halibut(Fish):
pass
## rover is-a Dog
rover = Dog("Rover")
## satan is-a Cat
satan = Cat("Satan")
## mary is-a Person
mary = Person("Mary")
## Mary has-a pet called satan
mary.pet = satan
## frank is-a Employee
frank = Employee("Frank", 120000)
## frank has-a pet called rover
frank.pet = rover
## flipper is-a Fish
flipper = Fish()
## crouse is-a Salmon
crouse = Salmon()
## harry is-a Halibut
harry = Halibut()
| {
"repo_name": "alexliew/learn_python_the_hard_way",
"path": "ex42.py",
"copies": "1",
"size": "1322",
"license": "mit",
"hash": -5335614642775248000,
"line_mean": 17.6197183099,
"line_max": 71,
"alpha_frac": 0.6096822995,
"autogenerated": false,
"ratio": 2.8552915766738662,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8870562159064075,
"avg_score": 0.018882343421958252,
"num_lines": 71
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.