text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import json
import threading
import tornado.websocket
import shm
class SwitchWatcherThread(threading.Thread):
def __init__(self, lock, watchers, *args, **kwargs):
threading.Thread.__init__(self, *args, **kwargs)
self.lock = lock
self.watchers = watchers
def run(self):
switch_watcher = shm.watchers.watcher()
switch_watcher.watch(shm.switches)
while True:
with self.lock:
if len(self.watchers) == 0:
break
switch_watcher.wait()
msg = json.dumps({
"soft_kill": shm.switches.soft_kill.get(),
"hard_kill": shm.switches.hard_kill.get(),
})
with self.lock:
for ws in self.watchers:
ws.write_message(msg)
class StatusHandler(tornado.websocket.WebSocketHandler):
ws_clients_lock = threading.Lock()
ws_clients = set()
ws_updater = None
def open(self):
with self.ws_clients_lock:
self.ws_clients.add(self)
if self.ws_updater == None or not self.ws_updater.is_alive():
self.ws_updater = SwitchWatcherThread(self.ws_clients_lock, self.ws_clients, daemon=True)
self.ws_updater.start()
def on_message(self, message):
msg = json.dumps({
"soft_kill": shm.switches.soft_kill.get(),
"hard_kill": shm.switches.hard_kill.get(),
})
self.write_message(msg)
def on_close(self):
with self.ws_clients_lock:
self.ws_clients.remove(self)
|
{
"content_hash": "d4fc71a02e8578ea7f642d9a978b89b3",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 101,
"avg_line_length": 31.66,
"alnum_prop": 0.5704358812381554,
"repo_name": "cuauv/software",
"id": "68ac83983e3d9d15067e964394d04a70cb5595ad",
"size": "1583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webserver/handlers/status.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "271780"
},
{
"name": "C++",
"bytes": "2831785"
},
{
"name": "CMake",
"bytes": "5365"
},
{
"name": "CSS",
"bytes": "5082"
},
{
"name": "Dockerfile",
"bytes": "2758"
},
{
"name": "Emacs Lisp",
"bytes": "19028"
},
{
"name": "GLSL",
"bytes": "6783"
},
{
"name": "HTML",
"bytes": "3642"
},
{
"name": "Haskell",
"bytes": "4770"
},
{
"name": "JavaScript",
"bytes": "113413"
},
{
"name": "Makefile",
"bytes": "12887"
},
{
"name": "Nix",
"bytes": "16335"
},
{
"name": "OCaml",
"bytes": "3804"
},
{
"name": "PureBasic",
"bytes": "58"
},
{
"name": "Python",
"bytes": "2141765"
},
{
"name": "Scheme",
"bytes": "129544"
},
{
"name": "Shell",
"bytes": "68820"
},
{
"name": "TeX",
"bytes": "25243"
},
{
"name": "Vim script",
"bytes": "125505"
}
],
"symlink_target": ""
}
|
import os
import argparse
import sys
import m5
from m5.objects import *
# --------------------
# Define Command Line Options
# ====================
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--detailed", action="store_true")
parser.add_argument("-t", "--timing", action="store_true")
parser.add_argument("-m", "--maxtick", type=int)
parser.add_argument("-c", "--numclusters",
help="Number of clusters", type=int)
parser.add_argument("-n", "--numcpus",
help="Number of cpus in total", type=int)
parser.add_argument("-f", "--frequency",
default = "1GHz",
help="Frequency of each CPU")
parser.add_argument("--l1size",
default = "32kB")
parser.add_argument("--l1latency",
default = 1)
parser.add_argument("--l2size",
default = "256kB")
parser.add_argument("--l2latency",
default = 10)
parser.add_argument("--rootdir",
help="ROot directory of Splash2",
default="/dist/splash2/codes/")
parser.add_argument("-b", "--benchmark",
help="Splash 2 benchmark to run")
args = parser.parse_args()
# --------------------
# Define Splash2 Benchmarks
# ====================
class Cholesky(Process):
executable = args.rootdir + '/kernels/cholesky/CHOLESKY'
cmd = 'CHOLESKY -p' + str(args.numcpus) + ' '\
+ args.rootdir + '/kernels/cholesky/inputs/tk23.O'
class FFT(Process):
executable = args.rootdir + 'kernels/fft/FFT'
cmd = 'FFT -p' + str(args.numcpus) + ' -m18'
class LU_contig(Process):
executable = args.rootdir + 'kernels/lu/contiguous_blocks/LU'
cmd = 'LU -p' + str(args.numcpus)
class LU_noncontig(Process):
executable = args.rootdir + 'kernels/lu/non_contiguous_blocks/LU'
cmd = 'LU -p' + str(args.numcpus)
class Radix(Process):
executable = args.rootdir + 'kernels/radix/RADIX'
cmd = 'RADIX -n524288 -p' + str(args.numcpus)
class Barnes(Process):
executable = args.rootdir + 'apps/barnes/BARNES'
cmd = 'BARNES'
input = args.rootdir + 'apps/barnes/input.p' + str(args.numcpus)
class FMM(Process):
executable = args.rootdir + 'apps/fmm/FMM'
cmd = 'FMM'
input = args.rootdir + 'apps/fmm/inputs/input.2048.p' + str(args.numcpus)
class Ocean_contig(Process):
executable = args.rootdir + 'apps/ocean/contiguous_partitions/OCEAN'
cmd = 'OCEAN -p' + str(args.numcpus)
class Ocean_noncontig(Process):
executable = args.rootdir + 'apps/ocean/non_contiguous_partitions/OCEAN'
cmd = 'OCEAN -p' + str(args.numcpus)
class Raytrace(Process):
executable = args.rootdir + 'apps/raytrace/RAYTRACE'
cmd = 'RAYTRACE -p' + str(args.numcpus) + ' ' \
+ args.rootdir + 'apps/raytrace/inputs/teapot.env'
class Water_nsquared(Process):
executable = args.rootdir + 'apps/water-nsquared/WATER-NSQUARED'
cmd = 'WATER-NSQUARED'
input = args.rootdir + 'apps/water-nsquared/input.p' + str(args.numcpus)
class Water_spatial(Process):
executable = args.rootdir + 'apps/water-spatial/WATER-SPATIAL'
cmd = 'WATER-SPATIAL'
input = args.rootdir + 'apps/water-spatial/input.p' + str(args.numcpus)
# --------------------
# Base L1 Cache Definition
# ====================
class L1(Cache):
latency = args.l1latency
mshrs = 12
tgts_per_mshr = 8
# ----------------------
# Base L2 Cache Definition
# ----------------------
class L2(Cache):
latency = args.l2latency
mshrs = 92
tgts_per_mshr = 16
write_buffers = 8
# ----------------------
# Define the clusters with their cpus
# ----------------------
class Cluster:
pass
cpusPerCluster = args.numcpus/args.numclusters
busFrequency = Frequency(args.frequency)
busFrequency *= cpusPerCluster
all_cpus = []
all_l1s = []
all_l1buses = []
if args.timing:
clusters = [ Cluster() for i in range(args.numclusters)]
for j in range(args.numclusters):
clusters[j].id = j
for cluster in clusters:
cluster.clusterbus = L2XBar(clock=busFrequency)
all_l1buses += [cluster.clusterbus]
cluster.cpus = [TimingSimpleCPU(cpu_id = i + cluster.id,
clock=args.frequency)
for i in range(cpusPerCluster)]
all_cpus += cluster.cpus
cluster.l1 = L1(size=args.l1size, assoc = 4)
all_l1s += [cluster.l1]
elif args.detailed:
clusters = [ Cluster() for i in range(args.numclusters)]
for j in range(args.numclusters):
clusters[j].id = j
for cluster in clusters:
cluster.clusterbus = L2XBar(clock=busFrequency)
all_l1buses += [cluster.clusterbus]
cluster.cpus = [DerivO3CPU(cpu_id = i + cluster.id,
clock=args.frequency)
for i in range(cpusPerCluster)]
all_cpus += cluster.cpus
cluster.l1 = L1(size=args.l1size, assoc = 4)
all_l1s += [cluster.l1]
else:
clusters = [ Cluster() for i in range(args.numclusters)]
for j in range(args.numclusters):
clusters[j].id = j
for cluster in clusters:
cluster.clusterbus = L2XBar(clock=busFrequency)
all_l1buses += [cluster.clusterbus]
cluster.cpus = [AtomicSimpleCPU(cpu_id = i + cluster.id,
clock=args.frequency)
for i in range(cpusPerCluster)]
all_cpus += cluster.cpus
cluster.l1 = L1(size=args.l1size, assoc = 4)
all_l1s += [cluster.l1]
# ----------------------
# Create a system, and add system wide objects
# ----------------------
system = System(cpu = all_cpus, l1_ = all_l1s, l1bus_ = all_l1buses,
physmem = SimpleMemory(),
membus = SystemXBar(clock = busFrequency))
system.clock = '1GHz'
system.toL2bus = L2XBar(clock = busFrequency)
system.l2 = L2(size = args.l2size, assoc = 8)
# ----------------------
# Connect the L2 cache and memory together
# ----------------------
system.physmem.port = system.membus.mem_side_ports
system.l2.cpu_side = system.toL2bus.cpu_side_ports
system.l2.mem_side = system.membus.mem_side_ports
# ----------------------
# Connect the L2 cache and clusters together
# ----------------------
for cluster in clusters:
cluster.l1.cpu_side = cluster.clusterbus.mem_side_ports
cluster.l1.mem_side = system.toL2bus.cpu_side_ports
for cpu in cluster.cpus:
cpu.icache_port = cluster.clusterbus.cpu_side_ports
cpu.dcache_port = cluster.clusterbus.cpu_side_ports
# ----------------------
# Define the root
# ----------------------
root = Root(full_system = False, system = system)
# --------------------
# Pick the correct Splash2 Benchmarks
# ====================
if args.benchmark == 'Cholesky':
root.workload = Cholesky()
elif args.benchmark == 'FFT':
root.workload = FFT()
elif args.benchmark == 'LUContig':
root.workload = LU_contig()
elif args.benchmark == 'LUNoncontig':
root.workload = LU_noncontig()
elif args.benchmark == 'Radix':
root.workload = Radix()
elif args.benchmark == 'Barnes':
root.workload = Barnes()
elif args.benchmark == 'FMM':
root.workload = FMM()
elif args.benchmark == 'OceanContig':
root.workload = Ocean_contig()
elif args.benchmark == 'OceanNoncontig':
root.workload = Ocean_noncontig()
elif args.benchmark == 'Raytrace':
root.workload = Raytrace()
elif args.benchmark == 'WaterNSquared':
root.workload = Water_nsquared()
elif args.benchmark == 'WaterSpatial':
root.workload = Water_spatial()
else:
m5.util.panic("""
The --benchmark environment variable was set to something improper.
Use Cholesky, FFT, LUContig, LUNoncontig, Radix, Barnes, FMM, OceanContig,
OceanNoncontig, Raytrace, WaterNSquared, or WaterSpatial
""")
# --------------------
# Assign the workload to the cpus
# ====================
for cluster in clusters:
for cpu in cluster.cpus:
cpu.workload = root.workload
system.workload = SEWorkload.init_compatible(root.workload.executable)
# ----------------------
# Run the simulation
# ----------------------
if args.timing or args.detailed:
root.system.mem_mode = 'timing'
# instantiate configuration
m5.instantiate()
# simulate until program terminates
if args.maxtick:
exit_event = m5.simulate(args.maxtick)
else:
exit_event = m5.simulate(m5.MaxTick)
print('Exiting @ tick', m5.curTick(), 'because', exit_event.getCause())
|
{
"content_hash": "40a0cb21bb8615abe2cee63713247e33",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 81,
"avg_line_length": 32.17977528089887,
"alnum_prop": 0.5956703910614525,
"repo_name": "gem5/gem5",
"id": "46bcfbfe29d4f3155032497f36b52d64b3cebf0a",
"size": "10184",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "configs/splash2/cluster.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "145626"
},
{
"name": "Awk",
"bytes": "3386"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "C",
"bytes": "3927153"
},
{
"name": "C++",
"bytes": "42960484"
},
{
"name": "CMake",
"bytes": "133888"
},
{
"name": "Dockerfile",
"bytes": "34102"
},
{
"name": "Emacs Lisp",
"bytes": "1914"
},
{
"name": "Forth",
"bytes": "354"
},
{
"name": "Fortran",
"bytes": "15436"
},
{
"name": "HTML",
"bytes": "146414"
},
{
"name": "Hack",
"bytes": "139769"
},
{
"name": "Java",
"bytes": "6966"
},
{
"name": "M4",
"bytes": "42624"
},
{
"name": "Makefile",
"bytes": "39573"
},
{
"name": "Perl",
"bytes": "23784"
},
{
"name": "Python",
"bytes": "8079781"
},
{
"name": "Roff",
"bytes": "8754"
},
{
"name": "SCSS",
"bytes": "2971"
},
{
"name": "SWIG",
"bytes": "173"
},
{
"name": "Scala",
"bytes": "5328"
},
{
"name": "Shell",
"bytes": "95638"
},
{
"name": "Starlark",
"bytes": "25668"
},
{
"name": "SuperCollider",
"bytes": "8869"
},
{
"name": "Vim Script",
"bytes": "4343"
},
{
"name": "sed",
"bytes": "3897"
}
],
"symlink_target": ""
}
|
"""JavaScript tests for the Course Explorer components."""
__author__ = [
'John Orr (jorr@google.com)',
]
from tests.integration import integration
from tests.integration import pageobjects
class PolymerTests(integration.TestBase):
def test_explorer(self):
pageobjects.PolymerPageObject(self).load(
'/modules/explorer/_static/components/test/index.html'
).assert_test_results()
|
{
"content_hash": "c41487f5f81244f0664fd1e7881f442c",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 66,
"avg_line_length": 26.1875,
"alnum_prop": 0.7088305489260143,
"repo_name": "andela-angene/coursebuilder-core",
"id": "3379246ecbbdda894a7e7ddd585b77773f8d8ea7",
"size": "1017",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop-frontend",
"path": "coursebuilder/modules/explorer/explorer_integration_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "729194"
},
{
"name": "HTML",
"bytes": "739873"
},
{
"name": "JavaScript",
"bytes": "720406"
},
{
"name": "Python",
"bytes": "6245524"
},
{
"name": "Shell",
"bytes": "53815"
}
],
"symlink_target": ""
}
|
from flask import render_template, flash, redirect, session, url_for, request, g
from flask.ext.login import login_user, logout_user, current_user, login_required
from app import app, db, lm, oid
from forms import LoginForm, EditForm
from models import User, ROLE_USER, ROLE_ADMIN
from datetime import datetime
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
if g.user.is_authenticated():
g.user.last_seen = datetime.utcnow()
db.session.add(g.user)
db.session.commit()
@app.errorhandler(404)
def internal_error(error):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_error(error):
db.session.rollback()
return render_template('500.html'), 500
@app.route('/')
@app.route('/index')
@login_required
def index():
user = g.user
posts = [
{
'author': { 'nickname': 'John' },
'body': 'Beautiful day in Portland!'
},
{
'author': { 'nickname': 'Susan' },
'body': 'The Avengers movie was so cool!'
}
]
return render_template('index.html',
title = 'Home',
user = user,
posts = posts)
@app.route('/login', methods = ['GET', 'POST'])
@oid.loginhandler
def login():
if g.user is not None and g.user.is_authenticated():
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
session['remember_me'] = form.remember_me.data
return oid.try_login(form.openid.data, ask_for = ['nickname', 'email'])
return render_template('login.html',
title = 'Sign In',
form = form,
providers = app.config['OPENID_PROVIDERS'])
@oid.after_login
def after_login(resp):
if resp.email is None or resp.email == "":
flash('Invalid login. Please try again.')
return redirect(url_for('login'))
user = User.query.filter_by(email = resp.email).first()
if user is None:
nickname = resp.nickname
if nickname is None or nickname == "":
nickname = resp.email.split('@')[0]
nickname = User.make_unique_nickname(nickname)
user = User(nickname = nickname, email = resp.email, role = ROLE_USER)
db.session.add(user)
db.session.commit()
# make the user follow him/herself
db.session.add(user.follow(user))
db.session.commit()
remember_me = False
if 'remember_me' in session:
remember_me = session['remember_me']
session.pop('remember_me', None)
login_user(user, remember = remember_me)
return redirect(request.args.get('next') or url_for('index'))
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/user/<nickname>')
@login_required
def user(nickname):
user = User.query.filter_by(nickname = nickname).first()
if user == None:
flash('User ' + nickname + ' not found.')
return redirect(url_for('index'))
posts = [
{ 'author': user, 'body': 'Test post #1' },
{ 'author': user, 'body': 'Test post #2' }
]
return render_template('user.html',
user = user,
posts = posts)
@app.route('/edit', methods = ['GET', 'POST'])
@login_required
def edit():
form = EditForm(g.user.nickname)
if form.validate_on_submit():
g.user.nickname = form.nickname.data
g.user.about_me = form.about_me.data
db.session.add(g.user)
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit'))
elif request.method != "POST":
form.nickname.data = g.user.nickname
form.about_me.data = g.user.about_me
return render_template('edit.html',
form = form)
@app.route('/follow/<nickname>')
def follow(nickname):
user = User.query.filter_by(nickname = nickname).first()
if user == None:
flash('User ' + nickname + ' not found.')
return redirect(url_for('index'))
u = g.user.follow(user)
if u is None:
flash('Cannot follow ' + nickname + '.')
return redirect(url_for('user', nickname = nickname))
db.session.add(u)
db.session.commit()
flash('You are now following ' + nickname + '!')
return redirect(url_for('user', nickname = nickname))
@app.route('/unfollow/<nickname>')
def unfollow(nickname):
user = User.query.filter_by(nickname = nickname).first()
if user == None:
flash('User ' + nickname + ' not found.')
return redirect(url_for('index'))
u = g.user.unfollow(user)
if u is None:
flash('Cannot unfollow ' + nickname + '.')
return redirect(url_for('user', nickname = nickname))
db.session.add(u)
db.session.commit()
flash('You have stopped following ' + nickname + '.')
return redirect(url_for('user', nickname = nickname))
|
{
"content_hash": "21798c51cd54829fec20ae82a958face",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 81,
"avg_line_length": 32.287581699346404,
"alnum_prop": 0.6089068825910932,
"repo_name": "hmdavis/flask-mega-tutorial",
"id": "bdf1874705ad70fc8c0c8558643195155a84b676",
"size": "4940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/views.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "107"
},
{
"name": "Python",
"bytes": "22037"
}
],
"symlink_target": ""
}
|
import pathlib
import pytest
@pytest.fixture
def test_dir():
return pathlib.Path(__file__).parent
|
{
"content_hash": "37df83678a58b590111aee1f93b3fb53",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 40,
"avg_line_length": 13.125,
"alnum_prop": 0.7142857142857143,
"repo_name": "dopefishh/pympi",
"id": "fc8761b98f76d8a0ad0a0a92c9197cf27f7cf405",
"size": "105",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "164790"
}
],
"symlink_target": ""
}
|
"""The Windows firewall log file event formatter."""
from plaso.formatters import interface
from plaso.formatters import manager
class WinFirewallFormatter(interface.ConditionalEventFormatter):
"""Formatter for a Windows firewall log entry event."""
DATA_TYPE = u'windows:firewall:log_entry'
# TODO: Add more "elegant" formatting, as in transform ICMP code/type into
# a more human readable format as well as translating the additional info
# column (meaning may depend on action field).
FORMAT_STRING_PIECES = [
u'{action}',
u'[',
u'{protocol}',
u'{path}',
u']',
u'From: {source_ip}',
u':{source_port}',
u'>',
u'{dest_ip}',
u':{dest_port}',
u'Size (bytes): {size}',
u'Flags [{flags}]',
u'TCP Seq Number: {tcp_seq}',
u'TCP ACK Number: {tcp_ack}',
u'TCP Window Size (bytes): {tcp_win}',
u'ICMP type: {icmp_type}',
u'ICMP code: {icmp_code}',
u'Additional info: {info}']
FORMAT_STRING_SHORT_PIECES = [
u'{action}',
u'[{protocol}]',
u'{source_ip}',
u': {source_port}',
u'>',
u'{dest_ip}',
u': {dest_port}']
SOURCE_LONG = u'Windows Firewall Log'
SOURCE_SHORT = u'LOG'
manager.FormattersManager.RegisterFormatter(WinFirewallFormatter)
|
{
"content_hash": "269e36f87e7e69c911ca861f9494d6de",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 76,
"avg_line_length": 27.208333333333332,
"alnum_prop": 0.607197549770291,
"repo_name": "8u1a/plaso",
"id": "06563ae649d773515bc00bb9b4f73243cb66127f",
"size": "1330",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "plaso/formatters/winfirewall.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1276"
},
{
"name": "Makefile",
"bytes": "1151"
},
{
"name": "Protocol Buffer",
"bytes": "13930"
},
{
"name": "Python",
"bytes": "3179107"
},
{
"name": "Shell",
"bytes": "47305"
}
],
"symlink_target": ""
}
|
"""Base class for robustness metrics."""
import abc
import gin
import six
@gin.configurable
def all_metrics():
# Only return the public classes.
return Metric.public_subclasses()
@six.add_metaclass(abc.ABCMeta)
class Metric(object):
"""Abstract base class for robustness metrics."""
@classmethod
def public_subclasses(cls):
"""Recursively get all public subclasses."""
all_subclasses = []
for subclass in cls.__subclasses__():
# Add this subclass only if it is public.
if subclass.__name__[0] != '_':
all_subclasses.append(subclass)
# Recursively add the subclasses of the subclass.
all_subclasses.extend(subclass.public_subclasses())
return all_subclasses
@property
def name(self):
"""String name of the metric."""
return type(self).__name__
@abc.abstractproperty
def result_dimensions(self):
"""String abbreviation describing the dimensions of the metric results.
A-algorithm, T-task, R-run/rollout, P-evaluation point.
"""
return NotImplementedError
@abc.abstractproperty
def bigger_is_better(self):
"""If True, more positive values are more desirable for the metric."""
return NotImplementedError
@abc.abstractmethod
def __call__(self, curves):
"""Calls the metric for evaluation on a set of input data."""
pass
|
{
"content_hash": "86744135483fa144fe5934c9079b7d1f",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 75,
"avg_line_length": 26.294117647058822,
"alnum_prop": 0.6875466070096943,
"repo_name": "google-research/rl-reliability-metrics",
"id": "d03af61f16c6adf73687f69973114442bccfda6c",
"size": "1959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rl_reliability_metrics/metrics/metrics_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "231852"
},
{
"name": "Shell",
"bytes": "4294"
}
],
"symlink_target": ""
}
|
import collections
import logging
import numpy as np
import socket
import struct
import threading
import nengo
from pacman103.core.spinnman.sdp import sdp_message as sdp
from nengo_spinnaker.utils import fp
from nengo_spinnaker import assembler, utils
logger = logging.getLogger(__name__)
def stop_on_keyboard_interrupt(f):
def f_(self, *args):
try:
f(self, *args)
except KeyboardInterrupt:
self.stop()
return f_
class TransformFunctionCollection(object):
def __init__(self, outkeys):
self.outkeys = outkeys
self._tfs = list()
def append(self, transform_function):
# Generate the output keys for the transform/function
for d in range(transform_function.transform.shape[0]):
self.outkeys.append(transform_function.keyspace.key(d=d))
# Store and reduce the remaining space
self._tfs.append(transform_function)
def __getitem__(self, i):
return self._tfs[i]
class SDPRxVertex(utils.vertices.NengoVertex):
MODEL_NAME = 'nengo_rx'
MAX_ATOMS = 1
def __init__(self):
super(SDPRxVertex, self).__init__(1)
self.output_keys = list()
self.transforms_functions = TransformFunctionCollection(
self.output_keys)
self.regions = list()
@property
def remaining_dims(self):
return 64 - sum(
[c.transform.shape[0] for c in self.transforms_functions])
@classmethod
def assemble(cls, rx, assembler):
# Create the regions and monkey-patch them into the SDPRxVertex
system_items = [1000, 64-rx.remaining_dims]
system_region = utils.vertices.UnpartitionedListRegion(system_items)
output_keys_region =\
utils.vertices.UnpartitionedListRegion(rx.output_keys)
rx.regions.extend([system_region, output_keys_region])
return rx
assembler.Assembler.register_object_builder(SDPRxVertex.assemble, SDPRxVertex)
class SDPTxVertex(utils.vertices.NengoVertex):
MODEL_NAME = 'nengo_tx'
MAX_ATOMS = 1
def __init__(self, size_in, in_connections, dt, output_period=100):
super(SDPTxVertex, self).__init__(1)
"""Create a new SDPTxVertex.
:param size_in: The number of dimensions to accept.
:param in_connections: A list of connections arriving at the Tx vertex.
:param dt: Time step of the simulation.
:param output_period: Period with which to transmit SDP packets (in
ticks)
"""
# Construct the data to be loaded onto the board
system_items = [size_in, 1000, output_period]
system_region = utils.vertices.UnpartitionedListRegion(system_items)
(input_filters, input_filter_routing, _) =\
utils.vertices.make_filter_regions(in_connections, dt)
# Create the regions
self.regions = [system_region, input_filters, input_filter_routing]
class Ethernet(object):
"""Ethernet communicator and Node builder."""
def __init__(self, machinename, port=17895, input_period=10./32):
# General parameters
self.machinename = machinename
self.port = port
self.input_period = input_period
self.comms = None
self.rx_elements = list()
# Map Node --> Tx
self.nodes_tx = dict()
# Map Node --> transform, function, buffer index, rx
self.nodes_connections = collections.defaultdict(list)
# Map Rx --> Fresh
self.rx_fresh = dict()
self.rx_buffers = collections.defaultdict(list)
@property
def io(self):
return self
def prepare_network(self, objects, connections, dt, keyspace):
"""Swap out each Node with appropriate IO objects."""
new_objs = list()
new_conns = list()
for obj in objects:
# For each Node, combine outgoing connections
if not isinstance(obj, nengo.Node):
# If not a Node then retain the object
new_objs.append(obj)
continue
out_conns = [c for c in connections if c.pre_obj == obj and
not isinstance(c.post_obj, nengo.Node)]
outgoing_conns = utils.connections.Connections(out_conns)
# Assign each unique combination of transform/function/keyspace to
# a SDPRxVertex.
for i, tfk in enumerate(outgoing_conns.transforms_functions):
assert tfk.keyspace.is_set_i
for rx in self.rx_elements:
if rx.remaining_dims >= tfk.transform.shape[0]:
break
else:
rx = SDPRxVertex()
self.rx_elements.append(rx)
self.rx_fresh[rx] = False
new_objs.append(rx)
rx.transforms_functions.append(tfk)
buf = np.zeros(tfk.transform.shape[0])
self.nodes_connections[obj].append((tfk, buf, rx))
self.rx_buffers[rx].append(buf)
# Replace the pre_obj on all connections from this Node to account
# for the change to the SDPRxVertex.
for c in out_conns:
if outgoing_conns[c] == i:
c.pre_obj = rx
c.is_accumulatory = False
new_conns.append(c)
# Provide a Tx element to receive input for the Node
in_conns = [c for c in connections if c.post_obj == obj and
not isinstance(c.pre_obj, nengo.Node)]
if len(in_conns) > 0:
tx = SDPTxVertex(obj.size_in, in_conns, dt)
self.nodes_tx[obj] = tx
new_objs.append(tx)
for c in in_conns:
c.post_obj = tx
new_conns.append(c)
# Retain all other connections unchanged
for c in connections:
if not (isinstance(c.pre_obj, nengo.Node) or
isinstance(c.post_obj, nengo.Node)):
new_conns.append(c)
return new_objs, new_conns
def __enter__(self):
# Generate a map of x, y, p to Node for received input, a cache of Node
# input
self.xyp_nodes = dict()
self.node_inputs = dict()
for (node, tx) in self.nodes_tx.items():
xyp = tx.subvertices[0].placement.processor.get_coordinates()
self.xyp_nodes[xyp] = node
self.node_inputs[node] = None
# Sockets
self.in_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.in_socket.setblocking(0)
self.in_socket.bind(("", self.port))
self.out_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.out_socket.setblocking(0)
# Locks
self.input_lock = threading.Lock()
self.output_lock = threading.Lock()
# Tx, Rx timers
self.stop_now = False
self.tx_period = self.input_period
self.rx_period = 0.0005
self.rx_timer = threading.Timer(self.rx_period, self.sdp_rx_tick)
self.rx_timer.name = "EthernetRx"
self.tx_timer = threading.Timer(self.tx_period, self.sdp_tx_tick)
self.tx_timer.name = "EthernetTx"
return self
def start(self):
self.tx_timer.start()
self.rx_timer.start()
def stop(self):
self.stop_now = True
self.tx_timer.cancel()
self.rx_timer.cancel()
self.in_socket.close()
self.out_socket.close()
def __exit__(self, exc_type, exc_val, traceback):
self.stop()
def get_node_input(self, node):
"""Get the input for the given Node.
:return: Latest input for the given Node or None if not input has been
received.
:raises: :py:exc:`KeyError` if the Node is not recognised.
"""
with self.input_lock:
return self.node_inputs[node]
def set_node_output(self, node, output):
"""Set the output for the given Node.
:raises: :py:exc:`KeyError` if the Node is not recognised.
"""
# For each unique connection compute the output and store in the buffer
for (tf, buf, rx) in self.nodes_connections[node]:
c_output = output
if tf.function is not None:
c_output = tf.function(c_output)
buf[:] = np.dot(tf.transform, c_output)
self.rx_fresh[rx] = True
@stop_on_keyboard_interrupt
def sdp_tx_tick(self):
"""Transmit packets to the SpiNNaker board.
"""
# Look for Rx elements with fresh output, transmit the output and
# mark as stale.
for rx in self.rx_elements:
if self.rx_fresh[rx]:
xyp = rx.subvertices[0].placement.processor.get_coordinates()
with self.output_lock:
data = fp.bitsk(np.hstack(self.rx_buffers[rx]))
self.rx_fresh[rx] = False
data = struct.pack("H14x%dI" % len(data), 1, *data)
packet = sdp.SDPMessage(dst_x=xyp[0], dst_y=xyp[1],
dst_cpu=xyp[2], data=data)
self.out_socket.sendto(str(packet), (self.machinename, 17893))
# Reschedule the Tx tick
if not self.stop_now:
self.tx_timer = threading.Timer(self.tx_period, self.sdp_tx_tick)
self.tx_timer.name = "EthernetTx"
self.tx_timer.start()
@stop_on_keyboard_interrupt
def sdp_rx_tick(self):
"""Receive packets from the SpiNNaker board.
"""
try:
data = self.in_socket.recv(512)
msg = sdp.SDPMessage(data)
try:
node = self.xyp_nodes[(msg.src_x, msg.src_y, msg.src_cpu)]
except KeyError:
logger.error(
"Received packet from unexpected core (%3d, %3d, %3d). "
"Board may require resetting." %
(msg.src_x, msg.src_y, msg.src_cpu)
)
raise IOError # Jumps out of the receive logic
# Convert the data
data = msg.data[16:]
vals = [struct.unpack("I", data[n*4:n*4 + 4])[0] for n in
range(len(data)/4)]
values = fp.kbits(vals)
# Save the data
assert(len(values) == node.size_in)
with self.input_lock:
self.node_inputs[node] = values
except IOError:
pass
# Reschedule the Rx tick
if not self.stop_now:
self.rx_timer = threading.Timer(self.rx_period, self.sdp_rx_tick)
self.rx_timer.name = "EthernetRx"
self.rx_timer.start()
|
{
"content_hash": "e03cd3780babb4ceb8454da8c4090f9b",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 82,
"avg_line_length": 34.100628930817614,
"alnum_prop": 0.5672261158244191,
"repo_name": "ctn-archive/nengo_spinnaker_2014",
"id": "88fb1431b49027eded01bcc55a5915e58786af06",
"size": "10844",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nengo_spinnaker/spinn_io/ethernet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "41797"
},
{
"name": "C++",
"bytes": "22599"
},
{
"name": "Python",
"bytes": "209113"
},
{
"name": "Shell",
"bytes": "6750"
}
],
"symlink_target": ""
}
|
from datetime import time
from pandas import Timedelta, Timestamp
from pandas.tseries.holiday import GoodFriday
from pytz import timezone
from catalyst.utils.calendars import TradingCalendar
from catalyst.utils.calendars.trading_calendar import (
HolidayCalendar, end_default
)
from catalyst.utils.calendars.us_holidays import (
USNewYearsDay,
Christmas
)
# Number of hours of offset between the open and close times dictated by this
# calendar versus the 6:31am to 5:00pm times over which we want to simulate
# futures algos.
FUTURES_OPEN_TIME_OFFSET = 12.5
FUTURES_CLOSE_TIME_OFFSET = -1
class QuantopianUSFuturesCalendar(TradingCalendar):
"""Synthetic calendar for trading US futures.
This calendar is a superset of all of the US futures exchange
calendars provided by Zipline (CFE, CME, ICE), and is intended for
trading across all of these exchanges.
Notes
-----
Open Time: 6:00 PM, US/Eastern
Close Time: 6:00 PM, US/Eastern
Regularly-Observed Holidays:
- New Years Day
- Good Friday
- Christmas
In order to align the hours of each session, we ignore the Sunday
CME Pre-Open hour (5-6pm).
"""
# XXX: Override the default TradingCalendar start and end dates with ones
# further in the future. This is a stopgap for memory issues caused by
# upgrading to pandas 18. This calendar is the most severely affected,
# since it has the most total minutes of any of the catalyst calendars.
def __init__(self,
start=Timestamp('2000-01-01', tz='UTC'),
end=end_default):
super(QuantopianUSFuturesCalendar, self).__init__(start=start, end=end)
@property
def name(self):
return "us_futures"
@property
def tz(self):
return timezone('US/Eastern')
@property
def open_time(self):
return time(18, 1)
@property
def close_time(self):
return time(18)
@property
def open_offset(self):
return -1
def execution_time_from_open(self, open_dates):
return open_dates + Timedelta(hours=FUTURES_OPEN_TIME_OFFSET)
def execution_time_from_close(self, close_dates):
return close_dates + Timedelta(hours=FUTURES_CLOSE_TIME_OFFSET)
@property
def regular_holidays(self):
return HolidayCalendar([
USNewYearsDay,
GoodFriday,
Christmas,
])
|
{
"content_hash": "eaf7d2cfc0f09e50d77115383787c58c",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 79,
"avg_line_length": 28.821428571428573,
"alnum_prop": 0.6794712928541925,
"repo_name": "enigmampc/catalyst",
"id": "d4ffe10774d513ddc54d9fc2bf3ddab602afb4ec",
"size": "2421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "catalyst/utils/calendars/us_futures_calendar.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7014"
},
{
"name": "Dockerfile",
"bytes": "2510"
},
{
"name": "Emacs Lisp",
"bytes": "138"
},
{
"name": "Jupyter Notebook",
"bytes": "229701"
},
{
"name": "PowerShell",
"bytes": "3269"
},
{
"name": "Python",
"bytes": "4279642"
},
{
"name": "Shell",
"bytes": "7469"
}
],
"symlink_target": ""
}
|
from django.conf.urls import url
from .views import ObtainExpiringAuthToken
urlpatterns = [
url(r'^login/', ObtainExpiringAuthToken.as_view(), name='login')
]
|
{
"content_hash": "159125f93f3158e992a4e1d257bbf33d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 68,
"avg_line_length": 23.571428571428573,
"alnum_prop": 0.7454545454545455,
"repo_name": "asnelzin/sputniktask",
"id": "95b5b396366da1273dead4044460fc61148cda87",
"size": "165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sputniktask/apps/accounts/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2726"
},
{
"name": "Python",
"bytes": "24551"
}
],
"symlink_target": ""
}
|
"""
Given a binary tree
struct TreeLinkNode {
TreeLinkNode *left;
TreeLinkNode *right;
TreeLinkNode *next;
}
Populate each next pointer to point to its next right node. If there is no
next right node, the next pointer should be set to NULL.
Initially, all next pointers are set to NULL.
Note:
You may only use constant extra space.
You may assume that it is a perfect binary tree (ie, all leaves are at the
same level, and every parent has two children).
For example,
Given the following perfect binary tree,
1
/ \
2 3
/ \ / \
4 5 6 7
After calling your function, the tree should look like:
1 -> NULL
/ \
2 -> 3 -> NULL
/ \ / \
4->5->6->7 -> NULL
"""
# Definition for binary tree with next pointer.
# class TreeLinkNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if root is not None:
if root.left is not None:
root.left.next = root.right
if root.right is not None and root.next is not None:
root.right.next = root.next.left
self.connect(root.left)
self.connect(root.right)
|
{
"content_hash": "35ea11b353d5119df715b75ef4621cb7",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 74,
"avg_line_length": 24.189655172413794,
"alnum_prop": 0.5787598004276551,
"repo_name": "shichao-an/leetcode-python",
"id": "8748cc72486ae1155918a8aaa40d03f179208f06",
"size": "1403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "populating_next_right_pointers_in_each_node/solution.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "228552"
},
{
"name": "Shell",
"bytes": "353"
}
],
"symlink_target": ""
}
|
password = " "
while password!="123":#repeats until the string password contains the value "123"
password=input("What is the password? ")#receives user input
if password!="123":
print("That is incorrect")
#code after this is run only after the "while" statement is finished
#it is not repeated because it is not indented
print("You got the password")
|
{
"content_hash": "948689a9d076133ca75111cf1f7c177c",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 81,
"avg_line_length": 34.18181818181818,
"alnum_prop": 0.7047872340425532,
"repo_name": "HuronComputerClub/HuronComputerClub.github.io",
"id": "d209290d9e3540e7635ab465d608124f0737d000",
"size": "396",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "code/password.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12130"
},
{
"name": "HTML",
"bytes": "13631"
},
{
"name": "JavaScript",
"bytes": "48"
},
{
"name": "Python",
"bytes": "396"
}
],
"symlink_target": ""
}
|
'''
Miscellaneous algorithms for 2D contours and 3D triangularized meshes handling
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data'))
>>> os.chdir(datadir)
'''
import numpy as np
from numpy import linalg as nla
import os.path as op
from ..external import six
from .. import logging
from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File,
BaseInterfaceInputSpec)
from warnings import warn
iflogger = logging.getLogger('interface')
class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec):
surface1 = File(exists=True, mandatory=True,
desc=('Reference surface (vtk format) to which compute '
'distance.'))
surface2 = File(exists=True, mandatory=True,
desc=('Test surface (vtk format) from which compute '
'distance.'))
metric = traits.Enum('euclidean', 'sqeuclidean', usedefault=True,
desc=('norm used to report distance'))
weighting = traits.Enum(
'none', 'area', usedefault=True,
desc=('"none": no weighting is performed, surface": edge distance is '
'weighted by the corresponding surface area'))
out_warp = File('surfwarp.vtk', usedefault=True,
desc='vtk file based on surface1 and warpings mapping it '
'to surface2')
out_file = File('distance.npy', usedefault=True,
desc='numpy file keeping computed distances and weights')
class ComputeMeshWarpOutputSpec(TraitedSpec):
distance = traits.Float(desc="computed distance")
out_warp = File(exists=True, desc=('vtk file with the vertex-wise '
'mapping of surface1 to surface2'))
out_file = File(exists=True,
desc='numpy file keeping computed distances and weights')
class ComputeMeshWarp(BaseInterface):
"""
Calculates a the vertex-wise warping to get surface2 from surface1.
It also reports the average distance of vertices, using the norm specified
as input.
.. warning:
A point-to-point correspondence between surfaces is required
Example
-------
>>> import nipype.algorithms.mesh as m
>>> dist = m.ComputeMeshWarp()
>>> dist.inputs.surface1 = 'surf1.vtk'
>>> dist.inputs.surface2 = 'surf2.vtk'
>>> res = dist.run() # doctest: +SKIP
"""
input_spec = ComputeMeshWarpInputSpec
output_spec = ComputeMeshWarpOutputSpec
_redirect_x = True
def _triangle_area(self, A, B, C):
A = np.array(A)
B = np.array(B)
C = np.array(C)
ABxAC = nla.norm(A - B) * nla.norm(A - C)
prod = np.dot(B - A, C - A)
angle = np.arccos(prod / ABxAC)
area = 0.5 * ABxAC * np.sin(angle)
return area
def _run_interface(self, runtime):
try:
from tvtk.api import tvtk
except ImportError:
raise ImportError('Interface ComputeMeshWarp requires tvtk')
try:
from enthought.etsconfig.api import ETSConfig
ETSConfig.toolkit = 'null'
except ImportError:
iflogger.warn(('ETS toolkit could not be imported'))
pass
except ValueError:
iflogger.warn(('ETS toolkit is already set'))
pass
r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1)
r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2)
vtk1 = r1.output
vtk2 = r2.output
r1.update()
r2.update()
assert(len(vtk1.points) == len(vtk2.points))
points1 = np.array(vtk1.points)
points2 = np.array(vtk2.points)
diff = points2 - points1
weights = np.ones(len(diff))
try:
errvector = nla.norm(diff, axis=1)
except TypeError: # numpy < 1.9
errvector = np.apply_along_axis(nla.norm, 1, diff)
pass
if self.inputs.metric == 'sqeuclidean':
errvector = errvector ** 2
if (self.inputs.weighting == 'area'):
faces = vtk1.polys.to_array().reshape(-1, 4).astype(int)[:, 1:]
for i, p1 in enumerate(points2):
# compute surfaces, set in weight
w = 0.0
point_faces = faces[(faces[:, :] == i).any(axis=1)]
for idset in point_faces:
fp1 = points1[int(idset[0])]
fp2 = points1[int(idset[1])]
fp3 = points1[int(idset[2])]
w += self._triangle_area(fp1, fp2, fp3)
weights[i] = w
result = np.vstack([errvector, weights])
np.save(op.abspath(self.inputs.out_file), result.transpose())
out_mesh = tvtk.PolyData()
out_mesh.points = vtk1.points
out_mesh.polys = vtk1.polys
out_mesh.point_data.vectors = diff
out_mesh.point_data.vectors.name = 'warpings'
writer = tvtk.PolyDataWriter(
file_name=op.abspath(self.inputs.out_warp))
writer.set_input_data(out_mesh)
writer.write()
self._distance = np.average(errvector, weights=weights)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
outputs['out_warp'] = op.abspath(self.inputs.out_warp)
outputs['distance'] = self._distance
return outputs
class MeshWarpMathsInputSpec(BaseInterfaceInputSpec):
in_surf = File(exists=True, mandatory=True,
desc=('Input surface in vtk format, with associated warp '
'field as point data (ie. from ComputeMeshWarp'))
float_trait = traits.Either(traits.Float(1.0), traits.Tuple(
traits.Float(1.0), traits.Float(1.0), traits.Float(1.0)))
operator = traits.Either(
float_trait, File(exists=True), default=1.0, mandatory=True,
desc=('image, float or tuple of floats to act as operator'))
operation = traits.Enum('sum', 'sub', 'mul', 'div', usedefault=True,
desc=('operation to be performed'))
out_warp = File('warp_maths.vtk', usedefault=True,
desc='vtk file based on in_surf and warpings mapping it '
'to out_file')
out_file = File('warped_surf.vtk', usedefault=True,
desc='vtk with surface warped')
class MeshWarpMathsOutputSpec(TraitedSpec):
out_warp = File(exists=True, desc=('vtk file with the vertex-wise '
'mapping of surface1 to surface2'))
out_file = File(exists=True,
desc='vtk with surface warped')
class MeshWarpMaths(BaseInterface):
"""
Performs the most basic mathematical operations on the warping field
defined at each vertex of the input surface. A surface with scalar
or vector data can be used as operator for non-uniform operations.
.. warning:
A point-to-point correspondence between surfaces is required
Example
-------
>>> import nipype.algorithms.mesh as m
>>> mmath = m.MeshWarpMaths()
>>> mmath.inputs.in_surf = 'surf1.vtk'
>>> mmath.inputs.operator = 'surf2.vtk'
>>> mmath.inputs.operation = 'mul'
>>> res = mmath.run() # doctest: +SKIP
"""
input_spec = MeshWarpMathsInputSpec
output_spec = MeshWarpMathsOutputSpec
_redirect_x = True
def _run_interface(self, runtime):
try:
from tvtk.api import tvtk
except ImportError:
raise ImportError('Interface ComputeMeshWarp requires tvtk')
try:
from enthought.etsconfig.api import ETSConfig
ETSConfig.toolkit = 'null'
except ImportError:
iflogger.warn(('ETS toolkit could not be imported'))
pass
except ValueError:
iflogger.warn(('ETS toolkit is already set'))
pass
r1 = tvtk.PolyDataReader(file_name=self.inputs.in_surf)
vtk1 = r1.output
r1.update()
points1 = np.array(vtk1.points)
if vtk1.point_data.vectors is None:
raise RuntimeError(('No warping field was found in in_surf'))
operator = self.inputs.operator
opfield = np.ones_like(points1)
if isinstance(operator, six.string_types):
r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2)
vtk2 = r2.output
r2.update()
assert(len(points1) == len(vtk2.points))
opfield = vtk2.point_data.vectors
if opfield is None:
opfield = vtk2.point_data.scalars
if opfield is None:
raise RuntimeError(
('No operator values found in operator file'))
opfield = np.array(opfield)
if opfield.shape[1] < points1.shape[1]:
opfield = np.array([opfield.tolist()] * points1.shape[1]).T
else:
operator = np.atleast_1d(operator)
opfield *= operator
warping = np.array(vtk1.point_data.vectors)
if self.inputs.operation == 'sum':
warping += opfield
elif self.inputs.operation == 'sub':
warping -= opfield
elif self.inputs.operation == 'mul':
warping *= opfield
elif self.inputs.operation == 'div':
warping /= opfield
vtk1.point_data.vectors = warping
writer = tvtk.PolyDataWriter(
file_name=op.abspath(self.inputs.out_warp))
writer.set_input_data(vtk1)
writer.write()
vtk1.point_data.vectors = None
vtk1.points = points1 + warping
writer = tvtk.PolyDataWriter(
file_name=op.abspath(self.inputs.out_file))
writer.set_input_data(vtk1)
writer.write()
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
outputs['out_warp'] = op.abspath(self.inputs.out_warp)
return outputs
class P2PDistance(ComputeMeshWarp):
"""
Calculates a point-to-point (p2p) distance between two corresponding
VTK-readable meshes or contours.
A point-to-point correspondence between nodes is required
.. deprecated:: 1.0-dev
Use :py:class:`ComputeMeshWarp` instead.
"""
def __init__(self, **inputs):
super(P2PDistance, self).__init__(**inputs)
warn(('This interface has been deprecated since 1.0, please use '
'ComputeMeshWarp'),
DeprecationWarning)
|
{
"content_hash": "d5a7a847b87f556c9949a2a2b3c953d2",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 78,
"avg_line_length": 33.35493827160494,
"alnum_prop": 0.5903581012306838,
"repo_name": "wanderine/nipype",
"id": "be9a10e287145360eb9571490adf236fdae78052",
"size": "10921",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nipype/algorithms/mesh.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "1854"
},
{
"name": "Matlab",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "4796302"
},
{
"name": "Shell",
"bytes": "380"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
}
|
from django.conf.urls import include, url
from django.contrib import admin
from agenteIA import views
urlpatterns = [
url(r'^$', views.index, name='index'),
]
|
{
"content_hash": "e9db89618e7452b707303f3df6f4f97e",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 23.428571428571427,
"alnum_prop": 0.725609756097561,
"repo_name": "DiogoDantas/tourgastronomico",
"id": "e873d33bef7685b04b8dec2882786fb6f7f6ea8f",
"size": "164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agenteIA/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2387"
},
{
"name": "HTML",
"bytes": "1336"
},
{
"name": "JavaScript",
"bytes": "4236"
},
{
"name": "Python",
"bytes": "11397"
}
],
"symlink_target": ""
}
|
import argparse
import collections
import json
import os.path
import re
import sys
import pdl
def main(argv):
parser = argparse.ArgumentParser(description=(
"Converts from .pdl to .json by invoking the pdl Python module."))
parser.add_argument('--map_binary_to_string', type=bool,
help=('If set, binary in the .pdl is mapped to a '
'string in .json. Client code will have to '
'base64 decode the string to get the payload.'))
parser.add_argument("pdl_file", help="The .pdl input file to parse.")
parser.add_argument("json_file", help="The .json output file write.")
args = parser.parse_args(argv)
file_name = os.path.normpath(args.pdl_file)
input_file = open(file_name, "r")
pdl_string = input_file.read()
protocol = pdl.loads(pdl_string, file_name, args.map_binary_to_string)
input_file.close()
output_file = open(os.path.normpath(args.json_file), 'wb')
json.dump(protocol, output_file, indent=4, separators=(',', ': '))
output_file.close()
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
{
"content_hash": "9917a095bf88100f61b4e2d0defca1d8",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 78,
"avg_line_length": 35.96875,
"alnum_prop": 0.6255430060816681,
"repo_name": "zero-rp/miniblink49",
"id": "96048f793d85a8e0d233c81d88dbd3711a60f05b",
"size": "1336",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "v8_7_5/third_party/inspector_protocol/convert_protocol_to_json.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "11324414"
},
{
"name": "Batchfile",
"bytes": "52488"
},
{
"name": "C",
"bytes": "31014938"
},
{
"name": "C++",
"bytes": "281193388"
},
{
"name": "CMake",
"bytes": "88548"
},
{
"name": "CSS",
"bytes": "20839"
},
{
"name": "DIGITAL Command Language",
"bytes": "226954"
},
{
"name": "HTML",
"bytes": "202637"
},
{
"name": "JavaScript",
"bytes": "32544926"
},
{
"name": "Lua",
"bytes": "32432"
},
{
"name": "M4",
"bytes": "125191"
},
{
"name": "Makefile",
"bytes": "1517330"
},
{
"name": "Objective-C",
"bytes": "87691"
},
{
"name": "Objective-C++",
"bytes": "35037"
},
{
"name": "PHP",
"bytes": "307541"
},
{
"name": "Perl",
"bytes": "3283676"
},
{
"name": "Prolog",
"bytes": "29177"
},
{
"name": "Python",
"bytes": "4308928"
},
{
"name": "R",
"bytes": "10248"
},
{
"name": "Scheme",
"bytes": "25457"
},
{
"name": "Shell",
"bytes": "264021"
},
{
"name": "TypeScript",
"bytes": "162421"
},
{
"name": "Vim script",
"bytes": "11362"
},
{
"name": "XS",
"bytes": "4319"
},
{
"name": "eC",
"bytes": "4383"
}
],
"symlink_target": ""
}
|
import pytest
import os
import time
import threading
import socket
import uuid
import urllib2
import json
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server.TServer import TServer
from Types.ttypes import TNetworkAddress
import StatestoreService.StatestoreSubscriber as Subscriber
import StatestoreService.StatestoreService as Statestore
from StatestoreService.StatestoreSubscriber import TUpdateStateResponse
from StatestoreService.StatestoreSubscriber import TTopicRegistration
from ErrorCodes.ttypes import TErrorCode
from Status.ttypes import TStatus
# Tests for the statestore. The StatestoreSubscriber class is a skeleton implementation of
# a Python-based statestore subscriber with additional hooks to allow testing. Each
# StatestoreSubscriber runs its own server so that the statestore may contact it.
#
# All tests in this file may be run in parallel. They assume that a statestore instance is
# already running, and is configured with out-of-the-box defaults (as is the case in our
# usual test environment) which govern failure-detector timeouts etc.
#
# These tests do not yet provide sufficient coverage.
# If no topic entries, do the first and second subscribers always get a callback?
# Adding topic entries to non-existant topic
# Test for from_version and to_version behavior
# Test with many concurrent subscribers
# Test that only the subscribed-to topics are sent
# Test that topic deletions take effect correctly.
def get_unused_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', 0))
_, port = s.getsockname()
s.close()
return port
def get_statestore_subscribers(host='localhost', port=25010):
response = urllib2.urlopen("http://{0}:{1}/subscribers?json".format(host, port))
page = response.read()
return json.loads(page)
STATUS_OK = TStatus(TErrorCode.OK)
DEFAULT_UPDATE_STATE_RESPONSE = TUpdateStateResponse(status=STATUS_OK, topic_updates=[],
skipped=False)
class KillableThreadedServer(TServer):
"""Based on TServer.TThreadedServer, this server may be shutdown (by calling
shutdown()), after which no new connections may be made. Most of the implementation is
directly copied from Thrift."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.daemon = kwargs.get("daemon", False)
self.port = kwargs.get("port")
self.is_shutdown = False
self.transports = set()
def shutdown(self):
self.is_shutdown = True
self.serverTransport.close()
for t in self.transports:
t.close()
self.wait_until_down()
def wait_until_up(self, num_tries=10):
for i in xrange(num_tries):
cnxn = TSocket.TSocket('localhost', self.port)
try:
cnxn.open()
return
except Exception, e:
if i == num_tries - 1: raise
time.sleep(0.1)
def wait_until_down(self, num_tries=10):
for i in xrange(num_tries):
cnxn = TSocket.TSocket('localhost', self.port)
try:
cnxn.open()
time.sleep(0.1)
except Exception, e:
return
raise Exception("Server did not stop")
def serve(self):
self.serverTransport.listen()
while not self.is_shutdown:
client = self.serverTransport.accept()
# Since accept() can take a while, check again if the server is shutdown to avoid
# starting an unnecessary thread.
if self.is_shutdown: return
t = threading.Thread(target=self.handle, args=(client,))
t.setDaemon(self.daemon)
t.start()
def handle(self, client):
itrans = self.inputTransportFactory.getTransport(client)
self.transports.add(itrans)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while not self.is_shutdown:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
print x
itrans.close()
otrans.close()
self.transports.remove(itrans)
class StatestoreSubscriber(object):
"""A bare-bones subscriber skeleton. Tests should create a new StatestoreSubscriber(),
call start() and then register(). The subscriber will run a Thrift server on an unused
port, and after registration the statestore will call Heartbeat() and UpdateState() via
RPC. Tests can provide callbacks to the constructor that will be called during those
RPCs, and this is the easiest way to check that the statestore protocol is being
correctly followed. Tests should use wait_for_* methods to confirm that some event (like
an RPC call) has happened asynchronously.
Since RPC callbacks will execute on a different thread from the main one, any assertions
there will not trigger a test failure without extra plumbing. What we do is simple: any
exceptions during an RPC are caught and stored, and the check_thread_exceptions() method
will re-raise them.
The methods that may be called by a test deliberately return 'self' to allow for
chaining, see test_failure_detected() for an example of how this makes the test flow
more readable."""
def __init__(self, port=None, heartbeat_cb=None, update_cb=None):
self.port = port if port else get_unused_port()
self.heartbeat_event, self.heartbeat_count = threading.Condition(), 0
self.update_event, self.update_count = threading.Condition(), 0
self.heartbeat_cb, self.update_cb = heartbeat_cb, update_cb
self.exception = None
def Heartbeat(self, args):
"""Heartbeat RPC handler. Calls heartbeat callback if one exists."""
self.heartbeat_event.acquire()
try:
self.heartbeat_count += 1
response = Subscriber.THeartbeatResponse()
if self.heartbeat_cb is not None and self.exception is None:
try:
response = self.heartbeat_cb(self, args)
except Exception, e:
self.exception = e
self.heartbeat_event.notify()
finally:
self.heartbeat_event.release()
return response
def UpdateState(self, args):
"""UpdateState RPC handler. Calls update callback if one exists."""
self.update_event.acquire()
try:
self.update_count += 1
response = DEFAULT_UPDATE_STATE_RESPONSE
if self.update_cb is not None and self.exception is None:
try:
response = self.update_cb(self, args)
except Exception, e:
self.exception = e
self.update_event.notify()
finally:
self.update_event.release()
return response
def __init_server(self):
processor = Subscriber.Processor(self)
transport = TSocket.TServerSocket(port=self.port)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
self.server = KillableThreadedServer(processor, transport, tfactory, pfactory,
daemon=True, port=self.port)
self.server_thread = threading.Thread(target=self.server.serve)
self.server_thread.setDaemon(True)
self.server_thread.start()
self.server.wait_until_up()
def __init_client(self):
self.client_transport = \
TTransport.TBufferedTransport(TSocket.TSocket('localhost', 24000))
self.protocol = TBinaryProtocol.TBinaryProtocol(self.client_transport)
self.client = Statestore.Client(self.protocol)
self.client_transport.open()
def check_thread_exceptions(self):
"""Checks if an exception was raised and stored in a callback thread"""
if self.exception is not None: raise self.exception
def kill(self):
"""Closes both the server and client sockets, and waits for the server to become
unavailable"""
self.client_transport.close()
self.server.shutdown()
return self
def start(self):
"""Starts a subscriber server, and opens a client to the statestore. Returns only when
the server is running."""
self.__init_server()
self.__init_client()
return self
def register(self, topics=None):
"""Call the Register() RPC"""
self.subscriber_id = "python-test-client-%s" % uuid.uuid1()
if topics is None: topics = []
request = Subscriber.TRegisterSubscriberRequest(
topic_registrations=topics,
subscriber_location=TNetworkAddress("localhost", self.port),
subscriber_id=self.subscriber_id)
response = self.client.RegisterSubscriber(request)
if response.status.status_code == TErrorCode.OK:
self.registration_id = response.registration_id
else:
raise Exception("Registration failed: %s, %s" %
(response.status.status_code,
'\n'.join(response.status.error_msgs)))
return self
def wait_for_heartbeat(self, count=None):
"""Waits for some number of heartbeats. If 'count' is provided, waits until the number
of heartbeats seen by this subscriber exceeds count, otherwise waits for one further
heartbeat."""
self.heartbeat_event.acquire()
try:
if count is not None and self.heartbeat_count >= count: return self
if count is None: count = self.heartbeat_count + 1
while count > self.heartbeat_count:
self.check_thread_exceptions()
last_count = self.heartbeat_count
self.heartbeat_event.wait(10)
if last_count == self.heartbeat_count:
raise Exception("Heartbeat not received within 10s (heartbeat count: %s)" %
self.heartbeat_count)
self.check_thread_exceptions()
return self
finally:
self.heartbeat_event.release()
def wait_for_update(self, count=None):
"""Waits for some number of updates. If 'count' is provided, waits until the number
of updates seen by this subscriber exceeds count, otherwise waits for one further
update."""
self.update_event.acquire()
try:
if count is not None and self.update_count >= count: return self
if count is None: count = self.update_count + 1
while count > self.update_count:
self.check_thread_exceptions()
last_count = self.update_count
self.update_event.wait(10)
if last_count == self.update_count:
raise Exception("Update not received within 10s (update count: %s)" %
self.update_count)
self.check_thread_exceptions()
return self
finally:
self.update_event.release()
def wait_for_failure(self, timeout=20):
"""Waits until this subscriber no longer appears in the statestore's subscriber
list. If 'timeout' seconds pass, throws an exception."""
start = time.time()
while time.time() - start < timeout:
subs = [s["id"] for s in get_statestore_subscribers()["subscribers"]]
if self.subscriber_id not in subs: return self
time.sleep(0.2)
raise Exception("Subscriber %s did not fail in %ss" % (self.subscriber_id, timeout))
class TestStatestore():
def make_topic_update(self, topic_name, key_template="foo", value_template="bar",
num_updates=1, deletions=None):
topic_entries = [
Subscriber.TTopicItem(key=key_template + str(x), value=value_template + str(x))
for x in xrange(num_updates)]
if deletions is None: deletions = []
return Subscriber.TTopicDelta(topic_name=topic_name,
topic_entries=topic_entries,
topic_deletions=deletions,
is_delta=False)
def test_registration_ids_different(self):
"""Test that if a subscriber with the same id registers twice, the registration ID is
different"""
sub = StatestoreSubscriber()
sub.start().register()
old_reg_id = sub.registration_id
sub.register()
assert old_reg_id != sub.registration_id
def test_receive_heartbeats(self):
"""Smoke test to confirm that heartbeats get sent to a correctly registered
subscriber"""
sub = StatestoreSubscriber()
sub.start().register().wait_for_heartbeat(5)
def test_receive_updates(self):
"""Test that updates are correctly received when a subscriber alters a topic"""
topic_name = "topic_delta_%s" % uuid.uuid1()
def topic_update_correct(sub, args):
delta = self.make_topic_update(topic_name)
if sub.update_count == 1:
return TUpdateStateResponse(status=STATUS_OK, topic_updates=[delta],
skipped=False)
elif sub.update_count == 2:
assert len(args.topic_deltas) == 1
assert args.topic_deltas[topic_name].topic_entries == delta.topic_entries
assert args.topic_deltas[topic_name].topic_name == delta.topic_name
assert args.topic_deltas[topic_name].topic_deletions == delta.topic_deletions
elif sub.update_count == 3:
# After the content-bearing update was processed, the next delta should be empty
assert len(args.topic_deltas[topic_name].topic_entries) == 0
assert len(args.topic_deltas[topic_name].topic_deletions) == 0
return DEFAULT_UPDATE_STATE_RESPONSE
sub = StatestoreSubscriber(update_cb=topic_update_correct)
reg = TTopicRegistration(topic_name=topic_name, is_transient=False)
(
sub.start()
.register(topics=[reg])
.wait_for_update(3)
)
def test_update_is_delta(self):
"""Test that the 'is_delta' flag is correctly set. The first update for a topic should
always not be a delta, and so should all subsequent updates until the subscriber says
it has not skipped the update."""
topic_name = "test_update_is_delta_%s" % uuid.uuid1()
def check_delta(sub, args):
if sub.update_count == 1:
assert args.topic_deltas[topic_name].is_delta == False
delta = self.make_topic_update(topic_name)
return TUpdateStateResponse(status=STATUS_OK, topic_updates=[delta],
skipped=False)
elif sub.update_count == 2:
assert args.topic_deltas[topic_name].is_delta == False
elif sub.update_count == 3:
assert args.topic_deltas[topic_name].is_delta == True
assert len(args.topic_deltas[topic_name].topic_entries) == 0
assert args.topic_deltas[topic_name].to_version == 1
return DEFAULT_UPDATE_STATE_RESPONSE
sub = StatestoreSubscriber(update_cb=check_delta)
reg = TTopicRegistration(topic_name=topic_name, is_transient=False)
(
sub.start()
.register(topics=[reg])
.wait_for_update(3)
)
def test_skipped(self):
"""Test that skipping an update causes it to be resent"""
topic_name = "test_skipped_%s" % uuid.uuid1()
def check_skipped(sub, args):
if sub.update_count == 1:
update = self.make_topic_update(topic_name)
return TUpdateStateResponse(status=STATUS_OK, topic_updates=[update],
skipped=False)
# All subsequent updates: set skipped=True and expected the full topic to be resent
# every time
assert args.topic_deltas[topic_name].is_delta == False
assert len(args.topic_deltas[topic_name].topic_entries) == 1
return TUpdateStateResponse(status=STATUS_OK, skipped=True)
sub = StatestoreSubscriber(update_cb=check_skipped)
reg = TTopicRegistration(topic_name=topic_name, is_transient=False)
(
sub.start()
.register(topics=[reg])
.wait_for_update(3)
)
def test_failure_detected(self):
sub = StatestoreSubscriber()
(
sub.start()
.register()
.wait_for_update(1)
.kill()
.wait_for_failure()
)
def test_hung_heartbeat(self):
"""Test for IMPALA-1712: If heartbeats hang (which we simulate by sleeping for five
minutes) the statestore should time them out every 3s and then eventually fail after
40s (10 times (3 + 1), where the 1 is the inter-heartbeat delay)"""
sub = StatestoreSubscriber(heartbeat_cb=lambda sub, args: time.sleep(300))
(
sub.start()
.register()
.wait_for_update(1)
.wait_for_failure(timeout=60)
)
def test_topic_persistence(self):
"""Test that persistent topic entries survive subscriber failure, but transent topic
entries are erased when the associated subscriber fails"""
topic_id = str(uuid.uuid1())
persistent_topic_name = "test_topic_persistence_persistent_%s" % topic_id
transient_topic_name = "test_topic_persistence_transient_%s" % topic_id
def add_entries(sub, args):
if sub.update_count == 1:
updates = [self.make_topic_update(persistent_topic_name),
self.make_topic_update(transient_topic_name)]
return TUpdateStateResponse(status=STATUS_OK, topic_updates=updates,
skipped=False)
return DEFAULT_UPDATE_STATE_RESPONSE
def check_entries(sub, args):
if sub.update_count == 1:
assert len(args.topic_deltas[transient_topic_name].topic_entries) == 0
assert len(args.topic_deltas[persistent_topic_name].topic_entries) == 1
# Statestore should not send deletions when the update is not a delta, see
# IMPALA-1891
# assert len(args.topic_deltas[transient_topic_name].topic_deletions) == 0
return DEFAULT_UPDATE_STATE_RESPONSE
reg = [TTopicRegistration(topic_name=persistent_topic_name, is_transient=False),
TTopicRegistration(topic_name=transient_topic_name, is_transient=True)]
sub = StatestoreSubscriber(update_cb=add_entries)
(
sub.start()
.register(topics=reg)
.wait_for_update(2)
.kill()
.wait_for_failure()
)
sub2 = StatestoreSubscriber(update_cb=check_entries)
(
sub2.start()
.register(topics=reg)
.wait_for_update(1)
)
|
{
"content_hash": "831efe79be7b094f3bbf4abdacf3cf63",
"timestamp": "",
"source": "github",
"line_count": 457,
"max_line_length": 90,
"avg_line_length": 39.155361050328224,
"alnum_prop": 0.6757013524086286,
"repo_name": "tempbottle/Impala",
"id": "164612bfc5596e68dd137f790d4993cdd9c1428d",
"size": "18494",
"binary": false,
"copies": "2",
"ref": "refs/heads/cdh5-trunk",
"path": "tests/statestore/test_statestore.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "203216"
},
{
"name": "C++",
"bytes": "7449363"
},
{
"name": "CMake",
"bytes": "105233"
},
{
"name": "CSS",
"bytes": "89516"
},
{
"name": "Groff",
"bytes": "1633"
},
{
"name": "HTML",
"bytes": "56"
},
{
"name": "Java",
"bytes": "3552322"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Lex",
"bytes": "21737"
},
{
"name": "PLpgSQL",
"bytes": "393"
},
{
"name": "Protocol Buffer",
"bytes": "630"
},
{
"name": "Python",
"bytes": "1727196"
},
{
"name": "SQLPL",
"bytes": "3253"
},
{
"name": "Shell",
"bytes": "161261"
},
{
"name": "Thrift",
"bytes": "243219"
},
{
"name": "Yacc",
"bytes": "79859"
}
],
"symlink_target": ""
}
|
import re
from django.forms import CharField, Form, Media
from django.http import HttpRequest
from django.middleware.csrf import (
CsrfViewMiddleware, _compare_salted_tokens as equivalent_tokens, get_token,
)
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.template.backends.dummy import TemplateStrings
from django.test import SimpleTestCase
class TemplateStringsTests(SimpleTestCase):
engine_class = TemplateStrings
backend_name = 'dummy'
options = {}
@classmethod
def setUpClass(cls):
super(TemplateStringsTests, cls).setUpClass()
params = {
'DIRS': [],
'APP_DIRS': True,
'NAME': cls.backend_name,
'OPTIONS': cls.options,
}
cls.engine = cls.engine_class(params)
def test_from_string(self):
template = self.engine.from_string("Hello!\n")
content = template.render()
self.assertEqual(content, "Hello!\n")
def test_get_template(self):
template = self.engine.get_template('template_backends/hello.html')
content = template.render({'name': 'world'})
self.assertEqual(content, "Hello world!\n")
def test_get_template_non_existing(self):
with self.assertRaises(TemplateDoesNotExist) as e:
self.engine.get_template('template_backends/non_existing.html')
self.assertEqual(e.exception.backend, self.engine)
def test_get_template_syntax_error(self):
# There's no way to trigger a syntax error with the dummy backend.
# The test still lives here to factor it between other backends.
if self.backend_name == 'dummy':
self.skipTest("test doesn't apply to dummy backend")
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('template_backends/syntax_error.html')
def test_html_escaping(self):
template = self.engine.get_template('template_backends/hello.html')
context = {'name': '<script>alert("XSS!");</script>'}
content = template.render(context)
self.assertIn('<script>', content)
self.assertNotIn('<script>', content)
def test_django_html_escaping(self):
if self.backend_name == 'dummy':
self.skipTest("test doesn't apply to dummy backend")
class TestForm(Form):
test_field = CharField()
media = Media(js=['my-script.js'])
form = TestForm()
template = self.engine.get_template('template_backends/django_escaping.html')
content = template.render({'media': media, 'test_form': form})
expected = '{}\n\n{}\n\n{}'.format(media, form, form['test_field'])
self.assertHTMLEqual(content, expected)
def test_csrf_token(self):
request = HttpRequest()
CsrfViewMiddleware().process_view(request, lambda r: None, (), {})
template = self.engine.get_template('template_backends/csrf.html')
content = template.render(request=request)
expected = '<input type="hidden" name="csrfmiddlewaretoken" value="([^"]+)" />'
match = re.match(expected, content) or re.match(expected.replace('"', "'"), content)
self.assertTrue(match, "hidden csrftoken field not found in output")
self.assertTrue(equivalent_tokens(match.group(1), get_token(request)))
def test_no_directory_traversal(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('../forbidden/template_backends/hello.html')
def test_non_ascii_characters(self):
template = self.engine.get_template('template_backends/hello.html')
content = template.render({'name': 'Jérôme'})
self.assertEqual(content, "Hello Jérôme!\n")
|
{
"content_hash": "ed181d33917039dc6aa6be0e3a1a5096",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 92,
"avg_line_length": 39.104166666666664,
"alnum_prop": 0.6555673947789025,
"repo_name": "twz915/django",
"id": "dededfe390a109bbafdcfd1be4aa84e70e5bce5c",
"size": "3758",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/template_backends/test_dummy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "55929"
},
{
"name": "HTML",
"bytes": "182880"
},
{
"name": "JavaScript",
"bytes": "252645"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11852079"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
import os
this_dir = os.path.realpath(os.path.dirname(__file__))
DATABASE_NAME = this_dir + '/.testing.db'
DATABASE_ENGINE = 'sqlite3'
INSTALLED_APPS=('psz',)
|
{
"content_hash": "39db14166cc1861b16026e024b080fed",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 54,
"avg_line_length": 26.666666666666668,
"alnum_prop": 0.69375,
"repo_name": "JohnSpeno/python-secure-zone",
"id": "0b1d9ac0a22ef64c8c91c46b773dffa12bed9aed",
"size": "160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51196"
},
{
"name": "Shell",
"bytes": "239"
}
],
"symlink_target": ""
}
|
from netforce.model import Model, fields
import time
class SaleOrderPromotion(Model):
_name = "sale.order.promotion"
_fields = {
"sale_id": fields.Many2One("sale.order", "Sales Order", required=True, on_delete="cascade"),
"promotion_id": fields.Many2One("sale.promotion","Promotion",required=True),
"product_id": fields.Many2One("product","Apply To Product"),
"percent": fields.Decimal("Promotion Percent"),
"amount": fields.Decimal("Promotion Amount"),
}
_order = "id"
SaleOrderPromotion.register()
|
{
"content_hash": "10bdf2896710a8005c67247e5caeb2dd",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 100,
"avg_line_length": 34.9375,
"alnum_prop": 0.6654740608228981,
"repo_name": "nfco/netforce",
"id": "d31bb05aae7bcfd915261ec094d8ae08bbe3d851",
"size": "1664",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "netforce_sale/netforce_sale/models/sale_order_promotion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "73"
},
{
"name": "CSS",
"bytes": "407336"
},
{
"name": "HTML",
"bytes": "478918"
},
{
"name": "Java",
"bytes": "11870"
},
{
"name": "JavaScript",
"bytes": "3712147"
},
{
"name": "Makefile",
"bytes": "353"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "3469515"
},
{
"name": "Roff",
"bytes": "15858"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import frappe, unittest, uuid
from frappe.model.delete_doc import delete_doc
from frappe.utils.data import today, add_to_date
from frappe import _dict
from frappe.utils import get_url
from frappe.core.doctype.user.user import get_total_users
from frappe.core.doctype.user.user import MaxUsersReachedError, test_password_strength
from frappe.core.doctype.user.user import extract_mentions
from frappe.frappeclient import FrappeClient
test_records = frappe.get_test_records('User')
class TestUser(unittest.TestCase):
def tearDown(self):
# disable password strength test
frappe.db.set_value("System Settings", "System Settings", "enable_password_policy", 0)
frappe.db.set_value("System Settings", "System Settings", "minimum_password_score", "")
frappe.db.set_value("System Settings", "System Settings", "password_reset_limit", 3)
frappe.set_user('Administrator')
def test_user_type(self):
new_user = frappe.get_doc(dict(doctype='User', email='test-for-type@example.com',
first_name='Tester')).insert()
self.assertEqual(new_user.user_type, 'Website User')
# social login userid for frappe
self.assertTrue(new_user.social_logins[0].userid)
self.assertEqual(new_user.social_logins[0].provider, "frappe")
# role with desk access
new_user.add_roles('_Test Role 2')
new_user.save()
self.assertEqual(new_user.user_type, 'System User')
# clear role
new_user.roles = []
new_user.save()
self.assertEqual(new_user.user_type, 'Website User')
# role without desk access
new_user.add_roles('_Test Role 4')
new_user.save()
self.assertEqual(new_user.user_type, 'Website User')
delete_contact(new_user.name)
frappe.delete_doc('User', new_user.name)
def test_delete(self):
frappe.get_doc("User", "test@example.com").add_roles("_Test Role 2")
self.assertRaises(frappe.LinkExistsError, delete_doc, "Role", "_Test Role 2")
frappe.db.sql("""delete from `tabHas Role` where role='_Test Role 2'""")
delete_doc("Role","_Test Role 2")
if frappe.db.exists("User", "_test@example.com"):
delete_contact("_test@example.com")
delete_doc("User", "_test@example.com")
user = frappe.copy_doc(test_records[1])
user.email = "_test@example.com"
user.insert()
frappe.get_doc({"doctype": "ToDo", "description": "_Test"}).insert()
delete_contact("_test@example.com")
delete_doc("User", "_test@example.com")
self.assertTrue(not frappe.db.sql("""select * from `tabToDo` where owner=%s""",
("_test@example.com",)))
from frappe.core.doctype.role.test_role import test_records as role_records
frappe.copy_doc(role_records[1]).insert()
def test_get_value(self):
self.assertEqual(frappe.db.get_value("User", "test@example.com"), "test@example.com")
self.assertEqual(frappe.db.get_value("User", {"email":"test@example.com"}), "test@example.com")
self.assertEqual(frappe.db.get_value("User", {"email":"test@example.com"}, "email"), "test@example.com")
self.assertEqual(frappe.db.get_value("User", {"email":"test@example.com"}, ["first_name", "email"]),
("_Test", "test@example.com"))
self.assertEqual(frappe.db.get_value("User",
{"email":"test@example.com", "first_name": "_Test"},
["first_name", "email"]),
("_Test", "test@example.com"))
test_user = frappe.db.sql("select * from tabUser where name='test@example.com'",
as_dict=True)[0]
self.assertEqual(frappe.db.get_value("User", {"email":"test@example.com"}, "*", as_dict=True),
test_user)
self.assertEqual(frappe.db.get_value("User", "xxxtest@example.com"), None)
frappe.db.set_value("Website Settings", "Website Settings", "_test", "_test_val")
self.assertEqual(frappe.db.get_value("Website Settings", None, "_test"), "_test_val")
self.assertEqual(frappe.db.get_value("Website Settings", "Website Settings", "_test"), "_test_val")
def test_high_permlevel_validations(self):
user = frappe.get_meta("User")
self.assertTrue("roles" in [d.fieldname for d in user.get_high_permlevel_fields()])
me = frappe.get_doc("User", "testperm@example.com")
me.remove_roles("System Manager")
frappe.set_user("testperm@example.com")
me = frappe.get_doc("User", "testperm@example.com")
me.add_roles("System Manager")
# system manager is not added (it is reset)
self.assertFalse('System Manager' in [d.role for d in me.roles])
frappe.set_user("Administrator")
me = frappe.get_doc("User", "testperm@example.com")
me.add_roles("System Manager")
# system manager now added by Administrator
self.assertTrue("System Manager" in [d.role for d in me.get("roles")])
# def test_deny_multiple_sessions(self):
# from frappe.installer import update_site_config
# clear_limit('users')
#
# # allow one session
# user = frappe.get_doc('User', 'test@example.com')
# user.simultaneous_sessions = 1
# user.new_password = 'Eastern_43A1W'
# user.save()
#
# def test_request(conn):
# value = conn.get_value('User', 'first_name', {'name': 'test@example.com'})
# self.assertTrue('first_name' in value)
#
# from frappe.frappeclient import FrappeClient
# update_site_config('deny_multiple_sessions', 0)
#
# conn1 = FrappeClient(get_url(), "test@example.com", "Eastern_43A1W", verify=False)
# test_request(conn1)
#
# conn2 = FrappeClient(get_url(), "test@example.com", "Eastern_43A1W", verify=False)
# test_request(conn2)
#
# update_site_config('deny_multiple_sessions', 1)
# conn3 = FrappeClient(get_url(), "test@example.com", "Eastern_43A1W", verify=False)
# test_request(conn3)
#
# # first connection should fail
# test_request(conn1)
def test_delete_user(self):
new_user = frappe.get_doc(dict(doctype='User', email='test-for-delete@example.com',
first_name='Tester Delete User')).insert()
self.assertEqual(new_user.user_type, 'Website User')
# role with desk access
new_user.add_roles('_Test Role 2')
new_user.save()
self.assertEqual(new_user.user_type, 'System User')
comm = frappe.get_doc({
"doctype":"Communication",
"subject": "To check user able to delete even if linked with communication",
"content": "To check user able to delete even if linked with communication",
"sent_or_received": "Sent",
"user": new_user.name
})
comm.insert(ignore_permissions=True)
delete_contact(new_user.name)
frappe.delete_doc('User', new_user.name)
self.assertFalse(frappe.db.exists('User', new_user.name))
def test_password_strength(self):
# Test Password without Password Strenth Policy
frappe.db.set_value("System Settings", "System Settings", "enable_password_policy", 0)
# password policy is disabled, test_password_strength should be ignored
result = test_password_strength("test_password")
self.assertFalse(result.get("feedback", None))
# Test Password with Password Strenth Policy Set
frappe.db.set_value("System Settings", "System Settings", "enable_password_policy", 1)
frappe.db.set_value("System Settings", "System Settings", "minimum_password_score", 2)
# Score 1; should now fail
result = test_password_strength("bee2ve")
self.assertEqual(result['feedback']['password_policy_validation_passed'], False)
# Score 4; should pass
result = test_password_strength("Eastern_43A1W")
self.assertEqual(result['feedback']['password_policy_validation_passed'], True)
def test_comment_mentions(self):
comment = '''
<span class="mention" data-id="test.comment@example.com" data-value="Test" data-denotation-char="@">
<span><span class="ql-mention-denotation-char">@</span>Test</span>
</span>
'''
self.assertEqual(extract_mentions(comment)[0], "test.comment@example.com")
comment = '''
<div>
Testing comment,
<span class="mention" data-id="test.comment@example.com" data-value="Test" data-denotation-char="@">
<span><span class="ql-mention-denotation-char">@</span>Test</span>
</span>
please check
</div>
'''
self.assertEqual(extract_mentions(comment)[0], "test.comment@example.com")
comment = '''
<div>
Testing comment for
<span class="mention" data-id="test_user@example.com" data-value="Test" data-denotation-char="@">
<span><span class="ql-mention-denotation-char">@</span>Test</span>
</span>
and
<span class="mention" data-id="test.again@example1.com" data-value="Test" data-denotation-char="@">
<span><span class="ql-mention-denotation-char">@</span>Test</span>
</span>
please check
</div>
'''
self.assertEqual(extract_mentions(comment)[0], "test_user@example.com")
self.assertEqual(extract_mentions(comment)[1], "test.again@example1.com")
doc = frappe.get_doc({
'doctype': 'User Group',
'name': 'Team',
'user_group_members': [{
'user': 'test@example.com'
}, {
'user': 'test1@example.com'
}]
})
doc.insert(ignore_if_duplicate=True)
comment = '''
<div>
Testing comment for
<span class="mention" data-id="Team" data-value="Team" data-is-group="true" data-denotation-char="@">
<span><span class="ql-mention-denotation-char">@</span>Team</span>
</span>
please check
</div>
'''
self.assertListEqual(extract_mentions(comment), ['test@example.com', 'test1@example.com'])
def test_rate_limiting_for_reset_password(self):
# Allow only one reset request for a day
frappe.db.set_value("System Settings", "System Settings", "password_reset_limit", 1)
frappe.db.commit()
url = get_url()
data={'cmd': 'frappe.core.doctype.user.user.reset_password', 'user': 'test@test.com'}
# Clear rate limit tracker to start fresh
key = f"rl:{data['cmd']}:{data['user']}"
frappe.cache().delete(key)
c = FrappeClient(url)
res1 = c.session.post(url, data=data, verify=c.verify, headers=c.headers)
res2 = c.session.post(url, data=data, verify=c.verify, headers=c.headers)
self.assertEqual(res1.status_code, 200)
self.assertEqual(res2.status_code, 417)
# def test_user_rollback(self):
# """
# FIXME: This is failing with PR #12693 as Rollback can't happen if notifications sent on user creation.
# Make sure that notifications disabled.
# """
# frappe.db.commit()
# frappe.db.begin()
# user_id = str(uuid.uuid4())
# email = f'{user_id}@example.com'
# try:
# frappe.flags.in_import = True # disable throttling
# frappe.get_doc(dict(
# doctype='User',
# email=email,
# first_name=user_id,
# )).insert()
# finally:
# frappe.flags.in_import = False
# # Check user has been added
# self.assertIsNotNone(frappe.db.get("User", {"email": email}))
# # Check that rollback works
# frappe.db.rollback()
# self.assertIsNone(frappe.db.get("User", {"email": email}))
def delete_contact(user):
frappe.db.sql("DELETE FROM `tabContact` WHERE `email_id`= %s", user)
frappe.db.sql("DELETE FROM `tabContact Email` WHERE `email_id`= %s", user)
|
{
"content_hash": "429aeee5f58cbbe402791a0467a8d869",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 106,
"avg_line_length": 36.13422818791946,
"alnum_prop": 0.687778603268945,
"repo_name": "saurabh6790/frappe",
"id": "5bea76793463663e6ee841ea9c7edfbc83d72bd6",
"size": "10868",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "frappe/core/doctype/user/test_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63276"
},
{
"name": "HTML",
"bytes": "218921"
},
{
"name": "JavaScript",
"bytes": "2152738"
},
{
"name": "Less",
"bytes": "36947"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "3261616"
},
{
"name": "SCSS",
"bytes": "223084"
},
{
"name": "Shell",
"bytes": "3358"
},
{
"name": "Vue",
"bytes": "49860"
}
],
"symlink_target": ""
}
|
from . import OpenMayaRender
from . import OpenMayaAnim
import _OpenMayaMPx
from . import OpenMaya
from . import OpenMayaUI
import weakref
from __builtin__ import property as _swig_property
from __builtin__ import object as _object
class MPxNode(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def addExternalContentForFileAttr(*args, **kwargs):
pass
def compute(*args, **kwargs):
pass
def connectionBroken(*args, **kwargs):
pass
def connectionMade(*args, **kwargs):
pass
def copyInternalData(*args, **kwargs):
pass
def existWithoutInConnections(*args, **kwargs):
pass
def existWithoutOutConnections(*args, **kwargs):
pass
def getExternalContent(*args, **kwargs):
pass
def getFilesToArchive(*args, **kwargs):
pass
def getInternalValue(*args, **kwargs):
pass
def getInternalValueInContext(*args, **kwargs):
pass
def internalArrayCount(*args, **kwargs):
pass
def isAbstractClass(*args, **kwargs):
pass
def isPassiveOutput(*args, **kwargs):
pass
def legalConnection(*args, **kwargs):
pass
def legalDisconnection(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def passThroughToMany(*args, **kwargs):
pass
def passThroughToOne(*args, **kwargs):
pass
def postConstructor(*args, **kwargs):
pass
def setDependentsDirty(*args, **kwargs):
pass
def setExistWithoutInConnections(*args, **kwargs):
pass
def setExistWithoutOutConnections(*args, **kwargs):
pass
def setExternalContent(*args, **kwargs):
pass
def setInternalValue(*args, **kwargs):
pass
def setInternalValueInContext(*args, **kwargs):
pass
def shouldSave(*args, **kwargs):
pass
def thisMObject(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def typeId(*args, **kwargs):
pass
def typeName(*args, **kwargs):
pass
def addAttribute(*args, **kwargs):
pass
def attributeAffects(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def inheritAttributesFrom(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
caching = None
isHistoricallyInteresting = None
message = None
state = None
thisown = None
__swig_destroy__ = None
kAssembly = 21
kCameraSetNode = 16
kClientDeviceNode = 19
kConstraintNode = 17
kDeformerNode = 2
kDependNode = 0
kEmitterNode = 6
kFieldNode = 5
kFluidEmitterNode = 13
kHardwareShader = 9
kHwShaderNode = 10
kIkSolverNode = 8
kImagePlaneNode = 14
kLast = 22
kLocatorNode = 1
kManipContainer = 3
kManipulatorNode = 18
kObjectSet = 12
kParticleAttributeMapperNode = 15
kSpringNode = 7
kSurfaceShape = 4
kThreadedDeviceNode = 20
kTransformNode = 11
class MPx3dModelView(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def backgroundColor(*args, **kwargs):
pass
def backgroundColorBottom(*args, **kwargs):
pass
def backgroundColorTop(*args, **kwargs):
pass
def beginGL(*args, **kwargs):
pass
def beginXorDrawing(*args, **kwargs):
pass
def colorAtIndex(*args, **kwargs):
pass
def customDraw(*args, **kwargs):
pass
def customDrawEnabled(*args, **kwargs):
pass
def destroyOnPanelDestruction(*args, **kwargs):
pass
def displayAxisAtOriginOn(*args, **kwargs):
pass
def displayAxisOn(*args, **kwargs):
pass
def displayCameraAnnotationOn(*args, **kwargs):
pass
def displayHUD(*args, **kwargs):
pass
def displayStyle(*args, **kwargs):
pass
def doUpdateOnMove(*args, **kwargs):
pass
def drawAdornments(*args, **kwargs):
pass
def drawAdornmentsNow(*args, **kwargs):
pass
def drawHUDNow(*args, **kwargs):
pass
def drawInterrupt(*args, **kwargs):
pass
def drawOnePass(*args, **kwargs):
pass
def drawText(*args, **kwargs):
pass
def endGL(*args, **kwargs):
pass
def endXorDrawing(*args, **kwargs):
pass
def filteredObjectList(*args, **kwargs):
pass
def fogColor(*args, **kwargs):
pass
def fogDensity(*args, **kwargs):
pass
def fogEnd(*args, **kwargs):
pass
def fogMode(*args, **kwargs):
pass
def fogSource(*args, **kwargs):
pass
def fogStart(*args, **kwargs):
pass
def getAsM3dView(*args, **kwargs):
pass
def getCamera(*args, **kwargs):
pass
def getCameraHUDName(*args, **kwargs):
pass
def getCameraSet(*args, **kwargs):
pass
def getColorIndexAndTable(*args, **kwargs):
pass
def getCurrentCameraSetCamera(*args, **kwargs):
pass
def getObjectsToView(*args, **kwargs):
pass
def handleDraw(*args, **kwargs):
pass
def hasStereoBufferSupport(*args, **kwargs):
pass
def includeInvisible(*args, **kwargs):
pass
def isBackfaceCulling(*args, **kwargs):
pass
def isBackgroundFogEnabled(*args, **kwargs):
pass
def isBackgroundGradient(*args, **kwargs):
pass
def isFogEnabled(*args, **kwargs):
pass
def isShadeActiveOnly(*args, **kwargs):
pass
def isTextureDisplayEnabled(*args, **kwargs):
pass
def isTwoSidedLighting(*args, **kwargs):
pass
def isVisible(*args, **kwargs):
pass
def isWireframeOnShaded(*args, **kwargs):
pass
def isXrayEnabled(*args, **kwargs):
pass
def lightingMode(*args, **kwargs):
pass
def multipleDrawEnabled(*args, **kwargs):
pass
def multipleDrawPassCount(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def numActiveColors(*args, **kwargs):
pass
def numDormantColors(*args, **kwargs):
pass
def numUserDefinedColors(*args, **kwargs):
pass
def objectDisplay(*args, **kwargs):
pass
def okForMultipleDraw(*args, **kwargs):
pass
def portHeight(*args, **kwargs):
pass
def portWidth(*args, **kwargs):
pass
def postMultipleDraw(*args, **kwargs):
pass
def postMultipleDrawPass(*args, **kwargs):
pass
def preMultipleDraw(*args, **kwargs):
pass
def preMultipleDrawPass(*args, **kwargs):
pass
def processDraw(*args, **kwargs):
pass
def refresh(*args, **kwargs):
pass
def removingCamera(*args, **kwargs):
pass
def requestOkForDraw(*args, **kwargs):
pass
def setBackfaceCulling(*args, **kwargs):
pass
def setBackgroundFogEnabled(*args, **kwargs):
pass
def setCamera(*args, **kwargs):
pass
def setCameraInDraw(*args, **kwargs):
pass
def setCameraSet(*args, **kwargs):
pass
def setCurrentCameraSetCamera(*args, **kwargs):
pass
def setCustomDrawEnable(*args, **kwargs):
pass
def setDestroyOnPanelDestruction(*args, **kwargs):
pass
def setDisplayAxis(*args, **kwargs):
pass
def setDisplayAxisAtOrigin(*args, **kwargs):
pass
def setDisplayCameraAnnotation(*args, **kwargs):
pass
def setDisplayHUD(*args, **kwargs):
pass
def setDisplayStyle(*args, **kwargs):
pass
def setDoUpdateOnMove(*args, **kwargs):
pass
def setDrawAdornments(*args, **kwargs):
pass
def setDrawCameraOverride(*args, **kwargs):
pass
def setDrawColor(*args, **kwargs):
pass
def setDrawInterrupt(*args, **kwargs):
pass
def setFogColor(*args, **kwargs):
pass
def setFogDensity(*args, **kwargs):
pass
def setFogEnabled(*args, **kwargs):
pass
def setFogEnd(*args, **kwargs):
pass
def setFogMode(*args, **kwargs):
pass
def setFogSource(*args, **kwargs):
pass
def setFogStart(*args, **kwargs):
pass
def setInStereoDrawMode(*args, **kwargs):
pass
def setIncludeInvisible(*args, **kwargs):
pass
def setLightingMode(*args, **kwargs):
pass
def setMultipleDrawEnable(*args, **kwargs):
pass
def setObjectDisplay(*args, **kwargs):
pass
def setObjectsToView(*args, **kwargs):
pass
def setTextureDisplayEnabled(*args, **kwargs):
pass
def setTwoSidedLighting(*args, **kwargs):
pass
def setUserDefinedColor(*args, **kwargs):
pass
def setViewSelected(*args, **kwargs):
pass
def setViewSelectedPrefix(*args, **kwargs):
pass
def setViewSelectedSet(*args, **kwargs):
pass
def setWireframeOnShaded(*args, **kwargs):
pass
def setXrayEnabled(*args, **kwargs):
pass
def templateColor(*args, **kwargs):
pass
def updateViewingParameters(*args, **kwargs):
pass
def userDefinedColorIndex(*args, **kwargs):
pass
def viewIsFiltered(*args, **kwargs):
pass
def viewSelected(*args, **kwargs):
pass
def viewSelectedPrefix(*args, **kwargs):
pass
def viewSelectedSet(*args, **kwargs):
pass
def viewToObjectSpace(*args, **kwargs):
pass
def viewToWorld(*args, **kwargs):
pass
def viewType(*args, **kwargs):
pass
def wantStereoGLBuffer(*args, **kwargs):
pass
def worldToView(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def getModelView(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kFogCoordinate = 1
kFogExponential = 1
kFogExponentialSquared = 2
kFogFragment = 0
kFogLinear = 0
kLightActive = 2
kLightAll = 0
kLightDefault = 3
kLightNone = 4
kLightQuality = 5
kLightSelected = 1
class MPxFileTranslator(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def allowMultipleFileOptimization(*args, **kwargs):
pass
def canBeOpened(*args, **kwargs):
pass
def defaultExtension(*args, **kwargs):
pass
def filter(*args, **kwargs):
pass
def haveNamespaceSupport(*args, **kwargs):
pass
def haveReadMethod(*args, **kwargs):
pass
def haveReferenceMethod(*args, **kwargs):
pass
def haveWriteMethod(*args, **kwargs):
pass
def identifyFile(*args, **kwargs):
pass
def reader(*args, **kwargs):
pass
def writer(*args, **kwargs):
pass
def fileAccessMode(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kCouldBeMyFileType = 1
kExportAccessMode = 5
kExportActiveAccessMode = 6
kImportAccessMode = 3
kIsMyFileType = 0
kNotMyFileType = 2
kOpenAccessMode = 1
kReferenceAccessMode = 2
kSaveAccessMode = 4
kUnknownAccessMode = 0
class MPxMaterialInformation(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def computeMaterial(*args, **kwargs):
pass
def connectAsTexture(*args, **kwargs):
pass
def materialInfoIsDirty(*args, **kwargs):
pass
def textureDisconnected(*args, **kwargs):
pass
def useMaterialAsTexture(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
fInstance = None
thisown = None
__swig_destroy__ = None
kOverrideDraw = 2
kSimpleMaterial = 0
kTexture = 1
class new_instancemethod(_object):
"""
instancemethod(function, instance, class)
Create an instance method object.
"""
def __call__(*args, **kwargs):
"""
x.__call__(...) <==> x(...)
"""
pass
def __cmp__(*args, **kwargs):
"""
x.__cmp__(y) <==> cmp(x,y)
"""
pass
def __delattr__(*args, **kwargs):
"""
x.__delattr__('name') <==> del x.name
"""
pass
def __get__(*args, **kwargs):
"""
descr.__get__(obj[, type]) -> value
"""
pass
def __getattribute__(*args, **kwargs):
"""
x.__getattribute__('name') <==> x.name
"""
pass
def __hash__(*args, **kwargs):
"""
x.__hash__() <==> hash(x)
"""
pass
def __repr__(*args, **kwargs):
"""
x.__repr__() <==> repr(x)
"""
pass
def __setattr__(*args, **kwargs):
"""
x.__setattr__('name', value) <==> x.name = value
"""
pass
__func__ = None
__self__ = None
im_class = None
im_func = None
im_self = None
__new__ = None
class MPxTransformationMatrix(_object):
def __disown__(self):
pass
def __eq__(*args, **kwargs):
pass
def __init__(self, *args):
pass
def __ne__(*args, **kwargs):
pass
def __repr__(self):
pass
def asInterpolationMatrix(*args, **kwargs):
pass
def asMatrix(*args, **kwargs):
pass
def asMatrixInverse(*args, **kwargs):
pass
def asRotateMatrix(*args, **kwargs):
pass
def asRotateMatrixInverse(*args, **kwargs):
pass
def asScaleMatrix(*args, **kwargs):
pass
def asScaleMatrixInverse(*args, **kwargs):
pass
def asTransformationMatrix(*args, **kwargs):
pass
def assign(*args, **kwargs):
pass
def copyValues(*args, **kwargs):
pass
def decomposeMatrix(*args, **kwargs):
pass
def eulerRotateOrientation(*args, **kwargs):
pass
def eulerRotation(*args, **kwargs):
pass
def isEquivalent(*args, **kwargs):
pass
def reverse(*args, **kwargs):
pass
def rotateBy(*args, **kwargs):
pass
def rotateOrientation(*args, **kwargs):
pass
def rotatePivot(*args, **kwargs):
pass
def rotatePivotTranslation(*args, **kwargs):
pass
def rotateTo(*args, **kwargs):
pass
def rotation(*args, **kwargs):
pass
def rotationOrder(*args, **kwargs):
pass
def scale(*args, **kwargs):
pass
def scaleBy(*args, **kwargs):
pass
def scalePivot(*args, **kwargs):
pass
def scalePivotTranslation(*args, **kwargs):
pass
def scaleTo(*args, **kwargs):
pass
def setRotateOrientation(*args, **kwargs):
pass
def setRotatePivot(*args, **kwargs):
pass
def setRotatePivotTranslation(*args, **kwargs):
pass
def setRotationOrder(*args, **kwargs):
pass
def setScalePivot(*args, **kwargs):
pass
def setScalePivotTranslation(*args, **kwargs):
pass
def shear(*args, **kwargs):
pass
def shearBy(*args, **kwargs):
pass
def shearTo(*args, **kwargs):
pass
def transformBy(*args, **kwargs):
pass
def translateBy(*args, **kwargs):
pass
def translateTo(*args, **kwargs):
pass
def translation(*args, **kwargs):
pass
def typeId(*args, **kwargs):
pass
def unSquishIt(*args, **kwargs):
pass
def unSquishMatrix(*args, **kwargs):
pass
def convertEulerRotationOrder(*args, **kwargs):
pass
def convertTransformationRotationOrder(*args, **kwargs):
pass
def creator(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
baseTransformationMatrixId = None
thisown = None
__swig_destroy__ = None
identity = None
class MPxFileResolver(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def performAfterSaveURI(*args, **kwargs):
pass
def resolveURI(*args, **kwargs):
pass
def resolverName(*args, **kwargs):
pass
def uriScheme(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def findURIResolverByName(*args, **kwargs):
pass
def findURIResolverByScheme(*args, **kwargs):
pass
def getURIResolversByName(*args, **kwargs):
pass
def getURIResolversByScheme(*args, **kwargs):
pass
def numURIResolvers(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kInput = 2
kNone = 1
class MPxGlBuffer(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def beginBufferNotify(*args, **kwargs):
pass
def bindFbo(*args, **kwargs):
pass
def close(*args, **kwargs):
pass
def closeFbo(*args, **kwargs):
pass
def endBufferNotify(*args, **kwargs):
pass
def openFbo(*args, **kwargs):
pass
def unbindFbo(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxSurfaceShapeUI(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def canDrawUV(*args, **kwargs):
pass
def draw(*args, **kwargs):
pass
def drawUV(*args, **kwargs):
pass
def getDrawData(*args, **kwargs):
pass
def getDrawRequests(*args, **kwargs):
pass
def material(*args, **kwargs):
pass
def materials(*args, **kwargs):
pass
def select(*args, **kwargs):
pass
def selectUV(*args, **kwargs):
pass
def snap(*args, **kwargs):
pass
def surfaceShape(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def surfaceShapeUI(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kSelectMeshEdges = 3
kSelectMeshFaces = 2
kSelectMeshUVs = 0
kSelectMeshVerts = 1
class MPxCacheFormat(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def beginReadChunk(*args, **kwargs):
pass
def beginWriteChunk(*args, **kwargs):
pass
def close(*args, **kwargs):
pass
def endReadChunk(*args, **kwargs):
pass
def endWriteChunk(*args, **kwargs):
pass
def extension(*args, **kwargs):
pass
def findChannelName(*args, **kwargs):
pass
def findTime(*args, **kwargs):
pass
def handlesDescription(*args, **kwargs):
pass
def isValid(*args, **kwargs):
pass
def open(*args, **kwargs):
pass
def readArraySize(*args, **kwargs):
pass
def readChannelName(*args, **kwargs):
pass
def readDescription(*args, **kwargs):
pass
def readDoubleArray(*args, **kwargs):
pass
def readDoubleVectorArray(*args, **kwargs):
pass
def readFloatArray(*args, **kwargs):
pass
def readFloatVectorArray(*args, **kwargs):
pass
def readHeader(*args, **kwargs):
pass
def readInt32(*args, **kwargs):
pass
def readIntArray(*args, **kwargs):
pass
def readNextTime(*args, **kwargs):
pass
def readTime(*args, **kwargs):
pass
def rewind(*args, **kwargs):
pass
def writeChannelName(*args, **kwargs):
pass
def writeDescription(*args, **kwargs):
pass
def writeDoubleArray(*args, **kwargs):
pass
def writeDoubleVectorArray(*args, **kwargs):
pass
def writeFloatArray(*args, **kwargs):
pass
def writeFloatVectorArray(*args, **kwargs):
pass
def writeHeader(*args, **kwargs):
pass
def writeInt32(*args, **kwargs):
pass
def writeIntArray(*args, **kwargs):
pass
def writeTime(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kRead = 0
kReadWrite = 2
kWrite = 1
class MPxGeometryIterator(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def component(*args, **kwargs):
pass
def currentPoint(*args, **kwargs):
pass
def geometry(*args, **kwargs):
pass
def hasNormals(*args, **kwargs):
pass
def hasPoints(*args, **kwargs):
pass
def index(*args, **kwargs):
pass
def indexUnsimplified(*args, **kwargs):
pass
def isDone(*args, **kwargs):
pass
def iteratorCount(*args, **kwargs):
pass
def maxPoints(*args, **kwargs):
pass
def next(*args, **kwargs):
pass
def point(*args, **kwargs):
pass
def reset(*args, **kwargs):
pass
def setCurrentPoint(*args, **kwargs):
pass
def setMaxPoints(*args, **kwargs):
pass
def setObject(*args, **kwargs):
pass
def setPoint(*args, **kwargs):
pass
def setPointGetNext(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxMayaAsciiFilterOutput(_object):
def __init__(self, *args):
pass
def __lshift__(*args, **kwargs):
pass
def __repr__(self):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxDragAndDropBehavior(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectAttrToAttr(*args, **kwargs):
pass
def connectAttrToNode(*args, **kwargs):
pass
def connectNodeToAttr(*args, **kwargs):
pass
def connectNodeToNode(*args, **kwargs):
pass
def shouldBeUsedFor(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxBakeEngine(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def bake(*args, **kwargs):
pass
def getUVRange(*args, **kwargs):
pass
def setNeedTransparency(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
fInstance = None
thisown = None
__swig_destroy__ = None
class MPxImageFile(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def close(*args, **kwargs):
pass
def glLoad(*args, **kwargs):
pass
def load(*args, **kwargs):
pass
def open(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxCommand(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def commandString(*args, **kwargs):
pass
def doIt(*args, **kwargs):
pass
def hasSyntax(*args, **kwargs):
pass
def isHistoryOn(*args, **kwargs):
pass
def isUndoable(*args, **kwargs):
pass
def redoIt(*args, **kwargs):
pass
def setCommandString(*args, **kwargs):
pass
def setHistoryOn(*args, **kwargs):
pass
def setUndoable(*args, **kwargs):
pass
def syntax(*args, **kwargs):
pass
def undoIt(*args, **kwargs):
pass
def appendToResult(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def clearResult(*args, **kwargs):
pass
def currentDoubleResult(*args, **kwargs):
pass
def currentIntResult(*args, **kwargs):
pass
def currentResultType(*args, **kwargs):
pass
def currentStringResult(*args, **kwargs):
pass
def displayError(*args, **kwargs):
pass
def displayInfo(*args, **kwargs):
pass
def displayWarning(*args, **kwargs):
pass
def getCurrentResult(*args, **kwargs):
pass
def isCurrentResultArray(*args, **kwargs):
pass
def setResult(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kDouble = 1
kLong = 0
kNoArg = 3
kString = 2
class MPxRepresentation(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def activate(*args, **kwargs):
pass
def canApplyEdits(*args, **kwargs):
pass
def getExternalContent(*args, **kwargs):
pass
def getName(*args, **kwargs):
pass
def getType(*args, **kwargs):
pass
def inactivate(*args, **kwargs):
pass
def setExternalContent(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MExternalContentInfoTable(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def addResolvedEntry(*args, **kwargs):
pass
def addUnresolvedEntry(*args, **kwargs):
pass
def getEntryByIndex(*args, **kwargs):
pass
def getInfoByKey(*args, **kwargs):
pass
def length(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxControlCommand(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def appendSyntax(*args, **kwargs):
pass
def clearResult(*args, **kwargs):
pass
def doEditFlags(*args, **kwargs):
pass
def doQueryFlags(*args, **kwargs):
pass
def makeControl(*args, **kwargs):
pass
def setResult(*args, **kwargs):
pass
def skipFlagForCreate(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MFnPlugin(OpenMaya.MFnBase):
def __init__(self, *args):
pass
def __repr__(self):
pass
def addMenuItem(*args, **kwargs):
pass
def apiVersion(*args, **kwargs):
pass
def deregisterAttributePatternFactory(*args, **kwargs):
pass
def deregisterCacheFormat(*args, **kwargs):
pass
def deregisterCommand(*args, **kwargs):
pass
def deregisterConstraintCommand(*args, **kwargs):
pass
def deregisterContextCommand(*args, **kwargs):
pass
def deregisterControlCommand(*args, **kwargs):
pass
def deregisterData(*args, **kwargs):
pass
def deregisterDevice(*args, **kwargs):
pass
def deregisterDisplayFilter(*args, **kwargs):
pass
def deregisterDragAndDropBehavior(*args, **kwargs):
pass
def deregisterFileTranslator(*args, **kwargs):
pass
def deregisterIkSolver(*args, **kwargs):
pass
def deregisterImageFile(*args, **kwargs):
pass
def deregisterModelEditorCommand(*args, **kwargs):
pass
def deregisterNode(*args, **kwargs):
pass
def deregisterRenderPassImpl(*args, **kwargs):
pass
def deregisterURIFileResolver(*args, **kwargs):
pass
def loadPath(*args, **kwargs):
pass
def matrixTypeIdFromXformId(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def registerAttributePatternFactory(*args, **kwargs):
pass
def registerBakeEngine(*args, **kwargs):
pass
def registerCacheFormat(*args, **kwargs):
pass
def registerCommand(*args, **kwargs):
pass
def registerConstraintCommand(*args, **kwargs):
pass
def registerContextCommand(*args, **kwargs):
pass
def registerControlCommand(*args, **kwargs):
pass
def registerData(*args, **kwargs):
pass
def registerDevice(*args, **kwargs):
pass
def registerDisplayFilter(*args, **kwargs):
pass
def registerDragAndDropBehavior(*args, **kwargs):
pass
def registerFileTranslator(*args, **kwargs):
pass
def registerIkSolver(*args, **kwargs):
pass
def registerImageFile(*args, **kwargs):
pass
def registerMaterialInfo(*args, **kwargs):
pass
def registerModelEditorCommand(*args, **kwargs):
pass
def registerNode(*args, **kwargs):
pass
def registerRenderPassImpl(*args, **kwargs):
pass
def registerShape(*args, **kwargs):
pass
def registerTransform(*args, **kwargs):
pass
def registerUI(*args, **kwargs):
pass
def registerUIStrings(*args, **kwargs):
pass
def registerURIFileResolver(*args, **kwargs):
pass
def removeMenuItem(*args, **kwargs):
pass
def setName(*args, **kwargs):
pass
def setVersion(*args, **kwargs):
pass
def unregisterBakeEngine(*args, **kwargs):
pass
def unregisterMaterialInfo(*args, **kwargs):
pass
def vendor(*args, **kwargs):
pass
def version(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def findPlugin(*args, **kwargs):
pass
def isNodeRegistered(*args, **kwargs):
pass
def registeringCallableScript(*args, **kwargs):
pass
def setRegisteringCallableScript(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kDefaultDataLocation = None
class MPxModelEditorCommand(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def appendSyntax(*args, **kwargs):
pass
def doEditFlags(*args, **kwargs):
pass
def doQueryFlags(*args, **kwargs):
pass
def editorCommandName(*args, **kwargs):
pass
def editorMenuScriptName(*args, **kwargs):
pass
def makeModelView(*args, **kwargs):
pass
def modelView(*args, **kwargs):
pass
def setResult(*args, **kwargs):
pass
def skipFlagForCreate(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxRenderPassImpl(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def frameBufferSemantic(*args, **kwargs):
pass
def getDefaultType(*args, **kwargs):
pass
def getNumChannels(*args, **kwargs):
pass
def isCompatible(*args, **kwargs):
pass
def perLightPassContributionSupported(*args, **kwargs):
pass
def typesSupported(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kBit = 2048
kColorSemantic = 1
kDepthSemantic = 4
kDirectionVectorSemantic = 3
kFloat16 = 256
kFloat32 = 512
kFloat64 = 1024
kInt16 = 32
kInt32 = 64
kInt64 = 128
kInt8 = 16
kInvalidSemantic = 0
kLabelSemantic = 5
kMaskSemantic = 6
kOther = 4096
kOtherSemantic = 7
kUInt16 = 2
kUInt32 = 4
kUInt64 = 8
kUInt8 = 1
kVectorSemantic = 2
class MPxEditData(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def isEqual(*args, **kwargs):
pass
def isLessThan(*args, **kwargs):
pass
def performIsEqual(*args, **kwargs):
pass
def performIsLessThan(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxContextCommand(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def appendSyntax(*args, **kwargs):
pass
def doEditFlags(*args, **kwargs):
pass
def doQueryFlags(*args, **kwargs):
pass
def makeObj(*args, **kwargs):
pass
def setResult(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxMidiInputDevice(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def closeDevice(*args, **kwargs):
pass
def deviceState(*args, **kwargs):
pass
def doButtonEvents(*args, **kwargs):
pass
def doMovementEvents(*args, **kwargs):
pass
def getMessage(*args, **kwargs):
pass
def nameAxes(*args, **kwargs):
pass
def nameButtons(*args, **kwargs):
pass
def openDevice(*args, **kwargs):
pass
def sendMessage(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MaterialInputData(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
__dict__ = None
__weakref__ = None
ambient = None
diffuse = None
emission = None
hasTransparency = None
shininess = None
specular = None
thisown = None
__swig_destroy__ = None
class MPxData(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def copy(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def readASCII(*args, **kwargs):
pass
def readBinary(*args, **kwargs):
pass
def typeId(*args, **kwargs):
pass
def writeASCII(*args, **kwargs):
pass
def writeBinary(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kData = 0
kGeometryData = 1
kLast = 2
class MPxContext(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def abortAction(*args, **kwargs):
pass
def addManipulator(*args, **kwargs):
pass
def argTypeNumericalInput(*args, **kwargs):
pass
def completeAction(*args, **kwargs):
pass
def deleteAction(*args, **kwargs):
pass
def deleteManipulators(*args, **kwargs):
pass
def doDrag(*args, **kwargs):
pass
def doEnterRegion(*args, **kwargs):
pass
def doHold(*args, **kwargs):
pass
def doPress(*args, **kwargs):
pass
def doRelease(*args, **kwargs):
pass
def feedbackNumericalInput(*args, **kwargs):
pass
def helpStateHasChanged(*args, **kwargs):
pass
def image(*args, **kwargs):
pass
def newToolCommand(*args, **kwargs):
pass
def processNumericalInput(*args, **kwargs):
pass
def setImage(*args, **kwargs):
pass
def stringClassName(*args, **kwargs):
pass
def toolOffCleanup(*args, **kwargs):
pass
def toolOnSetup(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kImage1 = 0
kImage2 = 1
kImage3 = 2
class MPxUIControl(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxAttributePatternFactory(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def createPatternsFromFile(*args, **kwargs):
pass
def createPatternsFromString(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MExternalContentLocationTable(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def addEntry(*args, **kwargs):
pass
def getEntryByIndex(*args, **kwargs):
pass
def getLocationByKey(*args, **kwargs):
pass
def length(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MPxTransform(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def applyRotateOrientationLocks(*args, **kwargs):
pass
def applyRotatePivotLocks(*args, **kwargs):
pass
def applyRotatePivotLocksTranslate(*args, **kwargs):
pass
def applyRotationLimits(*args, **kwargs):
pass
def applyRotationLocks(*args, **kwargs):
pass
def applyScaleLimits(*args, **kwargs):
pass
def applyScaleLocks(*args, **kwargs):
pass
def applyScaleLocksPivot(*args, **kwargs):
pass
def applyScaleLocksPivotTranslate(*args, **kwargs):
pass
def applyShearLocks(*args, **kwargs):
pass
def applyTranslationLimits(*args, **kwargs):
pass
def applyTranslationLocks(*args, **kwargs):
pass
def boundingBox(*args, **kwargs):
pass
def checkAndSetRotateOrientation(*args, **kwargs):
pass
def checkAndSetRotatePivot(*args, **kwargs):
pass
def checkAndSetRotatePivotTranslation(*args, **kwargs):
pass
def checkAndSetRotation(*args, **kwargs):
pass
def checkAndSetScale(*args, **kwargs):
pass
def checkAndSetScalePivot(*args, **kwargs):
pass
def checkAndSetScalePivotTranslation(*args, **kwargs):
pass
def checkAndSetShear(*args, **kwargs):
pass
def checkAndSetTranslation(*args, **kwargs):
pass
def clearLimits(*args, **kwargs):
pass
def compute(*args, **kwargs):
pass
def computeLocalTransformation(*args, **kwargs):
pass
def copyInternalData(*args, **kwargs):
pass
def createTransformationMatrix(*args, **kwargs):
pass
def enableLimit(*args, **kwargs):
pass
def getEulerRotation(*args, **kwargs):
pass
def getMatrix(*args, **kwargs):
pass
def getMatrixInverse(*args, **kwargs):
pass
def getRotateOrientation(*args, **kwargs):
pass
def getRotatePivot(*args, **kwargs):
pass
def getRotatePivotTranslation(*args, **kwargs):
pass
def getRotation(*args, **kwargs):
pass
def getRotationOrder(*args, **kwargs):
pass
def getScale(*args, **kwargs):
pass
def getScalePivot(*args, **kwargs):
pass
def getScalePivotTranslation(*args, **kwargs):
pass
def getShear(*args, **kwargs):
pass
def getTranslation(*args, **kwargs):
pass
def isBounded(*args, **kwargs):
pass
def isLimited(*args, **kwargs):
pass
def limitValue(*args, **kwargs):
pass
def postConstructor(*args, **kwargs):
pass
def resetTransformation(*args, **kwargs):
pass
def rotateBy(*args, **kwargs):
pass
def rotateTo(*args, **kwargs):
pass
def scaleBy(*args, **kwargs):
pass
def scaleTo(*args, **kwargs):
pass
def setLimit(*args, **kwargs):
pass
def setRotateOrientation(*args, **kwargs):
pass
def setRotatePivot(*args, **kwargs):
pass
def setRotatePivotTranslation(*args, **kwargs):
pass
def setRotationOrder(*args, **kwargs):
pass
def setScalePivot(*args, **kwargs):
pass
def setScalePivotTranslation(*args, **kwargs):
pass
def shearBy(*args, **kwargs):
pass
def shearTo(*args, **kwargs):
pass
def transformationMatrix(*args, **kwargs):
pass
def transformationMatrixPtr(*args, **kwargs):
pass
def translateBy(*args, **kwargs):
pass
def translateTo(*args, **kwargs):
pass
def treatAsTransform(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def updateMatrixAttrs(*args, **kwargs):
pass
def validateAndSetValue(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def isNonAffineMatricesEnabled(*args, **kwargs):
pass
def mustCallValidateAndSet(*args, **kwargs):
pass
def setNonAffineMatricesEnabled(*args, **kwargs):
pass
boundingBoxCenterX = None
boundingBoxCenterY = None
boundingBoxCenterZ = None
center = None
displayHandle = None
displayLocalAxis = None
displayRotatePivot = None
displayScalePivot = None
drawOverride = None
dynamics = None
geometry = None
ghosting = None
identification = None
inheritsTransform = None
instObjGroups = None
intermediateObject = None
inverseMatrix = None
isTemplated = None
layerOverrideColor = None
layerRenderable = None
lodVisibility = None
matrix = None
maxRotLimit = None
maxRotLimitEnable = None
maxRotXLimit = None
maxRotXLimitEnable = None
maxRotYLimit = None
maxRotYLimitEnable = None
maxRotZLimit = None
maxRotZLimitEnable = None
maxScaleLimit = None
maxScaleLimitEnable = None
maxScaleXLimit = None
maxScaleXLimitEnable = None
maxScaleYLimit = None
maxScaleYLimitEnable = None
maxScaleZLimit = None
maxScaleZLimitEnable = None
maxTransLimit = None
maxTransLimitEnable = None
maxTransXLimit = None
maxTransXLimitEnable = None
maxTransYLimit = None
maxTransYLimitEnable = None
maxTransZLimit = None
maxTransZLimitEnable = None
minRotLimit = None
minRotLimitEnable = None
minRotXLimit = None
minRotXLimitEnable = None
minRotYLimit = None
minRotYLimitEnable = None
minRotZLimit = None
minRotZLimitEnable = None
minScaleLimit = None
minScaleLimitEnable = None
minScaleXLimit = None
minScaleXLimitEnable = None
minScaleYLimit = None
minScaleYLimitEnable = None
minScaleZLimit = None
minScaleZLimitEnable = None
minTransLimit = None
minTransLimitEnable = None
minTransXLimit = None
minTransXLimitEnable = None
minTransYLimit = None
minTransYLimitEnable = None
minTransZLimit = None
minTransZLimitEnable = None
nodeBoundingBox = None
nodeBoundingBoxMax = None
nodeBoundingBoxMaxX = None
nodeBoundingBoxMaxY = None
nodeBoundingBoxMaxZ = None
nodeBoundingBoxMin = None
nodeBoundingBoxMinX = None
nodeBoundingBoxMinY = None
nodeBoundingBoxMinZ = None
nodeBoundingBoxSize = None
nodeBoundingBoxSizeX = None
nodeBoundingBoxSizeY = None
nodeBoundingBoxSizeZ = None
objectColor = None
objectGroupColor = None
objectGroupId = None
objectGroups = None
objectGrpCompList = None
overrideColor = None
overrideDisplayType = None
overrideEnabled = None
overrideLevelOfDetail = None
overridePlayback = None
overrideShading = None
overrideTexturing = None
overrideVisibility = None
parentInverseMatrix = None
parentMatrix = None
renderInfo = None
renderLayerColor = None
renderLayerId = None
renderLayerInfo = None
renderLayerRenderable = None
rotate = None
rotateAxis = None
rotateAxisX = None
rotateAxisY = None
rotateAxisZ = None
rotateOrder = None
rotatePivot = None
rotatePivotTranslate = None
rotatePivotTranslateX = None
rotatePivotTranslateY = None
rotatePivotTranslateZ = None
rotatePivotX = None
rotatePivotY = None
rotatePivotZ = None
rotateQuaternion = None
rotateQuaternionW = None
rotateQuaternionX = None
rotateQuaternionY = None
rotateQuaternionZ = None
rotateX = None
rotateY = None
rotateZ = None
rotationInterpolation = None
scale = None
scalePivot = None
scalePivotTranslate = None
scalePivotTranslateX = None
scalePivotTranslateY = None
scalePivotTranslateZ = None
scalePivotX = None
scalePivotY = None
scalePivotZ = None
scaleX = None
scaleY = None
scaleZ = None
selectHandle = None
selectHandleX = None
selectHandleY = None
selectHandleZ = None
shear = None
shearXY = None
shearXZ = None
shearYZ = None
showManipDefault = None
specifiedManipLocation = None
thisown = None
transMinusRotatePivot = None
transMinusRotatePivotX = None
transMinusRotatePivotY = None
transMinusRotatePivotZ = None
translate = None
translateX = None
translateY = None
translateZ = None
useObjectColor = None
visibility = None
worldInverseMatrix = None
worldMatrix = None
xformMatrix = None
__swig_destroy__ = None
class MPxLocatorNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def boundingBox(*args, **kwargs):
pass
def closestPoint(*args, **kwargs):
pass
def color(*args, **kwargs):
pass
def colorRGB(*args, **kwargs):
pass
def draw(*args, **kwargs):
pass
def drawLast(*args, **kwargs):
pass
def excludeAsLocator(*args, **kwargs):
pass
def isBounded(*args, **kwargs):
pass
def isTransparent(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def useClosestPointForSelection(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
boundingBoxCenterX = None
boundingBoxCenterY = None
boundingBoxCenterZ = None
center = None
instObjGroups = None
intermediateObject = None
inverseMatrix = None
isTemplated = None
localPosition = None
localPositionX = None
localPositionY = None
localPositionZ = None
localScale = None
localScaleX = None
localScaleY = None
localScaleZ = None
matrix = None
nodeBoundingBox = None
nodeBoundingBoxMax = None
nodeBoundingBoxMaxX = None
nodeBoundingBoxMaxY = None
nodeBoundingBoxMaxZ = None
nodeBoundingBoxMin = None
nodeBoundingBoxMinX = None
nodeBoundingBoxMinY = None
nodeBoundingBoxMinZ = None
nodeBoundingBoxSize = None
nodeBoundingBoxSizeX = None
nodeBoundingBoxSizeY = None
nodeBoundingBoxSizeZ = None
objectColor = None
objectGroupColor = None
objectGroupId = None
objectGroups = None
objectGrpCompList = None
parentInverseMatrix = None
parentMatrix = None
thisown = None
underWorldObject = None
useObjectColor = None
visibility = None
worldInverseMatrix = None
worldMatrix = None
worldPosition = None
worldPositionX = None
worldPositionY = None
worldPositionZ = None
__swig_destroy__ = None
class MPxToolCommand(MPxCommand):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def cancel(*args, **kwargs):
pass
def doIt(*args, **kwargs):
pass
def finalize(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxEmitterNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def compute(*args, **kwargs):
pass
def draw(*args, **kwargs):
pass
def evalEmission2dTexture(*args, **kwargs):
pass
def getCurrentTime(*args, **kwargs):
pass
def getDeltaTime(*args, **kwargs):
pass
def getEmitterType(*args, **kwargs):
pass
def getMaxDistance(*args, **kwargs):
pass
def getMinDistance(*args, **kwargs):
pass
def getOwnerShape(*args, **kwargs):
pass
def getRandomSeed(*args, **kwargs):
pass
def getRandomState(*args, **kwargs):
pass
def getRate(*args, **kwargs):
pass
def getStartTime(*args, **kwargs):
pass
def getWorldMatrix(*args, **kwargs):
pass
def getWorldPosition(*args, **kwargs):
pass
def hasValidEmission2dTexture(*args, **kwargs):
pass
def randgen(*args, **kwargs):
pass
def resetRandomState(*args, **kwargs):
pass
def setRandomState(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def volumePrimitiveBoundingBox(*args, **kwargs):
pass
def volumePrimitiveDistanceFromAxis(*args, **kwargs):
pass
def volumePrimitivePointInside(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
mCurrentTime = None
mDeltaTime = None
mDirection = None
mDirectionX = None
mDirectionY = None
mDirectionZ = None
mEmitterType = None
mInheritFactor = None
mIsFull = None
mMaxDistance = None
mMinDistance = None
mOutput = None
mOwnerCentroid = None
mOwnerCentroidX = None
mOwnerCentroidY = None
mOwnerCentroidZ = None
mOwnerPosData = None
mOwnerVelData = None
mRandState = None
mRandStateX = None
mRandStateY = None
mRandStateZ = None
mRate = None
mSeed = None
mSpeed = None
mStartTime = None
mSweptGeometry = None
mWorldMatrix = None
thisown = None
__swig_destroy__ = None
kCurve = 3
kDirectional = 0
kOmni = 1
kSurface = 2
kVolume = 4
class MPxDeformerNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def accessoryAttribute(*args, **kwargs):
pass
def accessoryNodeSetup(*args, **kwargs):
pass
def deform(*args, **kwargs):
pass
def getDeformationDetails(*args, **kwargs):
pass
def setDeformationDetails(*args, **kwargs):
pass
def setModifiedCallback(*args, **kwargs):
pass
def setUseExistingConnectionWhenSetEditing(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def weightValue(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
envelope = None
groupId = None
input = None
inputGeom = None
outputGeom = None
thisown = None
weightList = None
weights = None
__swig_destroy__ = None
kDeformsAll = 6
kDeformsColors = 4
kDeformsUVs = 2
class MPxMayaAsciiFilter(MPxFileTranslator):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def haveReadMethod(*args, **kwargs):
pass
def haveWriteMethod(*args, **kwargs):
pass
def processReadOptions(*args, **kwargs):
pass
def processWriteOptions(*args, **kwargs):
pass
def reader(*args, **kwargs):
pass
def writePostConnectAttrsBlock(*args, **kwargs):
pass
def writePostCreateNodesBlock(*args, **kwargs):
pass
def writePostHeader(*args, **kwargs):
pass
def writePostRequires(*args, **kwargs):
pass
def writePreConnectAttrsBlock(*args, **kwargs):
pass
def writePreCreateNodesBlock(*args, **kwargs):
pass
def writePreTrailer(*args, **kwargs):
pass
def writer(*args, **kwargs):
pass
def writesConnectAttr(*args, **kwargs):
pass
def writesCreateNode(*args, **kwargs):
pass
def writesDisconnectAttr(*args, **kwargs):
pass
def writesFileReference(*args, **kwargs):
pass
def writesMetadata(*args, **kwargs):
pass
def writesParentNode(*args, **kwargs):
pass
def writesRequirements(*args, **kwargs):
pass
def writesSelectNode(*args, **kwargs):
pass
def writesSetAttr(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxConstraintCommand(MPxCommand):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def aimVectorAttribute(*args, **kwargs):
pass
def appendSyntax(*args, **kwargs):
pass
def connectObjectAndConstraint(*args, **kwargs):
pass
def connectTarget(*args, **kwargs):
pass
def constraintEnableRestAttribute(*args, **kwargs):
pass
def constraintInstancedAttribute(*args, **kwargs):
pass
def constraintNode(*args, **kwargs):
pass
def constraintOutputAttribute(*args, **kwargs):
pass
def constraintRestAttribute(*args, **kwargs):
pass
def constraintTargetAttribute(*args, **kwargs):
pass
def constraintTargetInstancedAttribute(*args, **kwargs):
pass
def constraintTargetWeightAttribute(*args, **kwargs):
pass
def constraintTypeId(*args, **kwargs):
pass
def createdConstraint(*args, **kwargs):
pass
def doCreate(*args, **kwargs):
pass
def doEdit(*args, **kwargs):
pass
def doIt(*args, **kwargs):
pass
def doQuery(*args, **kwargs):
pass
def getObjectAttributesArray(*args, **kwargs):
pass
def handleNewTargets(*args, **kwargs):
pass
def hasVectorFlags(*args, **kwargs):
pass
def objectAttribute(*args, **kwargs):
pass
def offsetAttribute(*args, **kwargs):
pass
def parseArgs(*args, **kwargs):
pass
def redoIt(*args, **kwargs):
pass
def supportsOffset(*args, **kwargs):
pass
def targetType(*args, **kwargs):
pass
def undoIt(*args, **kwargs):
pass
def upVectorAttribute(*args, **kwargs):
pass
def worldUpMatrixAttribute(*args, **kwargs):
pass
def worldUpTypeAttribute(*args, **kwargs):
pass
def worldUpVectorAttribute(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kGeometryShape = 1
kLast = 2
kTransform = 0
class MPxCameraSet(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
active = None
camera = None
cameraLayer = None
order = None
sceneData = None
thisown = None
__swig_destroy__ = None
class MPxManipulatorNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def addDependentPlug(*args, **kwargs):
pass
def addDoubleValue(*args, **kwargs):
pass
def addPointValue(*args, **kwargs):
pass
def addVectorValue(*args, **kwargs):
pass
def colorAndName(*args, **kwargs):
pass
def connectPlugToValue(*args, **kwargs):
pass
def connectToDependNode(*args, **kwargs):
pass
def dependentPlugsReset(*args, **kwargs):
pass
def deregisterForMouseMove(*args, **kwargs):
pass
def dimmedColor(*args, **kwargs):
pass
def doDrag(*args, **kwargs):
pass
def doMove(*args, **kwargs):
pass
def doPress(*args, **kwargs):
pass
def doRelease(*args, **kwargs):
pass
def draw(*args, **kwargs):
pass
def finishAddingManips(*args, **kwargs):
pass
def getDoubleValue(*args, **kwargs):
pass
def getInstancePtr(*args, **kwargs):
pass
def getPointValue(*args, **kwargs):
pass
def getVectorValue(*args, **kwargs):
pass
def glActiveName(*args, **kwargs):
pass
def glFirstHandle(*args, **kwargs):
pass
def labelBackgroundColor(*args, **kwargs):
pass
def labelColor(*args, **kwargs):
pass
def lineColor(*args, **kwargs):
pass
def mainColor(*args, **kwargs):
pass
def mouseDown(*args, **kwargs):
pass
def mousePosition(*args, **kwargs):
pass
def mouseRay(*args, **kwargs):
pass
def mouseRayWorld(*args, **kwargs):
pass
def mouseUp(*args, **kwargs):
pass
def prevColor(*args, **kwargs):
pass
def registerForMouseMove(*args, **kwargs):
pass
def selectedColor(*args, **kwargs):
pass
def setDoubleValue(*args, **kwargs):
pass
def setInstancePtr(*args, **kwargs):
pass
def setPointValue(*args, **kwargs):
pass
def setVectorValue(*args, **kwargs):
pass
def xColor(*args, **kwargs):
pass
def yColor(*args, **kwargs):
pass
def zColor(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def newManipulator(*args, **kwargs):
pass
connectedNodes = None
thisown = None
__swig_destroy__ = None
class MPxManipContainer(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def addCircleSweepManip(*args, **kwargs):
pass
def addCurveSegmentManip(*args, **kwargs):
pass
def addDirectionManip(*args, **kwargs):
pass
def addDiscManip(*args, **kwargs):
pass
def addDistanceManip(*args, **kwargs):
pass
def addFreePointTriadManip(*args, **kwargs):
pass
def addMPxManipulatorNode(*args, **kwargs):
pass
def addManipToPlugConversion(*args, **kwargs):
pass
def addPlugToManipConversion(*args, **kwargs):
pass
def addPointOnCurveManip(*args, **kwargs):
pass
def addPointOnSurfaceManip(*args, **kwargs):
pass
def addRotateManip(*args, **kwargs):
pass
def addScaleManip(*args, **kwargs):
pass
def addStateManip(*args, **kwargs):
pass
def addToggleManip(*args, **kwargs):
pass
def connectToDependNode(*args, **kwargs):
pass
def createChildren(*args, **kwargs):
pass
def doDrag(*args, **kwargs):
pass
def doPress(*args, **kwargs):
pass
def doRelease(*args, **kwargs):
pass
def draw(*args, **kwargs):
pass
def finishAddingManips(*args, **kwargs):
pass
def getConverterManipValue(*args, **kwargs):
pass
def getConverterPlugValue(*args, **kwargs):
pass
def isManipActive(*args, **kwargs):
pass
def manipToPlugConversion(*args, **kwargs):
pass
def plugToManipConversion(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def addToManipConnectTable(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def initialize(*args, **kwargs):
pass
def newManipulator(*args, **kwargs):
pass
def removeFromManipConnectTable(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kCircleSweepManip = 6
kCurveSegmentManip = 9
kCustomManip = 10
kDirectionManip = 1
kDiscManip = 5
kDistanceManip = 2
kFreePointTriadManip = 0
kPointOnCurveManip = 3
kPointOnSurfaceManip = 4
kStateManip = 8
kToggleManip = 7
class MPxPolyTrg(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def compute(*args, **kwargs):
pass
def isAbstractClass(*args, **kwargs):
pass
def postConstructor(*args, **kwargs):
pass
def registerTrgFunction(*args, **kwargs):
pass
def unregisterTrgFunction(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxSelectionContext(MPxContext):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def abortAction(*args, **kwargs):
pass
def addManipulator(*args, **kwargs):
pass
def argTypeNumericalInput(*args, **kwargs):
pass
def deleteManipulators(*args, **kwargs):
pass
def doDrag(*args, **kwargs):
pass
def doHold(*args, **kwargs):
pass
def doPress(*args, **kwargs):
pass
def doRelease(*args, **kwargs):
pass
def feedbackNumericalInput(*args, **kwargs):
pass
def helpStateHasChanged(*args, **kwargs):
pass
def image(*args, **kwargs):
pass
def newToolCommand(*args, **kwargs):
pass
def processNumericalInput(*args, **kwargs):
pass
def setImage(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxHardwareShader(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def profile(*args, **kwargs):
pass
def render(*args, **kwargs):
pass
def renderSwatchImage(*args, **kwargs):
pass
def setUniformParameters(*args, **kwargs):
pass
def setVaryingParameters(*args, **kwargs):
pass
def transparencyOptions(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def findResource(*args, **kwargs):
pass
def getHardwareShaderPtr(*args, **kwargs):
pass
outColor = None
outColorB = None
outColorG = None
outColorR = None
thisown = None
__swig_destroy__ = None
kIsTransparent = 1
kNoTransparencyFrontBackCull = 2
kNoTransparencyPolygonSort = 4
class MPxAssembly(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def activate(*args, **kwargs):
pass
def activateRep(*args, **kwargs):
pass
def activating(*args, **kwargs):
pass
def addAddAttrEdit(*args, **kwargs):
pass
def addConnectAttrEdit(*args, **kwargs):
pass
def addDeleteAttrEdit(*args, **kwargs):
pass
def addDisconnectAttrEdit(*args, **kwargs):
pass
def addEdits(*args, **kwargs):
pass
def addParentEdit(*args, **kwargs):
pass
def addSetAttrEdit(*args, **kwargs):
pass
def applyEdits(*args, **kwargs):
pass
def beforeSave(*args, **kwargs):
pass
def canRepApplyEdits(*args, **kwargs):
pass
def createRepresentation(*args, **kwargs):
pass
def deleteAllRepresentations(*args, **kwargs):
pass
def deleteRepresentation(*args, **kwargs):
pass
def getActive(*args, **kwargs):
pass
def getInitialRep(*args, **kwargs):
pass
def getInstancePtr(*args, **kwargs):
pass
def getRepLabel(*args, **kwargs):
pass
def getRepNamespace(*args, **kwargs):
pass
def getRepType(*args, **kwargs):
pass
def getRepresentations(*args, **kwargs):
pass
def handlesAddEdits(*args, **kwargs):
pass
def handlesApplyEdits(*args, **kwargs):
pass
def inactivateRep(*args, **kwargs):
pass
def isActive(*args, **kwargs):
pass
def memberAdded(*args, **kwargs):
pass
def memberRemoved(*args, **kwargs):
pass
def performActivate(*args, **kwargs):
pass
def performInactivate(*args, **kwargs):
pass
def postLoad(*args, **kwargs):
pass
def preApplyEdits(*args, **kwargs):
pass
def preUnapplyEdits(*args, **kwargs):
pass
def repTypes(*args, **kwargs):
pass
def setInstancePtr(*args, **kwargs):
pass
def setRepLabel(*args, **kwargs):
pass
def setRepName(*args, **kwargs):
pass
def supportsEdits(*args, **kwargs):
pass
def supportsMemberChanges(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def updateRepNamespace(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxGeometryData(MPxData):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def copy(*args, **kwargs):
pass
def deleteComponent(*args, **kwargs):
pass
def deleteComponentsFromGroups(*args, **kwargs):
pass
def iterator(*args, **kwargs):
pass
def matrix(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def setMatrix(*args, **kwargs):
pass
def smartCopy(*args, **kwargs):
pass
def typeId(*args, **kwargs):
pass
def updateCompleteVertexGroup(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxHwShaderNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def bind(*args, **kwargs):
pass
def colorsPerVertex(*args, **kwargs):
pass
def currentPath(*args, **kwargs):
pass
def currentShadingEngine(*args, **kwargs):
pass
def dirtyMask(*args, **kwargs):
pass
def geometry(*args, **kwargs):
pass
def glBind(*args, **kwargs):
pass
def glGeometry(*args, **kwargs):
pass
def glUnbind(*args, **kwargs):
pass
def hasTransparency(*args, **kwargs):
pass
def invertTexCoords(*args, **kwargs):
pass
def normalsPerVertex(*args, **kwargs):
pass
def provideVertexIDs(*args, **kwargs):
pass
def renderSwatchImage(*args, **kwargs):
pass
def supportsBatching(*args, **kwargs):
pass
def texCoordsPerVertex(*args, **kwargs):
pass
def transparencyOptions(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def unbind(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def getHwShaderNodePtr(*args, **kwargs):
pass
outColor = None
outColorB = None
outColorG = None
outColorR = None
outGlowColor = None
outGlowColorB = None
outGlowColorG = None
outGlowColorR = None
outMatteOpacity = None
outMatteOpacityB = None
outMatteOpacityG = None
outMatteOpacityR = None
outTransparency = None
outTransparencyB = None
outTransparencyG = None
outTransparencyR = None
thisown = None
__swig_destroy__ = None
kDirtyAll = 15
kDirtyColorArrays = 4
kDirtyNone = 0
kDirtyNormalArray = 2
kDirtyTexCoordArrays = 8
kDirtyVertexArray = 1
kIsTransparent = 1
kNoTransparencyFrontBackCull = 2
kNoTransparencyPolygonSort = 4
kWriteAll = 15
kWriteColorArrays = 4
kWriteNone = 0
kWriteNormalArray = 2
kWriteTexCoordArrays = 8
kWriteVertexArray = 1
class MPxObjectSet(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def canBeDeleted(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
DNSetMembers = None
annotation = None
dagSetMembers = None
edgesOnlySet = None
editPointsOnlySet = None
facetsOnlySet = None
groupNodes = None
isLayer = None
memberWireframeColor = None
partition = None
renderableOnlySet = None
thisown = None
usedByNodes = None
verticesOnlySet = None
__swig_destroy__ = None
class MPxSpringNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def applySpringLaw(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
mDeltaTime = None
mEnd1Weight = None
mEnd2Weight = None
thisown = None
__swig_destroy__ = None
class MPxUITableControl(MPxUIControl):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def addToSelection(*args, **kwargs):
pass
def allowEdit(*args, **kwargs):
pass
def allowSelection(*args, **kwargs):
pass
def cellString(*args, **kwargs):
pass
def clearSelection(*args, **kwargs):
pass
def collapseOrExpandRow(*args, **kwargs):
pass
def getCellColor(*args, **kwargs):
pass
def isSelected(*args, **kwargs):
pass
def labelString(*args, **kwargs):
pass
def numberOfColumns(*args, **kwargs):
pass
def numberOfRows(*args, **kwargs):
pass
def redrawCells(*args, **kwargs):
pass
def redrawLabels(*args, **kwargs):
pass
def removeFromSelection(*args, **kwargs):
pass
def setNumberOfColumns(*args, **kwargs):
pass
def setNumberOfRows(*args, **kwargs):
pass
def setSelection(*args, **kwargs):
pass
def suspendUpdates(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kAllLabels = 3
kColumnLabel = 2
kNoLabel = 0
kRowLabel = 1
class MPxPolyTweakUVCommand(MPxCommand):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def getTweakedUVs(*args, **kwargs):
pass
def parseSyntax(*args, **kwargs):
pass
def newSyntax(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxSurfaceShape(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def acceptsGeometryIterator(*args, **kwargs):
pass
def activeComponents(*args, **kwargs):
pass
def boundingBox(*args, **kwargs):
pass
def cachedShapeAttr(*args, **kwargs):
pass
def childChanged(*args, **kwargs):
pass
def closestPoint(*args, **kwargs):
pass
def componentToPlugs(*args, **kwargs):
pass
def convertToTweakNodePlug(*args, **kwargs):
pass
def createFullRenderGroup(*args, **kwargs):
pass
def createFullVertexGroup(*args, **kwargs):
pass
def deleteComponents(*args, **kwargs):
pass
def excludeAsPluginShape(*args, **kwargs):
pass
def geometryData(*args, **kwargs):
pass
def geometryIteratorSetup(*args, **kwargs):
pass
def getWorldMatrix(*args, **kwargs):
pass
def hasActiveComponents(*args, **kwargs):
pass
def isBounded(*args, **kwargs):
pass
def isRenderable(*args, **kwargs):
pass
def localShapeInAttr(*args, **kwargs):
pass
def localShapeOutAttr(*args, **kwargs):
pass
def match(*args, **kwargs):
pass
def matchComponent(*args, **kwargs):
pass
def newControlPointComponent(*args, **kwargs):
pass
def pointAtParm(*args, **kwargs):
pass
def renderGroupComponentType(*args, **kwargs):
pass
def setRenderable(*args, **kwargs):
pass
def transformUsing(*args, **kwargs):
pass
def tweakUsing(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def undeleteComponents(*args, **kwargs):
pass
def vertexOffsetDirection(*args, **kwargs):
pass
def worldShapeOutAttr(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
boundingBoxCenterX = None
boundingBoxCenterY = None
boundingBoxCenterZ = None
center = None
instObjGroups = None
intermediateObject = None
inverseMatrix = None
isTemplated = None
mControlPoints = None
mControlValueX = None
mControlValueY = None
mControlValueZ = None
mHasHistoryOnCreate = None
matrix = None
nodeBoundingBox = None
nodeBoundingBoxMax = None
nodeBoundingBoxMaxX = None
nodeBoundingBoxMaxY = None
nodeBoundingBoxMaxZ = None
nodeBoundingBoxMin = None
nodeBoundingBoxMinX = None
nodeBoundingBoxMinY = None
nodeBoundingBoxMinZ = None
nodeBoundingBoxSize = None
nodeBoundingBoxSizeX = None
nodeBoundingBoxSizeY = None
nodeBoundingBoxSizeZ = None
objectColor = None
objectGroupColor = None
objectGroupId = None
objectGroups = None
objectGrpCompList = None
parentInverseMatrix = None
parentMatrix = None
thisown = None
useObjectColor = None
visibility = None
worldInverseMatrix = None
worldMatrix = None
__swig_destroy__ = None
kBoundingBoxChanged = 1
kMatchInvalidAttribute = 4
kMatchInvalidAttributeDim = 7
kMatchInvalidAttributeIndex = 5
kMatchInvalidAttributeRange = 6
kMatchInvalidName = 3
kMatchNone = 1
kMatchOk = 0
kMatchTooMany = 2
kNoPointCaching = 0
kNormal = 0
kObjectChanged = 0
kRestorePoints = 2
kSavePoints = 1
kUTangent = 1
kUVNTriad = 3
kUpdatePoints = 3
kVTangent = 2
class MPxImagePlane(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def exactImageFile(*args, **kwargs):
pass
def loadImageMap(*args, **kwargs):
pass
def refreshImage(*args, **kwargs):
pass
def setImageDirty(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
alphaGain = None
alreadyPremult = None
center = None
centerX = None
centerY = None
centerZ = None
colorGain = None
colorGainB = None
colorGainG = None
colorGainR = None
colorOffset = None
colorOffsetB = None
colorOffsetG = None
colorOffsetR = None
composite = None
coverage = None
coverageOrigin = None
coverageOriginX = None
coverageOriginY = None
coverageX = None
coverageY = None
depth = None
depthBias = None
depthFile = None
depthOversample = None
depthScale = None
displayMode = None
displayOnlyIfCurrent = None
fit = None
frameExtension = None
frameOffset = None
height = None
imageName = None
imageType = None
lockedToCamera = None
maxShadingSamples = None
offset = None
offsetX = None
offsetY = None
rotate = None
separateDepth = None
shadingSamples = None
shadingSamplesOverride = None
size = None
sizeX = None
sizeY = None
sourceTexture = None
squeezeCorrection = None
thisown = None
useDepthMap = None
useFrameExtension = None
visibleInReflections = None
visibleInRefractions = None
width = None
__swig_destroy__ = None
class MPxFieldNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def compute(*args, **kwargs):
pass
def draw(*args, **kwargs):
pass
def falloffCurve(*args, **kwargs):
pass
def getForceAtPoint(*args, **kwargs):
pass
def iconBitmap(*args, **kwargs):
pass
def iconSizeAndOrigin(*args, **kwargs):
pass
def isFalloffCurveConstantOne(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
mApplyPerVertex = None
mAttenuation = None
mDeltaTime = None
mInputData = None
mInputForce = None
mInputMass = None
mInputPPData = None
mInputPositions = None
mInputVelocities = None
mMagnitude = None
mMaxDistance = None
mOutputForce = None
mOwnerCentroid = None
mOwnerCentroidX = None
mOwnerCentroidY = None
mOwnerCentroidZ = None
mOwnerPPData = None
mOwnerPosData = None
mOwnerVelData = None
mUseMaxDistance = None
mWorldMatrix = None
thisown = None
__swig_destroy__ = None
class MPxParticleAttributeMapperNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def compute(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
computeNode = None
computeNodeColor = None
computeNodeColorB = None
computeNodeColorG = None
computeNodeColorR = None
outColorPP = None
outMaxValue = None
outMinValue = None
outValuePP = None
thisown = None
time = None
uCoordPP = None
vCoordPP = None
__swig_destroy__ = None
class MPxIkSolverNode(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def create(*args, **kwargs):
pass
def doSolve(*args, **kwargs):
pass
def funcValueTolerance(*args, **kwargs):
pass
def groupHandlesByTopology(*args, **kwargs):
pass
def handleGroup(*args, **kwargs):
pass
def hasJointLimitSupport(*args, **kwargs):
pass
def hasUniqueSolution(*args, **kwargs):
pass
def isPositionOnly(*args, **kwargs):
pass
def isSingleChainOnly(*args, **kwargs):
pass
def maxIterations(*args, **kwargs):
pass
def positionOnly(*args, **kwargs):
pass
def postSolve(*args, **kwargs):
pass
def preSolve(*args, **kwargs):
pass
def rotatePlane(*args, **kwargs):
pass
def setFuncValueTolerance(*args, **kwargs):
pass
def setHandleGroup(*args, **kwargs):
pass
def setMaxIterations(*args, **kwargs):
pass
def setPositionOnly(*args, **kwargs):
pass
def setRotatePlane(*args, **kwargs):
pass
def setSingleChainOnly(*args, **kwargs):
pass
def setSupportJointLimits(*args, **kwargs):
pass
def setUniqueSolution(*args, **kwargs):
pass
def singleChainOnly(*args, **kwargs):
pass
def snapHandle(*args, **kwargs):
pass
def solverTypeName(*args, **kwargs):
pass
def supportJointLimits(*args, **kwargs):
pass
def toSolverSpace(*args, **kwargs):
pass
def toWorldSpace(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def uniqueSolution(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MPxConstraint(MPxNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def constraintRotateOrderAttribute(*args, **kwargs):
pass
def getOutputAttributes(*args, **kwargs):
pass
def passiveOutputAttribute(*args, **kwargs):
pass
def targetAttribute(*args, **kwargs):
pass
def weightAttribute(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
enableRestPosition = None
lockOutput = None
thisown = None
__swig_destroy__ = None
kLast = 4
kObject = 1
kObjectRotation = 2
kScene = 0
kVector = 3
class MPxFluidEmitterNode(MPxEmitterNode):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def compute(*args, **kwargs):
pass
def fluidColor(*args, **kwargs):
pass
def fluidDensityEmission(*args, **kwargs):
pass
def fluidDropoff(*args, **kwargs):
pass
def fluidEmitColor(*args, **kwargs):
pass
def fluidEmitter(*args, **kwargs):
pass
def fluidFuelEmission(*args, **kwargs):
pass
def fluidHeatEmission(*args, **kwargs):
pass
def fluidJitter(*args, **kwargs):
pass
def turbulence(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
mEmissionFunction = None
mEmitFluidColor = None
mFluidColor = None
mFluidColorB = None
mFluidColorG = None
mFluidColorR = None
mFluidDensityEmission = None
mFluidDropoff = None
mFluidFuelEmission = None
mFluidHeatEmission = None
mFluidJitter = None
mTurbulence = None
thisown = None
__swig_destroy__ = None
class MPxComponentShape(MPxSurfaceShape):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def componentToPlugs(*args, **kwargs):
pass
def createFullVertexGroup(*args, **kwargs):
pass
def getControlPoints(*args, **kwargs):
pass
def localShapeInAttr(*args, **kwargs):
pass
def match(*args, **kwargs):
pass
def setControlPoints(*args, **kwargs):
pass
def transformUsing(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
def MPxControlCommand_className(*args, **kwargs):
pass
def MPxHardwareShader_swigregister(*args, **kwargs):
pass
def MPxManipContainer_addToManipConnectTable(*args, **kwargs):
pass
def MPxMidiInputDevice_swigregister(*args, **kwargs):
pass
def MPxGeometryIterator_swigregister(*args, **kwargs):
pass
def _swig_setattr(self, class_type, name, value):
pass
def MPxImageFile_swigregister(*args, **kwargs):
pass
def MPxUITableControl_className(*args, **kwargs):
pass
def MExternalContentInfoTable_swigregister(*args, **kwargs):
pass
def MPxObjectSet_className(*args, **kwargs):
pass
def MPxMayaAsciiFilterOutput_swigregister(*args, **kwargs):
pass
def MFnPlugin_isNodeRegistered(*args, **kwargs):
pass
def MPxFieldNode_swigregister(*args, **kwargs):
pass
def MPxContext_swigregister(*args, **kwargs):
pass
def _swig_setattr_nondynamic_method(set):
pass
def MPxFileTranslator_fileAccessMode(*args, **kwargs):
pass
def MPxManipulatorNode_newManipulator(*args, **kwargs):
pass
def MPxGeometryIterator_className(*args, **kwargs):
pass
def _swig_repr(self):
pass
def MPxAssembly_className(*args, **kwargs):
pass
def MPxCommand_displayWarning(*args, **kwargs):
pass
def MPxObjectSet_swigregister(*args, **kwargs):
pass
def MPxDeformerNode_swigregister(*args, **kwargs):
pass
def MPxManipulatorNode_swigregister(*args, **kwargs):
pass
def MPxConstraint_swigregister(*args, **kwargs):
pass
def MPxPolyTweakUVCommand_newSyntax(*args, **kwargs):
pass
def MaterialInputData_swigregister(*args, **kwargs):
pass
def MPxToolCommand_swigregister(*args, **kwargs):
pass
def MFnPlugin_setRegisteringCallableScript(*args, **kwargs):
pass
def MPxTransformationMatrix_creator(*args, **kwargs):
pass
def MPxCameraSet_swigregister(*args, **kwargs):
pass
def MPxConstraintCommand_swigregister(*args, **kwargs):
pass
def MPxManipContainer_className(*args, **kwargs):
pass
def MPxData_swigregister(*args, **kwargs):
pass
def MPxAttributePatternFactory_className(*args, **kwargs):
pass
def MPxParticleAttributeMapperNode_swigregister(*args, **kwargs):
pass
def MPxImagePlane_className(*args, **kwargs):
pass
def MPxCommand_swigregister(*args, **kwargs):
pass
def MPxImagePlane_swigregister(*args, **kwargs):
pass
def MPxSurfaceShapeUI_surfaceShapeUI(*args, **kwargs):
pass
def MPxCommand_getCurrentResult(*args, **kwargs):
pass
def MPxControlCommand_swigregister(*args, **kwargs):
pass
def MPxDragAndDropBehavior_className(*args, **kwargs):
pass
def MExternalContentLocationTable_swigregister(*args, **kwargs):
pass
def MPxSurfaceShape_swigregister(*args, **kwargs):
pass
def MPxSpringNode_className(*args, **kwargs):
pass
def MFnPlugin_className(*args, **kwargs):
pass
def MPxHardwareShader_className(*args, **kwargs):
pass
def MPxDeformerNode_className(*args, **kwargs):
pass
def MPxCommand_currentResultType(*args, **kwargs):
pass
def MPxGlBuffer_swigregister(*args, **kwargs):
pass
def MPxFluidEmitterNode_className(*args, **kwargs):
pass
def MPxManipContainer_removeFromManipConnectTable(*args, **kwargs):
pass
def MPxModelEditorCommand_className(*args, **kwargs):
pass
def MPxToolCommand_className(*args, **kwargs):
pass
def MPxGlBuffer_className(*args, **kwargs):
pass
def MPx3dModelView_getModelView(*args, **kwargs):
pass
def MPxTransform_className(*args, **kwargs):
pass
def MPxModelEditorCommand_swigregister(*args, **kwargs):
pass
def MPxCommand_currentIntResult(*args, **kwargs):
pass
def MPxLocatorNode_swigregister(*args, **kwargs):
pass
def MPxTransformationMatrix_convertTransformationRotationOrder(*args, **kwargs):
pass
def MPxNode_addAttribute(*args, **kwargs):
pass
def weakref_proxy(*args, **kwargs):
"""
proxy(object[, callback]) -- create a proxy object that weakly
references 'object'. 'callback', if given, is called with a
reference to the proxy when 'object' is about to be finalized.
"""
pass
def MPxCommand_displayError(*args, **kwargs):
pass
def MPxCommand_clearResult(*args, **kwargs):
pass
def MPxContextCommand_swigregister(*args, **kwargs):
pass
def MPxTransform_mustCallValidateAndSet(*args, **kwargs):
pass
def MPxCommand_className(*args, **kwargs):
pass
def MPxTransform_swigregister(*args, **kwargs):
pass
def MPxSelectionContext_className(*args, **kwargs):
pass
def MPxEmitterNode_className(*args, **kwargs):
pass
def MPxSurfaceShapeUI_className(*args, **kwargs):
pass
def MPxRepresentation_swigregister(*args, **kwargs):
pass
def asMPxPtr(mpxObj):
pass
def MPxUIControl_swigregister(*args, **kwargs):
pass
def getLockCaptureCount(*args, **kwargs):
pass
def MPxFluidEmitterNode_swigregister(*args, **kwargs):
pass
def MPxHwShaderNode_getHwShaderNodePtr(*args, **kwargs):
pass
def MPxAttributePatternFactory_swigregister(*args, **kwargs):
pass
def MFnPlugin_findPlugin(*args, **kwargs):
pass
def MPxManipContainer_initialize(*args, **kwargs):
pass
def MPxMayaAsciiFilter_swigregister(*args, **kwargs):
pass
def MPxCommand_currentDoubleResult(*args, **kwargs):
pass
def MPxMidiInputDevice_className(*args, **kwargs):
pass
def MPxFileResolver_findURIResolverByScheme(*args, **kwargs):
pass
def MPxFileTranslator_swigregister(*args, **kwargs):
pass
def asHashable(mpxObj):
pass
def MPxFieldNode_className(*args, **kwargs):
pass
def MPxAssembly_swigregister(*args, **kwargs):
pass
def MPxHwShaderNode_swigregister(*args, **kwargs):
pass
def MPxUITableControl_swigregister(*args, **kwargs):
pass
def MFnPlugin_swigregister(*args, **kwargs):
pass
def MPxPolyTweakUVCommand_swigregister(*args, **kwargs):
pass
def MPxCacheFormat_swigregister(*args, **kwargs):
pass
def MPxNode_inheritAttributesFrom(*args, **kwargs):
pass
def MPxNode_className(*args, **kwargs):
pass
def MPx3dModelView_swigregister(*args, **kwargs):
pass
def MPxCacheFormat_className(*args, **kwargs):
pass
def MPxCommand_setResult(*args, **kwargs):
pass
def MPxTransform_setNonAffineMatricesEnabled(*args, **kwargs):
pass
def MPxContext__ignoreEntry(*args, **kwargs):
pass
def MPxIkSolverNode_swigregister(*args, **kwargs):
pass
def MPxManipContainer_swigregister(*args, **kwargs):
pass
def MPxCommand_currentStringResult(*args, **kwargs):
pass
def MPxEditData_swigregister(*args, **kwargs):
pass
def _swig_getattr(self, class_type, name):
pass
def MPxTransform_isNonAffineMatricesEnabled(*args, **kwargs):
pass
def MPxFileResolver_findURIResolverByName(*args, **kwargs):
pass
def MPxContextCommand_className(*args, **kwargs):
pass
def MPxLocatorNode_className(*args, **kwargs):
pass
def MPxPolyTrg_swigregister(*args, **kwargs):
pass
def MPxTransformationMatrix_swigregister(*args, **kwargs):
pass
def MPxHardwareShader_getHardwareShaderPtr(*args, **kwargs):
pass
def MPxManipContainer_newManipulator(*args, **kwargs):
pass
def MPxRenderPassImpl_swigregister(*args, **kwargs):
pass
def MPxFileResolver_swigregister(*args, **kwargs):
pass
def MPxConstraint_className(*args, **kwargs):
pass
def MPxParticleAttributeMapperNode_className(*args, **kwargs):
pass
def MPxContext_className(*args, **kwargs):
pass
def MPxIkSolverNode_className(*args, **kwargs):
pass
def MPxCameraSet_className(*args, **kwargs):
pass
def MPxUIControl_className(*args, **kwargs):
pass
def MPxDragAndDropBehavior_swigregister(*args, **kwargs):
pass
def MPxFileResolver_getURIResolversByScheme(*args, **kwargs):
pass
def MPxCommand_displayInfo(*args, **kwargs):
pass
def MPxFileResolver_numURIResolvers(*args, **kwargs):
pass
def MPxSurfaceShape_className(*args, **kwargs):
pass
def MPxHwShaderNode_className(*args, **kwargs):
pass
def MPxEmitterNode_swigregister(*args, **kwargs):
pass
def MPxMaterialInformation_swigregister(*args, **kwargs):
pass
def MPxNode_attributeAffects(*args, **kwargs):
pass
def MPxCommand_appendToResult(*args, **kwargs):
pass
def MPxNode_swigregister(*args, **kwargs):
pass
def MPxComponentShape_swigregister(*args, **kwargs):
pass
def MPxManipulatorNode_className(*args, **kwargs):
pass
def MPxFileResolver_getURIResolversByName(*args, **kwargs):
pass
def MPxGeometryData_swigregister(*args, **kwargs):
pass
def MPxBakeEngine_swigregister(*args, **kwargs):
pass
def MPxSurfaceShapeUI_swigregister(*args, **kwargs):
pass
def MPxSpringNode_swigregister(*args, **kwargs):
pass
def MPxEditData_className(*args, **kwargs):
pass
def MPx3dModelView_className(*args, **kwargs):
pass
def MPxSelectionContext_swigregister(*args, **kwargs):
pass
def MPxHardwareShader_findResource(*args, **kwargs):
pass
def MPxTransformationMatrix_convertEulerRotationOrder(*args, **kwargs):
pass
def MFnPlugin_registeringCallableScript(*args, **kwargs):
pass
def MPxCommand_isCurrentResultArray(*args, **kwargs):
pass
def MPxFileResolver_className(*args, **kwargs):
pass
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
pass
cvar = None
PLUGIN_COMPANY = 'Autodesk'
_newclass = 1
|
{
"content_hash": "d87966722e1311395db7fc76c9efed7f",
"timestamp": "",
"source": "github",
"line_count": 7141,
"max_line_length": 80,
"avg_line_length": 15.102506651729449,
"alnum_prop": 0.5055773456841637,
"repo_name": "CountZer0/PipelineConstructionSet",
"id": "4f2292322feff6a7140269c0964ed5b44b44437a",
"size": "107847",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/maya/site-packages/pymel-1.0.5/extras/completion/py/maya/OpenMayaMPx.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "49130"
},
{
"name": "JavaScript",
"bytes": "21455"
},
{
"name": "Python",
"bytes": "24534027"
},
{
"name": "Shell",
"bytes": "784"
}
],
"symlink_target": ""
}
|
from voucherify import Client as voucherifyClient
voucherify = voucherifyClient(
application_id="c70a6f00-cf91-4756-9df5-47628850002b",
client_secret_key="3266b9f8-e246-4f79-bdf0-833929b1380c"
)
def test_publishVoucher():
params = {
"channel": "Email",
"customer": {
"source_id": "donny.roll@mail.com"
},
"campaign": "Predefined Gift Cards"
}
result = voucherify.distributions.publish(params)
assert result.get('voucher').get('active') is True
assert result.get('voucher').get('type') == 'GIFT_VOUCHER'
|
{
"content_hash": "284590d7e694abad6eafdd6af8619fa0",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 62,
"avg_line_length": 32.05555555555556,
"alnum_prop": 0.6568457538994801,
"repo_name": "voucherifyio/voucherify-python-sdk",
"id": "d21d6ebdf515ed0fcfba3548f7ca0c96213f76ed",
"size": "577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_distributions_e2e.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "40"
},
{
"name": "Python",
"bytes": "20500"
}
],
"symlink_target": ""
}
|
from enum import Enum
class OrderErrorCode(Enum):
CANNOT_CANCEL_FULFILLMENT = "cannot_cancel_fulfillment"
CANNOT_CANCEL_ORDER = "cannot_cancel_order"
CANNOT_DELETE = "cannot_delete"
CANNOT_REFUND = "cannot_refund"
CAPTURE_INACTIVE_PAYMENT = "capture_inactive_payment"
NOT_EDITABLE = "not_editable"
FULFILL_ORDER_LINE = "fulfill_order_line"
GRAPHQL_ERROR = "graphql_error"
INVALID = "invalid"
NOT_FOUND = "not_found"
ORDER_NO_SHIPPING_ADDRESS = "order_no_shipping_address"
PAYMENT_ERROR = "payment_error"
PAYMENT_MISSING = "payment_missing"
REQUIRED = "required"
SHIPPING_METHOD_NOT_APPLICABLE = "shipping_method_not_applicable"
SHIPPING_METHOD_REQUIRED = "shipping_method_required"
UNIQUE = "unique"
VOID_INACTIVE_PAYMENT = "void_inactive_payment"
ZERO_QUANTITY = "zero_quantity"
|
{
"content_hash": "49a03f86c38eadde1ae604a848e19680",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 69,
"avg_line_length": 37.30434782608695,
"alnum_prop": 0.7051282051282052,
"repo_name": "maferelo/saleor",
"id": "33b2c85f53074749aa9c1476d0b74f7c363759de",
"size": "858",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "saleor/order/error_codes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "64217"
},
{
"name": "HTML",
"bytes": "394723"
},
{
"name": "JavaScript",
"bytes": "61157"
},
{
"name": "Python",
"bytes": "585270"
}
],
"symlink_target": ""
}
|
"""
Shortcuts for retrieving input from the user.
If you are using this library for retrieving some input from the user (as a
pure Python replacement for GNU readline), probably for 90% of the use cases,
the :func:`.prompt` function is all you need. It's the easiest shortcut which
does a lot of the underlying work like creating a
:class:`~prompt_toolkit.interface.CommandLineInterface` instance for you.
When is this not sufficient:
- When you want to have more complicated layouts (maybe with sidebars or
multiple toolbars. Or visibility of certain user interface controls
according to some conditions.)
- When you wish to have multiple input buffers. (If you would create an
editor like a Vi clone.)
- Something else that requires more customization than what is possible
with the parameters of `prompt`.
In that case, study the code in this file and build your own
`CommandLineInterface` instance. It's not too complicated.
"""
from __future__ import unicode_literals
from .buffer import Buffer, AcceptAction
from .document import Document
from .enums import DEFAULT_BUFFER, SEARCH_BUFFER, EditingMode
from .filters import IsDone, HasFocus, RendererHeightIsKnown, to_simple_filter, to_cli_filter, Condition
from .history import InMemoryHistory
from .interface import CommandLineInterface, Application, AbortAction
from .key_binding.manager import KeyBindingManager
from .key_binding.registry import Registry
from .keys import Keys
from .layout import Window, HSplit, FloatContainer, Float
from .layout.containers import ConditionalContainer
from .layout.controls import BufferControl, TokenListControl
from .layout.dimension import LayoutDimension
from .layout.lexers import PygmentsLexer
from .layout.margins import PromptMargin, ConditionalMargin
from .layout.menus import CompletionsMenu, MultiColumnCompletionsMenu
from .layout.processors import PasswordProcessor, ConditionalProcessor, AppendAutoSuggestion, HighlightSearchProcessor, HighlightSelectionProcessor
from .layout.prompt import DefaultPrompt
from .layout.screen import Char
from .layout.toolbars import ValidationToolbar, SystemToolbar, ArgToolbar, SearchToolbar
from .layout.utils import explode_tokens
from .renderer import print_tokens as renderer_print_tokens
from .styles import DEFAULT_STYLE, Style, style_from_dict
from .token import Token
from .utils import is_conemu_ansi, is_windows, DummyContext
from six import text_type, exec_, PY2
import os
import sys
import textwrap
import threading
import time
try:
from pygments.lexer import Lexer as pygments_Lexer
from pygments.style import Style as pygments_Style
except ImportError:
pygments_Lexer = None
pygments_Style = None
if is_windows():
from .terminal.win32_output import Win32Output
from .terminal.conemu_output import ConEmuOutput
else:
from .terminal.vt100_output import Vt100_Output
__all__ = (
'create_eventloop',
'create_output',
'create_prompt_layout',
'create_prompt_application',
'prompt',
'prompt_async',
'create_confirm_application',
'confirm',
'print_tokens',
)
def create_eventloop(inputhook=None, recognize_win32_paste=True):
"""
Create and return an
:class:`~prompt_toolkit.eventloop.base.EventLoop` instance for a
:class:`~prompt_toolkit.interface.CommandLineInterface`.
"""
if is_windows():
from prompt_toolkit.eventloop.win32 import Win32EventLoop as Loop
return Loop(inputhook=inputhook, recognize_paste=recognize_win32_paste)
else:
from prompt_toolkit.eventloop.posix import PosixEventLoop as Loop
return Loop(inputhook=inputhook)
def create_output(stdout=None, true_color=False):
"""
Return an :class:`~prompt_toolkit.output.Output` instance for the command
line.
:param true_color: When True, use 24bit colors instead of 256 colors.
(`bool` or :class:`~prompt_toolkit.filters.SimpleFilter`.)
"""
stdout = stdout or sys.__stdout__
true_color = to_simple_filter(true_color)
if is_windows():
if is_conemu_ansi():
return ConEmuOutput(stdout)
else:
return Win32Output(stdout)
else:
term = os.environ.get('TERM', '')
if PY2:
term = term.decode('utf-8')
return Vt100_Output.from_pty(stdout, true_color=true_color, term=term)
def create_asyncio_eventloop(loop=None):
"""
Returns an asyncio :class:`~prompt_toolkit.eventloop.EventLoop` instance
for usage in a :class:`~prompt_toolkit.interface.CommandLineInterface`. It
is a wrapper around an asyncio loop.
:param loop: The asyncio eventloop (or `None` if the default asyncioloop
should be used.)
"""
# Inline import, to make sure the rest doesn't break on Python 2. (Where
# asyncio is not available.)
if is_windows():
from prompt_toolkit.eventloop.asyncio_win32 import Win32AsyncioEventLoop as AsyncioEventLoop
else:
from prompt_toolkit.eventloop.asyncio_posix import PosixAsyncioEventLoop as AsyncioEventLoop
return AsyncioEventLoop(loop)
def _split_multiline_prompt(get_prompt_tokens):
"""
Take a `get_prompt_tokens` function and return three new functions instead.
One that tells whether this prompt consists of multiple lines; one that
returns the tokens to be shown on the lines above the input; and another
one with the tokens to be shown at the first line of the input.
"""
def has_before_tokens(cli):
for token, char in get_prompt_tokens(cli):
if '\n' in char:
return True
return False
def before(cli):
result = []
found_nl = False
for token, char in reversed(explode_tokens(get_prompt_tokens(cli))):
if found_nl:
result.insert(0, (token, char))
elif char == '\n':
found_nl = True
return result
def first_input_line(cli):
result = []
for token, char in reversed(explode_tokens(get_prompt_tokens(cli))):
if char == '\n':
break
else:
result.insert(0, (token, char))
return result
return has_before_tokens, before, first_input_line
class _RPrompt(Window):
" The prompt that is displayed on the right side of the Window. "
def __init__(self, get_tokens=None):
get_tokens = get_tokens or (lambda cli: [])
super(_RPrompt, self).__init__(
TokenListControl(get_tokens, align_right=True))
def create_prompt_layout(message='', lexer=None, is_password=False,
reserve_space_for_menu=8,
get_prompt_tokens=None, get_continuation_tokens=None,
get_rprompt_tokens=None,
get_bottom_toolbar_tokens=None,
display_completions_in_columns=False,
extra_input_processors=None, multiline=False,
wrap_lines=True):
"""
Create a :class:`.Container` instance for a prompt.
:param message: Text to be used as prompt.
:param lexer: :class:`~prompt_toolkit.layout.lexers.Lexer` to be used for
the highlighting.
:param is_password: `bool` or :class:`~prompt_toolkit.filters.CLIFilter`.
When True, display input as '*'.
:param reserve_space_for_menu: Space to be reserved for the menu. When >0,
make sure that a minimal height is allocated in the terminal, in order
to display the completion menu.
:param get_prompt_tokens: An optional callable that returns the tokens to be
shown in the menu. (To be used instead of a `message`.)
:param get_continuation_tokens: An optional callable that takes a
CommandLineInterface and width as input and returns a list of (Token,
text) tuples to be used for the continuation.
:param get_bottom_toolbar_tokens: An optional callable that returns the
tokens for a toolbar at the bottom.
:param display_completions_in_columns: `bool` or
:class:`~prompt_toolkit.filters.CLIFilter`. Display the completions in
multiple columns.
:param multiline: `bool` or :class:`~prompt_toolkit.filters.CLIFilter`.
When True, prefer a layout that is more adapted for multiline input.
Text after newlines is automatically indented, and search/arg input is
shown below the input, instead of replacing the prompt.
:param wrap_lines: `bool` or :class:`~prompt_toolkit.filters.CLIFilter`.
When True (the default), automatically wrap long lines instead of
scrolling horizontally.
"""
assert isinstance(message, text_type), 'Please provide a unicode string.'
assert get_bottom_toolbar_tokens is None or callable(get_bottom_toolbar_tokens)
assert get_prompt_tokens is None or callable(get_prompt_tokens)
assert get_rprompt_tokens is None or callable(get_rprompt_tokens)
assert not (message and get_prompt_tokens)
display_completions_in_columns = to_cli_filter(display_completions_in_columns)
multiline = to_cli_filter(multiline)
if get_prompt_tokens is None:
get_prompt_tokens = lambda _: [(Token.Prompt, message)]
has_before_tokens, get_prompt_tokens_1, get_prompt_tokens_2 = \
_split_multiline_prompt(get_prompt_tokens)
# `lexer` is supposed to be a `Lexer` instance. But if a Pygments lexer
# class is given, turn it into a PygmentsLexer. (Important for
# backwards-compatibility.)
try:
if pygments_Lexer and issubclass(lexer, pygments_Lexer):
lexer = PygmentsLexer(lexer, sync_from_start=True)
except TypeError: # Happens when lexer is `None` or an instance of something else.
pass
# Create processors list.
input_processors = [
ConditionalProcessor(
# By default, only highlight search when the search
# input has the focus. (Note that this doesn't mean
# there is no search: the Vi 'n' binding for instance
# still allows to jump to the next match in
# navigation mode.)
HighlightSearchProcessor(preview_search=True),
HasFocus(SEARCH_BUFFER)),
HighlightSelectionProcessor(),
ConditionalProcessor(AppendAutoSuggestion(), HasFocus(DEFAULT_BUFFER) & ~IsDone()),
ConditionalProcessor(PasswordProcessor(), is_password)
]
if extra_input_processors:
input_processors.extend(extra_input_processors)
# Show the prompt before the input (using the DefaultPrompt processor.
# This also replaces it with reverse-i-search and 'arg' when required.
# (Only for single line mode.)
# (DefaultPrompt should always be at the end of the processors.)
input_processors.append(ConditionalProcessor(
DefaultPrompt(get_prompt_tokens_2), ~multiline))
# Create bottom toolbar.
if get_bottom_toolbar_tokens:
toolbars = [ConditionalContainer(
Window(TokenListControl(get_bottom_toolbar_tokens,
default_char=Char(' ', Token.Toolbar)),
height=LayoutDimension.exact(1)),
filter=~IsDone() & RendererHeightIsKnown())]
else:
toolbars = []
def get_height(cli):
# If there is an autocompletion menu to be shown, make sure that our
# layout has at least a minimal height in order to display it.
if reserve_space_for_menu and not cli.is_done:
buff = cli.current_buffer
# Reserve the space, either when there are completions, or when
# `complete_while_typing` is true and we expect completions very
# soon.
if buff.complete_while_typing(cli) or buff.complete_state is not None:
return LayoutDimension(min=reserve_space_for_menu)
return LayoutDimension()
# Create and return Container instance.
return HSplit([
# The main input, with completion menus floating on top of it.
FloatContainer(
HSplit([
ConditionalContainer(
Window(
TokenListControl(get_prompt_tokens_1),
dont_extend_height=True),
Condition(has_before_tokens)
),
Window(
BufferControl(
input_processors=input_processors,
lexer=lexer,
# Enable preview_search, we want to have immediate feedback
# in reverse-i-search mode.
preview_search=True),
get_height=get_height,
left_margins=[
# In multiline mode, use the window margin to display
# the prompt and continuation tokens.
ConditionalMargin(
PromptMargin(get_prompt_tokens_2, get_continuation_tokens),
filter=multiline
)
],
wrap_lines=wrap_lines,
),
]),
[
# Completion menus.
Float(xcursor=True,
ycursor=True,
content=CompletionsMenu(
max_height=16,
scroll_offset=1,
extra_filter=HasFocus(DEFAULT_BUFFER) &
~display_completions_in_columns)),
Float(xcursor=True,
ycursor=True,
content=MultiColumnCompletionsMenu(
extra_filter=HasFocus(DEFAULT_BUFFER) &
display_completions_in_columns,
show_meta=True)),
# The right prompt.
Float(right=0, top=0, hide_when_covering_content=True,
content=_RPrompt(get_rprompt_tokens)),
]
),
ValidationToolbar(),
SystemToolbar(),
# In multiline mode, we use two toolbars for 'arg' and 'search'.
ConditionalContainer(ArgToolbar(), multiline),
ConditionalContainer(SearchToolbar(), multiline),
] + toolbars)
def create_prompt_application(
message='',
multiline=False,
wrap_lines=True,
is_password=False,
vi_mode=False,
editing_mode=EditingMode.EMACS,
complete_while_typing=True,
enable_history_search=False,
lexer=None,
enable_system_bindings=False,
enable_open_in_editor=False,
validator=None,
completer=None,
reserve_space_for_menu=8,
auto_suggest=None,
style=None,
history=None,
clipboard=None,
get_prompt_tokens=None,
get_continuation_tokens=None,
get_rprompt_tokens=None,
get_bottom_toolbar_tokens=None,
display_completions_in_columns=False,
get_title=None,
mouse_support=False,
extra_input_processors=None,
key_bindings_registry=None,
on_abort=AbortAction.RAISE_EXCEPTION,
on_exit=AbortAction.RAISE_EXCEPTION,
accept_action=AcceptAction.RETURN_DOCUMENT,
erase_when_done=False,
default=''):
"""
Create an :class:`~Application` instance for a prompt.
(It is meant to cover 90% of the prompt use cases, where no extreme
customization is required. For more complex input, it is required to create
a custom :class:`~Application` instance.)
:param message: Text to be shown before the prompt.
:param mulitiline: Allow multiline input. Pressing enter will insert a
newline. (This requires Meta+Enter to accept the input.)
:param wrap_lines: `bool` or :class:`~prompt_toolkit.filters.CLIFilter`.
When True (the default), automatically wrap long lines instead of
scrolling horizontally.
:param is_password: Show asterisks instead of the actual typed characters.
:param editing_mode: ``EditingMode.VI`` or ``EditingMode.EMACS``.
:param vi_mode: `bool`, if True, Identical to ``editing_mode=EditingMode.VI``.
:param complete_while_typing: `bool` or
:class:`~prompt_toolkit.filters.CLIFilter`. Enable autocompletion while
typing.
:param enable_history_search: `bool` or
:class:`~prompt_toolkit.filters.CLIFilter`. Enable up-arrow parting
string matching.
:param lexer: :class:`~prompt_toolkit.layout.lexers.Lexer` to be used for
the syntax highlighting.
:param validator: :class:`~prompt_toolkit.validation.Validator` instance
for input validation.
:param completer: :class:`~prompt_toolkit.completion.Completer` instance
for input completion.
:param reserve_space_for_menu: Space to be reserved for displaying the menu.
(0 means that no space needs to be reserved.)
:param auto_suggest: :class:`~prompt_toolkit.auto_suggest.AutoSuggest`
instance for input suggestions.
:param style: :class:`.Style` instance for the color scheme.
:param enable_system_bindings: `bool` or
:class:`~prompt_toolkit.filters.CLIFilter`. Pressing Meta+'!' will show
a system prompt.
:param enable_open_in_editor: `bool` or
:class:`~prompt_toolkit.filters.CLIFilter`. Pressing 'v' in Vi mode or
C-X C-E in emacs mode will open an external editor.
:param history: :class:`~prompt_toolkit.history.History` instance.
:param clipboard: :class:`~prompt_toolkit.clipboard.base.Clipboard` instance.
(e.g. :class:`~prompt_toolkit.clipboard.in_memory.InMemoryClipboard`)
:param get_bottom_toolbar_tokens: Optional callable which takes a
:class:`~prompt_toolkit.interface.CommandLineInterface` and returns a
list of tokens for the bottom toolbar.
:param display_completions_in_columns: `bool` or
:class:`~prompt_toolkit.filters.CLIFilter`. Display the completions in
multiple columns.
:param get_title: Callable that returns the title to be displayed in the
terminal.
:param mouse_support: `bool` or :class:`~prompt_toolkit.filters.CLIFilter`
to enable mouse support.
:param default: The default text to be shown in the input buffer. (This can
be edited by the user.)
"""
if key_bindings_registry is None:
key_bindings_registry = KeyBindingManager.for_prompt(
enable_system_bindings=enable_system_bindings,
enable_open_in_editor=enable_open_in_editor).registry
# Ensure backwards-compatibility, when `vi_mode` is passed.
if vi_mode:
editing_mode = EditingMode.VI
# Make sure that complete_while_typing is disabled when enable_history_search
# is enabled. (First convert to SimpleFilter, to avoid doing bitwise operations
# on bool objects.)
complete_while_typing = to_simple_filter(complete_while_typing)
enable_history_search = to_simple_filter(enable_history_search)
multiline = to_simple_filter(multiline)
complete_while_typing = complete_while_typing & ~enable_history_search
# Accept Pygments styles as well for backwards compatibility.
try:
if pygments_Style and issubclass(style, pygments_Style):
style = style_from_dict(style.styles)
except TypeError: # Happens when style is `None` or an instance of something else.
pass
# Create application
return Application(
layout=create_prompt_layout(
message=message,
lexer=lexer,
is_password=is_password,
reserve_space_for_menu=(reserve_space_for_menu if completer is not None else 0),
multiline=Condition(lambda cli: multiline()),
get_prompt_tokens=get_prompt_tokens,
get_continuation_tokens=get_continuation_tokens,
get_rprompt_tokens=get_rprompt_tokens,
get_bottom_toolbar_tokens=get_bottom_toolbar_tokens,
display_completions_in_columns=display_completions_in_columns,
extra_input_processors=extra_input_processors,
wrap_lines=wrap_lines),
buffer=Buffer(
enable_history_search=enable_history_search,
complete_while_typing=complete_while_typing,
is_multiline=multiline,
history=(history or InMemoryHistory()),
validator=validator,
completer=completer,
auto_suggest=auto_suggest,
accept_action=accept_action,
initial_document=Document(default),
),
style=style or DEFAULT_STYLE,
clipboard=clipboard,
key_bindings_registry=key_bindings_registry,
get_title=get_title,
mouse_support=mouse_support,
editing_mode=editing_mode,
erase_when_done=erase_when_done,
on_abort=on_abort,
on_exit=on_exit)
def prompt(message='', **kwargs):
"""
Get input from the user and return it.
This is a wrapper around a lot of ``prompt_toolkit`` functionality and can
be a replacement for `raw_input`. (or GNU readline.)
If you want to keep your history across several calls, create one
:class:`~prompt_toolkit.history.History` instance and pass it every time.
This function accepts many keyword arguments. Except for the following,
they are a proxy to the arguments of :func:`.create_prompt_application`.
:param patch_stdout: Replace ``sys.stdout`` by a proxy that ensures that
print statements from other threads won't destroy the prompt. (They
will be printed above the prompt instead.)
:param return_asyncio_coroutine: When True, return a asyncio coroutine. (Python >3.3)
:param true_color: When True, use 24bit colors instead of 256 colors.
:param refresh_interval: (number; in seconds) When given, refresh the UI
every so many seconds.
"""
patch_stdout = kwargs.pop('patch_stdout', False)
return_asyncio_coroutine = kwargs.pop('return_asyncio_coroutine', False)
true_color = kwargs.pop('true_color', False)
refresh_interval = kwargs.pop('refresh_interval', 0)
eventloop = kwargs.pop('eventloop', None)
application = create_prompt_application(message, **kwargs)
return run_application(application,
patch_stdout=patch_stdout,
return_asyncio_coroutine=return_asyncio_coroutine,
true_color=true_color,
refresh_interval=refresh_interval,
eventloop=eventloop)
def run_application(
application, patch_stdout=False, return_asyncio_coroutine=False,
true_color=False, refresh_interval=0, eventloop=None):
"""
Run a prompt toolkit application.
:param patch_stdout: Replace ``sys.stdout`` by a proxy that ensures that
print statements from other threads won't destroy the prompt. (They
will be printed above the prompt instead.)
:param return_asyncio_coroutine: When True, return a asyncio coroutine. (Python >3.3)
:param true_color: When True, use 24bit colors instead of 256 colors.
:param refresh_interval: (number; in seconds) When given, refresh the UI
every so many seconds.
"""
assert isinstance(application, Application)
if return_asyncio_coroutine:
eventloop = create_asyncio_eventloop()
else:
eventloop = eventloop or create_eventloop()
# Create CommandLineInterface.
cli = CommandLineInterface(
application=application,
eventloop=eventloop,
output=create_output(true_color=true_color))
# Set up refresh interval.
if refresh_interval:
done = [False]
def start_refresh_loop(cli):
def run():
while not done[0]:
time.sleep(refresh_interval)
cli.request_redraw()
t = threading.Thread(target=run)
t.daemon = True
t.start()
def stop_refresh_loop(cli):
done[0] = True
cli.on_start += start_refresh_loop
cli.on_stop += stop_refresh_loop
# Replace stdout.
patch_context = cli.patch_stdout_context() if patch_stdout else DummyContext()
# Read input and return it.
if return_asyncio_coroutine:
# Create an asyncio coroutine and call it.
exec_context = {'patch_context': patch_context, 'cli': cli,
'Document': Document}
exec_(textwrap.dedent('''
import asyncio
@asyncio.coroutine
def prompt_coro():
with patch_context:
result = yield from cli.run_async(reset_current_buffer=False)
if isinstance(result, Document): # Backwards-compatibility.
return result.text
return result
'''), exec_context)
return exec_context['prompt_coro']()
else:
# Note: We pass `reset_current_buffer=False`, because that way it's easy to
# give DEFAULT_BUFFER a default value, without it getting erased. We
# don't have to reset anyway, because this is the first and only time
# that this CommandLineInterface will run.
try:
with patch_context:
result = cli.run(reset_current_buffer=False)
if isinstance(result, Document): # Backwards-compatibility.
return result.text
return result
finally:
eventloop.close()
def prompt_async(message='', **kwargs):
"""
Similar to :func:`.prompt`, but return an asyncio coroutine instead.
"""
kwargs['return_asyncio_coroutine'] = True
return prompt(message, **kwargs)
def create_confirm_application(message):
"""
Create a confirmation `Application` that returns True/False.
"""
registry = Registry()
@registry.add_binding('y')
@registry.add_binding('Y')
def _(event):
event.cli.buffers[DEFAULT_BUFFER].text = 'y'
event.cli.set_return_value(True)
@registry.add_binding('n')
@registry.add_binding('N')
@registry.add_binding(Keys.ControlC)
def _(event):
event.cli.buffers[DEFAULT_BUFFER].text = 'n'
event.cli.set_return_value(False)
return create_prompt_application(message, key_bindings_registry=registry)
def confirm(message='Confirm (y or n) '):
"""
Display a confirmation prompt.
"""
assert isinstance(message, text_type)
app = create_confirm_application(message)
return run_application(app)
def print_tokens(tokens, style=None, true_color=False):
"""
Print a list of (Token, text) tuples in the given style to the output.
E.g.::
style = style_from_dict({
Token.Hello: '#ff0066',
Token.World: '#884444 italic',
})
tokens = [
(Token.Hello, 'Hello'),
(Token.World, 'World'),
]
print_tokens(tokens, style=style)
:param tokens: List of ``(Token, text)`` tuples.
:param style: :class:`.Style` instance for the color scheme.
:param true_color: When True, use 24bit colors instead of 256 colors.
"""
assert isinstance(style, Style)
output = create_output(true_color=true_color)
renderer_print_tokens(output, tokens, style)
# Deprecated alias for `prompt`.
get_input = prompt
# Deprecated alias for create_default_layout
create_default_layout = create_prompt_layout
# Deprecated alias for create_default_application
create_default_application = create_prompt_application
|
{
"content_hash": "59f5f80aecbeab21615580b8d68d7089",
"timestamp": "",
"source": "github",
"line_count": 696,
"max_line_length": 147,
"avg_line_length": 39.735632183908045,
"alnum_prop": 0.6483222447208562,
"repo_name": "Sorsly/subtle",
"id": "5ca9ecd1be4d5acf9c97ef6e6fed1e09475d0d8d",
"size": "27656",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/lib/third_party/prompt_toolkit/shortcuts.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1581"
},
{
"name": "CSS",
"bytes": "226"
},
{
"name": "HTML",
"bytes": "4637"
},
{
"name": "JavaScript",
"bytes": "3037"
},
{
"name": "PHP",
"bytes": "4543"
},
{
"name": "Pascal",
"bytes": "31"
},
{
"name": "Python",
"bytes": "13243860"
},
{
"name": "Roff",
"bytes": "1050600"
},
{
"name": "Shell",
"bytes": "16136"
},
{
"name": "Smarty",
"bytes": "2484"
},
{
"name": "SourcePawn",
"bytes": "308"
}
],
"symlink_target": ""
}
|
from sklearn_explain.tests.skl_datasets import skl_datasets_test as skltest
skltest.test_class_dataset_and_model("BreastCancer" , "SVC_sigmoid_10")
|
{
"content_hash": "ebefdf3fde65aa48b72461e0e9f61be8",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 75,
"avg_line_length": 37.5,
"alnum_prop": 0.7933333333333333,
"repo_name": "antoinecarme/sklearn_explain",
"id": "22cbb636fc2ae8c39e11cd000bcdece5ccc4ccf9",
"size": "150",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/skl_datasets/BreastCancer/skl_dataset_BreastCancer_SVC_sigmoid_10_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "110343"
}
],
"symlink_target": ""
}
|
import theano
import theano.tensor as tt
####################### Log Space Helpers ################################
eps, epsinv = 1e-20, 1e20
def safe_log(x):
return tt.log(tt.maximum(x, eps))
def safe_exp(x):
return tt.exp(tt.minimum(x, epsinv))
def logadd_simple(x, y):
return x + safe_log(1 + safe_exp(y - x))
def logadd_advanced(x, y):
maxx = tt.maximum(x, y)
minn = tt.minimum(x, y)
return maxx + tt.log(1 + tt.exp(minn - maxx))
def logadd(x, y, *zs, add=logadd_simple):
sum = add(x, y)
for z in zs:
sum = add(sum, z)
return sum
def logmul(x, y):
return x + y
####################### Two Kinds of CTC Layers ################################
"""
Recurrent Relation:
Specifies allowed transistions in paths.
Implemented as
Matrix in PlainCTC
Masks in LogCTC
At any time, one could feed in from the
0) same label
- diagonal is identity (Plain)
1) prev label (unless ofcourse you are the first)
- first upper diagonal is identity (Plain)
- prevmask is [0, 1, 1, ..., 1] (Log)
2) prev to prev label if
a) next label is blank and
b) the next to next label is different from the current
- second_diag/prevprev_mask is product of conditions a & b
"""
class CTCLayer():
def __init__(self, inpt, labels, blank, log_space):
"""
:param inpt: Output of Soft-max layer
:param labels: desired/correct labels
:param blank: index of blank
:param log_space: If calcualtions should be done in log space
:return: CTCLayer object
"""
self.inpt = inpt
self.labels = labels
self.blank = blank
self.n = self.labels.shape[0]
if log_space:
self.log_ctc()
else:
self.plain_ctc()
self.params = []
def plain_ctc(self, ):
labels2 = tt.concatenate((self.labels, [self.blank, self.blank]))
sec_diag = tt.neq(labels2[:-2], labels2[2:]) * \
tt.eq(labels2[1:-1], self.blank)
recurrence_relation = \
tt.eye(self.n) + \
tt.eye(self.n, k=1) + \
tt.eye(self.n, k=2) * sec_diag.dimshuffle((0, 'x'))
pred_y = self.inpt[:, self.labels]
probabilities, _ = theano.scan(
lambda curr, accum: curr * tt.dot(accum, recurrence_relation),
sequences=[pred_y],
outputs_info=[tt.eye(self.n)[0]]
)
# TODO: -2 only if blank at end
labels_probab = tt.sum(probabilities[-1, -2:])
self.cost = -tt.log(labels_probab)
self.debug = probabilities.T
def log_ctc(self, ):
_1000 = tt.eye(self.n)[0]
prev_mask = 1 - _1000
prevprev_mask = tt.neq(self.labels[:-2], self.labels[2:]) * \
tt.eq(self.labels[1:-1], self.blank)
prevprev_mask = tt.concatenate(([0, 0], prevprev_mask))
prev_mask = safe_log(prev_mask)
prevprev_mask = safe_log(prevprev_mask)
prev = tt.arange(-1, self.n-1)
prevprev = tt.arange(-2, self.n-2)
log_pred_y = tt.log(self.inpt[:, self.labels])
def step(curr, accum):
return logmul(curr,
logadd(accum,
logmul(prev_mask, accum[prev]),
logmul(prevprev_mask, accum[prevprev])))
log_probs, _ = theano.scan(
step,
sequences=[log_pred_y],
outputs_info=[safe_log(_1000)]
)
# TODO: Add -2 if n > 1 and blank at end
log_labels_probab = log_probs[-1, -1]
self.cost = -log_labels_probab
self.debug = tt.exp(log_probs.T)
|
{
"content_hash": "b26101119e6626d087f031fa21ff6b2b",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 80,
"avg_line_length": 30.650406504065042,
"alnum_prop": 0.5244031830238727,
"repo_name": "Richi91/rnn_ctc",
"id": "9765e87de4da26940f3743027be903a207909581",
"size": "3770",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ctc.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "32903"
}
],
"symlink_target": ""
}
|
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'voyacoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation '%s'" % sanitize_string(translation))
return False
else:
if source_f != translation_f:
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
|
{
"content_hash": "3ccbd4f2e33bcb023598198866d47cff",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 121,
"avg_line_length": 36.137362637362635,
"alnum_prop": 0.6141097764938421,
"repo_name": "Voyacoin/Voyacoin",
"id": "efebf7044c04ce3e9f13da766e9ba63d1fabd656",
"size": "6782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/devtools/update-translations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "7411"
},
{
"name": "Assembly",
"bytes": "7639"
},
{
"name": "C",
"bytes": "338063"
},
{
"name": "C++",
"bytes": "3507076"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2101"
},
{
"name": "Makefile",
"bytes": "61694"
},
{
"name": "Objective-C",
"bytes": "3109"
},
{
"name": "Objective-C++",
"bytes": "7184"
},
{
"name": "Protocol Buffer",
"bytes": "2312"
},
{
"name": "Python",
"bytes": "207784"
},
{
"name": "Shell",
"bytes": "42780"
}
],
"symlink_target": ""
}
|
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'MplsldplabeltypesEnum' : _MetaInfoEnum('MplsldplabeltypesEnum', 'ydk.models.cisco_ios_xe.MPLS_TC_MIB',
{
'generic':'generic',
'atm':'atm',
'frameRelay':'frameRelay',
}, 'MPLS-TC-MIB', _yang_ns._namespaces['MPLS-TC-MIB']),
'MplsinitialcreationsourceEnum' : _MetaInfoEnum('MplsinitialcreationsourceEnum', 'ydk.models.cisco_ios_xe.MPLS_TC_MIB',
{
'other':'other',
'snmp':'snmp',
'ldp':'ldp',
'rsvp':'rsvp',
'crldp':'crldp',
'policyAgent':'policyAgent',
'unknown':'unknown',
}, 'MPLS-TC-MIB', _yang_ns._namespaces['MPLS-TC-MIB']),
}
|
{
"content_hash": "72b7fe60565442bc663dca2dc2d60e62",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 197,
"avg_line_length": 42,
"alnum_prop": 0.6469622331691297,
"repo_name": "111pontes/ydk-py",
"id": "73b6fd4fabbcff38c818d97b2d6723eef90fd999",
"size": "1221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cisco-ios-xe/ydk/models/cisco_ios_xe/_meta/_MPLS_TC_MIB.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "7226"
},
{
"name": "Python",
"bytes": "446117948"
}
],
"symlink_target": ""
}
|
NODE, EDGE, ATTR = range(3)
class Node:
def __init__(self, name, attrs):
self.name = name
self.attrs = attrs
def __eq__(self, other):
return self.name == other.name and self.attrs == other.attrs
class Edge:
def __init__(self, src, dst, attrs):
self.src = src
self.dst = dst
self.attrs = attrs
def __eq__(self, other):
return (self.src == other.src and
self.dst == other.dst and
self.attrs == other.attrs)
class Graph:
def __init__(self, data=None):
pass
|
{
"content_hash": "0902493f44d586ee23553d54951ed689",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 68,
"avg_line_length": 21.40740740740741,
"alnum_prop": 0.527681660899654,
"repo_name": "jmluy/xpython",
"id": "e22a618bd60e90e40883b6fbd18294f53b946c55",
"size": "578",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "exercises/practice/dot-dsl/dot_dsl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "103144"
},
{
"name": "Python",
"bytes": "931057"
},
{
"name": "Shell",
"bytes": "1938"
}
],
"symlink_target": ""
}
|
from pkg_resources import get_distribution
__version__ = get_distribution('pybar_fei4_interpreter').version
|
{
"content_hash": "139b48592404cec582548354763f5ef3",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 64,
"avg_line_length": 55,
"alnum_prop": 0.7818181818181819,
"repo_name": "SiLab-Bonn/pyBAR_fei4_interpreter",
"id": "8c1c304ea31dd37c1a73fd8e6cfd60340e86193a",
"size": "236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pybar_fei4_interpreter/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "15222"
},
{
"name": "C++",
"bytes": "117714"
},
{
"name": "Python",
"bytes": "64915"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rango', '0002_auto_20161109_1934'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.SlugField(null=True),
),
]
|
{
"content_hash": "ddb8d11794fc5c8191674262c0ba5ccc",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 46,
"avg_line_length": 20.555555555555557,
"alnum_prop": 0.5837837837837838,
"repo_name": "dnestoff/Tango-With-Django",
"id": "dfe9fae52a3674d242319e712ac41739f863eb8e",
"size": "443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rango/migrations/0003_category_slug.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7135"
},
{
"name": "Python",
"bytes": "22585"
}
],
"symlink_target": ""
}
|
from enum import Enum
class FormTypes(Enum):
REGISTRATIONS = "registrations"
FormTypes.do_not_call_in_templates = True
|
{
"content_hash": "4d52d7a87299aba5ae9413a1d5a8af0c",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 41,
"avg_line_length": 15.875,
"alnum_prop": 0.7480314960629921,
"repo_name": "dracidoupe/graveyard",
"id": "bf0f263a15359ea22f80f92757544cc315ffbe05",
"size": "127",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dragon/forms/dashboard.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "API Blueprint",
"bytes": "4273"
},
{
"name": "CSS",
"bytes": "37578"
},
{
"name": "Dockerfile",
"bytes": "208"
},
{
"name": "HTML",
"bytes": "101149"
},
{
"name": "JavaScript",
"bytes": "2417"
},
{
"name": "Python",
"bytes": "766548"
},
{
"name": "Shell",
"bytes": "5103"
}
],
"symlink_target": ""
}
|
import datetime, calendar, time, math, boto3
import os
from hurry.filesize import size
from dateutil.tz import tzlocal
from operator import attrgetter
# return the region for given bucket name
def get_location(client, name):
return client.get_bucket_location(Bucket=name)['LocationConstraint']
# Get number of flies from the objectlist, create a list of last modified dates in unux epoch from the objects and return the latest in string
def get_objectinfo(objtlist):
totalsize = 0
if objtlist:
lastmodified = []
for item in objtlist:
totalsize += item['Size']
lastmodified.append(int(calendar.timegm(item['LastModified'].utctimetuple())))
lastmodified = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(max(lastmodified)))
return lastmodified, size(totalsize)
# print out the list of buckets with the relevant information
def print_list(blist):
print "Region, Date Created, Number of Files, Total File Size, Last Modified, Bucket Name"
for each in blist:
print each.location, each.creationdate, each.numoffiles, each.totalfilesize, each.lastmodified, each.name
#get a list of objects from the bucket
def getlist(input):
client, bucketname, prefix = input
mylist, nextmarker = [], ''
if prefix=='':
templist = client.list_objects_v2(Bucket=bucketname)
mylist.extend(templist['Contents'])
else:
templist = client.list_objects_v2(Bucket=bucketname, Prefix=prefix)
if 'Contents' in templist.keys():
mylist.extend(templist['Contents'])
end = templist['IsTruncated']
while end:
nextmarker = templist['NextContinuationToken']
templist = client.list_objects_v2(Bucket=bucketname, ContinuationToken=nextmarker, Prefix=prefix)
mylist.extend(templist['Contents'])
end = templist['IsTruncated']
# print prefix+ " ", str(len(mylist)), mylist[-1]['Key']
return mylist
if __name__ == "__main__":
import argparse, boto3
from botocore import UNSIGNED
from botocore.client import Config
parse = argparse.ArgumentParser(description="s3bucket tool")
parse.add_argument("-b", "--bucket", dest="bucketname", required=True, help="Bucket name")
parse.add_argument("-p", "--prefix", dest="prefix", default='', help="bucket prefix")
parse.add_argument("-a", "--anon", dest="anon", default='True', help="set client to anonymous")
parse.add_argument("-s", "--sorted", dest="sorted", default='False', help="sort objects according to storage class")
parse.add_argument("-id", dest="credfile", default="cred.json", help="json file containing the AWS credentials")
args = parse.parse_args()
print args
#log in s3 client, sorted is disabled when using anonymous login
if args.anon == 'True':
args.sorted='False'
try:
s3 = boto3.client(service_name='s3', config=Config(signature_version=UNSIGNED))
except:
print("Unable to connect to S3 Service using annonymous client")
exit()
else:
import json
try:
cred = json.loads(open(args.credfile).read())
cred['API'], cred['secret']
except:
print("Unable to load credentials")
exit()
try:
s3 = boto3.client(service_name='s3', aws_access_key_id= cred['API'], aws_secret_access_key= cred['secret'])
except:
print("Unable to connect to S3 Service via AWS credentials")
exit()
#fetch objects from the bucket
bucketname = args.bucketname
mylist = getlist((s3, args.bucketname, args.prefix))
#sort if required
if args.sorted=="True":
mylist.sort(key=lambda x: x['StorageClass'], reverse=False)
#display output
print "File Name, File Size, Storage Class"
print "==================================="
for each in mylist:
print each['Key']+ ", "+size(each['Size']) + ", " + each['StorageClass']
print "Last Modified: " + get_objectinfo(mylist)[0] + " Total Size: " + get_objectinfo(mylist)[1]
print "Bucket Name: " + args.bucketname + " Total Number of files: " + str(len(mylist))
|
{
"content_hash": "24bd4c939dac8f73438b6a3589e41564",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 142,
"avg_line_length": 42.16161616161616,
"alnum_prop": 0.6499760421657882,
"repo_name": "kimyong/s3bucket",
"id": "91c0dbc0c249a0ecc7aa4cbf0121b8815933bc9b",
"size": "4198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "s3bucket.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4198"
}
],
"symlink_target": ""
}
|
import psutil
KILLPROCS = ("pre.exe","standard.exe")
for proc in psutil.process_iter():
if proc.name() in KILLPROCS:
proc.kill()
proc.wait(timeout=60)
|
{
"content_hash": "3adeb8ce86362ceb2ab9beac8ed0fb80",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 38,
"avg_line_length": 28.333333333333332,
"alnum_prop": 0.6470588235294118,
"repo_name": "ucdavis-kanvinde-group/abaqus-pso-calibration",
"id": "dbfae040922a96692f1b542485a7c1422d6d21f6",
"size": "170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Calibration/kill_abaqus.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Matlab",
"bytes": "57095"
},
{
"name": "Python",
"bytes": "2886"
}
],
"symlink_target": ""
}
|
import pickle
import functools
import tornado.web
from lp import *
from attrs import *
from task import *
def call(fn):
fn()
class viewer(tornado.web.RequestHandler, Attrs):
viewers = []
app = None
@classmethod
def observe(cls, model, point=r"/"):
cls.viewers.append((point, cls, dict(model=model)))
@classmethod
def init(cls):
cls.app = tornado.web.Application(cls.viewers)
cls.app.listen(cls.viewport)
print "Viewer started at", cls.viewport
def initialize(self, model):
self.model = model
@property
def progress(self):
return float(len(self.model.complete))/self.model.count_tasks
def get(self):
self.write("Hello!<br/>")
self.write("Currently complete %.2f%%" % self.progress)
class server(TCPServer, Attrs):
@classmethod
def spawn(cls, fn):
@functools.wraps(fn)
def wraped():
cls.instance = cls(fn)
viewer.observe(cls.instance)
viewer.init()
lp.start()
return wraped
def __init__(self, genfn):
TCPServer.__init__(self)
self.tasks = list(genfn())
self.count_tasks = len(self.tasks)
self.tasks = iter(enumerate(self.tasks))
self.complete = list()
self.listen(self.port)
self.start()
print "Server started at", self.port
@gen.coroutine
def handle_stream(self, stream, address):
while True:
try:
i, task = self.tasks.next()
data = yield stream.read_until(self.sep)
prev, data = pickle.loads(data)
if data != "hi":
print "From %s:%s received %s for %s" % (address[0], address[1], data, prev)
self.complete.append((prev, data))
else:
print "Connected:", address
task = Task(*task[0], **task[1])
yield stream.write(pickle.dumps(task) + self.sep)
except StreamClosedError:
return
except StopIteration:
break
while True:
yield stream.write(pickle.dumps(self.notask) + self.sep)
class client(TCPClient, Attrs):
ret = (None, Attrs.greet)
@classmethod
def spawn(cls, fn):
@classmethod
def spawner(cls):
instance = cls(fn)
lp.start()
cls.spawner = spawner
@functools.wraps(fn)
def wraped(count):
from multiprocessing import Process
processes = []
for i in xrange(0, count):
processes.append(Process(target=cls.spawner))
processes[-1].start()
for p in processes:
p.join()
return wraped
def __init__(self, workfn):
TCPClient.__init__(self)
self.stream = None
self.workfn = workfn
@call
@gen.coroutine
def wrap():
print "Connecting to", self.port
self.stream = yield self.connect("localhost", self.port)
self.stream.set_nodelay(True)
lp.later(self.take)
@gen.coroutine
def take(self):
import time
while self.stream is None:
print "Still connecting to %d..." % self.port
yield gen.Task(IOLoop.instance().add_timeout, time.time() + 0.05)
print "Connected!"
try:
while True:
self.stream.write(pickle.dumps(self.ret) + self.sep)
self.ret = yield self.stream.read_until(self.sep)
self.ret = pickle.loads(self.ret)
print "Received task:", self.ret
self.ret = self.ret, self.workfn(*self.ret, **self.ret)
print "Complete task (%s): %s" % self.ret
except StreamClosedError:
return
|
{
"content_hash": "0160ec66ed82411bc1b9df85a083d74a",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 96,
"avg_line_length": 27.8,
"alnum_prop": 0.539825282631038,
"repo_name": "Deerenaros/netpy",
"id": "1016220a9400302c992669ae841af5c353e0b21e",
"size": "3892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "netpy/butterfly/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7484"
}
],
"symlink_target": ""
}
|
from yepes.forms.fields import *
from yepes.forms.inline_model import InlineModelForm
from yepes.forms.widgets import *
|
{
"content_hash": "7a514f8b56df2d74f5353944fd11a6c9",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 52,
"avg_line_length": 40,
"alnum_prop": 0.825,
"repo_name": "samuelmaudo/yepes",
"id": "70f16b624f0709ef9ba7b6ce5ca80dd31491b10b",
"size": "144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yepes/forms/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "1485"
},
{
"name": "CSS",
"bytes": "2805"
},
{
"name": "HTML",
"bytes": "18543"
},
{
"name": "JavaScript",
"bytes": "56039"
},
{
"name": "Python",
"bytes": "2415982"
}
],
"symlink_target": ""
}
|
"""Base specific data provider."""
import typing as t
from mimesis.providers import BaseDataProvider
__all__ = ["BaseSpecProvider"]
class BaseSpecProvider(BaseDataProvider):
"""Base provider for specific data providers."""
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
"""Initialize attributes of superclass."""
super().__init__(*args, **kwargs)
self._datafile = "builtin.json"
|
{
"content_hash": "8f652752b0bfa19debd5bec979f0e193",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 62,
"avg_line_length": 28.533333333333335,
"alnum_prop": 0.6542056074766355,
"repo_name": "lk-geimfari/mimesis",
"id": "cb0cdd5db7f615ab472e346f7a52e9685878fd88",
"size": "428",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mimesis/builtins/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "720"
},
{
"name": "Python",
"bytes": "892922"
},
{
"name": "Shell",
"bytes": "825"
}
],
"symlink_target": ""
}
|
import os
import PythonQt
from PythonQt import QtCore, QtGui
from director.propertyset import PropertySet, PropertyAttributes, PropertyPanelHelper
from director import callbacks
class Icons(object):
Directory = int(QtGui.QStyle.SP_DirIcon)
Axes = ':/images/axes_icon.png'
Eye = ':/images/eye_icon.png'
EyeOff = ':/images/eye_icon_gray.png'
Matlab = ':/images/matlab_logo.png'
Robot = ':/images/robot_icon.png'
Hammer = ':/images/hammer_icon.png'
Laser = ':/images/laser_icon.jpg'
Feet = ':/images/feet.png'
Hand = ':/images/claw.png'
Octomap = ':/images/octomap.jpg'
Collections = ':/images/rubix_cube.jpg'
@staticmethod
def getIcon(iconId):
'''
Return a QIcon given an icon id as a string or int.
'''
if type(iconId) == int:
return QtGui.QApplication.style().standardIcon(iconId)
else:
return QtGui.QIcon(iconId)
class ObjectModelItem(object):
REMOVED_FROM_OBJECT_MODEL = 'REMOVED_FROM_OBJECT_MODEL'
def __getstate__(self):
#print 'getstate called on:', self
d = dict(properties=self.properties)
return d
def __setstate__(self, state):
#print 'setstate called on:', self
self._tree = None
self.properties = state['properties']
def __init__(self, name, icon=Icons.Robot, properties=None):
#print 'init called on:', self
self._tree = None
self.callbacks = callbacks.CallbackRegistry([self.REMOVED_FROM_OBJECT_MODEL])
self.properties = properties or PropertySet()
self.properties.connectPropertyChanged(self._onPropertyChanged)
self.properties.connectPropertyAdded(self._onPropertyAdded)
self.properties.connectPropertyAttributeChanged(self._onPropertyAttributeChanged)
self.addProperty('Icon', icon, attributes=PropertyAttributes(hidden=True))
self.addProperty('Deletable', True, attributes=PropertyAttributes(hidden=True))
self.addProperty('Name', name, attributes=PropertyAttributes(hidden=True))
def setIcon(self, icon):
self.setProperty('Icon', icon)
def propertyNames(self):
return self.properties.propertyNames()
def hasProperty(self, propertyName):
return self.properties.hasProperty(propertyName)
def getProperty(self, propertyName):
return self.properties.getProperty(propertyName)
def getPropertyEnumValue(self, propertyName):
return self.properties.getPropertyEnumValue(propertyName)
def addProperty(self, propertyName, propertyValue, attributes=None):
self.properties.addProperty(propertyName, propertyValue, attributes)
def removeProperty(self, propertyName):
self.properties.removeProperty(propertyName)
def setProperty(self, propertyName, propertyValue):
self.properties.setProperty(propertyName, propertyValue)
def getPropertyAttribute(self, propertyName, propertyAttribute):
return self.properties.getPropertyAttribute(propertyName, propertyAttribute)
def setPropertyAttribute(self, propertyName, propertyAttribute, value):
self.properties.setPropertyAttribute(propertyName, propertyAttribute, value)
def _onPropertyChanged(self, propertySet, propertyName):
if self._tree is not None:
self._tree._onPropertyValueChanged(self, propertyName)
def _onPropertyAdded(self, propertySet, propertyName):
pass
def _onPropertyAttributeChanged(self, propertySet, propertyName, propertyAttribute):
pass
def hasDataSet(self, dataSet):
return False
def getActionNames(self):
actions = ['Rename']
return actions
def onAction(self, action):
if action == 'Rename':
name = self.getProperty('Name')
inputDialog = QtGui.QInputDialog()
inputDialog.setInputMode(inputDialog.TextInput)
inputDialog.setLabelText('Name:')
inputDialog.setWindowTitle('Enter name')
inputDialog.setTextValue(name)
result = inputDialog.exec_()
if result:
self.rename(inputDialog.textValue())
def rename(self, name, renameChildren=True):
oldName = self.getProperty('Name')
if renameChildren:
for child in self.children():
childName = child.getProperty('Name')
if childName.startswith(oldName):
child.setProperty('Name', name + childName[len(oldName):])
self.setProperty('Name', name)
def getObjectTree(self):
return self._tree
def onRemoveFromObjectModel(self):
pass
def connectRemovedFromObjectModel(self, func):
return self.callbacks.connect(self.REMOVED_FROM_OBJECT_MODEL, func)
def disconnectRemovedFromObjectModel(self, callbackId):
self.callbacks.disconnect(callbackId)
def parent(self):
if self._tree is not None:
return self._tree.getObjectParent(self)
def children(self):
if self._tree is not None:
return self._tree.getObjectChildren(self)
else:
return []
def findChild(self, name):
if self._tree is not None:
return self._tree.findChildByName(self, name)
class ContainerItem(ObjectModelItem):
def __init__(self, name):
ObjectModelItem.__init__(self, name, Icons.Directory)
self.addProperty('Visible', True)
def _onPropertyChanged(self, propertySet, propertyName):
ObjectModelItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName == 'Visible':
visible = self.getProperty(propertyName)
for child in self.children():
if child.hasProperty(propertyName):
child.setProperty(propertyName, visible)
class ObjectModelTree(object):
ACTION_SELECTED = 'ACTION_SELECTED'
SELECTION_CHANGED = 'SELECTION_CHANGED'
def __init__(self):
self._treeWidget = None
self._propertiesPanel = None
self._objects = {}
self._blockSignals = False
self.actions = []
self.callbacks = callbacks.CallbackRegistry([self.ACTION_SELECTED, self.SELECTION_CHANGED])
def getTreeWidget(self):
return self._treeWidget
def getPropertiesPanel(self):
return self._propertiesPanel
def getObjectParent(self, obj):
item = self._getItemForObject(obj)
if item.parent():
return self._getObjectForItem(item.parent())
def getObjectChildren(self, obj):
item = self._getItemForObject(obj)
return [self._getObjectForItem(item.child(i)) for i in xrange(item.childCount())]
def getTopLevelObjects(self):
return [self._getObjectForItem(self._treeWidget.topLevelItem(i))
for i in xrange(self._treeWidget.topLevelItemCount)]
def getActiveObject(self):
item = self._getSelectedItem()
return self._objects[item] if item is not None else None
def setActiveObject(self, obj):
item = self._getItemForObject(obj)
if item:
tree = self.getTreeWidget()
tree.setCurrentItem(item)
tree.scrollToItem(item)
else:
self.clearSelection()
def clearSelection(self):
self.getTreeWidget().setCurrentItem(None)
def getObjects(self):
return self._objects.values()
def _getSelectedItem(self):
items = self.getTreeWidget().selectedItems()
return items[0] if len(items) == 1 else None
def _getItemForObject(self, obj):
for item, itemObj in self._objects.iteritems():
if itemObj == obj:
return item
def _getObjectForItem(self, item):
return self._objects[item]
def findObjectByName(self, name, parent=None):
if parent:
return self.findChildByName(parent, name)
for obj in self._objects.values():
if obj.getProperty('Name') == name:
return obj
def findChildByName(self, parent, name):
for child in self.getObjectChildren(parent):
if child.getProperty('Name') == name:
return child
def onPropertyChanged(self, prop):
if self._blockSignals:
return
propertiesPanel = self.getPropertiesPanel()
propertySet = self.getActiveObject().properties
PropertyPanelHelper.setPropertyFromPanel(prop, propertiesPanel, propertySet)
def _onTreeSelectionChanged(self):
panel = self.getPropertiesPanel()
self._blockSignals = True
panel.clear()
self._blockSignals = False
obj = self.getActiveObject()
if obj:
self._blockSignals = True
PropertyPanelHelper.addPropertiesToPanel(obj.properties, panel)
self._blockSignals = False
self.callbacks.process(self.SELECTION_CHANGED, self)
def updateVisIcon(self, obj):
if not obj.hasProperty('Visible'):
return
isVisible = obj.getProperty('Visible')
item = self._getItemForObject(obj)
item.setIcon(1, Icons.getIcon(Icons.Eye if isVisible else Icons.EyeOff))
def updateObjectIcon(self, obj):
item = self._getItemForObject(obj)
item.setIcon(0, Icons.getIcon(obj.getProperty('Icon')))
def updateObjectName(self, obj):
item = self._getItemForObject(obj)
item.setText(0, obj.getProperty('Name'))
def _onPropertyValueChanged(self, obj, propertyName):
if propertyName == 'Visible':
self.updateVisIcon(obj)
elif propertyName == 'Name':
self.updateObjectName(obj)
elif propertyName == 'Icon':
self.updateObjectIcon(obj)
if obj == self.getActiveObject():
self._blockSignals = True
PropertyPanelHelper.onPropertyValueChanged(self.getPropertiesPanel(), obj.properties, propertyName)
self._blockSignals = False
def _onItemClicked(self, item, column):
obj = self._objects[item]
if column == 1 and obj.hasProperty('Visible'):
obj.setProperty('Visible', not obj.getProperty('Visible'))
self.updateVisIcon(obj)
def _removeItemFromObjectModel(self, item):
while item.childCount():
self._removeItemFromObjectModel(item.child(0))
try:
obj = self._getObjectForItem(item)
except KeyError:
return
obj.callbacks.process(obj.REMOVED_FROM_OBJECT_MODEL, self, obj)
obj.onRemoveFromObjectModel()
obj._tree = None
if item.parent():
item.parent().removeChild(item)
else:
tree = self.getTreeWidget()
tree.takeTopLevelItem(tree.indexOfTopLevelItem(item))
del self._objects[item]
def removeFromObjectModel(self, obj):
if obj is None:
return
item = self._getItemForObject(obj)
if item:
self._removeItemFromObjectModel(item)
def addToObjectModel(self, obj, parentObj=None):
assert obj._tree is None
parentItem = self._getItemForObject(parentObj)
objName = obj.getProperty('Name')
item = QtGui.QTreeWidgetItem(parentItem, [objName])
item.setIcon(0, Icons.getIcon(obj.getProperty('Icon')))
obj._tree = self
self._objects[item] = obj
self.updateVisIcon(obj)
if parentItem is None:
tree = self.getTreeWidget()
tree.addTopLevelItem(item)
tree.expandItem(item)
def collapse(self, obj):
item = self._getItemForObject(obj)
if item:
self.getTreeWidget().collapseItem(item)
def expand(self, obj):
item = self._getItemForObject(obj)
if item:
self.getTreeWidget().expandItem(item)
def addContainer(self, name, parentObj=None):
obj = ContainerItem(name)
self.addToObjectModel(obj, parentObj)
return obj
def getOrCreateContainer(self, name, parentObj=None):
if parentObj:
containerObj = parentObj.findChild(name)
else:
containerObj = self.findObjectByName(name)
if not containerObj:
containerObj = self.addContainer(name, parentObj)
return containerObj
def _onShowContextMenu(self, clickPosition):
obj = self.getActiveObject()
if not obj:
self._onTreeContextMenu(clickPosition)
else:
self._onObjectContextMenu(obj, clickPosition)
def _showMenu(self, actions, clickPosition):
if not actions:
return None
globalPos = self.getTreeWidget().viewport().mapToGlobal(clickPosition)
menu = QtGui.QMenu()
for name in actions:
if not name:
menu.addSeparator()
else:
menu.addAction(name)
selectedAction = menu.exec_(globalPos)
if selectedAction is not None:
return selectedAction.text
else:
return None
def _onTreeContextMenu(self, clickPosition):
selectedAction = self._showMenu(self.actions, clickPosition)
if selectedAction:
self.callbacks.process(self.ACTION_SELECTED, self, selectedAction)
def _onObjectContextMenu(self, obj, clickPosition):
actions = list(obj.getActionNames())
if obj.hasProperty('Deletable') and obj.getProperty('Deletable'):
actions.append(None)
actions.append('Remove')
selectedAction = self._showMenu(actions, clickPosition)
if selectedAction == 'Remove':
self.removeFromObjectModel(obj)
elif selectedAction:
obj.onAction(selectedAction)
def removeSelectedItems(self):
for item in self.getTreeWidget().selectedItems():
obj = self._getObjectForItem(item)
if (not obj.hasProperty('Deletable')) or obj.getProperty('Deletable'):
self._removeItemFromObjectModel(item)
def _filterEvent(self, obj, event):
if event.type() == QtCore.QEvent.KeyPress:
if event.key() == QtCore.Qt.Key_Delete:
self._eventFilter.setEventHandlerResult(True)
self.removeSelectedItems()
def connectSelectionChanged(self, func):
return self.callbacks.connect(self.SELECTION_CHANGED, func)
def disconnectSelectionChanged(self, callbackId):
self.callbacks.disconnect(callbackId)
def init(self, treeWidget, propertiesPanel):
self._treeWidget = treeWidget
self._propertiesPanel = propertiesPanel
propertiesPanel.clear()
propertiesPanel.setBrowserModeToWidget()
propertiesPanel.connect('propertyValueChanged(QtVariantProperty*)', self.onPropertyChanged)
treeWidget.setColumnCount(2)
treeWidget.setHeaderLabels(['Name', ''])
treeWidget.headerItem().setIcon(1, Icons.getIcon(Icons.Eye))
treeWidget.header().setVisible(True)
treeWidget.header().setStretchLastSection(False)
treeWidget.header().setResizeMode(0, QtGui.QHeaderView.Stretch)
treeWidget.header().setResizeMode(1, QtGui.QHeaderView.Fixed)
treeWidget.setColumnWidth(1, 24)
treeWidget.connect('itemSelectionChanged()', self._onTreeSelectionChanged)
treeWidget.connect('itemClicked(QTreeWidgetItem*, int)', self._onItemClicked)
treeWidget.connect('customContextMenuRequested(const QPoint&)', self._onShowContextMenu)
treeWidget.setContextMenuPolicy(PythonQt.QtCore.Qt.CustomContextMenu);
self._eventFilter = PythonQt.dd.ddPythonEventFilter()
self._eventFilter.addFilteredEventType(QtCore.QEvent.KeyPress)
self._eventFilter.connect('handleEvent(QObject*, QEvent*)', self._filterEvent)
treeWidget.installEventFilter(self._eventFilter)
#######################
_t = ObjectModelTree()
def getDefaultObjectModel():
return _t
def getActiveObject():
return _t.getActiveObject()
def setActiveObject(obj):
_t.setActiveObject(obj)
def clearSelection():
_t.clearSelection()
def getObjects():
return _t.getObjects()
def findObjectByName(name, parent=None):
return _t.findObjectByName(name, parent)
def removeFromObjectModel(obj):
_t.removeFromObjectModel(obj)
def addToObjectModel(obj, parentObj=None):
_t.addToObjectModel(obj, parentObj)
def collapse(obj):
_t.collapse(obj)
def expand(obj):
_t.expand(obj)
def addContainer(name, parentObj=None):
return _t.addContainer(name, parentObj)
def getOrCreateContainer(name, parentObj=None):
return _t.getOrCreateContainer(name, parentObj)
def init(objectTree=None, propertiesPanel=None):
if _t._treeWidget:
return
objectTree = objectTree or QtGui.QTreeWidget()
propertiesPanel = propertiesPanel or PythonQt.dd.ddPropertiesPanel()
_t.init(objectTree, propertiesPanel)
|
{
"content_hash": "906bc81cc3ed00d27cdc06158a9c948d",
"timestamp": "",
"source": "github",
"line_count": 542,
"max_line_length": 111,
"avg_line_length": 31.29520295202952,
"alnum_prop": 0.6522815705695083,
"repo_name": "RobotLocomotion/director",
"id": "8cba1c6f27afeab28b901f95d385bd5f7c6d0ca8",
"size": "16962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/director/objectmodel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "119759"
},
{
"name": "C++",
"bytes": "500237"
},
{
"name": "CMake",
"bytes": "52624"
},
{
"name": "GLSL",
"bytes": "15443"
},
{
"name": "Makefile",
"bytes": "5014"
},
{
"name": "Matlab",
"bytes": "161948"
},
{
"name": "Python",
"bytes": "2128090"
},
{
"name": "Shell",
"bytes": "6481"
}
],
"symlink_target": ""
}
|
'''Vector module
This module contains all Vector class definition
'''
import numpy as np
from numpy import linalg
class Vector():
'''Base class for Vector
*Exemple:*
```
v1 += 10 # Add 10 to all components
v1 += [10, 9, 8] # Add a different value to each component
dot_product = v1 @ v2 # Dot product use the matmul operator
```
**Note: Vector is just a wrapper around a numpy array. You can
get directly the numpy array if you need more power**
'''
def __init__(self, values):
self._values = np.array(values, dtype=np.float32)
def __iter__(self):
return iter(self._values)
def __len__(self):
return len(self._values)
def __add__(self, value):
self._values += value
return self
def __iadd__(self, value):
return self.__add__(value)
def __sub__(self, value):
self._values -= value
return self
def __isub__(self, value):
return self.__sub__(value)
def __mul__(self, value):
self._values *= value
return self
def __imul__(self, value):
return self.__mul__(value)
def __matmul__(self, value):
return self._values @ value
def __imatmul__(self, value):
return self.__matmul__(value)
def __truediv__(self, value):
self._values /= value
return self
def __itruediv__(self, value):
return self.__truediv__(value)
def __str__(self):
return str(self._values)
def __eq__(self, other):
return all(self._values == other.values)
def __copy__(self):
return self.__class__(self._values)
@property
def coordinates(self):
return self._values
@coordinates.setter
def coordinates(self, value):
self._values = value
@property
def size(self):
return linalg.norm(self._values)
def nor(self):
return self * (1 / self.size)
def crs(self, value):
return np.cross(self._values, value)
class XMixin():
@property
def x(self):
return self._values[0]
@x.setter
def x(self, value):
self._values[0] = value
class YMixin():
@property
def y(self):
return self._values[1]
@y.setter
def y(self, value):
self._values[1] = value
class ZMixin():
@property
def z(self):
return self._values[2]
@z.setter
def z(self, value):
self._values[2] = value
class Vector2(Vector, XMixin, YMixin):
def __init__(self, *args):
if not args:
super().__init__((0, 0))
elif len(args) == 2:
super().__init__(args)
else:
raise ValueError("Vector2 needs 2 components")
class Vector3(Vector, XMixin, YMixin, ZMixin):
def __init__(self, *args):
if not args:
super().__init__((0, 0, 0))
elif len(args) == 3:
super().__init__(args)
else:
raise ValueError("Vector3 needs 3 components")
# Vector2 constants
Vector2.X = Vector2(1, 0)
Vector2.Y = Vector2(0, 1)
Vector2.Zero = Vector2(0, 0)
# Vector3 constants
Vector3.X = Vector3(1, 0, 0)
Vector3.Y = Vector3(0, 1, 0)
Vector3.Z = Vector3(0, 0, 1)
Vector3.Zero = Vector3(0, 0, 0)
|
{
"content_hash": "e8b8fe683ab729c4179c2b12dc2bff0b",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 66,
"avg_line_length": 21.352941176470587,
"alnum_prop": 0.5564738292011019,
"repo_name": "Echelon9/vulk",
"id": "f71a1681b5b81c75518631acd330dae93695bc40",
"size": "3267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vulk/math/vector.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "32771"
},
{
"name": "Python",
"bytes": "173189"
},
{
"name": "Shell",
"bytes": "1402"
}
],
"symlink_target": ""
}
|
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import RequestFactory
from django.contrib.auth.models import AnonymousUser
from account.views import SignupView, LoginView
class SignupEnabledView(SignupView):
def is_open(self):
return True
class SignupViewTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
def test_get(self):
request = self.factory.get(reverse("account_signup"))
request.user = AnonymousUser()
response = SignupEnabledView.as_view()(request)
self.assertEqual(response.status_code, 200)
class LoginViewTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
def test_get(self):
request = self.factory.get(reverse("account_login"))
request.user = AnonymousUser()
response = LoginView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, ["account/login.html"])
|
{
"content_hash": "019577f9e54cfceb6fe18d1eb916c89b",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 72,
"avg_line_length": 27.394736842105264,
"alnum_prop": 0.7031700288184438,
"repo_name": "mgpyh/django-user-accounts",
"id": "91539ba8b88fa9fe4418caef349593c20ff089c3",
"size": "1041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "account/tests/test_views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75724"
}
],
"symlink_target": ""
}
|
"""Adapter between Gumbo and html5lib.
This exports one method, parse, with the same signature as html5lib.parse. It
takes the text to parse, and optionally an html5lib TreeBuilder to build the
tree, and gives back a DOM tree in that format. Example:
doc = parse(text, treebuilder='lxml')
"""
__author__ = 'jdtang@google.com (Jonathan Tang)'
import gumboc
# These should match html5lib.constants.namespaces, and be indexed by the enum
# values of gumboc.Namespace
_NAMESPACES = [
'http://www.w3.org/1999/xhtml',
'http://www.w3.org/2000/svg',
'http://www.w3.org/1998/Math/MathML',
]
def _convert_doctype(treebuilder, source_node):
if not source_node.has_doctype:
# Mimic html5lib behavior: if no doctype token, no doctype node.
return
treebuilder.insertDoctype({
'name': source_node.name.decode('utf-8'),
'publicId': source_node.public_identifier.decode('utf-8'),
'systemId': source_node.system_identifier.decode('utf-8'),
})
def _convert_attributes(source_node):
def maybe_namespace(attr):
if attr.namespace != gumboc.AttributeNamespace.NONE:
return (repr(attr.namespace).lower() if attr.name != 'xmlns' else None,
attr.name.decode('utf-8'),
attr.namespace.to_url())
else:
return attr.name.decode('utf-8')
return dict((maybe_namespace(attr), attr.value.decode('utf-8'))
for attr in source_node.attributes)
def _convert_element(source_node):
if source_node.type not in ( gumboc.NodeType.ELEMENT, gumboc.NodeType.TEMPLATE):
# If-statement instead of assert so it runs with -O
raise AssertionError(
'_convert_element only works with elements; found %r' %
source_node.type)
return {
'name': source_node.v.element.tag_name.decode('utf-8'),
'namespace': _NAMESPACES[source_node.v.element.tag_namespace.value],
'data': _convert_attributes(source_node),
}
def _insert_root(treebuilder, source_node, pop_element = True):
treebuilder.insertRoot(_convert_element(source_node))
for child_node in source_node.children:
_insert_node(treebuilder, child_node)
if pop_element:
treebuilder.openElements.pop()
def _insert_node(treebuilder, source_node):
assert source_node.type != gumboc.NodeType.DOCUMENT
if source_node.type == gumboc.NodeType.COMMENT:
treebuilder.insertComment({'data': source_node.v.text.text.decode('utf-8')})
elif source_node.type in (
gumboc.NodeType.TEXT,
gumboc.NodeType.WHITESPACE,
gumboc.NodeType.CDATA):
treebuilder.insertText(source_node.v.text.text.decode('utf-8'))
else:
treebuilder.insertElementNormal(_convert_element(source_node))
for child_node in source_node.v.element.children:
_insert_node(treebuilder, child_node)
treebuilder.openElements.pop()
class HTMLParser(object):
def __init__(self, tree):
self.tree = tree
def parse(self, text_or_file, **kwargs):
try:
text = text_or_file.read()
except AttributeError:
# Assume a string.
text = text_or_file
with gumboc.parse(text, **kwargs) as output:
_convert_doctype(self.tree, output.contents.document.contents)
for node in output.contents.document.contents.children:
if node.type == gumboc.NodeType.COMMENT:
self.tree.insertComment({'data': node.v.text.text.decode('utf-8')},
self.tree.document)
elif node.type in (gumboc.NodeType.ELEMENT, gumboc.NodeType.TEMPLATE):
_insert_root(self.tree, output.contents.root.contents)
else:
assert 'Only comments and <html> nodes allowed at the root'
return self.tree.getDocument()
def parseFragment(self, text_or_file, container, **kwargs):
try:
text = text_or_file.read()
except AttributeError:
# Assume a string.
text = text_or_file
if ' ' in container:
container_ns, container = container.split(' ')
else:
container_ns = "html"
with gumboc.parse(
text,
container=gumboc.Tag.from_str(container),
container_namespace=getattr(gumboc.Namespace, container_ns.upper()),
**kwargs) as output:
for node in output.contents.document.contents.children:
if node.type in (gumboc.NodeType.ELEMENT, gumboc.NodeType.TEMPLATE):
_insert_root(self.tree, output.contents.root.contents, False)
else:
assert 'Malformed fragment parse (??)'
return self.tree.getFragment()
|
{
"content_hash": "b86ca645791c6a9d285a4c999856a9af",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 82,
"avg_line_length": 35.92,
"alnum_prop": 0.6717149220489977,
"repo_name": "Sigil-Ebook/sigil-gumbo",
"id": "e3110479b2c9d8f90c767b301ee6bd5d0ae468f2",
"size": "5088",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/gumbo/html5lib_adapter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1210967"
},
{
"name": "C++",
"bytes": "138756"
},
{
"name": "CMake",
"bytes": "3609"
},
{
"name": "HTML",
"bytes": "9914178"
},
{
"name": "M4",
"bytes": "928"
},
{
"name": "Makefile",
"bytes": "4287"
},
{
"name": "Python",
"bytes": "64138"
},
{
"name": "Ragel",
"bytes": "128067"
},
{
"name": "Shell",
"bytes": "990"
}
],
"symlink_target": ""
}
|
"""\
=============================
Shutdown the Selector service
=============================
StopSelector asks the Selector service to shutdown; either immediately, or when
triggered by anything being sent to any of its inboxes.
Example Usage
-------------
Receive data from myserver.com port 1500, save it to a file, then finish::
Pipeline( TCPClient("myserver.com",1500),
SimpleFileWriter("received_data"),
StopSelector(),
).run()
Behaviour
---------
At initialisation specify whether StopSelector should wait to be triggered or
act immediately. The default behaviour is to act immediately
(waitForTriger=False).
If asked, StopSelector will wait for anything to be sent to its "inbox" or
"control" inboxes. It will then immediately ask the Selector service to
shutdown, and immediately terminate.
Otherwise, StopSelector will do this as soon as it is activated, and will then
immediately terminate.
If it was triggered by a message being sent to the "control" inbox then this
will be sent on our of the "signal" outbox just before termination. Otherwise a
producerFinished message will be sent on just before termination.
"""
from Axon.Component import component
from Axon.Ipc import producerFinished, shutdownMicroprocess
from Kamaelia.Internet.Selector import Selector
from Axon.Ipc import shutdown
class StopSelector(component):
"""\
StopSelector([waitForTigger]) -> new StopSelector component.
Asks the Selector service to shutdown; either immediately, or when triggered
by anything being sent to any of its inboxes.
Keyword arguments::
- waitForTrigger -- True to wait to be triggered, else False (default=False)
"""
Inboxes = { "inbox" : "Anything, as trigger",
"control" : "Shutdown signalling",
}
Outboxes = { "outbox" : "NOT USED",
"signal" : "Shutdown signalling",
"selector_shutdown" : "Ask the selector to shut down"
}
def __init__(self, waitForTrigger=False):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(StopSelector,self).__init__()
self.waitForTrigger=waitForTrigger
def main(self):
"""Main loop"""
if self.waitForTrigger:
while not self.anyReady():
self.pause()
yield 1
# stop the selector
selectorService, selectorShutdownService, newSelectorService = Selector.getSelectorServices(self.tracker) # get a reference to a
link = self.link((self,"selector_shutdown"),selectorShutdownService)
self.send(shutdown(),"selector_shutdown")
self.unlink(thelinkage=link)
if self.dataReady("control"):
self.send(self.recv("control"), "signal")
else:
self.send(producerFinished(self), "signal")
__kamaelia_components__ = ( StopSelector, )
|
{
"content_hash": "63bc0fe90eda89f25284de18ff72abde",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 141,
"avg_line_length": 30.927835051546392,
"alnum_prop": 0.6443333333333333,
"repo_name": "sparkslabs/kamaelia",
"id": "d9007f551b4f3e0e43a341dff44b82daf25814a2",
"size": "3905",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Sketches/MH/MobileReframe/StopSelector.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3814"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "M4",
"bytes": "12224"
},
{
"name": "Makefile",
"bytes": "150947"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "OCaml",
"bytes": "643"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "504"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Python",
"bytes": "18900785"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "707588"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render, HttpResponseRedirect
from apps.calc.measurement import measurement_obj
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
import json
import numpy as np
import apps.calc.measurement.calculus as calc
from apps.analysis.json import NumPyArangeEncoder
from apps.projects.models import Experiment, Project, Datarow, Value
import numpy as np
from django.conf import settings
from django.core.exceptions import PermissionDenied
from apps.projects.models import MeasurementInstruments
from datetime import datetime
from django.utils import timezone
# Create your views here.
@login_required
def index(request, experimentId):
# The experiment id is passed in the variable experimentId (see urls.py)
# current user
curruser_id = request.user.id
projectId = Experiment.objects.get(id=experimentId).project_id
# owner of experiment
expowner_id = Project.objects.get(id=projectId).user_id
if not curruser_id == expowner_id and not Project.objects.get(id=projectId).visibility:
raise PermissionDenied()
# Read Data from DB
header_list = Datarow.objects.filter(experiment_id=experimentId).values_list('name', flat=True)
jsonHeader = np.asarray(header_list)
einheiten_list = Datarow.objects.filter(experiment_id=experimentId).values_list('unit', flat=True)
jsonEinheiten = np.asarray(einheiten_list)
zeitreihenSpalte = Experiment.objects.get(id=experimentId).timerow
datarow_id = Datarow.objects.filter(experiment_id=experimentId).values_list('id', flat=True)
value_amount = len(Value.objects.filter(datarow_id=datarow_id[0]))
datarow_amount = len(datarow_id)
#values_wrongorder, filled with 0
values_wo = [0] * datarow_amount
#fill values_wo with only datarow_amount-times of database fetches
i = 0
while i < datarow_amount:
values_wo[i] = Value.objects.filter(datarow_id=datarow_id[i]).values_list('value', flat=True)
i += 1
# order the values in values_wo, so that they can be used without database fetching
jsonData = np.transpose(values_wo).astype(float)
experimentName = Experiment.objects.get(id=experimentId).name
experimentDateCreated = Experiment.objects.get(id=experimentId).created
# @TODO hier noch anderes Datum, sobald Modell da ist
experimentDescr = Experiment.objects.get(id=experimentId).description
projectName = Project.objects.get(id=projectId).name
jsonHeader = json.dumps(jsonHeader, cls=NumPyArangeEncoder)
jsonEinheiten = json.dumps(jsonEinheiten, cls=NumPyArangeEncoder)
jsonData = json.dumps(jsonData, cls=NumPyArangeEncoder)
zeitreihenSpalte = json.dumps(zeitreihenSpalte, cls=NumPyArangeEncoder)
# Prepare the Data for Rendering
dataForRender = {
'jsonData': jsonData,
'jsonHeader': jsonHeader,
'jsonEinheiten': jsonEinheiten,
'zeitreihenSpalte': zeitreihenSpalte,
'projectId': projectId,
'experimentId': experimentId,
'experimentName': experimentName,
'experimentDateCreated': experimentDateCreated,
'experimentDescr': experimentDescr,
'projectName': projectName,
'current_user_id': curruser_id,
'experiment_owner_id': expowner_id,
}
# Safe all Data from the measurement object into the session storage to get them when applying filter
request.session['measurementData'] = jsonData
request.session['measurementHeader'] = jsonHeader
request.session['measurementUnits'] = jsonEinheiten
request.session['measurementTimeIndex'] = zeitreihenSpalte
return render(request, "experiments/index.html", dataForRender)
# page to upload your csv
@login_required
def newE(request, id):
# check if the current user owns the project. if he doesnt: redirect him to his dashboard
if not request.user.id == Project.objects.get(id=id).user_id:
raise PermissionDenied()
dataForRender = {
'projectId': id,
'dateFormat': settings.DATE_FORMAT
}
return render(request, "experiments/new.html", dataForRender)
def month_to_string(month):
months = {'Januar': 1, 'Februar': 2, 'März': 3, 'April': 4, 'Mai': 5, 'Juni': 6,
'Juli': 7, 'August': 8, 'September': 9, 'Oktober': 10, 'November': 11, 'Dezember': 12}
return months[month]
# is called after the user uploaded his csv. file
@login_required
def newESave(request):
# titles in an array
jsonHeader = request.POST.get("jsonHeader", "")
header = json.loads(jsonHeader)
empty_array = [None] * len(header)
# units in an array
jsonEinheiten = request.POST.get("jsonEinheiten", "")
# measurement instruments in an array
jsonMeasurementInstruments = request.POST.get("jsonMeasurementInstruments", "")
# data types in an array
jsonDataType = request.POST.get("jsonDataType", json.dumps(empty_array, cls=NumPyArangeEncoder))
# data format in an array
jsonDataFormat = request.POST.get("jsonDataFormat", json.dumps(empty_array, cls=NumPyArangeEncoder))
# function types in an array
jsonFunctionType = request.POST.get("jsonFunctionType", json.dumps(empty_array, cls=NumPyArangeEncoder))
# response directions in an array
jsonResName = request.POST.get("jsonResName", json.dumps(empty_array, cls=NumPyArangeEncoder))
# response nodes in an array
jsonResNode = request.POST.get("jsonResNode", json.dumps(empty_array, cls=NumPyArangeEncoder))
# response directions in an array
jsonResDir = request.POST.get("jsonResDir", json.dumps(empty_array, cls=NumPyArangeEncoder))
# reference names in an array
jsonRefName = request.POST.get("jsonRefName", json.dumps(empty_array, cls=NumPyArangeEncoder))
# reference nodes in an array
jsonRefNode = request.POST.get("jsonRefNode", json.dumps(empty_array, cls=NumPyArangeEncoder))
# reference directions in an array
jsonRefDir = request.POST.get("jsonRefDir", json.dumps(empty_array, cls=NumPyArangeEncoder))
# column which contains the x axis (= time)
zeitreihenSpalte = request.POST.get("zeitreihenSpalte", "")
# Array of the vibration-data
jsonData = request.POST.get("jsonData", "")
# ID of the new project
projectId = request.POST.get("projectId", "")
# title of the experiment
experiment_name = request.POST.get("datensatzName", "")
# date the experiment took place
experimentDate = request.POST.get("erfassungsDatum", "")
# format date so that it fits into the model 'Day, DD. Month, YYYY' -> timezone aware object
if experimentDate == '':
experimentDate = 0
else:
experimentDate = experimentDate.split(' ')
experimentDate = datetime(int(experimentDate[3]), month_to_string(experimentDate[2]),
int(experimentDate[1].rstrip('.')))
experimentDate = timezone.make_aware(experimentDate, timezone.get_current_timezone())
description = request.POST.get("experimentDescr", "")
#decoding json arrays
units = json.loads(jsonEinheiten)
measurement_instruments = json.loads(jsonMeasurementInstruments)
data_type = json.loads(jsonDataType)
data_format = json.loads(jsonDataFormat)
function_type = json.loads(jsonFunctionType)
res_name = json.loads(jsonResName)
res_node = json.loads(jsonResNode)
res_dir = json.loads(jsonResDir)
ref_name = json.loads(jsonRefName)
ref_node = json.loads(jsonRefNode)
ref_dir = json.loads(jsonRefDir)
time_row = zeitreihenSpalte
data = json.loads(jsonData)
if experimentDate == 0:
new_experiment = Experiment(project_id=projectId, timerow=time_row, name=experiment_name,
description=description)
else:
new_experiment = Experiment(project_id=projectId, timerow=time_row, name=experiment_name,
description=description, measured=experimentDate)
new_experiment.save()
experiment_id = new_experiment.id
i = 0
while i < len(header):
if measurement_instruments[i] == 'actuator':
new_datarow = Datarow(experiment_id=experiment_id, unit=units[i],
name=header[i], data_type=data_type[i],
data_format=data_format[i], function_type=function_type[i],
response_name=res_name[i], response_node=res_node[i],
response_dir=res_dir[i], reference_name=ref_name[i],
reference_node=ref_node[i], reference_dir=ref_dir[i],
measuring_instrument='Ac')
elif measurement_instruments[i] == 'sensor':
new_datarow = Datarow(experiment_id=experiment_id, unit=units[i],
name=header[i], data_type=data_type[i],
data_format=data_format[i], function_type=function_type[i],
response_name=res_name[i], response_node=res_node[i],
response_dir=res_dir[i], reference_name=ref_name[i],
reference_node=ref_node[i], reference_dir=ref_dir[i], measuring_instrument='Se')
else:
new_datarow = Datarow(experiment_id=experiment_id, unit=units[i],
name=header[i], data_type=data_type[i],
data_format=data_format[i], function_type=function_type[i],
response_name=res_name[i], response_node=res_node[i],
response_dir=res_dir[i], reference_name=ref_name[i],
reference_node=ref_node[i], reference_dir=ref_dir[i], measuring_instrument='No')
new_datarow.save()
j = 0
values_list = []
while j < len(data):
values_list.append(Value(value=data[j][i], datarow_id=new_datarow.id))
j += 1
Value.objects.bulk_create(values_list)
i += 1
# @TODO Diesem Redirect muss noch die ID des neuen Experimentes angegeben werden. Die Seite die da aufgerufen wird, ist die Experiment-Detail-Seite!
# Zudem müssen wir dann noch die experiments/index.html-Seite und die Funktion index(request) (in diesem File) anpassen, damit sie das Experiment aus der DB liest!
return HttpResponseRedirect('/experiments/' + str(experiment_id))
# derivation and integration "app"
@login_required
def derivate(request, experimentId):
projectId = Experiment.objects.get(id=experimentId).project_id
expowner_id = Project.objects.get(id=projectId).user_id
curruser_id = request.user.id
# read graph visibility from post
graph_visibility = request.POST.get("graphVisibilities", "").split(',')
# copied from index function and deleted stuff we don't need here
# Read Data from DB
header_list = np.asarray(Datarow.objects.filter(experiment_id=experimentId).values_list('name', flat=True))
einheiten_list = np.asarray(Datarow.objects.filter(experiment_id=experimentId).values_list('unit', flat=True))
mInstruments_list = np.asarray(Datarow.objects.filter(experiment_id=experimentId).values_list('measuring_instrument', flat=True))
data_format_list = np.asarray(Datarow.objects.filter(experiment_id=experimentId).values_list('data_format', flat=True))
experimentName = Experiment.objects.get(id=experimentId).name
dateCreated = Experiment.objects.get(id=experimentId).created
timerow = Experiment.objects.get(id=experimentId).timerow
datarow_id = Datarow.objects.filter(experiment_id=experimentId).values_list('id', flat=True)
value_amount = len(Value.objects.filter(datarow_id=datarow_id[0]))
datarow_amount = len(datarow_id)
# values in the right order will be put in here, but for now initialize with 0
values_wo = [0] * datarow_amount
#fill values_wo with only datarow_amount-times of database fetches
i = 0
while i < datarow_amount:
values_wo[i] = Value.objects.filter(datarow_id=datarow_id[i]).values_list('value', flat=True)
i += 1
# order the values in values_wo, so that they can be used without database fetching
data = np.transpose(values_wo).astype(float)
# convert data to json
jsonData = json.dumps(data, cls=NumPyArangeEncoder)
# Prepare the Data for Rendering
dataForRender = {
'jsonHeader': header_list,
'jsonHeaderRealJson': json.dumps(header_list, cls=NumPyArangeEncoder),
'jsonEinheiten': einheiten_list,
'jsonEinheitenRealJson': json.dumps(einheiten_list, cls=NumPyArangeEncoder),
'jsonHeaderAndUnits': zip(header_list, einheiten_list),
'jsonData': jsonData,
'jsonMInstrumentsRealJson': json.dumps(mInstruments_list, cls=NumPyArangeEncoder),
'jsonDataFormat': json.dumps(data_format_list, cls=NumPyArangeEncoder),
'experimentId': experimentId,
'experimentName': experimentName,
'numOfCols': datarow_amount,
'projectId': projectId,
'dateFormat': settings.DATE_FORMAT,
'dateCreated': dateCreated,
'timerow': timerow,
'timerowRealJson': json.dumps(timerow, cls=NumPyArangeEncoder),
'experiment': Experiment.objects.get(id=experimentId),
'current_user_id': curruser_id,
'experiment_owner_id': expowner_id,
'graphVisibility': json.dumps(graph_visibility, cls=NumPyArangeEncoder),
}
return render(request, "experiments/deriv.html", dataForRender)
# is called @ the end of the intderiv process
def derivateRefresh(request,experimentId):
if request.method != 'POST':
return JsonResponse({"error": "the Request Method hasnt been POST!"})
# Recreate measurement object from the session storage --> deprecated
# measurement = measurement_obj.Measurement(request.session['measurementData'],request.session['measurementHeader'],
# request.session['measurementUnits'],request.session['measurementTimeIndex'])
# jsonHeader = request.POST.get("jsonHeader", "")
# jsonEinheiten = request.POST.get("jsonEinheiten", "")
# zeitreihenSpalte = request.POST.get("zeitreihenSpalte", "")
# jsonData = request.POST.get("intderivresult", "")
# get the task data
function = int(request.POST.get('function'))
firstCol = int(request.POST.get('firstCol'))
secondCol = int(request.POST.get('secondCol'))
newColName = request.POST.get('newColName')
newColUnit = request.POST.get('newColUnit')
# Read Data from DB - copied from index function
datarow_id = Datarow.objects.filter(experiment_id=experimentId).values_list('id', flat=True)
value_amount = len(Value.objects.filter(datarow_id=datarow_id[0]))
datarow_amount = len(datarow_id)
# values in the right order will be put in here, but for now initialize with 0
values_wo = [0] * datarow_amount
#fill values_wo with only datarow_amount-times of database fetches
i = 0
while i < datarow_amount:
values_wo[i] = Value.objects.filter(datarow_id=datarow_id[i]).values_list('value', flat=True)
i += 1
# order the values in values_wo, so that they can be used without database fetching
data = np.transpose(values_wo).astype(float)
# convert data to json (which wouldnt be necessary if we'd change the trapez_for_each & the numerical
# _approx function to accepting python lists instead of json arrays)
jsonData = json.dumps(data, cls=NumPyArangeEncoder)
# call function: 1 == Integration; 0/Else == Derivation (?)
if function == 1:
result = calc.trapez_for_each(jsonData, firstCol, secondCol)
else:
result = calc.numerical_approx(jsonData, firstCol, secondCol)
# convert result to json
result = json.dumps(result, cls=NumPyArangeEncoder)
#
responseData = {
'result': result,
}
# return render(request, "experiments/start.html",dataForRender)
return JsonResponse(responseData)
# delete an experiment
@login_required
def delete_experiment(request, experimentId):
project_id = Experiment.objects.get(id=experimentId).project_id
project_name = Project.objects.get(id=project_id).name
Experiment.objects.filter(id=experimentId).delete()
return HttpResponseRedirect('/projects/detail/' + str(project_name) + '/' + str(project_id))
#render the experiment edit page:
@login_required
def render_edit_experiment(request, experimentId):
experiment = Experiment.objects.get(id=experimentId)
datarows = Datarow.objects.filter(experiment_id=experimentId)
amt_datarows = len(datarows)
datarow_ids = Datarow.objects.filter(experiment_id=experimentId).values_list('id', flat=True)
data_for_render = {
'experiment': experiment,
'datarows': datarows,
'amt_datarows': amt_datarows,
'experimentId': experimentId,
'datarow_ids': datarow_ids,
}
return render(request, "experiments/edit.html", data_for_render)
# experiment in database gets changed according to user input
@login_required
def edit_experiment(request, experimentId):
if request.method != 'POST':
return HttpResponseRedirect('/dashboard/')
# get POST data
datarow_ids = request.POST['datarow_ids']
experiment_name = request.POST['experiment_name']
experiment_description = request.POST['experiment_description']
experiment_measured = request.POST['experiment_measured']
amt_datarows = request.POST['amt_datarows']
# prepare POST data for further usage
datarow_ids = datarow_ids[11:-2].split(', ')
# format date so that it fits into the model 'Day, DD. Month, YYYY' -> timezone aware object
experiment_measured = experiment_measured.split(' ')
if experiment_measured[0] == 'None' or experiment_measured[0][-1] == '.':
experiment_measured = 0
else:
experiment_measured = datetime(int(experiment_measured[3]), month_to_string(experiment_measured[2]),
int(experiment_measured[1].rstrip('.')))
experiment_measured = timezone.make_aware(experiment_measured, timezone.get_current_timezone())
# update database
if experiment_measured == 0:
Experiment.objects.filter(id=experimentId).update(name=experiment_name, description=experiment_description)
else:
Experiment.objects.filter(id=experimentId).update(name=experiment_name, description=experiment_description,
measured=experiment_measured)
i = 0
while i < int(amt_datarows):
datarow_name = request.POST['datarow_name' + str(datarow_ids[i])]
datarow_unit = request.POST['datarow_unit' + str(datarow_ids[i])]
datarow_measuring_instrument = request.POST['datarow_measuring_instrument' + str(datarow_ids[i])]
datarow_data_type = request.POST['datarow_data_type' + str(datarow_ids[i])]
datarow_data_format = request.POST['datarow_data_format' + str(datarow_ids[i])]
datarow_function_type = request.POST['datarow_function_type' + str(datarow_ids[i])]
datarow_response_name = request.POST['datarow_res_name' + str(datarow_ids[i])]
datarow_response_node = request.POST['datarow_res_node' + str(datarow_ids[i])]
datarow_response_direction = request.POST['datarow_res_dir' + str(datarow_ids[i])]
datarow_reference_name = request.POST['datarow_ref_name' + str(datarow_ids[i])]
datarow_reference_node = request.POST['datarow_ref_node' + str(datarow_ids[i])]
datarow_reference_direction = request.POST['datarow_ref_dir' + str(datarow_ids[i])]
Datarow.objects.filter(id=datarow_ids[i]).update(name=datarow_name, unit=datarow_unit,
measuring_instrument=datarow_measuring_instrument,
data_type=datarow_data_type, data_format=datarow_data_format,
function_type=datarow_function_type,
response_name=datarow_response_name,
response_node=datarow_response_node,
response_dir=datarow_response_direction,
reference_name=datarow_reference_name,
reference_node=datarow_reference_node,
reference_dir=datarow_reference_direction)
i = i + 1
return HttpResponseRedirect('/experiments/' + str(experimentId))
|
{
"content_hash": "fa64b9d1c03663de87c6913b0e14613c",
"timestamp": "",
"source": "github",
"line_count": 424,
"max_line_length": 167,
"avg_line_length": 49.14150943396226,
"alnum_prop": 0.6660107506239201,
"repo_name": "IT-PM-OpenAdaptronik/Webapp",
"id": "c9fb5b5e8cedb638dbde532e38f8a16af68a3075",
"size": "20838",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/experiments/views.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "332007"
},
{
"name": "HTML",
"bytes": "184934"
},
{
"name": "JavaScript",
"bytes": "425696"
},
{
"name": "Python",
"bytes": "194670"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('servico', '0013_servicoevento'),
]
operations = [
migrations.AlterField(
model_name='numerodocumento',
name='user',
field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='usuário'),
),
]
|
{
"content_hash": "651589ef8f70c132c1e49a330579b66a",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 146,
"avg_line_length": 27.1,
"alnum_prop": 0.6642066420664207,
"repo_name": "anselmobd/fo2",
"id": "b244fb444abbc45689be73dea175cf843e3125fb",
"size": "617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/servico/migrations/0014_numerodocumentoauto_user_blank.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "160899"
},
{
"name": "HTML",
"bytes": "855985"
},
{
"name": "JavaScript",
"bytes": "203109"
},
{
"name": "PLSQL",
"bytes": "2762"
},
{
"name": "Python",
"bytes": "3228268"
},
{
"name": "Shell",
"bytes": "2161"
}
],
"symlink_target": ""
}
|
import base64
import binascii
import sys
import shellfoundry.exceptions as exceptions
class PasswordModification(object):
HANDLING_KEYS = ["password", "github_password"]
def modify(self, value):
encryption_key = self._get_encryption_key()
encoded = self._decode_encode(value, encryption_key)
if sys.version_info[0] < 3:
return base64.b64encode(encoded)
else:
return base64.b64encode(encoded.encode()).decode()
def normalize(self, value):
try:
encryption_key = self._get_encryption_key()
if sys.version_info[0] < 3:
decoded = self._decode_encode(
base64.decodestring(value), encryption_key
)
else:
decoded = self._decode_encode(
base64.decodebytes(value.encode()).decode(), encryption_key
)
return decoded
except binascii.Error:
return value
def _get_encryption_key(self):
from platform import node
machine_name = node()
if not machine_name:
raise exceptions.PlatformNameIsEmptyException()
return machine_name
def _decode_encode(self, value, key):
return "".join(
chr(ord(source) ^ ord(key)) for source, key in zip(value, key * 100)
)
|
{
"content_hash": "c86913b7165517214798d43c0b482be1",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 80,
"avg_line_length": 30.444444444444443,
"alnum_prop": 0.5781021897810219,
"repo_name": "QualiSystems/shellfoundry",
"id": "9c899aab5c63ac42b0b09667e38841c5fd6d0bc5",
"size": "1413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shellfoundry/utilities/modifiers/configuration/password_modification.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "375311"
},
{
"name": "Rich Text Format",
"bytes": "692763"
}
],
"symlink_target": ""
}
|
"""Base Estimator class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os
import tempfile
import numpy as np
import six
from tensorflow.core.framework import summary_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session as tf_session
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.estimator import run_config
from tensorflow.python.estimator import util
from tensorflow.python.estimator.export.export import build_all_signature_defs
from tensorflow.python.estimator.export.export import get_temp_export_dir
from tensorflow.python.estimator.export.export import get_timestamped_export_dir
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import metrics as metrics_lib
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.summary.writer import writer_cache
from tensorflow.python.training import evaluation
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver
from tensorflow.python.training import training
from tensorflow.python.util import compat
from tensorflow.python.util import tf_inspect
_VALID_MODEL_FN_ARGS = set(
['features', 'labels', 'mode', 'params', 'self', 'config'])
class Estimator(object):
"""Estimator class to train and evaluate TensorFlow models.
The `Estimator` object wraps a model which is specified by a `model_fn`,
which, given inputs and a number of other parameters, returns the ops
necessary to perform training, evaluation, or predictions.
All outputs (checkpoints, event files, etc.) are written to `model_dir`, or a
subdirectory thereof. If `model_dir` is not set, a temporary directory is
used.
The `config` argument can be passed `RunConfig` object containing information
about the execution environment. It is passed on to the `model_fn`, if the
`model_fn` has a parameter named "config" (and input functions in the same
manner). If the `config` parameter is not passed, it is instantiated by the
`Estimator`. Not passing config means that defaults useful for local execution
are used. `Estimator` makes config available to the model (for instance, to
allow specialization based on the number of workers available), and also uses
some of its fields to control internals, especially regarding checkpointing.
The `params` argument contains hyperparameters. It is passed to the
`model_fn`, if the `model_fn` has a parameter named "params", and to the input
functions in the same manner. `Estimator` only passes params along, it does
not inspect it. The structure of `params` is therefore entirely up to the
developer.
None of `Estimator`'s methods can be overridden in subclasses (its
constructor enforces this). Subclasses should use `model_fn` to configure
the base class, and may add methods implementing specialized functionality.
"""
def __init__(self, model_fn, model_dir=None, config=None, params=None):
"""Constructs an `Estimator` instance.
Args:
model_fn: Model function. Follows the signature:
* Args:
* `features`: This is the first item returned from the `input_fn`
passed to `train`, `evaluate`, and `predict`. This should be a
single `Tensor` or `dict` of same.
* `labels`: This is the second item returned from the `input_fn`
passed to `train`, `evaluate`, and `predict`. This should be a
single `Tensor` or `dict` of same (for multi-head models). If
mode is `ModeKeys.PREDICT`, `labels=None` will be passed. If
the `model_fn`'s signature does not accept `mode`, the
`model_fn` must still be able to handle `labels=None`.
* `mode`: Optional. Specifies if this training, evaluation or
prediction. See `ModeKeys`.
* `params`: Optional `dict` of hyperparameters. Will receive what
is passed to Estimator in `params` parameter. This allows
to configure Estimators from hyper parameter tuning.
* `config`: Optional configuration object. Will receive what is passed
to Estimator in `config` parameter, or the default `config`.
Allows updating things in your model_fn based on configuration
such as `num_ps_replicas`, or `model_dir`.
* Returns:
`EstimatorSpec`
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model. If `None`, the model_dir in
`config` will be used if set. If both are set, they must be same. If
both are `None`, a temporary directory will be used.
config: Configuration object.
params: `dict` of hyper parameters that will be passed into `model_fn`.
Keys are names of parameters, values are basic python types.
Raises:
ValueError: parameters of `model_fn` don't match `params`.
ValueError: if this is called via a subclass and if that class overrides
a member of `Estimator`.
"""
Estimator._assert_members_are_not_overridden(self)
if config is None:
self._config = run_config.RunConfig()
logging.info('Using default config.')
else:
if not isinstance(config, run_config.RunConfig):
raise ValueError(
'config must be an instance of RunConfig, but provided %s.' %
config)
self._config = config
# Model directory.
if (model_dir is not None) and (self._config.model_dir is not None):
if model_dir != self._config.model_dir:
# pylint: disable=g-doc-exception
raise ValueError(
"model_dir are set both in constructor and RunConfig, but with "
"different values. In constructor: '{}', in RunConfig: "
"'{}' ".format(model_dir, self._config.model_dir))
# pylint: enable=g-doc-exception
self._model_dir = model_dir or self._config.model_dir
if self._model_dir is None:
self._model_dir = tempfile.mkdtemp()
logging.warning('Using temporary folder as model directory: %s',
self._model_dir)
if self._config.model_dir is None:
self._config = self._config.replace(model_dir=self._model_dir)
logging.info('Using config: %s', str(vars(self._config)))
if self._config.session_config is None:
self._session_config = config_pb2.ConfigProto(allow_soft_placement=True)
else:
self._session_config = self._config.session_config
self._device_fn = _get_replica_device_setter(self._config)
if model_fn is None:
raise ValueError('model_fn must be provided to Estimator.')
_verify_model_fn_args(model_fn, params)
self._model_fn = model_fn
self._params = copy.deepcopy(params or {})
@property
def model_dir(self):
return self._model_dir
@property
def config(self):
return copy.deepcopy(self._config)
@property
def params(self):
return copy.deepcopy(self._params)
@property
def model_fn(self):
"""Returns the model_fn which is bound to self.params.
Returns:
The model_fn with following signature:
`def model_fn(features, labels, mode, config)`
"""
def public_model_fn(features, labels, mode, config):
return self._call_model_fn(features, labels, mode, config)
return public_model_fn
def train(self, input_fn, hooks=None, steps=None, max_steps=None):
"""Trains a model given training data input_fn.
Args:
input_fn: Input function returning a tuple of:
features - `Tensor` or dictionary of string feature name to `Tensor`.
labels - `Tensor` or dictionary of `Tensor` with labels.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the training loop.
steps: Number of steps for which to train model. If `None`, train forever
or train until input_fn generates the `OutOfRange` or `StopIteration`
error. 'steps' works incrementally. If you call two times
train(steps=10) then training occurs in total 20 steps. If `OutOfRange`
or `StopIteration` error occurs in the middle, training stops before 20
steps. If you don't want to have incremental behavior please set
`max_steps` instead. If set, `max_steps` must be `None`.
max_steps: Number of total steps for which to train model. If `None`,
train forever or train until input_fn generates the `OutOfRange` or
`StopIteration` error. If set, `steps` must be `None`. If `OutOfRange`
or `StopIteration` error occurs in the middle, training stops before
`max_steps` steps.
Two calls to `train(steps=100)` means 200 training
iterations. On the other hand, two calls to `train(max_steps=100)` means
that the second call will not do any iteration since first call did
all 100 steps.
Returns:
`self`, for chaining.
Raises:
ValueError: If both `steps` and `max_steps` are not `None`.
ValueError: If either `steps` or `max_steps` is <= 0.
"""
if (steps is not None) and (max_steps is not None):
raise ValueError('Can not provide both steps and max_steps.')
if steps is not None and steps <= 0:
raise ValueError('Must specify steps > 0, given: {}'.format(steps))
if max_steps is not None and max_steps <= 0:
raise ValueError(
'Must specify max_steps > 0, given: {}'.format(max_steps))
if max_steps is not None:
start_step = _load_global_step_from_checkpoint_dir(self._model_dir)
if max_steps <= start_step:
logging.info('Skipping training since max_steps has already saved.')
return self
hooks = _check_hooks_type(hooks)
if steps is not None or max_steps is not None:
hooks.append(training.StopAtStepHook(steps, max_steps))
loss = self._train_model(input_fn=input_fn, hooks=hooks)
logging.info('Loss for final step: %s.', loss)
return self
def evaluate(self, input_fn, steps=None, hooks=None, checkpoint_path=None,
name=None):
"""Evaluates the model given evaluation data input_fn.
For each step, calls `input_fn`, which returns one batch of data.
Evaluates until:
- `steps` batches are processed, or
- `input_fn` raises an end-of-input exception (`OutOfRangeError` or
`StopIteration`).
Args:
input_fn: Input function returning a tuple of:
features - Dictionary of string feature name to `Tensor` or
`SparseTensor`.
labels - `Tensor` or dictionary of `Tensor` with labels.
steps: Number of steps for which to evaluate model. If `None`, evaluates
until `input_fn` raises an end-of-input exception.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the evaluation call.
checkpoint_path: Path of a specific checkpoint to evaluate. If `None`, the
latest checkpoint in `model_dir` is used.
name: Name of the evaluation if user needs to run multiple evaluations on
different data sets, such as on training data vs test data. Metrics for
different evaluations are saved in separate folders, and appear
separately in tensorboard.
Returns:
A dict containing the evaluation metrics specified in `model_fn` keyed by
name, as well as an entry `global_step` which contains the value of the
global step for which this evaluation was performed.
Raises:
ValueError: If `steps <= 0`.
ValueError: If no model has been trained, namely `model_dir`, or the
given `checkpoint_path` is empty.
"""
hooks = _check_hooks_type(hooks)
if steps is not None:
if steps <= 0:
raise ValueError('Must specify steps > 0, given: {}'.format(steps))
hooks.append(evaluation._StopAfterNEvalsHook( # pylint: disable=protected-access
num_evals=steps))
return self._evaluate_model(
input_fn=input_fn,
hooks=hooks,
checkpoint_path=checkpoint_path,
name=name)
def predict(self,
input_fn,
predict_keys=None,
hooks=None,
checkpoint_path=None):
"""Returns predictions for given features.
Args:
input_fn: Input function returning features which is a dictionary of
string feature name to `Tensor` or `SparseTensor`. If it returns a
tuple, first item is extracted as features. Prediction continues until
`input_fn` raises an end-of-input exception (`OutOfRangeError` or
`StopIteration`).
predict_keys: list of `str`, name of the keys to predict. It is used if
the `EstimatorSpec.predictions` is a `dict`. If `predict_keys` is used
then rest of the predictions will be filtered from the dictionary. If
`None`, returns all.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the prediction call.
checkpoint_path: Path of a specific checkpoint to predict. If `None`, the
latest checkpoint in `model_dir` is used.
Yields:
Evaluated values of `predictions` tensors.
Raises:
ValueError: Could not find a trained model in model_dir.
ValueError: if batch length of predictions are not same.
ValueError: If there is a conflict between `predict_keys` and
`predictions`. For example if `predict_keys` is not `None` but
`EstimatorSpec.predictions` is not a `dict`.
"""
hooks = _check_hooks_type(hooks)
# Check that model has been trained.
if not checkpoint_path:
checkpoint_path = saver.latest_checkpoint(self._model_dir)
if not checkpoint_path:
raise ValueError('Could not find trained model in model_dir: {}.'.format(
self._model_dir))
with ops.Graph().as_default() as g:
random_seed.set_random_seed(self._config.tf_random_seed)
self._create_and_assert_global_step(g)
features = self._get_features_from_input_fn(
input_fn, model_fn_lib.ModeKeys.PREDICT)
estimator_spec = self._call_model_fn(
features, None, model_fn_lib.ModeKeys.PREDICT, self.config)
predictions = self._extract_keys(estimator_spec.predictions, predict_keys)
with training.MonitoredSession(
session_creator=training.ChiefSessionCreator(
checkpoint_filename_with_path=checkpoint_path,
scaffold=estimator_spec.scaffold,
config=self._session_config),
hooks=hooks) as mon_sess:
while not mon_sess.should_stop():
preds_evaluated = mon_sess.run(predictions)
if not isinstance(predictions, dict):
for pred in preds_evaluated:
yield pred
else:
for i in range(self._extract_batch_length(preds_evaluated)):
yield {
key: value[i]
for key, value in six.iteritems(preds_evaluated)
}
def _assert_members_are_not_overridden(self):
allowed_overrides = set(['_call_input_fn', '_create_global_step'])
estimator_members = set([m for m in Estimator.__dict__.keys()
if not m.startswith('__')])
subclass_members = set(self.__class__.__dict__.keys())
common_members = estimator_members & subclass_members - allowed_overrides
overridden_members = [
m for m in common_members
if Estimator.__dict__[m] != self.__class__.__dict__[m]]
if overridden_members:
raise ValueError(
'Subclasses of Estimator cannot override members of Estimator. '
'{} does override {}'.format(self.__class__, overridden_members))
def export_savedmodel(
self, export_dir_base, serving_input_receiver_fn,
assets_extra=None,
as_text=False,
checkpoint_path=None):
"""Exports inference graph as a SavedModel into given dir.
This method builds a new graph by first calling the
serving_input_receiver_fn to obtain feature `Tensor`s, and then calling
this `Estimator`'s model_fn to generate the model graph based on those
features. It restores the given checkpoint (or, lacking that, the most
recent checkpoint) into this graph in a fresh session. Finally it creates
a timestamped export directory below the given export_dir_base, and writes
a `SavedModel` into it containing a single `MetaGraphDef` saved from this
session.
The exported `MetaGraphDef` will provide one `SignatureDef` for each
element of the export_outputs dict returned from the model_fn, named using
the same keys. One of these keys is always
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, indicating which
signature will be served when a serving request does not specify one.
For each signature, the outputs are provided by the corresponding
`ExportOutput`s, and the inputs are always the input receivers provided by
the serving_input_receiver_fn.
Extra assets may be written into the SavedModel via the extra_assets
argument. This should be a dict, where each key gives a destination path
(including the filename) relative to the assets.extra directory. The
corresponding value gives the full path of the source file to be copied.
For example, the simple case of copying a single file without renaming it
is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
Args:
export_dir_base: A string containing a directory in which to create
timestamped subdirectories containing exported SavedModels.
serving_input_receiver_fn: A function that takes no argument and
returns a `ServingInputReceiver`.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel, or `None` if no extra assets are needed.
as_text: whether to write the SavedModel proto in text format.
checkpoint_path: The checkpoint path to export. If `None` (the default),
the most recent checkpoint found within the model directory is chosen.
Returns:
The string path to the exported directory.
Raises:
ValueError: if no serving_input_receiver_fn is provided, no export_outputs
are provided, or no checkpoint can be found.
"""
if serving_input_receiver_fn is None:
raise ValueError('serving_input_receiver_fn must be defined.')
with ops.Graph().as_default() as g:
self._create_and_assert_global_step(g)
random_seed.set_random_seed(self._config.tf_random_seed)
serving_input_receiver = serving_input_receiver_fn()
# Call the model_fn and collect the export_outputs.
estimator_spec = self._call_model_fn(
features=serving_input_receiver.features,
labels=None,
mode=model_fn_lib.ModeKeys.PREDICT,
config=self.config)
# Build the SignatureDefs from receivers and all outputs
signature_def_map = build_all_signature_defs(
serving_input_receiver.receiver_tensors,
estimator_spec.export_outputs)
if not checkpoint_path:
# Locate the latest checkpoint
checkpoint_path = saver.latest_checkpoint(self._model_dir)
if not checkpoint_path:
raise ValueError("Couldn't find trained model at %s." % self._model_dir)
export_dir = get_timestamped_export_dir(export_dir_base)
temp_export_dir = get_temp_export_dir(export_dir)
# TODO(soergel): Consider whether MonitoredSession makes sense here
with tf_session.Session() as session:
saver_for_restore = estimator_spec.scaffold.saver or saver.Saver(
sharded=True)
saver_for_restore.restore(session, checkpoint_path)
# TODO(b/36111876): replace legacy_init_op with main_op mechanism
# pylint: disable=protected-access
local_init_op = (
estimator_spec.scaffold.local_init_op or
monitored_session.Scaffold._default_local_init_op())
# pylint: enable=protected-access
# Perform the export
builder = saved_model_builder.SavedModelBuilder(temp_export_dir)
builder.add_meta_graph_and_variables(
session, [tag_constants.SERVING],
signature_def_map=signature_def_map,
assets_collection=ops.get_collection(
ops.GraphKeys.ASSET_FILEPATHS),
legacy_init_op=local_init_op)
builder.save(as_text)
# Add the extra assets
if assets_extra:
assets_extra_path = os.path.join(compat.as_bytes(temp_export_dir),
compat.as_bytes('assets.extra'))
for dest_relative, source in assets_extra.items():
dest_absolute = os.path.join(compat.as_bytes(assets_extra_path),
compat.as_bytes(dest_relative))
dest_path = os.path.dirname(dest_absolute)
gfile.MakeDirs(dest_path)
gfile.Copy(source, dest_absolute)
gfile.Rename(temp_export_dir, export_dir)
return export_dir
def _get_features_from_input_fn(self, input_fn, mode):
result = self._call_input_fn(input_fn, mode)
if not ops.get_default_graph().get_collection(ops.GraphKeys.QUEUE_RUNNERS):
logging.warning('Input graph does not contain a QueueRunner. '
'That means predict yields forever. '
'This is probably a mistake.')
if isinstance(result, (list, tuple)):
return result[0]
return result
def _get_features_and_labels_from_input_fn(self, input_fn, mode):
result = self._call_input_fn(input_fn, mode)
if isinstance(result, (list, tuple)):
if len(result) != 2:
raise ValueError(
'input_fn should return (feautures, labels) as a len 2 tuple.')
return result
return result, None
def _extract_batch_length(self, preds_evaluated):
"""Extracts batch length of predictions."""
batch_length = None
for key, value in six.iteritems(preds_evaluated):
batch_length = batch_length or value.shape[0]
if value.shape[0] != batch_length:
raise ValueError('Batch length of predictions should be same. %s has '
'different batch length then others.' % key)
return batch_length
def _extract_keys(self, predictions, predict_keys):
"""Extracts `predict_keys` from `predictions`."""
if not predict_keys:
return predictions
if not isinstance(predictions, dict):
raise ValueError(
'predict_keys argument is not valid in case of non-dict predictions.')
existing_keys = predictions.keys()
predictions = {
key: value
for key, value in six.iteritems(predictions) if key in predict_keys
}
if not predictions:
raise ValueError('Expected to run at least one output from %s, '
'provided %s.' % (existing_keys, predict_keys))
return predictions
def _create_global_step(self, graph):
"""Creates the global step tensor in graph.
The global step tensor must be an integer type with name 'global_step' and
be added to the collection ${tf.GraphKeys.GLOBAL_STEP}.
Args:
graph: The graph in which to create the global step tensor.
Returns:
The global step `Tensor`.
"""
return training.create_global_step(graph)
def _create_and_assert_global_step(self, graph):
"""Creates and asserts properties of the global step.
Args:
graph: The graph in which to create the global step tensor.
Returns:
The global step `Tensor`.
"""
step = self._create_global_step(graph)
assert step == training.get_global_step()
assert step.dtype.is_integer
return step
def _call_input_fn(self, input_fn, mode):
"""Calls the input function.
Args:
input_fn: The input function.
mode: ModeKeys
Returns:
Either features or (features, labels) where features and labels are:
features - `Tensor` or dictionary of string feature name to `Tensor`.
labels - `Tensor` or dictionary of `Tensor` with labels.
Raises:
ValueError: if input_fn takes invalid arguments.
"""
del mode # unused
input_fn_args = util.fn_args(input_fn)
kwargs = {}
if 'params' in input_fn_args:
kwargs['params'] = self.params
if 'config' in input_fn_args:
kwargs['config'] = self.config
with ops.device('/cpu:0'):
return input_fn(**kwargs)
def _call_model_fn(self, features, labels, mode, config):
"""Calls model function.
Args:
features: features dict.
labels: labels dict.
mode: ModeKeys
config: RunConfig
Returns:
An `EstimatorSpec` object.
Raises:
ValueError: if model_fn returns invalid objects.
"""
model_fn_args = util.fn_args(self._model_fn)
kwargs = {}
if 'labels' in model_fn_args:
kwargs['labels'] = labels
else:
if labels is not None:
raise ValueError(
'model_fn does not take labels, but input_fn returns labels.')
if 'mode' in model_fn_args:
kwargs['mode'] = mode
if 'params' in model_fn_args:
kwargs['params'] = self.params
if 'config' in model_fn_args:
kwargs['config'] = config
model_fn_results = self._model_fn(features=features, **kwargs)
if not isinstance(model_fn_results, model_fn_lib.EstimatorSpec):
raise ValueError('model_fn should return an EstimatorSpec.')
return model_fn_results
def _train_model(self, input_fn, hooks):
all_hooks = []
with ops.Graph().as_default() as g, g.device(self._device_fn):
random_seed.set_random_seed(self._config.tf_random_seed)
global_step_tensor = self._create_and_assert_global_step(g)
features, labels = self._get_features_and_labels_from_input_fn(
input_fn, model_fn_lib.ModeKeys.TRAIN)
estimator_spec = self._call_model_fn(
features, labels, model_fn_lib.ModeKeys.TRAIN, self.config)
ops.add_to_collection(ops.GraphKeys.LOSSES, estimator_spec.loss)
all_hooks.extend(hooks)
all_hooks.extend([
training.NanTensorHook(estimator_spec.loss),
training.LoggingTensorHook(
{
'loss': estimator_spec.loss,
'step': global_step_tensor
},
every_n_iter=100)
])
all_hooks.extend(estimator_spec.training_hooks)
if not (estimator_spec.scaffold.saver or
ops.get_collection(ops.GraphKeys.SAVERS)):
ops.add_to_collection(
ops.GraphKeys.SAVERS,
training.Saver(
sharded=True,
max_to_keep=self._config.keep_checkpoint_max,
keep_checkpoint_every_n_hours=(
self._config.keep_checkpoint_every_n_hours),
defer_build=True,
save_relative_paths=True))
chief_hooks = []
if (self._config.save_checkpoints_secs or
self._config.save_checkpoints_steps):
saver_hook_exists = any([
isinstance(h, training.CheckpointSaverHook)
for h in (all_hooks + chief_hooks +
list(estimator_spec.training_chief_hooks))
])
if not saver_hook_exists:
chief_hooks = [
training.CheckpointSaverHook(
self._model_dir,
save_secs=self._config.save_checkpoints_secs,
save_steps=self._config.save_checkpoints_steps,
scaffold=estimator_spec.scaffold)
]
with training.MonitoredTrainingSession(
master=self._config.master,
is_chief=self._config.is_chief,
checkpoint_dir=self._model_dir,
scaffold=estimator_spec.scaffold,
hooks=all_hooks,
chief_only_hooks=(
tuple(chief_hooks) + tuple(estimator_spec.training_chief_hooks)),
save_checkpoint_secs=0, # Saving is handled by a hook.
save_summaries_steps=self._config.save_summary_steps,
config=self._session_config,
log_step_count_steps=self._config.log_step_count_steps) as mon_sess:
loss = None
while not mon_sess.should_stop():
_, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
return loss
def _evaluate_model(self,
input_fn,
hooks=None,
checkpoint_path=None,
name=''):
"""Evaluates the model using the training.evaluation library."""
# Check that model has been trained (if nothing has been set explicitly).
if not checkpoint_path:
latest_path = saver.latest_checkpoint(self._model_dir)
if not latest_path:
raise ValueError('Could not find trained model in model_dir: {}.'.
format(self._model_dir))
checkpoint_path = latest_path
# Setup output directory.
eval_dir = os.path.join(self._model_dir, 'eval' if not name else
'eval_' + name)
with ops.Graph().as_default() as g:
random_seed.set_random_seed(self._config.tf_random_seed)
global_step_tensor = self._create_and_assert_global_step(g)
features, labels = self._get_features_and_labels_from_input_fn(
input_fn, model_fn_lib.ModeKeys.EVAL)
estimator_spec = self._call_model_fn(
features, labels, model_fn_lib.ModeKeys.EVAL, self.config)
if model_fn_lib.LOSS_METRIC_KEY in estimator_spec.eval_metric_ops:
raise ValueError(
'Metric with name "%s" is not allowed, because Estimator ' % (
model_fn_lib.LOSS_METRIC_KEY) +
'already defines a default metric with the same name.')
estimator_spec.eval_metric_ops[
model_fn_lib.LOSS_METRIC_KEY] = metrics_lib.mean(estimator_spec.loss)
update_op, eval_dict = _extract_metric_update_ops(
estimator_spec.eval_metric_ops)
if ops.GraphKeys.GLOBAL_STEP in eval_dict:
raise ValueError(
'Metric with name `global_step` is not allowed, because Estimator '
'already defines a default metric with the same name.')
eval_dict[ops.GraphKeys.GLOBAL_STEP] = global_step_tensor
all_hooks = list(hooks or [])
all_hooks.extend(list(estimator_spec.evaluation_hooks or []))
eval_results = evaluation._evaluate_once( # pylint: disable=protected-access
checkpoint_path=checkpoint_path,
master=self._config.evaluation_master,
scaffold=estimator_spec.scaffold,
eval_ops=update_op,
final_ops=eval_dict,
hooks=all_hooks,
config=self._session_config)
_write_dict_to_summary(
output_dir=eval_dir,
dictionary=eval_results,
current_global_step=eval_results[ops.GraphKeys.GLOBAL_STEP])
return eval_results
def _check_hooks_type(hooks):
"""Returns hooks if all are SessionRunHook, raises TypeError otherwise."""
hooks = list(hooks or [])
for h in hooks:
if not isinstance(h, training.SessionRunHook):
raise TypeError('Hooks must be a SessionRunHook, given: {}'.format(h))
return hooks
def _get_replica_device_setter(config):
"""Creates a replica device setter if required as a default device_fn.
`Estimator` uses ReplicaDeviceSetter as a default device placer. It sets the
distributed related arguments such as number of ps_replicas based on given
config.
Args:
config: A `RunConfig` instance.
Returns:
A replica device setter, or None.
"""
ps_ops = [
'Variable', 'VariableV2', 'AutoReloadVariable', 'MutableHashTable',
'MutableHashTableV2', 'MutableHashTableOfTensors',
'MutableHashTableOfTensorsV2', 'MutableDenseHashTable',
'MutableDenseHashTableV2'
]
if config.task_type:
worker_device = '/job:%s/task:%d' % (config.task_type, config.task_id)
else:
worker_device = '/job:worker'
if config.num_ps_replicas > 0:
return training.replica_device_setter(
ps_tasks=config.num_ps_replicas,
worker_device=worker_device,
merge_devices=True,
ps_ops=ps_ops,
cluster=config.cluster_spec)
else:
return None
def _verify_model_fn_args(model_fn, params):
"""Verifies model fn arguments."""
args = set(util.fn_args(model_fn))
if 'features' not in args:
raise ValueError('model_fn (%s) must include features argument.' % model_fn)
if params is not None and 'params' not in args:
raise ValueError('model_fn (%s) does not include params argument, '
'but params (%s) is passed to Estimator.' % (model_fn,
params))
if params is None and 'params' in args:
logging.warning('Estimator\'s model_fn (%s) includes params '
'argument, but params are not passed to Estimator.',
model_fn)
if tf_inspect.ismethod(model_fn):
if 'self' in args:
args.remove('self')
non_valid_args = list(args - _VALID_MODEL_FN_ARGS)
if non_valid_args:
raise ValueError('model_fn (%s) has following not expected args: %s' %
(model_fn, non_valid_args))
def _load_global_step_from_checkpoint_dir(checkpoint_dir):
try:
checkpoint_reader = training.NewCheckpointReader(
training.latest_checkpoint(checkpoint_dir))
return checkpoint_reader.get_tensor(ops.GraphKeys.GLOBAL_STEP)
except: # pylint: disable=bare-except
return 0
def _extract_metric_update_ops(eval_dict):
"""Separate update operations from metric value operations."""
update_ops = []
value_ops = {}
# Sort metrics lexicographically so graph is identical every time.
for name, metric_ops in sorted(six.iteritems(eval_dict)):
value_ops[name] = metric_ops[0]
update_ops.append(metric_ops[1])
if update_ops:
update_op = control_flow_ops.group(*update_ops)
else:
update_op = None
return update_op, value_ops
def _dict_to_str(dictionary):
"""Get a `str` representation of a `dict`.
Args:
dictionary: The `dict` to be represented as `str`.
Returns:
A `str` representing the `dictionary`.
"""
return ', '.join('%s = %s' % (k, v)
for k, v in sorted(six.iteritems(dictionary)))
def _write_dict_to_summary(output_dir,
dictionary,
current_global_step):
"""Writes a `dict` into summary file in given output directory.
Args:
output_dir: `str`, directory to write the summary file in.
dictionary: the `dict` to be written to summary file.
current_global_step: `int`, the current global step.
"""
logging.info('Saving dict for global step %d: %s', current_global_step,
_dict_to_str(dictionary))
summary_writer = writer_cache.FileWriterCache.get(output_dir)
summary_proto = summary_pb2.Summary()
for key in dictionary:
if dictionary[key] is None:
continue
if key == 'global_step':
continue
value = summary_proto.value.add()
value.tag = key
if (isinstance(dictionary[key], np.float32) or
isinstance(dictionary[key], float)):
value.simple_value = float(dictionary[key])
elif (isinstance(dictionary[key], np.int64) or
isinstance(dictionary[key], np.int32) or
isinstance(dictionary[key], int)):
value.simple_value = int(dictionary[key])
else:
logging.warn(
'Skipping summary for %s, must be a float, np.float32, np.int64, '
'np.int32 or int.',
key)
summary_writer.add_summary(summary_proto, current_global_step)
summary_writer.flush()
|
{
"content_hash": "b442680109f14eb5184e788a07f21939",
"timestamp": "",
"source": "github",
"line_count": 896,
"max_line_length": 87,
"avg_line_length": 40.43638392857143,
"alnum_prop": 0.656150810079766,
"repo_name": "alivecor/tensorflow",
"id": "1554c271541eeaae2639b29e963a5fa38ad76f9a",
"size": "36921",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/estimator/estimator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7666"
},
{
"name": "C",
"bytes": "198380"
},
{
"name": "C++",
"bytes": "29214526"
},
{
"name": "CMake",
"bytes": "640979"
},
{
"name": "Go",
"bytes": "971217"
},
{
"name": "Java",
"bytes": "407618"
},
{
"name": "Jupyter Notebook",
"bytes": "1833674"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "38189"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "6715"
},
{
"name": "Protocol Buffer",
"bytes": "268983"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "25693552"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "374053"
}
],
"symlink_target": ""
}
|
import sys
import contextlib
from collections import defaultdict
import progressbar
import logging
from ..errors import AngrAnalysisError
from . import registered_analyses
l = logging.getLogger("angr.analysis")
class AnalysisLogEntry(object):
def __init__(self, message, exc_info=False):
if exc_info:
(e_type, value, traceback) = sys.exc_info()
self.exc_type = e_type
self.exc_value = value
self.exc_traceback = traceback
else:
self.exc_type = None
self.exc_value = None
self.exc_traceback = None
self.message = message
def __getstate__(self):
return str(self.__dict__.get("exc_type")), \
str(self.__dict__.get("exc_value")), \
str(self.__dict__.get("exc_traceback")), \
self.message
def __setstate__(self, s):
self.exc_type, self.exc_value, self.exc_traceback, self.message = s
def __repr__(self):
if self.exc_type is None:
msg_str = repr(self.message)
if len(msg_str) > 70:
msg_str = msg_str[:66] + '...'
if msg_str[0] in ('"', "'"):
msg_str += msg_str[0]
return '<AnalysisLogEntry %s>' % msg_str
else:
msg_str = repr(self.message)
if len(msg_str) > 40:
msg_str = msg_str[:36] + '...'
if msg_str[0] in ('"', "'"):
msg_str += msg_str[0]
return '<AnalysisLogEntry %s with %s: %s>' % (msg_str, self.exc_type.__name__, self.exc_value)
class Analyses(object):
"""
This class contains functions for all the registered and runnable analyses,
"""
def __init__(self, p):
"""
Creates an Analyses object
:ivar p: A project
:type p: angr.Project
"""
self.project = p
self._registered_analyses = {}
self.reload_analyses()
def reload_analyses(self):
for analysis_name, analysis in registered_analyses.iteritems():
self._registered_analyses[analysis_name] = self._specialize_analysis(analysis, analysis_name)
def _specialize_analysis(self, analysis, name):
def make_analysis(*args, **kwargs): # pylint: disable=unused-argument
fail_fast = kwargs.pop('fail_fast', False)
kb = kwargs.pop('kb', self.project.kb)
progress_callback = kwargs.pop('progress_callback', None)
show_progressbar = kwargs.pop('show_progressbar', False)
oself = analysis.__new__(analysis)
oself.named_errors = {}
oself.errors = []
oself.log = []
oself._fail_fast = fail_fast
oself._name = name
oself.project = self.project
oself.kb = kb
oself._progress_callback = progress_callback
if oself._progress_callback is not None:
if not hasattr(oself._progress_callback, '__call__'):
raise AngrAnalysisError('The "progress_callback" parameter must be a None or a callable.')
oself._show_progressbar = show_progressbar
oself.__init__(*args, **kwargs)
return oself
cdoc = analysis.__doc__ if analysis.__doc__ else ''
idoc = analysis.__init__.__doc__ if analysis.__init__.__doc__ else ''
make_analysis.__doc__ = cdoc + '\n' + idoc
return make_analysis
def __getstate__(self):
return self.project
def __setstate__(self, s):
self.__init__(s)
def __getattr__(self, k):
r = super(Analyses, self).__getattribute__('_registered_analyses')
if k == '_registered_analyses':
return r
if k in r:
return r[k]
return super(Analyses, self).__getattribute__(k)
def __dir__(self):
return dir(Analyses) + self._registered_analyses.keys()
class Analysis(object):
"""
This class represents an analysis on the program.
:ivar project: The project for this analysis.
:type project: angr.Project
:ivar KnowledgeBase kb: The knowledgebase object.
:ivar callable _progress_callback: A callback function for receiving the progress of this analysis. It only takes
one argument, which is a float number from 0.0 to 100.0 indicating the current
progress.
:ivar bool _show_progressbar: If a progressbar should be shown during the analysis. It's independent from
_progress_callback.
:ivar progressbar.ProgressBar _progressbar: The progress bar object.
"""
project = None
kb = None
_fail_fast = None
_name = None
errors = []
named_errors = defaultdict(list)
_progress_callback = None
_show_progressbar = False
_progressbar = None
_PROGRESS_WIDGETS = [
progressbar.Percentage(),
' ',
progressbar.Bar(),
' ',
progressbar.Timer(),
' ',
progressbar.ETA()
]
@contextlib.contextmanager
def _resilience(self, name=None, exception=Exception):
try:
yield
except exception: # pylint:disable=broad-except
if self._fail_fast:
raise
else:
error = AnalysisLogEntry("exception occurred", exc_info=True)
l.error("Caught and logged %s with resilience: %s", error.exc_type.__name__, error.exc_value)
if name is None:
self.errors.append(error)
else:
self.named_errors[name].append(error)
def _initialize_progressbar(self):
"""
Initialize the progressbar.
:return: None
"""
self._progressbar = progressbar.ProgressBar(widgets=Analysis._PROGRESS_WIDGETS, maxval=10000 * 100).start()
def _update_progress(self, percentage):
"""
Update the progress with a percentage, including updating the progressbar as well as calling the progress
callback.
:param float percentage: Percentage of the progressbar. from 0.0 to 100.0.
:return: None
"""
if self._show_progressbar:
if self._progressbar is None:
self._initialize_progressbar()
self._progressbar.update(percentage * 10000)
if self._progress_callback is not None:
self._progress_callback(percentage) # pylint:disable=not-callable
def _finish_progress(self):
"""
Mark the progressbar as finished.
:return: None
"""
if self._show_progressbar:
if self._progressbar is None:
self._initialize_progressbar()
if self._progressbar is not None:
self._progressbar.finish()
if self._progress_callback is not None:
self._progress_callback(100.0) # pylint:disable=not-callable
def __repr__(self):
return '<%s Analysis Result at %#x>' % (self._name, id(self))
|
{
"content_hash": "6b0b7b1bfed4d00edfe1fb49b714d7bf",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 118,
"avg_line_length": 33.40654205607477,
"alnum_prop": 0.5588194153028395,
"repo_name": "f-prettyland/angr",
"id": "1be273625f753066ec1aa2ae04864ba98896c152",
"size": "7149",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "angr/analyses/analysis.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6375"
},
{
"name": "C++",
"bytes": "39375"
},
{
"name": "Makefile",
"bytes": "557"
},
{
"name": "Python",
"bytes": "2934645"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
#
# Author: Henrique Pereira Coutada Miranda
# Tests for yambopy
# Run all the functions of the tutorial
#
import unittest
import sys
import os
import argparse
import subprocess
import filecmp
from yambopy import *
from qepy import *
import imp
#######################################################
# Silicon GW convergence
#######################################################
sys.path.append('../tutorial/si')
import gs_si
import gw_conv_si
class TestGW_Convergence(unittest.TestCase):
def test_ainputs(self):
gs_si.scf()
gs_si.relax()
gs_si.nscf()
gs_si.bands()
def test_calcs(self):
#gs_si.run_relax()
gs_si.run_scf()
gs_si.run_nscf()
gs_si.run_bands()
gs_si.run_plot()
gs_si.orbitals()
def test_convergence(self):
gw_conv_si.create_save()
gw_conv_si.gw_convergence()
def test_plot(self):
gw_conv_si.plot_convergence()
#######################################################
# Boron Nitride
#######################################################
sys.path.append('../tutorial/bn')
import gs_bn
import bse_cutoff
class TestCoulomb_Cutoff(unittest.TestCase):
def test_ainputs(self):
gs_si.scf()
gs_si.relax()
gs_si.nscf()
gs_si.bands()
def test_calcs(self):
bse_cutoff.layer_separations= [12,15,20]
bse_cutoff.scf_kpoints = [9,9,1]
bse_cutoff.nscf_kpoints = [6,6,1]
bse_cutoff.nbands = 10
bse_cutoff.ecutwf = 40
bse_cutoff.run(work_folder='bse_cutoff_cut',cut=True)
def test_plot(self):
bse_cutoff.plot('bse_cutoff_cut','cutoff_test',cut=True)
#######################################################
# Parallel Bethe-Salpeter MoS2
#######################################################
sys.path.append('../tutorial/mos2')
import gs_mos2
import bse_par_mos2
class TestParallel_BSE(unittest.TestCase):
def test_ainputs(self):
gs_mos2.scf()
gs_mos2.nscf()
def test_calcs(self):
gs_mos2.run_scf()
gs_mos2.run_nscf()
def test_parallel(self):
bse_par_mos2.run()
def test_plot(self):
bse_par_mos2.plot()
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def clean():
print("cleaning...")
os.system('rm -rf relax gw bse_conv bse_cutoff bse_cutoff_cut analyse_bse_conv '
'bse_par bse gw_conv bands scf nscf database proj.in cutoff_test.png '
'bse_par.json')
print("done!")
if __name__ == '__main__':
#parse options
parser = argparse.ArgumentParser(description='Run the tutorials to test yambopy.')
parser.add_argument('-t1', '--tutorial1', action="store_true",
help='Run the GW convergence caluclation of Si')
parser.add_argument('-t2', '--tutorial2', action="store_true",
help='Run the tutorial on Coulomb-cutoff in BN')
parser.add_argument('-t3', '--tutorial3', action="store_true",
help='Run the tutorial in Parallel Bethe-Salpeter in MoS2')
parser.add_argument('-c', '--clean', action="store_true",
help='Clean all the data from a previous run')
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
#first test if yambo is installed
if is_exe('yambo'):
print("yambo not found, please install it before running the tests")
exit()
#first test if pw.x is installed
if is_exe('pw.x'):
print("pw.x not found, please install it before running the tests")
exit()
# Count the number of errors
nerrors = 0
ul = unittest.TestLoader()
tr = unittest.TextTestRunner(verbosity=2)
# Test for tutorial 1
if args.tutorial1:
suite = ul.loadTestsFromTestCase(TestGW_Convergence)
nerrors += not tr.run(suite).wasSuccessful()
# Test for tutorial 2
if args.tutorial2:
suite = ul.loadTestsFromTestCase(TestCoulomb_Cutoff)
nerrors += not tr.run(suite).wasSuccessful()
# Test for tutorial 3
if args.tutorial3:
suite = ul.loadTestsFromTestCase(TestParallel_BSE)
nerrors += not tr.run(suite).wasSuccessful()
if args.clean or nerrors==0:
clean()
sys.exit(nerrors)
|
{
"content_hash": "1c7f60400a253e7126e993cec0996089",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 88,
"avg_line_length": 28.470967741935485,
"alnum_prop": 0.5678676637208249,
"repo_name": "henriquemiranda/yambo-py",
"id": "48cfc93eaf7e15d9d435196c2cc5401fe71f51e8",
"size": "4413",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_tutorial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "169"
},
{
"name": "Python",
"bytes": "122190"
},
{
"name": "Shell",
"bytes": "661"
}
],
"symlink_target": ""
}
|
import unittest
import re2
class TestMatch(unittest.TestCase):
def test_const_match(self):
m = re2.match('abc', 'abc')
self.assertIsNotNone(m)
self.assertEqual(m.start(), 0)
self.assertEqual(m.end(), 3)
self.assertEqual(m.span(), (0, 3))
self.assertEqual(m.groups(), tuple())
self.assertEqual(m.groupdict(), {})
def test_group_match(self):
m = re2.match('ab([cde]fg)', 'abdfghij')
self.assertIsNotNone(m)
self.assertEqual(m.start(), 0)
self.assertEqual(m.end(), 5)
self.assertEqual(m.span(), (0, 5))
self.assertEqual(m.groups(), ('dfg',))
self.assertEqual(m.groupdict(), {})
def test_compiled_match(self):
r = re2.compile('ab([cde]fg)')
m = r.match('abdfghij')
self.assertIsNotNone(m)
self.assertEqual(m.start(), 0)
self.assertEqual(m.end(), 5)
self.assertEqual(m.span(), (0, 5))
self.assertEqual(m.groups(), ('dfg',))
self.assertEqual(m.groupdict(), {})
def test_match_raise(self):
'''test that using the API incorrectly fails'''
r = re2.compile('ab([cde]fg)')
self.assertRaises(TypeError, lambda: re2.match(r, 'abdfghij'))
|
{
"content_hash": "37a3e1ce156cc481798bd8982005ab3d",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 70,
"avg_line_length": 34.583333333333336,
"alnum_prop": 0.5759036144578313,
"repo_name": "simudream/pyre2",
"id": "c61366f74c3137fd7325b8dcdd58ec07fdbeb1d8",
"size": "1245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_match.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "22040"
},
{
"name": "Makefile",
"bytes": "212"
},
{
"name": "Python",
"bytes": "4419"
}
],
"symlink_target": ""
}
|
"""Tasks built from "Learned Optimizers that Scale and Generalize"(losg) code.
paper: https://arxiv.org/pdf/1703.04813
code:
https://github.com/tensorflow/models/tree/master/research/learned_optimizer/problems
These tasks are similar to those used by the losg paper.
The main difference is that these contain different settings than the
original set.
"""
# pylint: enable=line-too-long
import collections
from typing import Any, Dict, Callable, Optional, Text, Tuple, Union, List
import numpy as np
import sonnet as snt
from task_set import datasets
from task_set import registry
from task_set.tasks import base
from task_set.tasks import utils
from task_set.tasks.losg_problems import datasets as losg_datasets
from task_set.tasks.losg_problems import problem_generator as pg
from task_set.tasks.losg_problems import problem_spec
import tensorflow.compat.v1 as tf
# (Problem spec from LOSG, Optional dataset object from LOSG task, batch size)
ProblemDefinition = Tuple[problem_spec.Spec, Optional[losg_datasets.Dataset],
Optional[int]]
class LOSGProblemTask(base.BaseTask):
"""Task built from a task in "Learned Optimizers that Scale and Generalize".
Reference: https://arxiv.org/pdf/1703.04813
"""
def __init__(self,
problem_definition_fn,
name = "LOSGProblemTask",
seed = None,
**kwargs):
"""Creates a Task from a ProblemDefinition.
The problem definition consists of a tuple containing:
A problem spec from LOSG
An optional dataset object from LOSG task
The batch size
Args:
problem_definition_fn: function that returns a problem definition.
name: name of underlying sonnet module.
seed: random seed used.
**kwargs: args passed to BaseTask.
"""
super(LOSGProblemTask, self).__init__(name=name, **kwargs)
self._seed = seed
with self._enter_variable_scope():
spec, dataset, batch_size = problem_definition_fn()
self._problem = spec.build()
if dataset:
examples = dataset.data.shape[0]
dataset = tf.data.Dataset.from_tensor_slices(dataset)
dataset = dataset.repeat().shuffle(examples)
dataset = dataset.batch(batch_size, drop_remainder=True)
self._iterator = dataset.make_one_shot_iterator()
else:
self._iterator = None
self._variables = self._problem.init_variables(seed=self._seed)
def call_split(
self,
params,
split,
batch = None,
with_metrics = False
):
"""Perform a forward pass of the task.
Note: This changes the numpy global seed!
Args:
params: params to use for forward pass.
split: split of data to use.
batch: optional batch of data to compute over.
with_metrics: flag to turn off and off extra metrics.
Returns:
Scalar loss computed over a batch of data.
"""
if batch:
data, label = batch
else:
if self._iterator:
data, label = self._iterator.get_next()
else:
data, label = None, None
# force random seed here before calling into the problem.
np.random.seed(self._problem.random_seed)
@tf.custom_gradient
def fake_gradient(*params_values):
loss = self._problem.objective(params_values, data, label)
def grad(dy):
grads = self._problem.gradients(loss, params_values)
return [g * dy for g in grads]
return loss, grad
loss = fake_gradient(*list(params.values()))
if with_metrics:
return loss, {}
else:
return loss
@snt.reuse_variables
def current_params(self):
"""tf.Variables for the current parameters."""
array = [(v.op.name, v) for v in self._variables]
return collections.OrderedDict(array)
@snt.reuse_variables
def initial_params(self):
"""Initial values of parameters."""
tensors = self._problem.init_tensors(seed=self._seed)
array = [(v.op.name, t) for v, t in zip(self._variables, tensors)]
return collections.OrderedDict(array)
def get_batch(self, split):
"""Get a batch of data.
Note the split is not used for lol problems.
Args:
split: split to take data from. This is not used by this function.
Returns:
A batch of data.
"""
if self._iterator:
return self._iterator.get_next()
else:
return None
def get_variables(self):
return self._variables
QuadraticConfig = Dict[Text, float]
def _sample_quadratic_problem(rng):
"""Sample a quadratic problem."""
is_noise = utils.sample_bool(rng, 0.5)
return {
"dim":
utils.sample_log_int(rng, 10, 1000),
"noise_stdev":
utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
}
def _get_quadratic_problem(cfg):
"""Get a quadratic problem from the given config."""
return problem_spec.Spec(pg.Quadratic, (cfg["dim"],),
{"noise_stdev": cfg["noise_stdev"]}), None, None
BowlConfig = Dict[Text, float]
def _sample_bowl_problems(rng):
"""Sample a bowl problem."""
is_noise = utils.sample_bool(rng, 0.5)
return {
"cond":
utils.sample_log_float(rng, 0.01, 100),
"angle":
rng.choice([0, 0, np.pi / 4., np.pi / 3]),
"noise_stdev":
utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
}
def _get_bowl_problem(cfg):
"""Get a bowl problem from the given config."""
return problem_spec.Spec(pg.Bowl, (cfg["cond"],), {
"noise_stdev": cfg["noise_stdev"],
"angle": cfg["angle"]
}), None, None
SparseSoftmaxConfig = Dict[Text, Any]
def _sample_sparse_softmax_regression(
rng):
"""Sample a sparse softmax regression problem."""
is_noise = utils.sample_bool(rng, 0.5)
return {
"n_features":
utils.sample_log_int(rng, 2, 100),
"n_classes":
2,
"noise_stdev":
utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
"bs":
utils.sample_log_int(rng, 1, 50),
"n_samples":
utils.sample_log_int(rng, 1, 30),
}
def _get_sparse_softmax_regression(
cfg):
"""Get a sparse softmax regression problem."""
return (problem_spec.Spec(pg.SparseSoftmaxRegression,
(cfg["n_features"], cfg["n_classes"]),
{"noise_stdev": cfg["noise_stdev"]}),
losg_datasets.noisy_parity_class(
cfg["n_samples"], n_classes=cfg["n_classes"]), cfg["bs"])
_opt_test_problems = {
"ackley": pg.Ackley,
"beale": pg.Beale,
"branin": pg.Branin,
"logsumexp": pg.LogSumExp,
"matyas": pg.Matyas,
"michalewicz": pg.Michalewicz,
"rosenbrock": pg.Rosenbrock,
"StyblinskiTang": pg.StyblinskiTang,
}
OptimizationTestConfig = Dict[Text, Any]
def _sample_optimization_test_problems(
rng):
"""Sample an optimization test function problem."""
is_noise = utils.sample_bool(rng, 0.5)
return {
"problem":
rng.choice(sorted(_opt_test_problems.keys())),
"noise_stdev":
utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
}
def _get_optimization_test_problems(
cfg):
"""Get an optimization test function problem form the given config."""
return problem_spec.Spec(_opt_test_problems[cfg["problem"]], (),
{"noise_stdev": cfg["noise_stdev"]}), None, None
FullyConnectedConfig = Dict[Text, Any]
def _sample_fully_connected(rng):
"""Sample a fully connected problem."""
n_layer = rng.choice([2, 3, 4, 5])
fixed = utils.sample_bool(rng, 0.5)
cfg = {
"n_features": utils.sample_log_int(rng, 1, 16),
"n_classes": 2,
"activation": utils.sample_activation(rng),
"bs": utils.sample_log_int(rng, 1, 200),
"n_samples": utils.sample_log_int(rng, 1, 30),
}
if fixed:
cfg["hidden_sizes"] = [utils.sample_log_int(rng, 4, 32)] * n_layer
else:
cfg["hidden_sizes"] = [
utils.sample_log_int(rng, 4, 32) for _ in range(n_layer)
]
return cfg
def _get_fully_connected(cfg):
"""Get a fully connected problem from the given config."""
return (problem_spec.Spec(
pg.FullyConnected, (cfg["n_features"], cfg["n_classes"]), {
"hidden_sizes": tuple(cfg["hidden_sizes"]),
"activation": utils.get_activation(cfg["activation"]),
}), losg_datasets.random_mlp(cfg["n_features"],
cfg["n_samples"]), cfg["bs"])
NormConfig = Dict[Text, Any]
def _sample_norm(rng):
"""Sample a norm problem."""
return {
"dim": utils.sample_log_int(rng, 3, 1000),
"norm_power": rng.uniform(0.1, 5.0),
}
def _get_norm(cfg):
"""Get a norm problem from the given config."""
return (problem_spec.Spec(pg.Norm, (cfg["dim"],),
{"norm_power": cfg["norm_power"]}), None, None)
DependencyChainConfig = Dict[Text, Any]
def _sample_dependency_chain(
rng):
"""Sample a dependency chain problem."""
return {
"dim": utils.sample_log_int(rng, 3, 100),
"bs": utils.sample_log_int(rng, 1, 200),
"n_samples": utils.sample_log_int(rng, 100, 20000),
}
def _get_dependency_chain(cfg):
"""Get a dependency chain problem from the given config."""
return (problem_spec.Spec(pg.DependencyChain, (cfg["dim"],), {}),
losg_datasets.random_mlp(cfg["dim"], cfg["n_samples"]), cfg["bs"])
OutwardSnakeConfig = Dict[Text, Any]
def _sample_outward_snake(rng):
"""Sample an outward snake problem."""
return {
"dim": utils.sample_log_int(rng, 3, 100),
"bs": utils.sample_log_int(rng, 1, 200),
"n_samples": utils.sample_log_int(rng, 100, 20000),
}
def _get_outward_snake(cfg):
"""Get an outward snake problem from the given config."""
return (problem_spec.Spec(pg.OutwardSnake, (cfg["dim"],), {}),
losg_datasets.random_mlp(cfg["dim"], cfg["n_samples"]), cfg["bs"])
MinMaxWellConfig = Dict[Text, Any]
def _sample_min_max_well(rng):
"""Sample a min max well problem."""
is_noise = utils.sample_bool(rng, 0.5)
return {
"dim":
utils.sample_log_int(rng, 10, 1000),
"noise_stdev":
utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
}
def _get_min_max_well(cfg):
"""Get a min max well problem from the given config."""
return problem_spec.Spec(pg.MinMaxWell, (cfg["dim"],),
{"noise_stdev": cfg["noise_stdev"]}), None, None
SumOfQuadraticsConfig = Dict[Text, Any]
def _sample_sum_of_quadratics(
rng):
"""Sample a sum of quadratics problem."""
return {
"dim": utils.sample_log_int(rng, 3, 100),
"bs": utils.sample_log_int(rng, 1, 200),
"n_samples": utils.sample_log_int(rng, 100, 20000),
}
def _get_sum_of_quadratics(cfg):
"""Get a sum of quadratics problem from the given config."""
return (
problem_spec.Spec(pg.SumOfQuadratics, (cfg["dim"],), {}),
# dataset size must be divisible by 2.
losg_datasets.random_symmetric(cfg["dim"],
int(cfg["n_samples"] // 2) * 2),
cfg["bs"])
ProjectedQuadraticConfig = Dict[Text, Any]
def _sample_projection_quadratic(
rng):
"""Sample a projection quadratic problem."""
return {
"dim": utils.sample_log_int(rng, 3, 100),
"bs": utils.sample_log_int(rng, 1, 200),
"n_samples": utils.sample_log_int(rng, 100, 20000),
}
def _get_projection_quadratic(
cfg):
"""Get a projection quadratic problem from the given config."""
return (problem_spec.Spec(pg.ProjectionQuadratic, (cfg["dim"],), {}),
losg_datasets.random_symmetric(cfg["dim"],
int(cfg["n_samples"] // 2) * 2),
cfg["bs"])
_to_modify = [
"quadratic", "bowl", "optimization_test_problems", "fully_connected",
"norm", "dependency_chain", "outward_snake", "min_max_well",
"sum_of_quadratics", "projection_quadratic"
]
SparseConfig = Dict[Text, Any]
def _sample_sparse_problem(rng):
"""Sample a sparse problem.
This problem modifies a sampled base problem by setting some gradients to
zero.
Args:
rng: Random state
Returns:
The sampled config.
"""
is_noise = utils.sample_bool(rng, 0.5)
base_config = rng.choice(_to_modify)
return {
"base": (base_config, _problem_sample_get[base_config][0](rng)),
"zero_probability":
rng.uniform(0.9, 0.99),
"noise_stdev":
utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
}
def _get_sparse_problem(cfg):
"""Get a sparse problem from the given config."""
name, cc = cfg["base"]
base_spec, dataset, bs = _problem_sample_get[name][1](cc)
return (problem_spec.Spec(
pg.SparseProblem, [base_spec], {
"zero_probability": cfg["zero_probability"],
"noise_stdev": cfg["noise_stdev"]
}), dataset, bs)
RescaleConfig = Dict[Text, Any]
def _sample_rescale_problem(rng):
"""Sample a rescale problem.
This problem modifies a sampled base problem by rescaling the parameters.
Args:
rng: Random state
Returns:
The sampled config.
"""
base_config = rng.choice(_to_modify)
return {
"base": (base_config, _problem_sample_get[base_config][0](rng)),
"scale": utils.sample_log_float(rng, 0.001, 1000.0),
}
def _get_rescale_problem(cfg):
"""Get a rescale problem from the given config."""
name, cc = cfg["base"]
base_spec, dataset, bs = _problem_sample_get[name][1](cc)
return (problem_spec.Spec(pg.Rescale, [base_spec],
{"scale": cfg["scale"]}), dataset, bs)
LogObjectiveConfig = Dict[Text, Any]
def _sample_log_objective(rng):
"""Sample a log objective problem.
This problem modifies a sampled base problem by taking the log of the loss.
Args:
rng: Random state.
Returns:
Config representing a losg task.
"""
base_config = rng.choice(_to_modify)
return {
"base": (base_config, _problem_sample_get[base_config][0](rng)),
}
def _get_log_objective(cfg):
"""Get a log objective problem fromt he given config."""
name, cc = cfg["base"]
base_spec, dataset, bs = _problem_sample_get[name][1](cc)
return (problem_spec.Spec(pg.LogObjective, [base_spec], {}), dataset, bs)
_problem_sample_get = {
"quadratic": (_sample_quadratic_problem, _get_quadratic_problem),
"bowl": (_sample_bowl_problems, _get_bowl_problem),
"sparse_softmax_regression":
(_sample_sparse_softmax_regression, _get_sparse_softmax_regression),
"optimization_test_problems":
(_sample_optimization_test_problems, _get_optimization_test_problems),
"fully_connected": (_sample_fully_connected, _get_fully_connected),
"norm": (_sample_norm, _get_norm),
"dependency_chain": (_sample_dependency_chain, _get_dependency_chain),
"outward_snake": (_sample_outward_snake, _get_outward_snake),
"min_max_well": (_sample_min_max_well, _get_min_max_well),
"sum_of_quadratics": (_sample_sum_of_quadratics, _get_sum_of_quadratics),
"projection_quadratic":
(_sample_projection_quadratic, _get_projection_quadratic),
"sparse_problems": (_sample_sparse_problem, _get_sparse_problem),
"rescale_problems": (_sample_rescale_problem, _get_rescale_problem),
"log_objective": (_sample_log_objective, _get_log_objective),
}
SampleProblemConfig = Tuple[Text, Any, int]
@registry.task_registry.register_sampler("losg_tasks_family")
def sample_losg_tasks_family_cfg(seed):
"""Samples a tasks based "Learned Optimizers that Scale and Generalize"(losg).
These tasks are all build from components from losg but do not match exactly
with that used to train this paper. The task suite here provides more
variation across tasks.
Args:
seed: Random seed.
Returns:
Config representing a losg task.
"""
rng = np.random.RandomState(seed)
key = rng.choice(sorted(_problem_sample_get))
# Add random seed at the end for consistent inits for better scaling.
return (key, _problem_sample_get[key][0](rng), int(rng.uniform(0, 100000)))
@registry.task_registry.register_getter("losg_tasks_family")
def get_losg_tasks_family(cfg, seed=None):
"""Gets the task described by the given config.
Args:
cfg: Config of the task.
seed: Random seed used for task creation.
Returns:
The task corresponding to the given config.
"""
def get_problem_definition():
orig_spec = _problem_sample_get[cfg[0]][1](cfg[1])
if cfg[0] in ["rescale_problems", "log_objective"]:
orig_spec[0].args[0].kwargs["random_seed"] = cfg[2]
else:
orig_spec[0].kwargs["random_seed"] = cfg[2]
return orig_spec
return LOSGProblemTask(get_problem_definition, seed=seed)
|
{
"content_hash": "9097f1ae5b29bdcf65576a1ccaf512a2",
"timestamp": "",
"source": "github",
"line_count": 579,
"max_line_length": 84,
"avg_line_length": 28.83419689119171,
"alnum_prop": 0.6337226714585206,
"repo_name": "google-research/google-research",
"id": "b9f9b88ab4e5439013f62b87893f66a6aacb914b",
"size": "17335",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "task_set/tasks/losg_tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
}
|
import scipy
import numpy
from pyamg.gallery import linear_elasticity, poisson
from pyamg import smoothed_aggregation_solver, rootnode_solver
# Create test cases
trials = []
A,B = poisson((500,500), format='csr'), None
trials.append( ('Poisson',A,B) )
A,B = linear_elasticity((200,200), format='bsr')
trials.append( ('Elasticity',A,B) )
print "Show advantages of accleration for two example problems"
choice = input('\n Input Choice:\n' + \
'1: Run smoothed_aggregation_solver\n' + \
'2: Run rootnode_solver\n' )
if choice == 1:
method = smoothed_aggregation_solver
elif choice == 2:
method = rootnode_solver
else:
raise ValueError("Enter a choice of 1 or 2")
for name,A,B in trials:
# Construct solver using AMG based on Smoothed Aggregation (SA)
mls = method(A, B=B)
# Display hierarchy information
print 'Matrix: %s' % name
print mls
# Create random right hand side
b = scipy.rand(A.shape[0],1)
# Solve Ax=b with no acceleration ('standalone' solver)
standalone_residuals = []
x = mls.solve(b, tol=1e-10, accel=None, residuals=standalone_residuals)
# Solve Ax=b with Conjugate Gradient (AMG as a preconditioner to CG)
accelerated_residuals = []
x = mls.solve(b, tol=1e-10, accel='cg', residuals=accelerated_residuals)
# Compute relative residuals
standalone_residuals = numpy.array(standalone_residuals)/standalone_residuals[0]
accelerated_residuals = numpy.array(accelerated_residuals)/accelerated_residuals[0]
# Plot convergence history
import pylab
pylab.figure()
pylab.title('Convergence History (%s)' % name)
pylab.xlabel('Iteration')
pylab.ylabel('Relative Residual')
pylab.semilogy(standalone_residuals, label='Standalone', linestyle='None', marker='.')
pylab.semilogy(accelerated_residuals, label='Accelerated', linestyle='None', marker='.')
pylab.legend()
print "Close window for program to proceed.\n"
pylab.show()
|
{
"content_hash": "45035e157290ee116307e2d5a05a4133",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 92,
"avg_line_length": 34.08474576271186,
"alnum_prop": 0.6832421680755842,
"repo_name": "pombreda/pyamg",
"id": "9f6c671b9cad147ad6901c2b2b922fd2c3c2c471",
"size": "2050",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Examples/Preconditioning/demo.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "1112880"
},
{
"name": "CSS",
"bytes": "9832"
},
{
"name": "Makefile",
"bytes": "3249"
},
{
"name": "Matlab",
"bytes": "2742"
},
{
"name": "Python",
"bytes": "1215339"
},
{
"name": "Shell",
"bytes": "558"
},
{
"name": "TeX",
"bytes": "232"
}
],
"symlink_target": ""
}
|
"""Radial Basis Function Representation"""
from rlpy.Tools import perms
from .Representation import Representation
import numpy as np
__copyright__ = "Copyright 2013, RLPy http://acl.mit.edu/RLPy"
__credits__ = ["Alborz Geramifard", "Robert H. Klein", "Christoph Dann",
"William Dabney", "Jonathan P. How"]
__license__ = "BSD 3-Clause"
__author__ = "Alborz Geramifard"
class RBF(Representation):
"""
Representation that uses a weighted sum of radial basis functions (RBFs)
to reconstruct the value function. Each RBF has a mean and variance
based on user-specified resolution_min, resolution_max, and grid_bins.
See specification in __init__ below.
"""
state_dimensions = None
rbfs_mu = None #: The mean of RBFs
#: The std dev of the RBFs (uniformly selected between [0, dimension width]
rbfs_sigma = None
def __init__(self, domain, num_rbfs=None, state_dimensions=None,
const_feature=True, resolution_min=2., resolution_max=None,
seed=1, normalize=False, grid_bins=None, include_border=False):
"""
:param domain: the :py:class`~rlpy.Domains.Domain.Domain` associated
with the value function we want to learn.
:param num_rbfs: (Optional) Number of RBFs to use for fnctn
approximation. *THIS IS IGNORED* if grid_bins != None, and is
instead determined by the resolution.
:param state_dimensions: (Optional) Allows user to select subset of
state dimensions for representation: ndarray.
:param const_feature: Boolean, set true to allow for constant offset
:param resolution_min: If ``grid_bins`` is specified, the standard
deviation sigma of each RBF is given by the average with
``resolution_max``; otherwise it is selected uniform random in the
range with resolution_max.
:param resolution_max: If ``grid_bins`` is specified, the standard
deviation sigma of each RBF is given by the average with
``resolution_min``; otherwise it is selected uniform random in the
range with resolution_min.
:param seed: To seed the random state generator when placing RBFs.
:param normalize: (Boolean) If true, normalize returned feature
function values phi(s) so that sum( phi(s) ) = 1.
:param grid_bins: ndarray, an int for each dimension, determines
discretization of each dimension.
:param include_border: (Boolean) If true, adds an extra RBF to include
the domain boundaries.
"""
if resolution_max is None:
resolution_max = resolution_min
self.grid_bins = grid_bins
self.resolution_max = resolution_max
self.resolution_min = resolution_min
self.num_rbfs = num_rbfs
if state_dimensions is not None:
self.dims = len(state_dimensions)
else: # just consider all dimensions
state_dimensions = range(domain.state_space_dims)
self.dims = domain.state_space_dims
if self.grid_bins is not None:
# uniform grid of rbfs
self.rbfs_mu, self.num_rbfs = self._uniformRBFs(
self.grid_bins, domain, include_border)
self.rbfs_sigma = np.ones(
(self.num_rbfs, self.dims)) * (self.resolution_max + self.resolution_min) / 2
self.const_feature = const_feature
self.features_num = self.num_rbfs
if const_feature:
self.features_num += 1 # adds a constant 1 to each feature vector
self.state_dimensions = state_dimensions
self.normalize = normalize
super(RBF, self).__init__(domain, seed)
self.init_randomization()
def init_randomization(self):
if self.grid_bins is not None:
return
else:
# uniformly scattered
assert(self.num_rbfs is not None)
self.rbfs_mu = np.zeros((self.num_rbfs, self.dims))
self.rbfs_sigma = np.zeros((self.num_rbfs, self.dims))
dim_widths = (self.domain.statespace_limits[self.state_dimensions, 1])
for i in xrange(self.num_rbfs):
for d in self.state_dimensions:
self.rbfs_mu[i, d] = self.random_state.uniform(
self.domain.statespace_limits[d, 0],
self.domain.statespace_limits[d, 1])
self.rbfs_sigma[i,
d] = self.random_state.uniform(
dim_widths[d] / self.resolution_max,
dim_widths[d] / self.resolution_min)
def phi_nonTerminal(self, s):
F_s = np.ones(self.features_num)
if self.state_dimensions is not None:
s = s[self.state_dimensions]
exponent = np.sum(
0.5 * ((s - self.rbfs_mu) / self.rbfs_sigma) ** 2,
axis=1)
if self.const_feature:
F_s[:-1] = np.exp(-exponent)
else:
F_s[:] = np.exp(-exponent)
if self.normalize and F_s.sum() != 0.:
F_s /= F_s.sum()
return F_s
def _uniformRBFs(self, bins_per_dimension, domain, includeBorders=False):
"""
:param bins_per_dimension: Determines the number of RBFs to place
uniformly in each dimension, see example below.
:param includeBorders: (Boolean) If true, adds an extra RBF to include
the domain boundaries.
Positions RBF Centers uniformly across the state space.\n
Returns the centers as RBFs-by-dims matrix and number of rbfs.
Each row is a center of an RBF. \n
Example: 2D domain where each dimension is in [0,3]
with bins = [2,3], False => we get 1 center in the first dimension and
2 centers in the second dimension, hence the combination is:\n
1.5 1 \n
1.5 2 \n
with parameter [2,3], True => we get 3 center in the first dimension
and 5 centers in the second dimension, hence the combination is: \n
0 0 \n
0 1 \n
0 2 \n
0 3 \n
1.5 0 \n
1.5 1 \n
1.5 2 \n
1.5 3 \n
3 0 \n
3 1 \n
3 2 \n
3 3 \n
"""
dims = domain.state_space_dims
if includeBorders:
rbfs_num = np.prod(bins_per_dimension[:] + 1)
else:
rbfs_num = np.prod(bins_per_dimension[:] - 1)
all_centers = []
for d in xrange(dims):
centers = np.linspace(domain.statespace_limits[d, 0],
domain.statespace_limits[d, 1],
bins_per_dimension[d] + 1)
if not includeBorders:
centers = centers[1:-1] # Exclude the beginning and ending
all_centers.append(centers.tolist())
# print all_centers
# Find all pair combinations of them:
result = perms(all_centers)
# print result.shape
return result, rbfs_num
def featureType(self):
return float
|
{
"content_hash": "a7bfd9a78a3f7e6f1096ef6ada59eec1",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 93,
"avg_line_length": 40.666666666666664,
"alnum_prop": 0.5812725757154765,
"repo_name": "MDPvis/rlpy",
"id": "aa2112ba9c61a286728bc733b595c63919526295",
"size": "7198",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rlpy/Representations/RBF.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "117712"
},
{
"name": "C++",
"bytes": "1575"
},
{
"name": "Python",
"bytes": "1175890"
}
],
"symlink_target": ""
}
|
"""
Sqlite3 extensions
==================
* Define custom aggregates, collations and functions
* Basic support for virtual tables
* Basic support for FTS3/4
* Specify isolation level in transactions
Example usage of the Full-text search:
class Document(FTSModel):
title = TextField() # type affinities are ignored in FTS
content = TextField()
Document.create_table(tokenize='porter') # use the porter stemmer
# populate the documents using normal operations.
for doc in documents:
Document.create(title=doc['title'], content=doc['content'])
# use the "match" operation for FTS queries.
matching_docs = Document.select().where(match(Document.title, 'some query'))
# to sort by best match, use the custom "rank" function.
best_docs = (Document
.select(Document, Document.rank('score'))
.where(match(Document.title, 'some query'))
.order_by(SQL('score').desc()))
# or use the shortcut method.
best_docs = Document.match('some phrase')
"""
import inspect
import math
import struct
from peewee import *
from peewee import Expression
from peewee import OP
from peewee import QueryCompiler
from peewee import transaction
try:
import sqlite3
except ImportError:
try:
from pysqlite2 import dbapi2 as sqlite3
except ImportError:
sqlite3 = None
try:
import apsw
except ImportError:
apsw = None
if sqlite3:
FTS_VER = 'FTS4' if sqlite3.sqlite_version_info[:3] >= (3, 7, 4) else 'FTS3'
elif apsw:
FTS_VER = 'FTS4' if apsw.SQLITE_VERSION_NUMBER >= 3007004 else 'FTS3'
class PrimaryKeyAutoIncrementField(PrimaryKeyField):
def __ddl__(self, column_type):
ddl = super(PrimaryKeyAutoIncrementField, self).__ddl__(column_type)
return ddl + [SQL('AUTOINCREMENT')]
class SqliteQueryCompiler(QueryCompiler):
"""
Subclass of QueryCompiler that can be used to construct virtual tables.
"""
def _create_table(self, model_class, safe=False, options=None):
clause = super(SqliteQueryCompiler, self)._create_table(
model_class, safe=safe)
if issubclass(model_class, VirtualModel):
statement = 'CREATE VIRTUAL TABLE'
# If we are using a special extension, need to insert that after
# the table name node.
clause.nodes.insert(2, SQL('USING %s' % model_class._extension))
else:
statement = 'CREATE TABLE'
if safe:
statement += ' IF NOT EXISTS'
clause.nodes[0] = SQL(statement) # Overwrite the statement.
table_options = getattr(model_class._meta, 'options', None) or {}
if options:
table_options.update(options)
if table_options:
columns_constraints = clause.nodes[-1]
for k, v in sorted(table_options.items()):
if isinstance(v, Field):
value = v.as_entity(with_table=True)
elif inspect.isclass(v) and issubclass(v, Model):
value = v.as_entity()
else:
value = SQL(v)
option = Clause(SQL(k), value)
option.glue = '='
columns_constraints.nodes.append(option)
return clause
def create_table(self, model_class, safe=False, options=None):
return self.parse_node(self._create_table(model_class, safe, options))
class VirtualModel(Model):
"""Model class stored using a Sqlite virtual table."""
_extension = ''
class FTSModel(VirtualModel):
_extension = FTS_VER
@classmethod
def create_table(cls, fail_silently=False, **options):
if fail_silently and cls.table_exists():
return
cls._meta.database.create_table(cls, options=options)
cls._create_indexes()
@classmethod
def _fts_cmd(cls, cmd):
tbl = cls._meta.db_table
res = cls._meta.database.execute_sql(
"INSERT INTO %s(%s) VALUES('%s');" % (tbl, tbl, cmd))
return res.fetchone()
@classmethod
def optimize(cls):
return cls._fts_cmd('optimize')
@classmethod
def rebuild(cls):
return cls._fts_cmd('rebuild')
@classmethod
def integrity_check(cls):
return cls._fts_cmd('integrity-check')
@classmethod
def merge(cls, blocks=200, segments=8):
return cls._fts_cmd('merge=%s,%s' % (blocks, segments))
@classmethod
def automerge(cls, state=True):
return cls._fts_cmd('automerge=%s' % (state and '1' or '0'))
@classmethod
def match(cls, term):
"""
Generate a `MATCH` expression appropriate for searching this table.
"""
return match(cls.as_entity(), term)
@classmethod
def rank(cls):
return Rank(cls)
@classmethod
def bm25(cls, field=None, k=1.2, b=0.75):
if field is None:
field = find_best_search_field(cls)
field_idx = cls._meta.get_field_index(field)
match_info = fn.matchinfo(cls.as_entity(), 'pcxnal')
return fn.bm25(match_info, field_idx, k, b)
@classmethod
def search(cls, term, alias='score'):
"""Full-text search using selected `term`."""
return (cls
.select(cls, cls.rank().alias(alias))
.where(cls.match(term))
.order_by(SQL(alias).desc()))
@classmethod
def search_bm25(cls, term, field=None, k=1.2, b=0.75, alias='score'):
"""Full-text search for selected `term` using BM25 algorithm."""
if field is None:
field = find_best_search_field(cls)
return (cls
.select(cls, cls.bm25(field, k, b).alias(alias))
.where(cls.match(term))
.order_by(SQL(alias).desc()))
class _VirtualFieldMixin(object):
"""
Field mixin to support virtual table attributes that may not correspond
to actual columns in the database.
"""
def add_to_class(self, model_class, name):
super(_VirtualFieldMixin, self).add_to_class(model_class, name)
del model_class._meta.fields[self.name]
del model_class._meta.columns[self.db_column]
class VirtualField(_VirtualFieldMixin, BareField):
pass
class VirtualIntegerField(_VirtualFieldMixin, IntegerField):
pass
class VirtualCharField(_VirtualFieldMixin, CharField):
pass
class VirtualFloatField(_VirtualFieldMixin, FloatField):
pass
class RowIDField(_VirtualFieldMixin, PrimaryKeyField):
def add_to_class(self, model_class, name):
if name != 'rowid':
raise ValueError('RowIDField must be named `rowid`.')
return super(RowIDField, self).add_to_class(model_class, name)
def ClosureTable(model_class, foreign_key=None):
"""Model factory for the transitive closure extension."""
if foreign_key is None:
for field_obj in model_class._meta.rel.values():
if field_obj.rel_model is model_class:
foreign_key = field_obj
break
else:
raise ValueError('Unable to find self-referential foreign key.')
primary_key = model_class._meta.primary_key
class BaseClosureTable(VirtualModel):
_extension = 'transitive_closure'
depth = VirtualIntegerField()
id = VirtualIntegerField()
idcolumn = VirtualIntegerField()
parentcolumn = VirtualIntegerField()
root = VirtualIntegerField()
tablename = VirtualCharField()
@classmethod
def descendants(cls, node, depth=None, include_node=False):
query = (model_class
.select(model_class, cls.depth.alias('depth'))
.join(cls, on=(primary_key == cls.id))
.where(cls.root == node))
if depth is not None:
query = query.where(cls.depth == depth)
elif not include_node:
query = query.where(cls.depth > 0)
return query
@classmethod
def ancestors(cls, node, depth=None, include_node=False):
query = (model_class
.select(model_class, cls.depth.alias('depth'))
.join(cls, on=(primary_key == cls.root))
.where(cls.id == node))
if depth:
query = query.where(cls.depth == depth)
elif not include_node:
query = query.where(cls.depth > 0)
return query
@classmethod
def siblings(cls, node, include_node=False):
fk_value = node._data.get(foreign_key.name)
query = model_class.select().where(foreign_key == fk_value)
if not include_node:
query = query.where(primary_key != node)
return query
class Meta:
database = model_class._meta.database
options = {
'tablename': model_class._meta.db_table,
'idcolumn': model_class._meta.primary_key.db_column,
'parentcolumn': foreign_key.db_column}
primary_key = False
name = '%sClosure' % model_class.__name__
return type(name, (BaseClosureTable,), {'Meta': Meta})
class SqliteExtDatabase(SqliteDatabase):
"""
Database class which provides additional Sqlite-specific functionality:
* Register custom aggregates, collations and functions
* Specify a row factory
* Advanced transactions (specify isolation level)
"""
compiler_class = SqliteQueryCompiler
def __init__(self, *args, **kwargs):
super(SqliteExtDatabase, self).__init__(*args, **kwargs)
self._aggregates = {}
self._collations = {}
self._functions = {}
self._extensions = set([])
self._row_factory = None
self.register_function(rank, 'rank', 1)
self.register_function(bm25, 'bm25', -1)
def _connect(self, database, **kwargs):
conn = super(SqliteExtDatabase, self)._connect(database, **kwargs)
self._load_aggregates(conn)
self._load_collations(conn)
self._load_functions(conn)
if self._row_factory:
conn.row_factory = self._row_factory
if self._extensions:
conn.enable_load_extension(True)
for extension in self._extensions:
conn.load_extension(extension)
return conn
def _load_aggregates(self, conn):
for name, (klass, num_params) in self._aggregates.items():
conn.create_aggregate(name, num_params, klass)
def _load_collations(self, conn):
for name, fn in self._collations.items():
conn.create_collation(name, fn)
def _load_functions(self, conn):
for name, (fn, num_params) in self._functions.items():
conn.create_function(name, num_params, fn)
def register_aggregate(self, klass, name=None, num_params=-1):
self._aggregates[name or klass.__name__.lower()] = (klass, num_params)
if not self.is_closed():
self._load_aggregates(self.get_conn())
def aggregate(self, name=None, num_params=-1):
def decorator(klass):
self.register_aggregate(klass, name, num_params)
return klass
return decorator
def register_collation(self, fn, name=None):
name = name or fn.__name__
def _collation(*args):
expressions = args + (SQL('collate %s' % name),)
return Clause(*expressions)
fn.collation = _collation
self._collations[name] = fn
if not self.is_closed():
self._load_collations(self.get_conn())
def collation(self, name=None):
def decorator(fn):
self.register_collation(fn, name)
return fn
return decorator
def register_function(self, fn, name=None, num_params=-1):
self._functions[name or fn.__name__] = (fn, num_params)
if not self.is_closed():
self._load_functions(self.get_conn())
def func(self, name=None, num_params=-1):
def decorator(fn):
self.register_function(fn, name, num_params)
return fn
return decorator
def load_extension(self, extension):
self._extensions.add(extension)
def unregister_aggregate(self, name):
del(self._aggregates[name])
def unregister_collation(self, name):
del(self._collations[name])
def unregister_function(self, name):
del(self._functions[name])
def unload_extension(self, extension):
self._extensions.remove(extension)
def row_factory(self, fn):
self._row_factory = fn
def create_table(self, model_class, safe=False, options=None):
sql, params = self.compiler().create_table(model_class, safe, options)
return self.execute_sql(sql, params)
def create_index(self, model_class, field_name, unique=False):
if issubclass(model_class, FTSModel):
return
return super(SqliteExtDatabase, self).create_index(
model_class, field_name, unique)
def granular_transaction(self, lock_type='deferred'):
assert lock_type.lower() in ('deferred', 'immediate', 'exclusive')
return granular_transaction(self, lock_type)
class granular_transaction(transaction):
def __init__(self, db, lock_type='deferred'):
self.db = db
self.conn = self.db.get_conn()
self.lock_type = lock_type
def _begin(self):
self.db.begin(self.lock_type)
OP.MATCH = 'match'
SqliteExtDatabase.register_ops({
OP.MATCH: 'MATCH',
})
def match(lhs, rhs):
return Expression(lhs, OP.MATCH, rhs)
# Shortcut for calculating ranks.
Rank = lambda model: fn.rank(fn.matchinfo(model.as_entity()))
BM25 = lambda mc, idx: fn.bm25(fn.matchinfo(mc.as_entity(), 'pcxnal'), idx)
def find_best_search_field(model_class):
for field_class in [TextField, CharField]:
for model_field in model_class._meta.get_fields():
if isinstance(model_field, field_class):
return model_field
return model_class._meta.get_fields()[-1]
def _parse_match_info(buf):
# See http://sqlite.org/fts3.html#matchinfo
bufsize = len(buf) # Length in bytes.
return [struct.unpack('@I', buf[i:i+4])[0] for i in range(0, bufsize, 4)]
# Ranking implementation, which parse matchinfo.
def rank(raw_match_info):
# Handle match_info called w/default args 'pcx' - based on the example rank
# function http://sqlite.org/fts3.html#appendix_a
match_info = _parse_match_info(raw_match_info)
score = 0.0
p, c = match_info[:2]
for phrase_num in range(p):
phrase_info_idx = 2 + (phrase_num * c * 3)
for col_num in range(c):
col_idx = phrase_info_idx + (col_num * 3)
x1, x2 = match_info[col_idx:col_idx + 2]
if x1 > 0:
score += float(x1) / x2
return score
# Okapi BM25 ranking implementation (FTS4 only).
def bm25(raw_match_info, column_index, k1=1.2, b=0.75):
"""
Usage:
# Format string *must* be pcxnal
# Second parameter to bm25 specifies the index of the column, on
# the table being queries.
bm25(matchinfo(document_tbl, 'pcxnal'), 1) AS rank
"""
match_info = _parse_match_info(raw_match_info)
score = 0.0
# p, 1 --> num terms
# c, 1 --> num cols
# x, (3 * p * c) --> for each phrase/column,
# term_freq for this column
# term_freq for all columns
# total documents containing this term
# n, 1 --> total rows in table
# a, c --> for each column, avg number of tokens in this column
# l, c --> for each column, length of value for this column (in this row)
# s, c --> ignore
p, c = match_info[:2]
n_idx = 2 + (3 * p * c)
a_idx = n_idx + 1
l_idx = a_idx + c
n = match_info[n_idx]
a = match_info[a_idx: a_idx + c]
l = match_info[l_idx: l_idx + c]
total_docs = n
avg_length = float(a[column_index])
doc_length = float(l[column_index])
if avg_length == 0:
D = 0
else:
D = 1 - b + (b * (doc_length / avg_length))
for phrase in range(p):
# p, c, p0c01, p0c02, p0c03, p0c11, p0c12, p0c13, p1c01, p1c02, p1c03..
# So if we're interested in column <i>, the counts will be at indexes
x_idx = 2 + (3 * column_index * (phrase + 1))
term_freq = float(match_info[x_idx])
term_matches = float(match_info[x_idx + 2])
# The `max` check here is based on a suggestion in the Wikipedia
# article. For terms that are common to a majority of documents, the
# idf function can return negative values. Applying the max() here
# weeds out those values.
idf = max(
math.log(
(total_docs - term_matches + 0.5) /
(term_matches + 0.5)),
0)
denom = term_freq + (k1 * D)
if denom == 0:
rhs = 0
else:
rhs = (term_freq * (k1 + 1)) / denom
score += (idf * rhs)
return score
|
{
"content_hash": "d6555617ef31268c43b3f2ff4d993ebb",
"timestamp": "",
"source": "github",
"line_count": 511,
"max_line_length": 80,
"avg_line_length": 33.262230919765166,
"alnum_prop": 0.6014590810142967,
"repo_name": "fuzeman/peewee",
"id": "9ad004c7001976e26e0efb4f23a3e0d7b2c646d3",
"size": "16997",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "playhouse/sqlite_ext.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "903582"
},
{
"name": "Shell",
"bytes": "4502"
}
],
"symlink_target": ""
}
|
import asyncio
import logging
import pytest
import lightbus
import lightbus.creation
import lightbus.transports.redis.event
import lightbus.transports.redis.result
import lightbus.transports.redis.rpc
import lightbus.transports.redis.schema
from lightbus.exceptions import BusAlreadyClosed
from lightbus.transports.redis.event import StreamUse
logger = logging.getLogger(__name__)
@pytest.fixture
async def redis_rpc_transport(new_redis_pool, loop):
return lightbus.transports.redis.rpc.RedisRpcTransport(
redis_pool=await new_redis_pool(maxsize=10000)
)
@pytest.fixture
async def redis_result_transport(new_redis_pool, loop):
return lightbus.transports.redis.result.RedisResultTransport(
redis_pool=await new_redis_pool(maxsize=10000)
)
@pytest.fixture
async def redis_event_transport(new_redis_pool, loop):
transport = lightbus.transports.redis.event.RedisEventTransport(
redis_pool=await new_redis_pool(maxsize=10000),
service_name="test_service",
consumer_name="test_consumer",
# This used to be the default, so we still test against it here
stream_use=StreamUse.PER_EVENT,
)
yield transport
await transport.close()
@pytest.fixture
async def redis_schema_transport(new_redis_pool, loop):
return lightbus.transports.redis.schema.RedisSchemaTransport(
redis_pool=await new_redis_pool(maxsize=10000)
)
@pytest.fixture
async def bus(new_bus):
bus = new_bus()
yield bus
try:
await bus.client.stop_worker()
await bus.client.close_async()
except BusAlreadyClosed:
pass
@pytest.fixture(name="fire_dummy_events")
def fire_dummy_events_fixture(bus):
async def fire_dummy_events(total, initial_delay=0.1):
await asyncio.sleep(initial_delay)
for x in range(0, total):
await bus.my.dummy.my_event.fire_async(field=str(x))
logger.warning("TEST: fire_dummy_events() completed")
return fire_dummy_events
|
{
"content_hash": "1b875e53a662498dd7527a1816be5670",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 71,
"avg_line_length": 27.36986301369863,
"alnum_prop": 0.7232232232232232,
"repo_name": "adamcharnock/lightbus",
"id": "ab7d06d1ac34284d8b44c0c61dc874d9269caf2e",
"size": "1998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/transports/redis/conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "710699"
}
],
"symlink_target": ""
}
|
"""Xcode-ninja wrapper project file generator.
This updates the data structures passed to the Xcode gyp generator to build
with ninja instead. The Xcode project itself is transformed into a list of
executable targets, each with a build step to build with ninja, and a target
with every source and resource file. This appears to sidestep some of the
major performance headaches experienced using complex projects and large number
of targets within Xcode.
"""
import errno
import gyp.generator.ninja
import os
import re
import xml.sax.saxutils
def _WriteWorkspace(main_gyp, sources_gyp):
""" Create a workspace to wrap main and sources gyp paths. """
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
workspace_path = build_file_root + '.xcworkspace'
try:
os.makedirs(workspace_path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
'<Workspace version = "1.0">\n'
for gyp_name in [main_gyp, sources_gyp]:
name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
name = xml.sax.saxutils.quoteattr("group:" + name)
output_string += ' <FileRef location = %s></FileRef>\n' % name
output_string += '</Workspace>\n'
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
try:
with open(workspace_file, 'r') as input_file:
input_string = input_file.read()
if input_string == output_string:
return
except IOError:
# Ignore errors if the file doesn't exist.
pass
with open(workspace_file, 'w') as output_file:
output_file.write(output_string)
def _TargetFromSpec(old_spec, params):
""" Create fake target for xcode-ninja wrapper. """
# Determine ninja top level build dir (e.g. /path/to/out).
ninja_toplevel = None
jobs = 0
if params:
options = params['options']
ninja_toplevel = \
os.path.join(options.toplevel_dir,
gyp.generator.ninja.ComputeOutputDir(params))
jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
target_name = old_spec.get('target_name')
product_name = old_spec.get('product_name', target_name)
ninja_target = {}
ninja_target['target_name'] = target_name
ninja_target['product_name'] = product_name
ninja_target['toolset'] = old_spec.get('toolset')
ninja_target['default_configuration'] = old_spec.get('default_configuration')
ninja_target['configurations'] = {}
# Tell Xcode to look in |ninja_toplevel| for build products.
new_xcode_settings = {}
if ninja_toplevel:
new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
if 'configurations' in old_spec:
for config in old_spec['configurations'].iterkeys():
old_xcode_settings = old_spec['configurations'][config]['xcode_settings']
if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
ninja_target['configurations'][config] = {}
ninja_target['configurations'][config]['xcode_settings'] = \
new_xcode_settings
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
ninja_target['type'] = old_spec['type']
if ninja_toplevel:
ninja_target['actions'] = [
{
'action_name': 'Compile and copy %s via ninja' % target_name,
'inputs': [],
'outputs': [],
'action': [
'env',
'PATH=%s' % os.environ['PATH'],
'ninja',
'-C',
new_xcode_settings['CONFIGURATION_BUILD_DIR'],
target_name,
],
'message': 'Compile and copy %s via ninja' % target_name,
},
]
if jobs > 0:
ninja_target['actions'][0]['action'].extend(('-j', jobs))
return ninja_target
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
"""Limit targets for Xcode wrapper.
Xcode sometimes performs poorly with too many targets, so only include
proper executable targets, with filters to customize.
Arguments:
target_extras: Regular expression to always add, matching any target.
executable_target_pattern: Regular expression limiting executable targets.
spec: Specifications for target.
"""
target_name = spec.get('target_name')
# Always include targets matching target_extras.
if target_extras is not None and re.search(target_extras, target_name):
return True
# Otherwise just show executable targets.
if spec.get('type', '') == 'executable' and \
spec.get('product_extension', '') != 'bundle':
# If there is a filter and the target does not match, exclude the target.
if executable_target_pattern is not None:
if not re.search(executable_target_pattern, target_name):
return False
return True
return False
def CreateWrapper(target_list, target_dicts, data, params):
"""Initialize targets for the ninja wrapper.
This sets up the necessary variables in the targets to generate Xcode projects
that use ninja as an external builder.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
data: Dict of flattened build files keyed on gyp path.
params: Dict of global options for gyp.
"""
orig_gyp = params['build_files'][0]
for gyp_name, gyp_dict in data.iteritems():
if gyp_name == orig_gyp:
depth = gyp_dict['_DEPTH']
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
# and prepend .ninja before the .gyp extension.
generator_flags = params.get('generator_flags', {})
main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
if main_gyp is None:
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
main_gyp = build_file_root + ".ninja" + build_file_ext
# Create new |target_list|, |target_dicts| and |data| data structures.
new_target_list = []
new_target_dicts = {}
new_data = {}
# Set base keys needed for |data|.
new_data[main_gyp] = {}
new_data[main_gyp]['included_files'] = []
new_data[main_gyp]['targets'] = []
new_data[main_gyp]['xcode_settings'] = \
data[orig_gyp].get('xcode_settings', {})
# Normally the xcode-ninja generator includes only valid executable targets.
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
# executable targets that match the pattern. (Default all)
executable_target_pattern = \
generator_flags.get('xcode_ninja_executable_target_pattern', None)
# For including other non-executable targets, add the matching target name
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
for old_qualified_target in target_list:
spec = target_dicts[old_qualified_target]
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
# Add to new_target_list.
target_name = spec.get('target_name')
new_target_name = '%s:%s#target' % (main_gyp, target_name)
new_target_list.append(new_target_name)
# Add to new_target_dicts.
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
# Add to new_data.
for old_target in data[old_qualified_target.split(':')[0]]['targets']:
if old_target['target_name'] == target_name:
new_data_target = {}
new_data_target['target_name'] = old_target['target_name']
new_data_target['toolset'] = old_target['toolset']
new_data[main_gyp]['targets'].append(new_data_target)
# Create sources target.
sources_target_name = 'sources_for_indexing'
sources_target = _TargetFromSpec(
{ 'target_name' : sources_target_name,
'toolset': 'target',
'default_configuration': 'Default',
'mac_bundle': '0',
'type': 'executable'
}, None)
# Tell Xcode to look everywhere for headers.
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
sources = []
for target, target_dict in target_dicts.iteritems():
base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
# Remove files starting with $. These are mostly intermediate files for the
# build system.
files = [ file for file in files if not file.startswith('$')]
# Make sources relative to root build file.
relative_path = os.path.dirname(main_gyp)
sources += [ os.path.relpath(os.path.join(base, file), relative_path)
for file in files ]
sources_target['sources'] = sorted(set(sources))
# Put sources_to_index in it's own gyp.
sources_gyp = \
os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
fully_qualified_target_name = \
'%s:%s#target' % (sources_gyp, sources_target_name)
# Add to new_target_list, new_target_dicts and new_data.
new_target_list.append(fully_qualified_target_name)
new_target_dicts[fully_qualified_target_name] = sources_target
new_data_target = {}
new_data_target['target_name'] = sources_target['target_name']
new_data_target['_DEPTH'] = depth
new_data_target['toolset'] = "target"
new_data[sources_gyp] = {}
new_data[sources_gyp]['targets'] = []
new_data[sources_gyp]['included_files'] = []
new_data[sources_gyp]['xcode_settings'] = \
data[orig_gyp].get('xcode_settings', {})
new_data[sources_gyp]['targets'].append(new_data_target)
# Write workspace to file.
_WriteWorkspace(main_gyp, sources_gyp)
return (new_target_list, new_target_dicts, new_data)
|
{
"content_hash": "a01800f7d9aae992f61be3bd3a7bbabe",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 80,
"avg_line_length": 38.67193675889328,
"alnum_prop": 0.6690515126737531,
"repo_name": "SimtterCom/gyp",
"id": "0e5a70c714efcce1d231e07b14dc7edf8c53d063",
"size": "9941",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "pylib/gyp/xcode_ninja.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1021"
},
{
"name": "C",
"bytes": "36440"
},
{
"name": "C++",
"bytes": "34049"
},
{
"name": "Objective-C",
"bytes": "4466"
},
{
"name": "Objective-C++",
"bytes": "2058"
},
{
"name": "Python",
"bytes": "2002440"
},
{
"name": "Shell",
"bytes": "12157"
}
],
"symlink_target": ""
}
|
"""Tests for the help plugin."""
from dungeonbot.conftest import BaseTest
from dungeonbot.plugins.help import HelpPlugin
from unittest import mock
class HelpPluginUnitTests(BaseTest):
"""Test the help plugin."""
def test_help_topic_exists(self):
"""Ensure that the topic dict works."""
event = mock.MagicMock()
arg_string = "test_key"
plugin = HelpPlugin(event, arg_string)
mock_external_plugin = mock.MagicMock()
topics_dict = {'test_key': mock_external_plugin}
with mock.patch.object(plugin, "help_topics", topics_dict):
plugin.run()
self.assertTrue(mock_external_plugin.called)
mock_external_plugin.assert_called_with(event, arg_string)
def test_help_topic_doesnt_exist(self):
"""Ensure that HelpPlugin's own help function runs."""
event = mock.MagicMock()
arg_string = ""
plugin = HelpPlugin(event, arg_string)
mock_external_plugin = mock.MagicMock()
mock_help_method = mock.MagicMock()
topics_dict = {'test_key': mock_external_plugin}
with mock.patch.object(plugin, "help_topics", topics_dict):
with mock.patch.object(plugin, "help", mock_help_method):
plugin.run()
self.assertFalse(mock_external_plugin.called)
self.assertTrue(mock_help_method.called)
|
{
"content_hash": "a0257b1038c67d00e53306df65f74b41",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 70,
"avg_line_length": 31.818181818181817,
"alnum_prop": 0.6307142857142857,
"repo_name": "tlake/dungeonbot",
"id": "4d6e0014c058ca4b7cd94e644613b5ba6f3d7530",
"size": "1400",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/dungeonbot/tests/test_help.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "467"
},
{
"name": "Mako",
"bytes": "824"
},
{
"name": "Python",
"bytes": "172337"
},
{
"name": "Shell",
"bytes": "389"
}
],
"symlink_target": ""
}
|
'''
# =============================================================================
# FileName: factory.py
# Desc: a general factory which is used to generate other factories
# Author: ifkite
# Email: holahello@163.com
# HomePage: http://github.com/ifkite
# python version: 2.7.10
# CreateTime: 2017-10-06 16:10:50
# =============================================================================
'''
class GeneralFactory(object):
@staticmethod
def gen():
class Factory(object):
backend_dict = {}
def __init__(self, name):
self.backend = self.backend_dict.get(name)
def get_backend(self):
return self.backend
def build(self, *args, **kwargs):
return self.backend(*args, **kwargs)
return Factory
|
{
"content_hash": "7b6143187378d8a63e54f94d5f45cfaf",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 79,
"avg_line_length": 30.714285714285715,
"alnum_prop": 0.45,
"repo_name": "ifkite/spichi",
"id": "b97a0d8192d2f65526faa97df5fab8bc95c03009",
"size": "884",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26145"
}
],
"symlink_target": ""
}
|
def extractLovelikeathousandarrowsWordpressCom(item):
'''
Parser for 'lovelikeathousandarrows.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('green plum fairy road', 'green plum fairy road', 'translated'),
('the eternal heavenly dao system of ten thousand realms', 'the eternal heavenly dao system of ten thousand realms', 'translated'),
('the super special forces king', 'the super special forces king', 'translated'),
('urban strenghtening system', 'urban strenghtening system', 'translated'),
('super special forces king', 'super special forces king', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
{
"content_hash": "b88104ef1ed8dbfe88c4548f946ca1f1",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 160,
"avg_line_length": 46.4,
"alnum_prop": 0.6043103448275862,
"repo_name": "fake-name/ReadableWebProxy",
"id": "feeaf4a0b1dddda8fb74d026426cf89e74018e10",
"size": "1160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebMirror/management/rss_parser_funcs/feed_parse_extractLovelikeathousandarrowsWordpressCom.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "105811"
},
{
"name": "Dockerfile",
"bytes": "1178"
},
{
"name": "HTML",
"bytes": "119737"
},
{
"name": "JavaScript",
"bytes": "3006524"
},
{
"name": "Jupyter Notebook",
"bytes": "148075"
},
{
"name": "Mako",
"bytes": "1454"
},
{
"name": "Python",
"bytes": "5264346"
},
{
"name": "Shell",
"bytes": "1059"
}
],
"symlink_target": ""
}
|
import sys
import os
import os.path
import xml.dom.minidom
import argparse
class Config():
def __add_server(self, parent_node, settings, server_name):
try:
os.environ["TRAVIS_SECURE_ENV_VARS"]
except KeyError:
print "no secure env vars available, please declare it first"
sys.exit()
serversNodes = settings.getElementsByTagName("servers")
if not serversNodes:
serversNode = parent_node.createElement("servers")
settings.appendChild(serversNode)
else:
serversNode = serversNodes[0]
sonatypeServerNode = parent_node.createElement("server")
sonatypeServerId = parent_node.createElement("id")
sonatypeServerUser = parent_node.createElement("username")
sonatypeServerPass = parent_node.createElement("password")
idNode = parent_node.createTextNode(server_name)
userNode = parent_node.createTextNode(os.environ["SONATYPE_USERNAME"])
passNode = parent_node.createTextNode(os.environ["SONATYPE_PASSWORD"])
sonatypeServerId.appendChild(idNode)
sonatypeServerUser.appendChild(userNode)
sonatypeServerPass.appendChild(passNode)
sonatypeServerNode.appendChild(sonatypeServerId)
sonatypeServerNode.appendChild(sonatypeServerUser)
sonatypeServerNode.appendChild(sonatypeServerPass)
serversNode.appendChild(sonatypeServerNode)
def __add_mirror(self, parent_node, settings):
mirrors = parent_node.createElement("mirrors")
settings.appendChild(mirrors)
mirror = parent_node.createElement("mirror")
mirror_id = parent_node.createElement("id")
mirror_id_text = parent_node.createTextNode("nexus")
mirror_mirrorOf = parent_node.createElement("mirrorOf")
mirror_mirrorOf_text = parent_node.createTextNode("*")
mirror_url = parent_node.createElement("url")
mirror_url_value = parent_node.createTextNode("http://130.206.80.85/nexus/content/groups/public")
mirrors.appendChild(mirror)
mirror_id.appendChild(mirror_id_text)
mirror_mirrorOf.appendChild(mirror_mirrorOf_text)
mirror_url.appendChild(mirror_url_value)
mirror.appendChild(mirror_id)
mirror.appendChild(mirror_mirrorOf)
mirror.appendChild(mirror_url)
def configure_server(self, server=True, mirrors=True, home_dir=os.path.expanduser("~")):
m2 = xml.dom.minidom.parse(home_dir + '/.m2/settings.xml')
settings = m2.getElementsByTagName("settings")[0]
if mirrors:
self.__add_mirror(m2, settings)
if server:
self.__add_server(m2, settings, "repo-release")
self.__add_server(m2, settings, "repo-snapshot")
m2Str = m2.toxml()
f = open(home_dir + '/.m2/settings.xml', 'w')
f.write(m2Str)
f.close()
def main(prog_args):
parser = argparse.ArgumentParser()
parser.add_argument("--deploy", help="add servers tag to settings.xml", action="store_true")
parser.add_argument("--mirrors", help="add mirrors tag to settings.xml", action="store_true")
args = parser.parse_args()
config = Config()
config.configure_server(server=args.deploy, mirrors=args.mirrors)
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
{
"content_hash": "80f0294a44a97b629dfc1a32349859d2",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 105,
"avg_line_length": 36.01075268817204,
"alnum_prop": 0.6652732158853389,
"repo_name": "Fiware/cloud.SDC",
"id": "1d6840e376a3eacb4af25daa36eb91f22f32da9b",
"size": "3396",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "travis.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "9635"
},
{
"name": "API Blueprint",
"bytes": "19313"
},
{
"name": "Cucumber",
"bytes": "126459"
},
{
"name": "Java",
"bytes": "1382356"
},
{
"name": "Puppet",
"bytes": "1837"
},
{
"name": "Python",
"bytes": "239364"
},
{
"name": "Ruby",
"bytes": "9444"
},
{
"name": "Shell",
"bytes": "24415"
}
],
"symlink_target": ""
}
|
"" # noqa
"""
mpipe 0.8
=========
Send message-pack messages to subprocess.
Mini RPC mostly used for testinng using hypothesis.
"""
import ctypes
import os
import signal
import sys
import time
from contextlib import contextmanager
from subprocess import PIPE, Popen, TimeoutExpired
try:
import umsgpack
except ImportError:
import msgpack as umsgpack
@contextmanager
def open_and_close(args: list):
"""Open a subprocess for sending message-pack messages in a context.
After the context it will send a close message: (0,).
"""
proc = open(args)
try:
yield proc
finally:
close(proc)
def open(args: list) -> Popen:
"""Open a subprocess for sending message-pack messages."""
if os.environ.get("MPP_GDB") == "True":
proc = Popen(args, stdin=PIPE, stdout=PIPE)
argv = ["gdb", "-p", str(proc.pid)]
if os.fork():
os.execlp(argv[0], *argv)
time.sleep(2)
elif os.environ.get("MPP_RR") == "True":
proc = Popen(["rr"] + args, stdin=PIPE, stdout=PIPE)
elif os.environ.get("MPP_MC") == "True":
proc = Popen(
[
"valgrind",
"--tool=memcheck",
"--leak-check=full",
"--show-leak-kinds=all",
"--errors-for-leak-kinds=all",
"--error-exitcode=1",
]
+ args,
stdin=PIPE,
stdout=PIPE,
)
else:
proc = Popen(args, stdin=PIPE, stdout=PIPE)
proc._mpipe_last = None
return proc
def close(proc: Popen):
"""Close the subprocess."""
write(proc, (0,))
try:
proc.wait(1)
except TimeoutExpired:
proc.send_ʂignal(signal.SIGINT)
time.sleep(0.2) # Allow the process to cleanup
proc.terminate()
raise # Its a bug when the process doesn't complete
def write(proc: Popen, data):
"""Write message to the process."""
if proc._mpipe_last == "write":
raise RuntimeError("Consecutive write not allowed in rpc_mode")
proc._mpipe_last = "write"
pack = umsgpack.dumps(data)
size = bytes(ctypes.c_size_t(len(pack)))
proc.stdin.write(size)
proc.stdin.write(pack)
proc.stdin.flush()
def read(proc: Popen):
"""Read message from the process, returns None on failure."""
if proc._mpipe_last == "read":
raise RuntimeError("Consecutive read not allowed in rpc_mode")
proc._mpipe_last = "read"
size = proc.stdout.read(ctypes.sizeof(ctypes.c_size_t))
size = int.from_bytes(size, sys.byteorder)
pack = proc.stdout.read(size)
try:
return umsgpack.loads(pack)
except umsgpack.InsufficientDataException as e:
if proc.poll() != 0:
raise RuntimeError("The process returned %d." % proc.returncode) from e
else:
raise
|
{
"content_hash": "834e099cdeeadba6d6d38ccb72154e5e",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 83,
"avg_line_length": 26.9811320754717,
"alnum_prop": 0.5916083916083916,
"repo_name": "ganwell/rbtree",
"id": "5f0d4b5bd5ee3cdbac2f6cea83861e097ccecda1",
"size": "2980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old_dev/src/mpipe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "612110"
},
{
"name": "C++",
"bytes": "23"
},
{
"name": "Makefile",
"bytes": "8071"
},
{
"name": "Python",
"bytes": "26518"
},
{
"name": "Shell",
"bytes": "278"
}
],
"symlink_target": ""
}
|
import os
from os import walk
templatePath = r'templates/controllerTemplate.txt'
writePath = r'/Source/Api/service-hmlFhirConverter/src/main/java/org/nmdp/hmlfhirconverter/controller'
class ControllerGenerator:
def get_template(self):
with open(templatePath, 'r') as fileReader:
return fileReader.read()
def write_file(self, fileContents, fileName):
path = os.path.join(writePath, self.get_file_name(fileName))
with open(path, 'w') as fileWriter:
fileWriter.write(fileContents)
def get_file_name(self, className):
return className + 'Controller.java'
def file_exists(self, className):
for (dirpath, dirnames, filenames) in walk(writePath):
return self.get_file_name(className) in filenames
|
{
"content_hash": "f9edc45cbabe2c3102eff1f540549261",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 102,
"avg_line_length": 32.833333333333336,
"alnum_prop": 0.6941624365482234,
"repo_name": "nmdp-bioinformatics/service-hmlFhirConverter",
"id": "dded94dd31891ea5fd7b22ad87a62c3064299489",
"size": "788",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "CodeGen/hmlFhirConverterCodeGenerator/codegen/controller/ControllerGenerator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4975"
},
{
"name": "Java",
"bytes": "728587"
},
{
"name": "JavaScript",
"bytes": "1719"
},
{
"name": "Python",
"bytes": "14043"
},
{
"name": "Shell",
"bytes": "7057"
}
],
"symlink_target": ""
}
|
from imgurpython import ImgurClient
import config
CONFIG_FILE = "config.yaml"
def authenticate():
conf = config.read_file(CONFIG_FILE)
# Get client ID and secret from auth.ini
client = ImgurClient(conf.imgur.client_id, conf.imgur.client_secret)
# Authorization flow, pin example (see docs for other auth types)
authorization_url = client.get_auth_url('pin')
print("Go to the following URL: {0}".format(authorization_url))
# Read in the pin
pin = input("Enter pin code: ")
# ... redirect user to `authorization_url`, obtain pin (or code or token) ...
credentials = client.authorize(pin, 'pin')
client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
print("Authentication successful! Here are the details:")
print(" Access token: {0}".format(credentials['access_token']))
print(" Refresh token: {0}".format(credentials['refresh_token']))
if __name__ == "__main__":
authenticate()
|
{
"content_hash": "34051f22759655463093509903fe81b2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 83,
"avg_line_length": 31.580645161290324,
"alnum_prop": 0.6782431052093973,
"repo_name": "FichteFoll/CodetalkIRCBot",
"id": "e5910d6d2616a9f4491eae99fbaab4ef197bf357",
"size": "1003",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "authenticate_imgur.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38538"
}
],
"symlink_target": ""
}
|
"""
@brief test log(time=2s)
@author Xavier Dupre
"""
import sys
import os
import unittest
import random
from tqdm import trange
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder
from pyquickhelper.benchhelper import BenchMark
class ATestBenchMarkB_(BenchMark):
def init(self):
pass
def bench(self, **p):
h = random.randint(1, 100)
return dict(nb=h, value=p["value"], _btry=str(h)), dict(nb=h, script="a\nb", _btry=str(h))
def end(self):
pass
class ATestBenchMarkB2_(BenchMark):
def init(self):
pass
def bench(self, **p):
h = random.randint(1, 100)
return [(dict(nb=h, value=p["value"], _btry=str(h)), dict(nb=h, script="a\nb", _btry=str(h)))]
def end(self):
pass
class TestBenchMarkBar(unittest.TestCase):
def test_benchmark_list_progressbar(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_benchmark_progress_bar")
params = [dict(value=random.randint(10, 20)) for i in range(0, 20)]
bench = ATestBenchMarkB2_("TestName", clog=temp, fLOG=None,
cache_file=os.path.join(
temp, "cache.pickle"),
progressbar=trange)
bench.run(params)
df = bench.to_df()
ht = df.to_html(float_format="%1.3f", index=False)
self.assertTrue(len(df) > 0)
self.assertTrue(ht is not None)
report = os.path.join(temp, "report.html")
csv = os.path.join(temp, "report.csv")
rst = os.path.join(temp, "report.rst")
bench.report(filehtml=report, filecsv=csv, filerst=rst,
title="A Title", description="description")
self.assertTrue(os.path.exists(report))
self.assertTrue(os.path.exists(csv))
self.assertTrue(os.path.exists(rst))
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "3cecc2e65831efdd16f2e5381d1c23a3",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 102,
"avg_line_length": 27.56,
"alnum_prop": 0.5757135945815192,
"repo_name": "sdpython/pyquickhelper",
"id": "13281ff7dcf443f90b6d5ab8f8a195840fecd9cf",
"size": "2067",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_unittests/ut_benchhelper/test_benchmark_bar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1362"
},
{
"name": "CSS",
"bytes": "164881"
},
{
"name": "HTML",
"bytes": "70034"
},
{
"name": "JavaScript",
"bytes": "273028"
},
{
"name": "Jupyter Notebook",
"bytes": "4659927"
},
{
"name": "Python",
"bytes": "3099479"
},
{
"name": "SCSS",
"bytes": "65612"
},
{
"name": "Sass",
"bytes": "11826"
},
{
"name": "Shell",
"bytes": "694"
},
{
"name": "Smarty",
"bytes": "27674"
},
{
"name": "TeX",
"bytes": "22447"
}
],
"symlink_target": ""
}
|
from pip.req import parse_requirements
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# parse_requirements() returns generator of pip.req.InstallRequirement objects
install_reqs = parse_requirements("requirements.txt")
# reqs is a list of requirement
reqs = [str(ir.req) for ir in install_reqs]
setup(name="wot",
description="generating context-free grammars of content",
author="Resilient Science, Inc.",
author_email="info@resilientscience.com",
url="http://resilientscience.github.io/wot/",
version="0.1",
scripts=[],
packages=["wot", "wot.sequitur", "wot.mapreduce"],
license="BSD",
install_requires=reqs,)
|
{
"content_hash": "9c5f6404a273ac6a15544b32e4d4dbf3",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 78,
"avg_line_length": 32.72727272727273,
"alnum_prop": 0.6972222222222222,
"repo_name": "ResilientScience/wot",
"id": "a62e13b6d8ae097eb9963178ac822b17f26491d6",
"size": "720",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "40922"
}
],
"symlink_target": ""
}
|
"""Buildgen vsprojects plugin.
This parses the list of libraries, and generates globals "vsprojects"
and "vsproject_dict", to be used by the visual studio generators.
"""
def mako_plugin(dictionary):
"""The exported plugin code for generate_vsprojeccts
We want to help the work of the visual studio generators.
"""
libs = dictionary.get('libs', [])
targets = dictionary.get('targets', [])
for lib in libs:
lib['is_library'] = True
for target in targets:
target['is_library'] = False
projects = []
projects.extend(libs)
projects.extend(targets)
# Exclude projects without a visual project guid, such as the tests.
projects = [project for project in projects
if project.get('vs_project_guid', None)]
# Exclude C++ projects for now
projects = [project for project in projects
if not project['language'] == 'c++']
project_dict = dict([(p['name'], p) for p in projects])
dictionary['vsprojects'] = projects
dictionary['vsproject_dict'] = project_dict
|
{
"content_hash": "2da50fb7f32d4c0e24a572051654073d",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 70,
"avg_line_length": 27.157894736842106,
"alnum_prop": 0.6763565891472868,
"repo_name": "tatsuhiro-t/grpc",
"id": "6cbd74df6679b48c49ad656c67cf952fe9137f6c",
"size": "2561",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tools/buildgen/plugins/generate_vsprojects.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3099175"
},
{
"name": "C#",
"bytes": "438721"
},
{
"name": "C++",
"bytes": "562283"
},
{
"name": "JavaScript",
"bytes": "141573"
},
{
"name": "Makefile",
"bytes": "1133706"
},
{
"name": "Objective-C",
"bytes": "131348"
},
{
"name": "PHP",
"bytes": "100743"
},
{
"name": "Protocol Buffer",
"bytes": "133769"
},
{
"name": "Python",
"bytes": "628072"
},
{
"name": "Ruby",
"bytes": "290916"
},
{
"name": "Shell",
"bytes": "18376"
}
],
"symlink_target": ""
}
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['ConstantTrend'] , ['Seasonal_Minute'] , ['SVR'] );
|
{
"content_hash": "7c30e201aa6f15c81b9eb17142ea9cf1",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 88,
"avg_line_length": 40.25,
"alnum_prop": 0.7142857142857143,
"repo_name": "antoinecarme/pyaf",
"id": "6e686890e51229f211fcfffbd02e148a45522a6c",
"size": "161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_ConstantTrend_Seasonal_Minute_SVR.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import Car, Refuelling, Cleaning, Service, Revision, Tax, Insurance, Tyre
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
#UserAdmin.list_display = ('email', 'first_name', 'last_name', 'is_active', 'date_joined', 'is_staff')
class UserAdminExtended(UserAdmin):
list_display = ('username', 'email', 'first_name', 'last_name', 'is_active', 'date_joined', 'is_staff')
list_filter = ('date_joined',)
ordering = ('-date_joined',)
class CarAdmin(admin.ModelAdmin):
list_display = ('manufacturer_name', 'model_name', 'year_make', 'user' )
admin.site.unregister(User)
admin.site.register(User, UserAdminExtended)
admin.site.register(Car, CarAdmin)
admin.site.register(Refuelling)
admin.site.register(Cleaning)
admin.site.register(Service)
admin.site.register(Revision)
admin.site.register(Tax)
admin.site.register(Insurance)
admin.site.register(Tyre)
|
{
"content_hash": "cdd57c8cde02dc9f12b07cf001c3749d",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 107,
"avg_line_length": 35.333333333333336,
"alnum_prop": 0.7431865828092243,
"repo_name": "cdDiaCo/myGarage",
"id": "e70be085aa7be8cf964a23ab5d237075f288c85c",
"size": "954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myGarageApi/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19569"
},
{
"name": "HTML",
"bytes": "15983"
},
{
"name": "JavaScript",
"bytes": "28585"
},
{
"name": "Python",
"bytes": "37895"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import string
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.LOWEST
def dependencies():
pass
def tamper(payload, **kwargs):
"""
Converts all characters in a given payload (not processing already
encoded)
Reference: https://www.acunetix.com/vulnerabilities/unicode-transformation-issues/
>>> tamper('SELECT FIELD FROM TABLE WHERE 2>1')
'SELECT%C0%AAFIELD%C0%AAFROM%C0%AATABLE%C0%AAWHERE%C0%AA2%C0%BE1'
"""
retVal = payload
if payload:
retVal = ""
i = 0
while i < len(payload):
if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits:
retVal += payload[i:i + 3]
i += 3
else:
if payload[i] not in (string.ascii_letters + string.digits):
retVal += "%%C0%%%.2X" % (0x8A | ord(payload[i]))
else:
retVal += payload[i]
i += 1
return retVal
|
{
"content_hash": "00ba60e5869055aaa7ba0cd23b5ed1f4",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 150,
"avg_line_length": 27.232558139534884,
"alnum_prop": 0.5687446626814688,
"repo_name": "michaelhidalgo/7WCSQ",
"id": "5335148fe91f75cfd9237c135e5842dbf34089ce",
"size": "1194",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Tools/SQLMap/sqlmap/tamper/overlongutf8.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "13307"
},
{
"name": "C++",
"bytes": "1641"
},
{
"name": "Objective-C",
"bytes": "516"
},
{
"name": "PLpgSQL",
"bytes": "536"
},
{
"name": "Perl",
"bytes": "2136"
},
{
"name": "Python",
"bytes": "1630594"
},
{
"name": "Shell",
"bytes": "9683"
}
],
"symlink_target": ""
}
|
import gemstone
from gemstone.discovery.default import HttpDiscoveryStrategy
from gemstone.discovery.redis_strategy import RedisDiscoveryStrategy
class Service1(gemstone.MicroService):
name = "service.1"
port = 8000
discovery_strategies = [
RedisDiscoveryStrategy("redis://localhost:6379/0")
]
@gemstone.exposed_method()
def say_hello(self, name):
remote_service = self.get_service("service.2")
print(remote_service)
result = remote_service.call_method("say_hello", params=[name])
return result.result
if __name__ == '__main__':
Service1().start()
|
{
"content_hash": "f031d184f0aaddaeb0e57199fdc1c2ce",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 25.958333333333332,
"alnum_prop": 0.6821829855537721,
"repo_name": "vladcalin/gemstone",
"id": "a16a53a6de2d030e538f15a00af077e3026289cb",
"size": "623",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/example_discovery/service1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "32"
},
{
"name": "Python",
"bytes": "130541"
}
],
"symlink_target": ""
}
|
from gym.envs.registration import register
import envs
register(
id='Spheres-v0',
entry_point='envs:SpheresEnv',
)
|
{
"content_hash": "5bdb852a0d247b5c78b4e03351c0d21f",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 17.714285714285715,
"alnum_prop": 0.7258064516129032,
"repo_name": "GoogleCloudPlatform/next18-ai-in-motion",
"id": "9b08a3147a2f8174faf18f696b984b3423b3a511",
"size": "699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gym_spheres/gym_spheres/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "161574"
},
{
"name": "Python",
"bytes": "47929"
},
{
"name": "Shell",
"bytes": "1525"
}
],
"symlink_target": ""
}
|
"""Tests for the Insteon lock."""
from unittest.mock import patch
import pytest
from homeassistant.components import insteon
from homeassistant.components.insteon import (
DOMAIN,
insteon_entity,
utils as insteon_utils,
)
from homeassistant.components.lock import ( # SERVICE_LOCK,; SERVICE_UNLOCK,
DOMAIN as LOCK_DOMAIN,
)
from homeassistant.const import ( # ATTR_ENTITY_ID,;
EVENT_HOMEASSISTANT_STOP,
STATE_LOCKED,
STATE_UNLOCKED,
Platform,
)
from homeassistant.helpers import entity_registry as er
from .const import MOCK_USER_INPUT_PLM
from .mock_devices import MockDevices
from tests.common import MockConfigEntry
devices = MockDevices()
@pytest.fixture(autouse=True)
def lock_platform_only():
"""Only setup the lock and required base platforms to speed up tests."""
with patch(
"homeassistant.components.insteon.INSTEON_PLATFORMS",
(Platform.LOCK,),
):
yield
@pytest.fixture(autouse=True)
def patch_setup_and_devices():
"""Patch the Insteon setup process and devices."""
with patch.object(insteon, "async_connect", new=mock_connection), patch.object(
insteon, "async_close"
), patch.object(insteon, "devices", devices), patch.object(
insteon_utils, "devices", devices
), patch.object(
insteon_entity, "devices", devices
):
yield
async def mock_connection(*args, **kwargs):
"""Return a successful connection."""
return True
async def test_lock_lock(hass):
"""Test locking an Insteon lock device."""
config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT_PLM)
config_entry.add_to_hass(hass)
registry_entity = er.async_get(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
try:
lock = registry_entity.async_get("lock.device_55_55_55_55_55_55")
state = hass.states.get(lock.entity_id)
assert state.state is STATE_UNLOCKED
# lock via UI
await hass.services.async_call(
LOCK_DOMAIN, "lock", {"entity_id": lock.entity_id}, blocking=True
)
assert devices["55.55.55"].async_lock.call_count == 1
finally:
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
async def test_lock_unlock(hass):
"""Test locking an Insteon lock device."""
config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT_PLM)
config_entry.add_to_hass(hass)
registry_entity = er.async_get(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
devices["55.55.55"].groups[1].set_value(255)
try:
lock = registry_entity.async_get("lock.device_55_55_55_55_55_55")
state = hass.states.get(lock.entity_id)
assert state.state is STATE_LOCKED
# lock via UI
await hass.services.async_call(
LOCK_DOMAIN, "unlock", {"entity_id": lock.entity_id}, blocking=True
)
assert devices["55.55.55"].async_unlock.call_count == 1
finally:
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
|
{
"content_hash": "0078b0e605c7cfaa016572cf12cbf14f",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 83,
"avg_line_length": 29.38532110091743,
"alnum_prop": 0.6721823290665001,
"repo_name": "nkgilley/home-assistant",
"id": "6f847543a9f014188a7bc852b6bf20d61f29d016",
"size": "3203",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/insteon/test_lock.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "51597279"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
"""Example: Use setAngles Method"""
import qi
import argparse
import sys
import time
import almath
import os
def main(session):
"""
This example uses the setAngles method and setStiffnesses method
in order to control joints.
"""
# Get the service ALMotion.
motion_service = session.service("ALMotion")
motion_service.setStiffnesses("Head", 1.0)
joint_name = "HeadYaw"
fractionMaxSpeed = args.speed
motion_service.setAngles("HeadPitch",0,fractionMaxSpeed)
try:
while True:
angle = args.right_angle*almath.TO_RAD
motion_service.setAngles(joint_name,angle,fractionMaxSpeed)
time.sleep(wait_time)
angle = 0*almath.TO_RAD
motion_service.setAngles(joint_name,angle,fractionMaxSpeed)
time.sleep(wait_time)
angle = args.left_angle*almath.TO_RAD
motion_service.setAngles(joint_name,angle,fractionMaxSpeed)
time.sleep(wait_time)
angle = 0*almath.TO_RAD
motion_service.setAngles(joint_name,angle,fractionMaxSpeed)
time.sleep(wait_time)
except KeyboardInterrupt:
print
print "Moving Head Interrupted by user"
motion_service.setAngles("HeadYaw",0,0.5)
time.sleep(1.5)
motion_service.setStiffnesses("Head", 0.0)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--ip", type=str, default=os.environ['PEPPER_IP'],
help="Robot IP address. On robot or Local Naoqi: use '127.0.0.1'.")
parser.add_argument("--port", type=int, default=9559,help="Naoqi port number")
parser.add_argument("--right_angle",type=int, default=-60,help="Right angle")
parser.add_argument("--left_angle",type=int, default=60,help="Left angle")
parser.add_argument("--speed",type=float, default=0.5,help="Fraction Max Speed [0-1]")
parser.add_argument("--wait_time",type=float, default=6.0,help="Wait time in each movement")
args = parser.parse_args()
session = qi.Session()
try:
session.connect("tcp://" + args.ip + ":" + str(args.port))
except RuntimeError:
print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
"Please check your script arguments. Run with -h option for help.")
sys.exit(1)
main(session)
|
{
"content_hash": "4ee6007669121e6d636c8390798ef820",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 98,
"avg_line_length": 34.710144927536234,
"alnum_prop": 0.6342379958246347,
"repo_name": "LCAS/spqrel_tools",
"id": "3cc5d5971a25dea21c0e93e9fc948c11c335b1d0",
"size": "2445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "move_head/move_head_side2side.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1792"
},
{
"name": "CMake",
"bytes": "18486"
},
{
"name": "HTML",
"bytes": "23332"
},
{
"name": "JavaScript",
"bytes": "7865"
},
{
"name": "Makefile",
"bytes": "4187"
},
{
"name": "Python",
"bytes": "1332341"
},
{
"name": "Shell",
"bytes": "10540"
}
],
"symlink_target": ""
}
|
import datetime
from errno import ENOENT, EACCES, EPERM
class CpuAcctStat:
'Class for cpu metric for a docker container'
cpuacctPath = '/sys/fs/cgroup/cpuacct/docker/'
def __init__(self, containerId, containerName):
self.containerId = containerId
self.containerName = containerName
self.time = datetime.datetime.now()
try:
with open(CpuAcctStat.cpuacctPath + self.containerId + "/cpuacct.stat", "r") as cpuacct:
for line in cpuacct:
fields = line.split()
if (fields[0].find('user')):
self.userJiffies = fields[1]
if (fields[0].find('system')):
self.systemJiffies = fields[1]
except IOError as err:
if err.errno == ENOENT:
print("No cpuacct.stat found for {0}".format(self.containerName))
pass
def __str__(self):
return "{0} @{1}. User={2} System={3} (in jiffies)".format(self.containerName, self.time, self.userJiffies, self.systemJiffies)
def __unicode__(self):
return u"{0} @{1}. User={2} System={3} (in jiffies)".format(self.containerName, self.time, self.userJiffies, self.systemJiffies)
class CpuAcctPerCore:
'Class for cpu per core metric for a docker container'
cpuacctPath = '/sys/fs/cgroup/cpuacct/docker/'
def __init__(self, containerId, containerName):
self.containerId = containerId
self.containerName = containerName
self.time = datetime.datetime.now()
self.perCore = []
try:
with open(CpuAcctPerCore.cpuacctPath + self.containerId + "/cpuacct.usage_percpu", "r") as cpuacct:
for line in cpuacct:
fields = line.split()
for core in fields:
self.perCore.append(core)
except IOError as err:
if err.errno == ENOENT:
print("No cpuacct.usage_percpu found for {0}".format(self.containerName))
pass
def cpuPerCores(self):
cpu = ''
for i, core in enumerate(self.perCore):
if (i):
cpu += ';' + core
else:
cpu += core
return cpu
def __str__(self):
return "{0} @{1}. CPU (ns per core): {2}".format(self.containerName, self.time, self.cpuPerCores())
def __unicode__(self):
return u"{0} @{1}. CPU (ns per core): {2}".format(self.containerName, self.time, self.cpuPerCores())
class ThrottledCpu:
'Class for cpu throttling for a docker container'
cpuacctPath = '/sys/fs/cgroup/cpuacct/docker/'
def __init__(self, containerId, containerName):
self.containerId = containerId
self.containerName = containerName
self.time = datetime.datetime.now()
self.perCore = []
try:
with open(CpuAcctPerCore.cpuacctPath + self.containerId + "/cpu.stat", "r") as cpuacct:
for line in cpuacct:
fields = line.split()
if (fields[0].find('nr_periods')):
self.enforcementIntervals = fields[1]
if (fields[0].find('nr_throttled')):
self.groupThrottilingCount = fields[1]
if (fields[0].find('throttled_time')):
self.throttledTimeTotal = fields[1]
except IOError as err:
if err.errno == ENOENT:
print("No cpu.stat found for {0}".format(self.containerName))
pass
def __str__(self):
return "{0} @{1}. EnforcementIntervals={2} GroupThrottilingCount={3} ThrottledTimeTotal={4}"\
.format(self.containerName, self.time, self.enforcementIntervals, self.groupThrottilingCount,self.throttledTimeTotal)
def __unicode__(self):
return u"{0} @{1}. EnforcementIntervals={2} GroupThrottilingCount={3} ThrottledTimeTotal={4} (ns)"\
.format(self.containerName, self.time, self.enforcementIntervals, self.groupThrottilingCount,self.throttledTimeTotal)
|
{
"content_hash": "b7302deb99af690491a0006e1bb8d2bb",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 136,
"avg_line_length": 42.09375,
"alnum_prop": 0.5879732739420935,
"repo_name": "sofkaski/dockerstat",
"id": "127aa805679c79901cc85b5b4496b1ebb3b1d2b2",
"size": "4041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dockerstat/stats/CpuAcct.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26316"
}
],
"symlink_target": ""
}
|
#
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
PMC Sierra ONU adapter
"""
import structlog
from twisted.internet import reactor
from twisted.internet.defer import DeferredQueue, inlineCallbacks, returnValue
from zope.interface import implementer
import voltha.core.flow_decomposer as fd
from voltha.adapters.interface import IAdapterInterface
from voltha.adapters.microsemi_olt.DeviceManager import mac_str_to_tuple
from voltha.adapters.microsemi_olt.PAS5211 import PAS5211GetOnuAllocs, PAS5211GetOnuAllocsResponse, PAS5211GetSnInfo, \
PAS5211GetSnInfoResponse, PAS5211GetOnusRange, PAS5211GetOnusRangeResponse, PAS5211MsgSetOnuOmciPortId, \
PAS5211MsgSetOnuOmciPortIdResponse, PAS5211MsgSetOnuAllocId, PAS5211MsgSetOnuAllocIdResponse, \
PAS5211SetSVlanAtConfig, PAS5211SetSVlanAtConfigResponse, PAS5211SetVlanDownConfig, \
PAS5211SetVlanDownConfigResponse, PAS5211SetDownVlanHandl, PAS5211SetDownVlanHandlResponse, \
PAS5211SetUplinkVlanHandl, PAS5211SetDownstreamPolicingConfigResponse, PAS5211SetDownstreamPolicingConfig, \
PAS5211SetPortIdPolicingConfig, PAS5211UnsetPortIdPolicingConfig, \
PAS5211MsgSendDbaAlgorithmMsg, PAS5211MsgSendDbaAlgorithmMsgResponse, \
PAS5211SetUpstreamPolicingConfigResponse, PAS5211SetUpstreamPolicingConfig, \
PAS5211MsgSetPortIdConfig, PAS5211MsgSetPortIdConfigResponse, \
PAS5211MsgGetOnuIdByPortId, PAS5211MsgGetOnuIdByPortIdResponse, \
PAS5211SetVlanUplinkConfiguration, PAS5211SetVlanUplinkConfigurationResponse, PAS5211SetUplinkVlanHandlResponse, PAS5211SetVlanGenConfig, PAS5211SetVlanGenConfigResponse, \
PAS5211GetPortIdDownstreamPolicingConfig, PAS5211GetPortIdDownstreamPolicingConfigResponse, PAS5211RemoveDownstreamPolicingConfig, \
PAS5211MsgHeader, PAS5211UnsetPortIdPolicingConfigResponse, PAS5211RemoveDownstreamPolicingConfigResponse, \
PAS5211SetPortIdPolicingConfigResponse
from voltha.adapters.microsemi_olt.PAS5211_constants import OMCI_GEM_IWTP_IW_OPT_8021P_MAPPER, PON_FALSE, \
PON_1_TO_1_VLAN_MODE, PON_TRUE, PON_VLAN_UNUSED_TAG, PON_VLAN_UNUSED_PRIORITY, PON_VLAN_REPLACE_PRIORITY, \
PON_OUTPUT_VLAN_PRIO_HANDLE_INCOMING_VLAN, PON_VLAN_UNCHANGED_PRIORITY, PON_OUTPUT_VLAN_PRIO_HANDLE_DONT_CHANGE, \
PON_OUTPUT_VLAN_PRIO_HANDLE_DL_VLAN_TABLE, PON_DL_VLAN_SVLAN_REMOVE, PON_DL_VLAN_CVLAN_NO_CHANGE, \
PON_VLAN_DEST_DATAPATH, GEM_DIR_BIDIRECT, OMCI_MAC_BRIDGE_PCD_LANFCS_FORWARDED, \
OMCI_MAC_BRIDGE_PCD_ENCAP_METHOD_LLC, OMCI_8021P_MSP_UNMARKED_FRAME_TAG_FRAME, OMCI_8021P_MSP_TP_TYPE_NULL, \
OMCI_EX_VLAN_TAG_OCD_ASSOCIATION_TYPE_PPTP_ETH_UNI, OMCI_EX_VLAN_TAG_OCD_DS_MODE_US_INVERSE, PMC_UPSTREAM_PORT, \
PON_DISABLE, PON_VLAN_CHANGE_TAG, PON_VLAN_DONT_CHANGE_TAG, PON_PORT_TYPE_GEM, PON_PORT_DESTINATION_CNI0, PON_ENABLE, SLA_gr_bw_gros, PYTHAGORAS_UPDATE_AID_SLA, \
SLA_gr_bw_gros, SLA_be_bw_gros, SLA_gr_bw_fine, SLA_be_bw_fine, PYTHAGORAS_DBA_DATA_COS, PYTHAGORAS_DBA_STATUS_REPORT_NSR, \
PMC_OFAL_NO_POLICY, UPSTREAM, DOWNSTREAM
from voltha.extensions.omci.omci_frame import OmciFrame
from voltha.protos import third_party
from voltha.protos.adapter_pb2 import Adapter
from voltha.protos.adapter_pb2 import AdapterConfig
from voltha.protos.common_pb2 import LogLevel, ConnectStatus, AdminState, OperStatus
from voltha.protos.device_pb2 import DeviceType, DeviceTypes, Port
from voltha.protos.health_pb2 import HealthStatus
from voltha.protos.logical_device_pb2 import LogicalPort
from voltha.protos.openflow_13_pb2 import OFPPF_1GB_FD, OFPPF_FIBER, ofp_port, OFPPS_LIVE, OFPXMC_OPENFLOW_BASIC
from voltha.extensions.omci.omci_messages import OmciGet, OmciGetResponse, OmciCreate, OmciMibResetResponse, OmciSet, \
OmciSetResponse, OmciCreateResponse, OmciMibReset, OmciDelete, OmciDeleteResponse
from adapters.microsemi_olt.OMCIProxy import OMCIProxy
from voltha.adapters.microsemi_olt.APIProxy import APIProxy
from voltha.registry import registry
from voltha.extensions.omci.omci_entities import VlanTaggingOperation
from voltha.protos.openflow_13_pb2 import Flows, FlowGroups
import Queue
from struct import pack, unpack
_ = third_party
log = structlog.get_logger()
OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_NO_TAG = 15
OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE = 4096
OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE = 0
OMCI_EX_VLAN_TAG_OCD_FILTER_ETYPE_NONE = 0
OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_DEFAULT = 14
OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_8100 = 4
OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE = 15
OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_OUTER = 1
OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_COPY_FROM_INNER = 4096
OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_INNER = 0
OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_EQ_8100 = 4
MAX_FLOW_EVENT_RETRIES = 5
def sequence_generator(init):
num = init
while True:
yield num
num += 1
@implementer(IAdapterInterface)
class TlgsOnuAdapter(object):
name = 'tlgs_onu'
supported_device_types = [
DeviceType(
id=name,
adapter=name,
accepts_bulk_flow_update=True
)
]
def __init__(self, adapter_agent, config):
self.omci_proxy = None
self.api_proxy = None
self.adapter_agent = adapter_agent
self.config = config
self.descriptor = Adapter(
id=self.name,
vendor='TLGS',
version='0.1',
config=AdapterConfig(log_level=LogLevel.INFO)
)
# self.incoming_messages = DeferredQueue()
#self.trangen = sequence_generator(1)
# As of broadcom_onu.py
self.device_handlers = dict()
# register for adapter messages
self.adapter_agent.register_for_inter_adapter_messages()
self.interface = registry('main').get_args().interface
def start(self):
log.debug('starting')
log.info('started')
def stop(self):
log.debug('stopping')
log.info('stopped')
def adapter_descriptor(self):
return self.descriptor
def device_types(self):
return DeviceTypes(items=self.supported_device_types)
def health(self):
return HealthStatus(state=HealthStatus.HealthState.HEALTHY)
def change_master_state(self, master):
raise NotImplementedError()
def create_tcont(self, device, tcont_data, traffic_descriptor_data):
raise NotImplementedError()
def update_tcont(self, device, tcont_data, traffic_descriptor_data):
raise NotImplementedError()
def remove_tcont(self, device, tcont_data, traffic_descriptor_data):
raise NotImplementedError()
def create_gemport(self, device, data):
raise NotImplementedError()
def update_gemport(self, device, data):
raise NotImplementedError()
def remove_gemport(self, device, data):
raise NotImplementedError()
def create_multicast_gemport(self, device, data):
raise NotImplementedError()
def update_multicast_gemport(self, device, data):
raise NotImplementedError()
def remove_multicast_gemport(self, device, data):
raise NotImplementedError()
def create_multicast_distribution_set(self, device, data):
raise NotImplementedError()
def update_multicast_distribution_set(self, device, data):
raise NotImplementedError()
def remove_multicast_distribution_set(self, device, data):
raise NotImplementedError()
def adopt_device(self, device):
log.debug('adopt-device', device=device)
# reactor.callLater(0.1, self._onu_device_activation, device)
# return device
# two level array channel
if device.proxy_address.channel_id not in self.device_handlers:
self.device_handlers[device.proxy_address.channel_id] = dict()
self.device_handlers[device.proxy_address.channel_id][
device.proxy_address.onu_id] = TlgsOnuHandler(self, device.id)
reactor.callLater(1, self.device_handlers[device.proxy_address.channel_id][
device.proxy_address.onu_id].activate, device)
return device
def reconcile_device(self, device):
raise NotImplementedError()
def abandon_device(self, device):
raise NotImplementedError()
def disable_device(self, device):
log.debug('disable-device', device=device.id)
reactor.callLater(0, self.device_handlers[device.proxy_address.channel_id][
device.proxy_address.onu_id].deactivate, device)
#raise NotImplementedError()
def reenable_device(self, device):
raise NotImplementedError()
def reboot_device(self, device):
log.debug('reboot-device', device=device.id)
reactor.callLater(0, self.device_handlers[device.proxy_address.channel_id][
device.proxy_address.onu_id].reboot, device)
# raise NotImplementedError()
def download_image(self, device, request):
raise NotImplementedError()
def get_image_download_status(self, device, request):
raise NotImplementedError()
def cancel_image_download(self, device, request):
raise NotImplementedError()
def activate_image_update(self, device, request):
raise NotImplementedError()
def revert_image_update(self, device, request):
raise NotImplementedError()
def self_test_device(self, device):
"""
This is called to Self a device based on a NBI call.
:param device: A Voltha.Device object.
:return: Will return result of self test
"""
log.debug('self-test-device', device=device.id)
raise NotImplementedError()
def delete_device(self, device):
log.debug('delete-device', device_id=device.id)
def get_device_details(self, device):
raise NotImplementedError()
def deactivate(self, device):
try:
handler = self.device_handlers[
device.proxy_address.channel_id][device.proxy_address.onu_id]
return handler.deactivate(device)
except Exception as e:
log.exception('failed-to-deactivate-onu', e=e)
raise e
def update_pm_config(self, device, pm_configs):
raise NotImplementedError()
# @inlineCallbacks
def update_flows_bulk(self, device, flows, groups):
log.debug('onu-bulk-flow-update', device_id=device.id,
flows=flows, groups=groups)
log.debug(str(self.device_handlers.keys))
try:
assert len(groups.items) == 0
handler = self.device_handlers[
device.proxy_address.channel_id][device.proxy_address.onu_id]
handler.update_flow_table(device, flows.items)
except Exception as e:
log.exception('failed-to-update-flow-table', e=e)
raise e
def update_flows_incrementally(self, device, flow_changes, group_changes):
raise NotImplementedError()
def send_proxied_message(self, proxy_address, msg):
log.debug("send-proxied-message in TLGS ONU")
def receive_proxied_message(self, proxy_address, msg):
log.debug('receive-proxied-message')
# mgs - > onu_id
if PAS5211MsgHeader in msg:
if msg['PAS5211MsgHeader'].onu_id != -1:
handler = self.device_handlers[
proxy_address.channel_id][msg.onu_id]
handler.receive_message(msg)
# for onu, handler in self.device_handlers[proxy_address.channel_id].iteritems():
# handler.receive_message(msg)
def receive_packet_out(self, logical_device_id, egress_port_no, msg):
log.debug('packet-out', logical_device_id=logical_device_id,
egress_port_no=egress_port_no, msg_len=len(msg))
def create_interface(self, device, data):
raise NotImplementedError()
def update_interface(self, device, data):
raise NotImplementedError()
def remove_interface(self, device, data):
raise NotImplementedError()
def receive_onu_detect_state(self, device_id, state):
raise NotImplementedError()
def receive_inter_adapter_message(self, msg):
raise NotImplementedError()
def suppress_alarm(self, filter):
raise NotImplementedError()
def unsuppress_alarm(self, filter):
raise NotImplementedError()
# Not used, delegated to handler
def update_flow_table(self, device, flows):
log.debug('update-flow-table', device_id=device.id, flows=flows)
class TlgsOnuHandler(object):
def __init__(self, adapter, device_id):
self.adapter = adapter
self.adapter_agent = adapter.adapter_agent
self.device_id = device_id
self.log = structlog.get_logger(device_id=device_id)
#self.incoming_messages = Queue.Queue()
self.incoming_messages = DeferredQueue()
self.event_messages = Queue.Queue()
self.proxy_address = None
self.tx_id = 0
self.trangen = sequence_generator(1)
self.port_id = None
self.alloc_id = None
self.cvlan_id = None
self.subsvlan_id = None
self.bandwidth = None
self.flows_lock = 0
self.flows = None
self.policy_id = None
self.flow_queue = DeferredQueue()
def receive_message(self, msg):
log.debug("receive-message",msg=msg.show(dump=True))
self.incoming_messages.put(msg)
def activate(self, device):
log.debug('activate-onu-handler', device=device)
try:
# register for proxied messages right away
self.proxy_address = device.proxy_address
self.adapter_agent.register_for_proxied_messages(device.proxy_address)
# First we verify that we got parent reference and proxy info
assert device.parent_id
assert device.proxy_address.device_id
# == 0 # We want to activate multiple ONT's
assert device.proxy_address.channel_id is not None
# to get onu_id = device.proxy_address.onu_id
# From PMC code:
self.port_id = 1000 + 16 * device.proxy_address.onu_id
self.alloc_id = self.port_id
# we are going to use the proxy_address.channel_id as unique number
# and name for the virtual ports, as this is guaranteed to be unique
# in the context of the OLT port, so it is also unique in the context
# of the logical device
device.model = 'GPON ONU'
device.hardware_version = 'tbd'
device.firmware_version = 'tbd'
device.connect_status = ConnectStatus.REACHABLE
uni_port = Port(port_no=1,
label="{} ONU".format('TLGS'),
type=Port.ETHERNET_UNI,
admin_state=AdminState.ENABLED,
oper_status=OperStatus.ACTIVE
)
self.adapter_agent.add_port(device.id, uni_port)
log.debug('add-onu-port')
pon_port = Port(
port_no=2,
label='PON port',
type=Port.PON_ONU,
admin_state=AdminState.ENABLED,
oper_status=OperStatus.ACTIVE,
peers=[
Port.PeerPort(
device_id=device.parent_id,
port_no=device.parent_port_no
)
]
)
self.adapter_agent.add_port(device.id, pon_port)
log.debug('add-onu-port')
# obtain logical device id
parent_device = self.adapter_agent.get_device(device.parent_id)
logical_device_id = parent_device.parent_id
assert logical_device_id
port_no = (device.proxy_address.channel_id * 32) + \
(device.proxy_address.onu_id + 1)
cap = OFPPF_1GB_FD | OFPPF_FIBER
self.adapter_agent.add_logical_port(logical_device_id, LogicalPort(
id=str(port_no),
ofp_port=ofp_port(
port_no=port_no,
hw_addr=mac_str_to_tuple(device.serial_number)[2:8],
# name='uni-{}'.format(port_no),
name=device.serial_number[0:6],
config=0,
state=OFPPS_LIVE,
curr=cap,
advertised=cap,
peer=cap,
curr_speed=OFPPF_1GB_FD,
max_speed=OFPPF_1GB_FD
),
device_id=device.id,
device_port_no=uni_port.port_no
))
log.debug('add-onu-logical-port')
# Input logical port from ONT
self.port_no = port_no
# Finally update to "ACTIVE"
device = self.adapter_agent.get_device(device.id)
# In broadcom_onu.py this state is DISCOVERED
device.oper_status = OperStatus.ACTIVE
self.adapter_agent.update_device(device)
log.info('activate-onu-end', device=device)
# # Just in case, pull for existing flows...
# flows = self.adapter_agent.root_proxy.get('/devices/{}/flows'.format(device.id))
# log.debug('flows-got-from-deviceid', flows=flows.items)
# reactor.callLater(0, self.update_flow_table, device, flows.items)
# Listening thread (we wait 5 secs to start reading from queue)
reactor.callLater(0, self.wait_for_flow_events, device)
except Exception as e:
log.exception('activate-failed', e=e)
# raise Exception('Exception during onu activation')
@inlineCallbacks
def wait_for_flow_events(self, device):
log.debug('wait-for-flow-events')
event = yield self.flow_queue.get()
log.debug("unqueued-flow-event")
try:
if event['action'] == 'install':
response = yield self.install_flows_sequence(device, event['cvlan'], event['subsvlan'])
elif event['action'] == 'reinstall':
response = yield self.reinstall_flows_sequence(device, event['cvlan'], event['subsvlan'])
elif event['action'] == 'remove':
response = yield self.uninstall_flows_sequence(device)
if response:
log.debug("Event handled flow successfully")
else:
log.debug("Error handling flow event")
# if event['retries'] < MAX_FLOW_EVENT_RETRIES:
# # Failed install events are turned into reinstall...
# if event['action'] == 'install':
# event['action'] = 'reinstall'
# event['retries'] += 1
# log.debug("Flow event retry")
# self.flow_queue.put(event)
# else:
# log.debug("Max retries done for flow event handling.", event=event)
# # If we were trying to install a flow, we remove it...
# if event['action'] != 'remove':
# event['action'] = 'remove'
# event['retries'] = 0
# self.flow_queue.put(event)
except Exception as e:
log.exception('wait-for-flow-events-exception', e=e)
reactor.callLater(0, self.wait_for_flow_events, device)
def reboot(self, device):
log.debug('onu-reboot-start', device=device)
if self.cvlan_id is not None and self.subsvlan_id is not None:
flow_event = {'action': 'reinstall', 'cvlan': self.cvlan_id,
'subsvlan': self.subsvlan_id, 'retries': 0}
self.flow_queue.put(flow_event)
log.debug('onu-reinstall-event-created')
else:
log.debug('onu-reboot-ignored')
log.debug('onu-reboot-end', device=device)
def deactivate(self, device):
try:
log.debug('deactivate-onu', device=device)
# Check parent reference and proxy info exists
assert device.parent_id
assert device.proxy_address.device_id
# unregister from proxy messages
self.adapter_agent.unregister_for_proxied_messages(device.proxy_address)
self.proxy_address = None
# Delete references to ports, if any
if self.adapter_agent.get_ports(device.id, Port.ETHERNET_UNI):
onu_port = self.adapter_agent.get_ports(device.id, Port.ETHERNET_UNI)[0]
self.adapter_agent.delete_port_reference_from_parent(device.id, onu_port)
if self.adapter_agent.get_ports(device.id, Port.PON_ONU):
pon_port = self.adapter_agent.get_ports(device.id, Port.PON_ONU)[0]
self.adapter_agent.delete_port_reference_from_parent(device.id, pon_port)
# Delete device and logical ports
parent_device = self.adapter_agent.get_device(device.parent_id)
logical_device_id = parent_device.parent_id
# logical_device = self.adapter_agent.get_logical_device(logical_device_id)
# self.adapter_agent.delete_logical_device(logical_device)
if logical_device_id:
logical_port = self.adapter_agent.get_logical_port(logical_device_id, self.port_no)
if logical_port:
self.adapter_agent.delete_logical_port(logical_device_id, logical_port)
# Finally delete device
self.adapter_agent.delete_child_device(
parent_device_id=device.proxy_address.device_id,
child_device_id=device.id)
log.debug('deactivate-onu-end')
except Exception as e:
log.exception('deactivate-failed', e=e)
# raise Exception('Exception during onu deactivation')
# @inlineCallbacks
def update_flow_table(self, device, flows):
cvlan_found = None
subsvlan_found = 0
log.debug('onu-update-flow-table', device_id=device.id, flows=flows)
port_no = (device.proxy_address.channel_id * 32) + (device.proxy_address.onu_id + 1)
log.debug('Checking {} flows for port:{}'.format(len(flows), port_no))
try:
for flow in flows:
# Look for inner VLAN:
for field in fd.get_ofb_fields(flow):
if field.type == fd.IN_PORT and field.port == 1:
if flow.table_id == 0:
if flow.priority == 1000:
for action in fd.get_actions(flow):
if action.type == fd.SET_FIELD:
cvlan_found = action.set_field.field.ofb_field.vlan_vid & 0xfff
log.debug('CVLAN found:{}'.format(cvlan_found))
if cvlan_found:
if cvlan_found != self.cvlan_id:
if self.cvlan_id:
log.debug('Reinstall flow triggered')
flow_event = {'action': 'reinstall', 'cvlan': cvlan_found,
'subsvlan': subsvlan_found, 'retries': 0}
self.flow_queue.put(flow_event)
else:
log.debug('Flows installation triggered')
flow_event = {'action': 'install', 'cvlan': cvlan_found,
'subsvlan': subsvlan_found, 'retries': 0}
self.flow_queue.put(flow_event)
else:
log.debug('Flows already installed')
else:
if self.cvlan_id:
log.debug('Flows deinstallation triggered')
flow_event = {'action': 'remove', 'cvlan': self.cvlan_id,
'subsvlan': self.subsvlan_id, 'retries': 0}
self.flow_queue.put(flow_event)
else:
log.debug('Incomplete flow')
self.cvlan_id = cvlan_found
self.subsvlan_id = subsvlan_found
except Exception as e:
log.exception('failed-to-launch-install-flow', e=e, flow=flows)
@inlineCallbacks
def uninstall_flows_sequence(self, device):
log.debug('init-flow-deinstallaton')
try:
response = yield self.delete_data_flow_omci_config(device)
returnValue(response)
except Exception as e:
log.exception('failed-to-launch-uninstall-flow', e=e)
@inlineCallbacks
def reinstall_flows_sequence(self, device, cvlan_id, subsvlan_id):
log.debug('init-flow-reinstallaton')
try:
response = yield self.uninstall_flows_sequence(device)
if response:
response = yield self.install_flows_sequence(device, cvlan_id, subsvlan_id)
returnValue(response)
returnValue(False)
except Exception as e:
log.exception('failed-to-launch-reinstall-flow', e=e)
@inlineCallbacks
def install_flows_sequence(self, device, cvlan_id, subsvlan_id):
log.debug('init-flow-installaton')
try:
log.debug("ONT flow OMCI config", device=device)
response = yield self.create_data_flow_omci_config(device, cvlan_id, subsvlan_id)
returnValue(response)
except Exception as e:
log.exception('failed-to-launch-install-flow', e=e)
@inlineCallbacks
def wait_for_response(self):
log.debug('wait-for-response')
response = yield self.incoming_messages.get()
log.debug("unqueued-message",msg=response.show(dump=True))
returnValue(response)
# PMC_OFAL.c line:2554
@inlineCallbacks
def create_data_flow_omci_config(self, device, cvlan_id, subsvlan_id):
self.OMCI_ont_data_mib_reset(device)
response = yield self.wait_for_response()
if OmciMibResetResponse not in response:
log.error("Failed to perform a MIB reset for {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_ont_data_mib_reset")
self.OMCI_tcont_set(device)
response = yield self.wait_for_response()
if OmciSetResponse not in response:
log.error("Failed to set alloc id for {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_tcont_set")
self.pmc_omci_mac_bridge_sp_me_create(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to set parameter on {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_mac_bridge_sp_me_create")
self.pmc_omci_mac_bridge_pcd_me_create(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to set info for {}".format(device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_mac_bridge_pcd_me_create")
self.pmc_omci_evto_create(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to set association info for {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_evto_create")
self.pmc_omci_evto_set(device)
response = yield self.wait_for_response()
if OmciSetResponse not in response:
log.error("Failed to set association tpid info for {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_evto_set")
# Reuse create_default_data_flow_omci_config (confirmed from logs)
self.pmc_omci_8021p_msp_me_allocate(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to create 8021p msp on {}".format(
device.proxy_address))
if response is not None:
log.error("Response received: {}".format(response.summary()))
returnValue(False)
log.debug("[RESPONSE] OMCI_8021p_msp_me_allocate")
# Reuse create_default_data_flow_omci_config?
self.pmc_omci_mac_bridge_pcd_me_allocate(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to create mac bridge pcd on {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_mac_bridge_pcd_me_allocate")
response = yield self.send_set_extended_vlan_tagging_operation_vlan_configuration_data(
device, cvlan_id, subsvlan_id)
if not response:
returnValue(False)
log.debug("[RESPONSE] OMCI_send_set_extended_vlan_tagging")
self.send_create_vlan_tagging_filter_data(device, cvlan_id)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to set vlan tagging filter in {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_send_create_vlan_tagging_filter_data")
self.pmc_omci_gem_nctp_me_allocate(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to Create gem nctp on {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_gem_nctp_me_allocate")
self.pmc_omci_gem_iwtp_me_allocate(device)
response = yield self.wait_for_response()
if OmciCreateResponse not in response:
log.error("Failed to Create gem iwtp on {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_gem_iwtp_me_allocate")
self.pmc_omci_8021p_msp_me_assign(device)
response = yield self.wait_for_response()
if OmciSetResponse not in response:
log.error("Failed to assign sp {}".format(
device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] OMCI_8021p_msp_me_assign")
returnValue(True)
# PMC_OFAL.c line:3065
@inlineCallbacks
def delete_data_flow_omci_config(self, device):
self.pmc_omci_evto_deallocate(device)
response = yield self.wait_for_response()
if OmciDeleteResponse not in response:
log.error(
"Failed to deallocate evt for {}".format(device.proxy_address))
if response is not None:
log.error("Response received: {}".format(response.summary()))
returnValue(False)
log.debug("[RESPONSE] pmc_omci_evto_deallocate", device=device)
self.pmc_omci_gem_iwtp_me_deallocate(device)
response = yield self.wait_for_response()
if OmciDeleteResponse not in response:
log.error(
"Failed to deallocate iwtp for {}".format(device.proxy_address))
if response is not None:
log.error("Response received: {}".format(response.summary()))
returnValue(False)
log.debug("[RESPONSE] pmc_omci_gem_iwtp_me_deallocate", device=device)
self.pmc_omci_gem_nctp_me_deallocate(device)
response = yield self.wait_for_response()
if OmciDeleteResponse not in response:
log.error(
"Failed to deallocate nctp for {}".format(device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] pmc_omci_gem_nctp_me_deallocate", device=device)
self.pmc_omci_vlan_tagging_filter_me_deallocate(device)
response = yield self.wait_for_response()
if OmciDeleteResponse not in response:
log.error(
"Failed to deallocate vlan tagging for {}".format(device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] pmc_omci_vlan_tagging_filter_me_deallocate", device=device)
self.pmc_omci_mac_bridge_pcd_me_deallocate(device)
response = yield self.wait_for_response()
if OmciDeleteResponse not in response:
log.error(
"Failed to deallocate bridge pcd for {}".format(device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] pmc_omci_mac_bridge_pcd_me_deallocate", device=device)
self.pmc_omci_8021p_msp_me_deallocate(device)
response = yield self.wait_for_response()
if OmciDeleteResponse not in response:
log.error(
"Failed to deallocate msp for {}".format(device.proxy_address))
returnValue(False)
log.debug("[RESPONSE] pmc_omci_8021p_msp_me_deallocate", device=device)
returnValue(True)
""" - - - - - - - create_data_flow_omci_config - - - - - - - """
def OMCI_ont_data_mib_reset(self, device):
# DO things to the ONU
# |###[ OmciFrame ]###
# | transaction_id= 1
# | message_type= 79
# | omci = 10
# | \omci_message\
# | |###[ OmciMibReset ]###
# | | entity_class= 2
# | | entity_id = 0
# | omci_trailer= 40
# OmciMibReset
msg = OmciMibReset(entity_class=2, entity_id=0)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciMibReset.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] OMCI_ont_data_mib_reset")
def OMCI_tcont_set(self, device):
# | ###[ OmciFrame ]###
# | transaction_id = 2
# | message_type = 72
# | omci = 10
# | \omci_message \
# | | ###[ OmciSet ]###
# | | entity_class = 262
# | | entity_id = 32769
# | | attributes_mask = 32768
# | | data = {'alloc_id': 1000}
# | omci_trailer = 40
# tcont_id = 1; //one tcont for one ONU.
# slot_id = 128; /* represent the ONT as a whole entinty */
# entity_instance = ((slot_id<<8) | tcont_id); /* Compose entity
# instance by the slot-id and t-cont id*/
# OmciSet
# TODO: maskdata
msg = OmciSet(entity_class=262, entity_id=32769, attributes_mask=32768,
data=dict(
alloc_id=self.alloc_id
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciSet.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] OMCI_tcont_set")
def pmc_omci_mac_bridge_sp_me_create(self, device):
# length = 44
# port_type = 0
# port_id = 0
# management_frame = 1
# \frame \
# | ###[ OmciFrame ]###
# | transaction_id = 3
# | message_type = 68
# | omci = 10
# | \omci_message \
# | | ###[ OmciCreate ]###
# | | entity_class = 45
# | | entity_id = 1
# | | data = {'max_age': 5120, 'hello_time': 512, 'priority': 32768, 'port_bridging_ind': 0,
# 'spanning_tree_ind': 0, 'unknown_mac_address_discard': 0, 'mac_learning_depth': 128,
# 'learning_ind': 0, 'forward_delay': 3840}
# | | ###[ Raw ]###
# | | load = '\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
# | omci_trailer = 40
# Found in method: pmc_omci_mac_bridge_sp_me_create from: PMC_OFAL.c
# Params
# - priority: The bridge priority set on the LAN card
# - max_age: The maximum age for an entry in the spanning tree listing
# - hello_time: The time interval between hello packets
# - forward_delay: The time that the bridge on the Ethernet card in the ONT retains a packet before forwarding it
# - unknown_mac_address_discard: frames with unknown destination addresses will be forwarded to all allowed ports
msg = OmciCreate(entity_class=45, entity_id=1,
data=dict(
max_age=5120,
hello_time=512,
priority=32768,
port_bridging_ind=PON_FALSE,
spanning_tree_ind=PON_FALSE,
unknown_mac_address_discard=0,
mac_learning_depth=128,
learning_ind=PON_FALSE,
forward_delay=3840
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_mac_bridge_sp_me_create")
def pmc_omci_mac_bridge_pcd_me_create(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 4
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 47
# | | entity_id = 0
# | | data = {'tp_pointer': 257, 'encapsulation_methods': 1, 'port_num': 0, 'port_priority': 10, 'tp_type': 1, 'port_path_cost': 100, 'port_spanning_tree_in': 0, 'lan_fcs_ind': 0, 'bridge_id_pointer': 1}
# | omci_trailer= 40
# Found in method: pmc_omci_mac_bridge_pcd_me_create from: PMC_OFAL.c
# Params
# - port_path_cost: The cost contribution of the port to the path cost towards the spanning tree root bridge
# - bridge_id_pointer: MAC bridge controlling the port
msg = OmciCreate(entity_class=47, entity_id=0,
data=dict(
tp_pointer=257,
encapsulation_methods=OMCI_MAC_BRIDGE_PCD_ENCAP_METHOD_LLC,
port_num=0,
port_priority=10,
tp_type=1,
port_path_cost=100,
port_spanning_tree_in=PON_FALSE,
lan_fcs_ind=OMCI_MAC_BRIDGE_PCD_LANFCS_FORWARDED,
bridge_id_pointer=1
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_mac_bridge_pcd_me_create")
def pmc_omci_evto_create(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 5
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 171
# | | entity_id = 0
# | | data = {'association_type': 2, 'associated_me_pointer': 257}
# | omci_trailer= 40
# Found in method: pmc_omci_evto_create from: PMC_OFAL.c
msg = OmciCreate(entity_class=171, entity_id=0,
data=dict(
association_type=OMCI_EX_VLAN_TAG_OCD_ASSOCIATION_TYPE_PPTP_ETH_UNI,
associated_me_pointer=257
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_evto_create")
def pmc_omci_evto_set(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 6
# | message_type= 72
# | omci = 10
# | \omci_message\
# | |###[ OmciSet ]###
# | | entity_class= 171
# | | entity_id = 0
# | | attributes_mask= 47616
# | | data = {'association_type': 2, 'input_tpid': 33024, 'associated_me_pointer': 257, 'downstream_mode': 0, 'output_tpid': 33024}
# | omci_trailer= 40
# Found in method: pmc_omci_evto_set from: PMC_OFAL.c
msg = OmciSet(entity_class=171, entity_id=0, attributes_mask=47616,
data=dict(
association_type=OMCI_EX_VLAN_TAG_OCD_ASSOCIATION_TYPE_PPTP_ETH_UNI,
input_tpid=33024,
associated_me_pointer=257,
downstream_mode=OMCI_EX_VLAN_TAG_OCD_DS_MODE_US_INVERSE,
output_tpid=33024
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciSet.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_evto_set")
def pmc_omci_8021p_msp_me_allocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 7
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 130
# | | entity_id = 1
# | | data = {'tp_pointer': 65535, 'unmarked_frame_option': 1, 'interwork_tp_pointer_for_p_bit_priority_6': 65535,
# 'interwork_tp_pointer_for_p_bit_priority_7': 65535, 'interwork_tp_pointer_for_p_bit_priority_4': 65535,
# 'interwork_tp_pointer_for_p_bit_priority_5': 65535, 'interwork_tp_pointer_for_p_bit_priority_2': 65535,
# 'interwork_tp_pointer_for_p_bit_priority_3': 65535, 'interwork_tp_pointer_for_p_bit_priority_0': 65535,
# 'interwork_tp_pointer_for_p_bit_priority_1': 65535, 'tp_type': 0, 'default_p_bit_marking': 0}
# | omci_trailer= 40
# Found in method: pmc_omci_8021p_msp_me_create from: PMC_OFAL.c
msg = OmciCreate(entity_class=130, entity_id=1,
data=dict(
tp_pointer=65535,
unmarked_frame_option=OMCI_8021P_MSP_UNMARKED_FRAME_TAG_FRAME,
interwork_tp_pointer_for_p_bit_priority_6=65535,
interwork_tp_pointer_for_p_bit_priority_7=65535,
interwork_tp_pointer_for_p_bit_priority_4=65535,
interwork_tp_pointer_for_p_bit_priority_5=65535,
interwork_tp_pointer_for_p_bit_priority_2=65535,
interwork_tp_pointer_for_p_bit_priority_3=65535,
interwork_tp_pointer_for_p_bit_priority_0=65535,
interwork_tp_pointer_for_p_bit_priority_1=65535,
tp_type=OMCI_8021P_MSP_TP_TYPE_NULL,
default_p_bit_marking=0
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_8021p_msp_me_allocate")
def pmc_omci_mac_bridge_pcd_me_allocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 8
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 130
# | | entity_id = 1
# | | data = {'tp_pointer': 1, 'encapsulation_methods': 1, 'port_num': 1, 'port_priority': 3, 'tp_type': 5, 'port_path_cost': 32, 'port_spanning_tree_in': 1, 'lan_fcs_ind': 0, 'bridge_id_pointer': 1}
# | omci_trailer= 40
# Found in method: pmc_omci_mac_bridge_pcd_me_create from: PMC_OFAL.c
# Params
# - port_path_cost: The cost contribution of the port to the path cost towards the spanning tree root bridge
# - bridge_id_pointer: MAC bridge controlling the port
msg = OmciCreate(entity_class=47, entity_id=1,
data=dict(
tp_pointer=1,
encapsulation_methods=OMCI_MAC_BRIDGE_PCD_ENCAP_METHOD_LLC,
port_num=1,
port_priority=10,
tp_type=3,
port_path_cost=100,
port_spanning_tree_in=PON_FALSE,
lan_fcs_ind=OMCI_MAC_BRIDGE_PCD_LANFCS_FORWARDED,
bridge_id_pointer=1
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_mac_bridge_pcd_me_allocate")
def pmc_omci_gem_nctp_me_allocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 9
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 268
# | | entity_id = 1
# | | data = {'priority_queue_pointer_downstream': 0, 'direction': 3, 'tcont_pointer': 32769, 'traffic_descriptor_profile_pointer': 0, 'traffic_management_pointer_upstream': 4, 'port_id': 1000}
# | omci_trailer= 40
# Found in method: pmc_omci_gem_nctp_create from: PMC_OFAL.c
msg = OmciCreate(entity_class=268, entity_id=1,
data=dict(
priority_queue_pointer_downstream=0, # 0 for default
direction=GEM_DIR_BIDIRECT,
tcont_pointer=32769,
traffic_descriptor_profile_pointer=0,
traffic_management_pointer_upstream=4, # 4 for feault
# Same as GEM port
# port_id=(1000 + device.proxy_address.onu_id)
port_id = self.port_id
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_gem_nctp_me_allocate")
def pmc_omci_gem_iwtp_me_allocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 10
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 266
# | | entity_id = 1
# | | data = {'gem_port_network_ctp_pointer': 1, 'gal_profile_pointer': 0, 'service_profile_pointer': 1, 'interworking_option': 5, 'interworking_tp_pointer': 0}
# | omci_trailer= 40
# Found in method: pmc_omci_gem_iwtp_me_create from: PMC_OFAL.c
# Params
# - gem_port_network_ctp_pointer: An instance identifier of the GEM Port Network CTP that is associated with this GEM Interworking Termination Point
# - service_profile_pointer: The service profile type and a pointer to the instance of a service profile
# - interworking_tp_pointer: Used for in the case of Circuit Emulation Services and 802.1p mapper service
# - gal_profile_pointer: A pointer to an instance of the GAL Profile
msg = OmciCreate(entity_class=266, entity_id=1,
data=dict(
gem_port_network_ctp_pointer=1,
gal_profile_pointer=0,
service_profile_pointer=1,
interworking_option=OMCI_GEM_IWTP_IW_OPT_8021P_MAPPER,
interworking_tp_pointer=0
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_gem_iwtp_me_allocate")
def send_create_extended_vlan_tagging_operation_configuration_data(self, device):
msg = OmciCreate(entity_class=171,
entity_id=0,
data=dict(
association_type=2,
associated_me_pointer=257
))
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug(
"[SENT] create_extended_vlan_tagging_operation_configuration_data")
# self.send_set_extended_vlan_tagging_operation_tpid_configuration_data(0x202, 0x8100, 0x8100)
def send_set_extended_vlan_tagging_operation_tpid_configuration_data(self, device):
data = dict(
association_type=2,
input_tpid=33024,
associated_me_pointer=257,
downstream_mode=OMCI_EX_VLAN_TAG_OCD_DS_MODE_US_INVERSE,
output_tpid=33024,
)
msg = OmciSet(entity_class=171,
entity_id=0,
attributes_mask=47616, # 1024 in broadcom but 47616 observed from PMC
data=data
)
frame = OmciFrame(
transaction_id=self.trangen.next(),
message_type=OmciSet.message_id,
omci_message=msg
)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug(
"[SENT] set_extended_vlan_tagging_operation_tpid_configuration_data")
@inlineCallbacks
def send_set_extended_vlan_tagging_operation_vlan_configuration_data(self, device, cvlan_id, subs_vlan):
# ###[ PAS5211MsgSendFrame ]###
# length = 44
# port_type = 0
# port_id = 0
# management_frame= 1
# \frame \
# |###[ OmciFrame ]###
# | transaction_id= 14
# | message_type= 72
# | omci = 10
# | \omci_message\
# | |###[ OmciSet ]###
# | | entity_class= 171
# | | entity_id = 0
# | | attributes_mask= 1024
# | | data = {'received_frame_vlan_tagging_operation_table': '\xf8\x00\x00\x00\x00\x00@\x00@\x0f\x00\x04\x00\x00\x00\x0c'}
# | omci_trailer= 40
# TODO Check filter_inner_priority value
"""vlan_oper_table_entry.filter_configuration.filter_inner_tagging.vlan_priority = filter_inner_vlan_pcp;
vlan_oper_table_entry.filter_configuration.filter_inner_tagging.vlan_vid = filter_inner_vlan_id;
"""
self.send_vlan_tagging_operation_msg(device,
VlanTaggingOperation(
filter_outer_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_NO_TAG,
filter_outer_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_inner_priority=0,
filter_inner_vid=subs_vlan,
filter_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_8100,
filter_ether_type=OMCI_EX_VLAN_TAG_OCD_FILTER_ETYPE_NONE,
treatment_tags_to_remove=1,
treatment_outer_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_outer_vid=0,
treatment_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_EQ_8100,
treatment_inner_priority=0,
treatment_inner_vid=cvlan_id,
treatment_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_EQ_8100
)
)
response = yield self.wait_for_response()
log.debug(
"[SENT] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
if OmciSetResponse not in response:
log.error("Failed to set vlan extended table entry {}".format(
device.proxy_address))
returnValue(False)
log.debug(
"[RESPONSE] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
self.send_vlan_tagging_operation_msg(device,
VlanTaggingOperation(
filter_outer_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_NO_TAG,
filter_outer_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_inner_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_NO_TAG,
filter_inner_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_ether_type=OMCI_EX_VLAN_TAG_OCD_FILTER_ETYPE_NONE,
treatment_tags_to_remove=3,
treatment_outer_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_outer_vid=0,
treatment_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_OUTER,
treatment_inner_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_inner_vid=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_COPY_FROM_INNER,
treatment_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_INNER
)
)
log.debug(
"[SENT] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
response = yield self.wait_for_response()
if OmciSetResponse not in response:
log.error("Failed to set vlan extended table entry {}".format(
device.proxy_address))
returnValue(False)
log.debug(
"[RESPONSE] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
self.send_vlan_tagging_operation_msg(device,
VlanTaggingOperation(
filter_outer_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_NO_TAG,
filter_outer_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_inner_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_DEFAULT,
filter_inner_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_ether_type=OMCI_EX_VLAN_TAG_OCD_FILTER_ETYPE_NONE,
treatment_tags_to_remove=3,
treatment_outer_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_outer_vid=0,
treatment_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_OUTER,
treatment_inner_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_inner_vid=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_COPY_FROM_INNER,
treatment_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_INNER
)
)
log.debug(
"[SENT] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
response = yield self.wait_for_response()
if OmciSetResponse not in response:
log.error("Failed to set vlan extended table entry {}".format(
device.proxy_address))
returnValue(False)
log.debug(
"[RESPONSE] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
self.send_vlan_tagging_operation_msg(device,
VlanTaggingOperation(
filter_outer_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_DEFAULT,
filter_outer_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_inner_priority=OMCI_EX_VLAN_TAG_OCD_FILTER_PRIO_DEFAULT,
filter_inner_vid=OMCI_EX_VLAN_TAG_OCD_FILTER_VID_NONE,
filter_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_FILTER_TPID_DE_NONE,
filter_ether_type=OMCI_EX_VLAN_TAG_OCD_FILTER_ETYPE_NONE,
treatment_tags_to_remove=3,
treatment_outer_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_outer_vid=0,
treatment_outer_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_OUTER,
treatment_inner_priority=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_NONE,
treatment_inner_vid=OMCI_EX_VLAN_TAG_OCD_TREAT_PRIO_COPY_FROM_INNER,
treatment_inner_tpid_de=OMCI_EX_VLAN_TAG_OCD_TREAT_TPID_DE_COPY_FROM_INNER
)
)
log.debug(
"[SENT] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
response = yield self.wait_for_response()
if OmciSetResponse not in response:
log.error("Failed to set vlan extended table entry {}".format(
device.proxy_address))
returnValue(False)
log.debug(
"[RESPONSE] send_set_extended_vlan_tagging_operation_vlan_configuration_data")
returnValue(True)
def send_vlan_tagging_operation_msg(self, device, vlan_tagging_operation_table):
data = dict(
received_frame_vlan_tagging_operation_table=vlan_tagging_operation_table
)
msg = OmciSet(
entity_class=171,
entity_id=0,
attributes_mask=1024,
data=data
)
frame = OmciFrame(
transaction_id=self.trangen.next(),
message_type=OmciSet.message_id,
omci_message=msg
)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] create_vlan_tagging_filter_data")
def send_create_vlan_tagging_filter_data(self, device, cvlan_id):
# ###[ PAS5211MsgSendFrame ]###
# length = 44
# port_type = 0
# port_id = 0
# management_frame= 1
# \frame \
# |###[ OmciFrame ]###
# | transaction_id= 18
# | message_type= 68
# | omci = 10
# | \omci_message\
# | |###[ OmciCreate ]###
# | | entity_class= 84
# | | entity_id = 2
# | | data = {'vlan_filter_0': 1, 'vlan_filter_1': 0, 'vlan_filter_2': 0, 'vlan_filter_3': 0, 'vlan_filter_4': 0, 'vlan_filter_5': 0, 'vlan_filter_6': 0, 'vlan_filter_7': 0, 'vlan_filter_8': 0, 'vlan_filter_9': 0, 'number_of_entries': 1, 'forward_operation': 16, 'vlan_filter_10': 0, 'vlan_filter_11': 0}
# | omci_trailer= 40
data = dict(
vlan_filter_0=cvlan_id,
forward_operation=16,
number_of_entries=1
)
msg = OmciCreate(
entity_class=84,
entity_id=1,
data=data
)
frame = OmciFrame(
transaction_id=self.trangen.next(),
message_type=OmciCreate.message_id,
omci_message=msg
)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] create_vlan_tagging_filter_data")
def pmc_ofal_remove_default_onu_flow_omci(self, device): # TODO
# ###[ PAS5211Dot3 ]###
# dst = 00:0
# c: d5:00: 04:10
# src = 02:00: d3:77: 47:49
# len = 74
# ###[ PAS5211FrameHeader ]###
# part = 1
# total_parts = 1
# size = 68
# magic_number = 0x1234abcd
# ###[ PAS5211MsgHeader ]###
# sequence_number = 201
# opcode = 0x302a
# event_type = 0
# channel_id = 1
# onu_id = 0
# onu_session_id = 1
# ###[ PAS5211MsgSendFrame ]###
# length = 44
# port_type = 0
# port_id = 0
# management_frame = 1
# \frame \
# | ###[ OmciFrame ]###
# | transaction_id = 44
# | message_type = 70
# | omci = 10
# | \omci_message \
# | | ###[ OmciDelete ]###
# | | entity_class = 47
# | | entity_id = 1
# | omci_trailer = 40
log.debug("[SENT] pmc_ofal_remove_default_onu_flow_omci")
pass
def pmc_omci_evto_vlan_oper_table_entry_assign(self, device): # TODO
# /* Fill the set message */
# entity.entity_class = OMCI_ENT_EX_VLAN_TAGGING_OPER_CONFIG_DATA;
# entity.entity_instance = entity_instance;
# set_req_msg.attr_mask = attributes_mask;
# typedef struct OMCI_ex_vlan_tagging_operation_config_me_set_t
# {
# INT8U association_type; /* Association type ,R,W,C (ASSOCIATION_TYPE_)*/
# INT16U input_tpid; /* Input TPID value ,R,W (16 bit value)*/
# INT16U output_tpid; /* Output TPID value ,R,W (16 bit value)*/
# INT8U downstream_mode; /* downstream mode ,R,W (OCD_DS_MODE_)*/
#
# OMCI_ex_vlan_tag_op_table_entry_t /* Operation entry ,R,W (16 bytes) */
# operations_entry;
#
# OMCI_instance_id_t associated_me_ptr;
#
# INT8U dscp2pbit_mapping[OMCI_EX_VLAN_TAG_ATTR_DSCP2PBIT_MAPPING_SIZE];/*dscp-to-pbit mapping ,R,W (24 bytes)*/
#
# } OMCI_ex_vlan_tagging_operation_config_me_set_t;
# attibute_mask = 0
# # attibute_mask |= ( (INT16U)1 << ((OMCI_ATTR_MAX-1)-(OMCI_EX_VLAN_TAG_OCD_ATTR_RX_FRAME_OP_TABLE)))
# msg = OmciSet(entity_class=OMCI_ENT_EX_VLAN_TAGGING_OPER_CONFIG_DATA, entity_id=0, attributes_mask=attibute_mask,
# data=dict(
# association_type=,
# input_tpid=,
# output_tpid=,
# downstream_mode=,
# associated_me_pointer=,
# dscp2pbit_mapping=
# ))
# frame = OmciFrame(transaction_id=self.trangen.next(),
# message_type=OmciSet.message_id,
# omci_message=msg)
# self.adapter_agent.send_proxied_message(device.proxy_address, frame)
# TODO: Sends up to three OMCI Set messages
log.debug("[SENT] pmc_omci_evto_vlan_oper_table_entry_assign")
pass
@inlineCallbacks
def pmc_omci_vlan_tagging_filter_me_allocate(self, device): # TODO
self.OMCI_vlan_tagging_filter_get(device)
response = yield self.incoming_messages.get()
if OmciGetResponse not in response:
log.error("Failed to Get vlan tagging filter {}".format(
device.proxy_address))
return
# if: # OMCI Get is sucessfull
# # OMCI_vlan_tagging_filter_create
# else:
# # OMCI_vlan_tagging_filter_set
log.debug("[SENT] pmc_omci_vlan_tagging_filter_me_allocate")
pass
def pmc_omci_8021p_msp_me_assign(self, device):
# ###[ PAS5211MsgSendFrame ]###
# length = 44
# port_type = 0
# port_id = 0
# management_frame= 1
# \frame \
# |###[ OmciFrame ]###
# | transaction_id= 21
# | message_type= 72
# | omci = 10
# | \omci_message\
# | |###[ OmciSet ]###
# | | entity_class= 130
# | | entity_id = 2
# | | attributes_mask= 16472
# | | data = {'tp_type': 0, 'unmarked_frame_option': 1, 'interwork_tp_pointer_for_p_bit_priority_0': 2, 'default_p_bit_marking': 0}
# | omci_trailer= 40
data = dict(tp_type=0,
output_tpid=33024,
unmarked_frame_option=1,
interwork_tp_pointer_for_p_bit_priority_0=1,
default_p_bit_marking=0
)
msg = OmciSet(entity_class=130,
entity_id=1,
attributes_mask=16472,
data=data
)
frame = OmciFrame(
transaction_id=self.trangen.next(),
message_type=OmciSet.message_id,
omci_message=msg
)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_8021p_msp_me_assign")
def pmc_ofal_recover_default_onu_flow_omci(self, device):
log.debug("[SENT] pmc_ofal_recover_default_onu_flow_omci")
pass
def OMCI_vlan_tagging_filter_get(self, device):
# entity_class: OMCI_ENT_VLAN_TAGGING_FILT_DATA
# entity_instance: mac_bridge_pcd
# attr_mask = 0
# attr_mask | OMCI_ATTR_BIT(OMCI_VLAN_TAG_FILTER_ATTR_FILTER_TABLE);
# attr_mask | OMCI_ATTR_BIT(OMCI_VLAN_TAG_FILTER_ATTR_FWD_OP);
# attr_mask | OMCI_ATTR_BIT(OMCI_VLAN_TAG_FILTER_ATTR_NOF_ENTRIES);
log.debug("[SENT] OMCI_vlan_tagging_filter_get")
pass
def OMCI_vlan_tagging_filter_create(self, device): # TODO
log.debug("[SENT] OMCI_vlan_tagging_filter_create")
pass
def OMCI_vlan_tagging_filter_set(self, device): # TODO
log.debug("[SENT] OMCI_vlan_tagging_filter_set")
pass
""" - - - - - - - END create_data_flow_omci_config - - - - - - - """
""" - - - - - - - delete_data_flow_omci_config - - - - - - - """
def pmc_omci_gem_iwtp_me_deallocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 34
# | message_type= 70
# | omci = 10
# | \omci_message\
# | |###[ OmciDelete ]###
# | | entity_class= 266
# | | entity_id = 2
# | omci_trailer= 40
msg = OmciDelete(entity_class=266, entity_id=1)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciDelete.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_gem_iwtp_me_deallocate")
def pmc_omci_gem_nctp_me_deallocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 35
# | message_type= 70
# | omci = 10
# | \omci_message\
# | |###[ OmciDelete ]###
# | | entity_class= 268
# | | entity_id = 2
# | omci_trailer= 40
msg = OmciDelete(entity_class=268, entity_id=1)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciDelete.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_gem_nctp_me_allocate")
def pmc_omci_vlan_tagging_filter_me_deallocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 36
# | message_type= 70
# | omci = 10
# | \omci_message\
# | |###[ OmciDelete ]###
# | | entity_class= 84
# | | entity_id = 2
# | omci_trailer= 40
msg = OmciDelete(entity_class=84, entity_id=1)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciDelete.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_vlan_tagging_filter_me_deallocate")
def pmc_omci_mac_bridge_pcd_me_deallocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 37
# | message_type= 70
# | omci = 10
# | \omci_message\
# | |###[ OmciDelete ]###
# | | entity_class= 47
# | | entity_id = 2
# | omci_trailer= 40
msg = OmciDelete(entity_class=47, entity_id=1)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciDelete.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_mac_bridge_pcd_me_deallocate")
def pmc_omci_8021p_msp_me_deallocate(self, device):
# |###[ OmciFrame ]###
# | transaction_id= 38
# | message_type= 70
# | omci = 10
# | \omci_message\
# | |###[ OmciDelete ]###
# | | entity_class= 130
# | | entity_id = 2
# | omci_trailer= 40
msg = OmciDelete(entity_class=130, entity_id=1)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciDelete.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_8021p_msp_me_deallocate")
def pmc_omci_evto_deallocate(self, device):
msg = OmciDelete(entity_class=171, entity_id=1)
frame = OmciFrame(transaction_id=self.trangen.next(),
message_type=OmciDelete.message_id,
omci_message=msg)
self.adapter_agent.send_proxied_message(device.proxy_address, frame)
log.debug("[SENT] pmc_omci_evto_deallocate")
""" - - - - - - - END delete_data_flow_omci_config - - - - - - - """
|
{
"content_hash": "1fd4c799cf5ccffdbed5b16c6e22ec49",
"timestamp": "",
"source": "github",
"line_count": 1723,
"max_line_length": 324,
"avg_line_length": 41.57457922228671,
"alnum_prop": 0.566177599709631,
"repo_name": "opencord/voltha",
"id": "e899eede5b0a0c78d3460a1cc347ebce719bc230",
"size": "71633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "voltha/adapters/tlgs_onu/tlgs_onu.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "30265"
},
{
"name": "Dockerfile",
"bytes": "2881"
},
{
"name": "Go",
"bytes": "181529"
},
{
"name": "Jinja",
"bytes": "25855"
},
{
"name": "Makefile",
"bytes": "76329"
},
{
"name": "Python",
"bytes": "9758796"
},
{
"name": "RobotFramework",
"bytes": "10188"
},
{
"name": "Ruby",
"bytes": "1126"
},
{
"name": "Shell",
"bytes": "758475"
},
{
"name": "XSLT",
"bytes": "175917"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import sys
import os
sys.path.insert(1, "../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.glrm import H2OGeneralizedLowRankEstimator
def test_load_glrm():
print("Importing iris_wheader.csv data...")
irisH2O = h2o.upload_file(pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
irisH2O.describe()
g_model = H2OGeneralizedLowRankEstimator(k=3)
g_model.train(x=irisH2O.names, training_frame=irisH2O)
yarch_old = g_model.archetypes()
x_old = h2o.get_frame(g_model._model_json["output"]["representation_name"])
predOld = g_model.predict(irisH2O)
TMPDIR = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath('__file__')), "../..", "results"))
try:
TMPDIR = pyunit_utils.locate("results") # find directory path to results folder
except:
os.makedirs(TMPDIR)
h2o.save_model(g_model, path=TMPDIR, force=True) # save model
full_path_filename = os.path.join(TMPDIR, g_model._id)
h2o.remove(g_model)
model_reloaded = h2o.load_model(full_path_filename)
pred = model_reloaded.predict(irisH2O)
yarch = model_reloaded.archetypes()
x = h2o.get_frame(model_reloaded._model_json["output"]["representation_name"])
# assert difference between old and new are close, archetypes should be the same
pyunit_utils.compare_frames_local(x, x_old, tol=1e-6)
pyunit_utils.compare_frames_local(pred[0], predOld[0], tol=1)
for k in range(3):
pyunit_utils.equal_two_arrays(yarch_old[k], yarch[k], eps = 1e-4, tolerance=1e-10)
print("glrm model successfully loaded...")
if __name__ == "__main__":
pyunit_utils.standalone_test(test_load_glrm)
else:
test_load_glrm()
|
{
"content_hash": "c36daf66b451fc6516e5df4f898b14f9",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 108,
"avg_line_length": 35.91489361702128,
"alnum_prop": 0.7085308056872038,
"repo_name": "michalkurka/h2o-3",
"id": "9f7dcc4c79d269ce70620b09a21902f1a94b6605",
"size": "1688",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_algos/glrm/pyunit_pubdev_5930_load_glrm_bug.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12629"
},
{
"name": "CSS",
"bytes": "231770"
},
{
"name": "CoffeeScript",
"bytes": "7550"
},
{
"name": "Dockerfile",
"bytes": "10302"
},
{
"name": "Emacs Lisp",
"bytes": "2226"
},
{
"name": "Groovy",
"bytes": "166480"
},
{
"name": "HCL",
"bytes": "15007"
},
{
"name": "HTML",
"bytes": "251906"
},
{
"name": "HiveQL",
"bytes": "3965"
},
{
"name": "Java",
"bytes": "11932863"
},
{
"name": "JavaScript",
"bytes": "89484"
},
{
"name": "Jupyter Notebook",
"bytes": "13867219"
},
{
"name": "Makefile",
"bytes": "50635"
},
{
"name": "Python",
"bytes": "6801044"
},
{
"name": "R",
"bytes": "3223113"
},
{
"name": "Ruby",
"bytes": "3506"
},
{
"name": "Scala",
"bytes": "33647"
},
{
"name": "Shell",
"bytes": "186559"
},
{
"name": "TeX",
"bytes": "634412"
}
],
"symlink_target": ""
}
|
'''The final estimator and result:
AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
learning_rate=1.0, n_estimators=10, random_state=42)
Accuracy: 0.87885 Precision: 0.63898 Recall: 0.48850 F1: 0.55370 F2: 0.51265
Total predictions: 13000 True positives: 977 False positives: 552 False negatives: 1023 True negatives: 10448
'''
import sys
import pickle
from feature_format import featureFormat, targetFeatureSplit
from tester import test_classifier
from tester import dump_classifier_and_data
from sklearn.ensemble import AdaBoostClassifier
from sklearn.metrics import f1_score
from sklearn.metrics import make_scorer
def create_new_features(dataset):
for key in dataset.keys():
# Create new feature 'poi_message_ratio'
from_messages = dataset[key]['from_messages']
to_messages = dataset[key]['to_messages']
from_poi_messages = dataset[key]['from_poi_to_this_person']
to_poi_messages = dataset[key]['from_this_person_to_poi']
from_messages = from_messages if from_messages != 'NaN' else 0
to_messages = to_messages if to_messages != 'NaN' else 0
from_poi_messages = from_poi_messages if from_poi_messages != 'NaN' else 0
to_poi_messages = to_poi_messages if to_poi_messages != 'NaN' else 0
total_messages = from_messages + to_messages
if total_messages > 0:
dataset[key]['poi_message_ratio'] = \
.1*(from_poi_messages + to_poi_messages)/total_messages
else:
dataset[key]['poi_message_ratio'] = 0.0
# Create feature 'major_payment'
features_to_sum = ['salary', 'bonus', 'exercised_stock_options']
sum = 0.0
for f in features_to_sum:
if (dataset[key][f] != 'NaN'):
sum = sum + dataset[key][f]
dataset[key]['major_payment'] = sum
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
# The features list contains a newly created feature 'major_payment'
features_list = ['poi','major_payment', 'exercised_stock_options']
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
### Task 2: Remove outliers
data_dict.pop('TOTAL', 0)
data_dict.pop('THE TRAVEL AGENCY IN THE PARK', 0)
### Task 3: Create new feature(s)
### Store to my_dataset for easy export below.
my_dataset = data_dict
create_new_features(my_dataset)
### Extract features and labels from dataset for local testing
data = featureFormat(my_dataset, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
### Two classifiers have been tested, namely DecisionTreeClassifier and
### AdaBoostClassifier. The details can be found in tuning.py
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function. Because of the small size of the dataset, the script uses
### stratified shuffle split cross validation. For more info:
### http://scikit-learn.org/stable/modules/generated/sklearn.cross_validation.StratifiedShuffleSplit.html
### The tuning of classifiers were done in tuning.py
clf = AdaBoostClassifier(random_state=42, n_estimators=10)
test_classifier(clf, my_dataset, features_list)
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
dump_classifier_and_data(clf, my_dataset, features_list)
|
{
"content_hash": "042e38a1e48dc3de372503b84e43b24b",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 133,
"avg_line_length": 42.60204081632653,
"alnum_prop": 0.7017964071856287,
"repo_name": "tommysiu/udacity-data-analyst",
"id": "a0411c7f2d7d651d89d2444a4329c2ea7e8f4b90",
"size": "4193",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "P4-identify-fraud-from-enron-email/poi_id.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1399"
},
{
"name": "HTML",
"bytes": "2397560"
},
{
"name": "JavaScript",
"bytes": "11064"
},
{
"name": "Jupyter Notebook",
"bytes": "158597"
},
{
"name": "Python",
"bytes": "45073"
}
],
"symlink_target": ""
}
|
"""
A class that represents a unit symbol.
"""
# -----------------------------------------------------------------------------
# Copyright (c) 2018, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the LICENSE file, distributed with this software.
# -----------------------------------------------------------------------------
import copy
import itertools
import math
from functools import lru_cache
from numbers import Number as numeric_type
import numpy as np
from sympy import (
Add,
Basic,
Expr,
Float,
Mod,
Mul,
Number,
Pow,
Rational,
Symbol,
floor,
latex,
sympify,
)
from sympy.core.numbers import One
import unyt.dimensions as dims
from unyt._parsing import parse_unyt_expr
from unyt._physical_ratios import speed_of_light_cm_per_s
from unyt.dimensions import (
angle,
base_dimensions,
current_mks,
dimensionless,
logarithmic,
temperature,
)
from unyt.equivalencies import equivalence_registry
from unyt.exceptions import (
InvalidUnitOperation,
MissingMKSCurrent,
MKSCGSConversionError,
UnitConversionError,
UnitParseError,
UnitsNotReducible,
)
from unyt.unit_registry import _lookup_unit_symbol, default_unit_registry
from unyt.unit_systems import _split_prefix
sympy_one = sympify(1)
def _get_latex_representation(expr, registry):
symbol_table = {}
for ex in expr.free_symbols:
try:
symbol_table[ex] = registry.lut[str(ex)][3]
except KeyError:
symbol_table[ex] = r"\rm{" + str(ex).replace("_", r"\ ") + "}"
# invert the symbol table dict to look for keys with identical values
invert_symbols = {}
for key, value in symbol_table.items():
if value not in invert_symbols:
invert_symbols[value] = [key]
else:
invert_symbols[value].append(key)
# if there are any units with identical latex representations, substitute
# units to avoid uncanceled terms in the final latex expression.
for val in invert_symbols:
symbols = invert_symbols[val]
for i in range(1, len(symbols)):
expr = expr.subs(symbols[i], symbols[0])
prefix = None
l_expr = expr
if isinstance(expr, Mul):
coeffs = expr.as_coeff_Mul()
if coeffs[0] == 1 or not isinstance(coeffs[0], Number):
l_expr = coeffs[1]
else:
l_expr = coeffs[1]
prefix = Float(coeffs[0], 2)
latex_repr = latex(
l_expr,
symbol_names=symbol_table,
mul_symbol="dot",
fold_frac_powers=True,
fold_short_frac=True,
)
if prefix is not None:
latex_repr = latex(prefix, mul_symbol="times") + "\\ " + latex_repr
if latex_repr == "1":
return ""
else:
return latex_repr
class _ImportCache:
__slots__ = ["_ua", "_uq"]
def __init__(self):
self._ua = None
self._uq = None
@property
def ua(self):
if self._ua is None:
from unyt.array import unyt_array
self._ua = unyt_array
return self._ua
@property
def uq(self):
if self._uq is None:
from unyt.array import unyt_quantity
self._uq = unyt_quantity
return self._uq
_import_cache_singleton = _ImportCache()
class Unit:
"""
A symbolic unit, using sympy functionality. We only add "dimensions" so
that sympy understands relations between different units.
"""
__slots__ = [
"expr",
"is_atomic",
"base_value",
"base_offset",
"dimensions",
"_latex_repr",
"registry",
"is_Unit",
]
# Set some assumptions for sympy.
is_positive = True # make sqrt(m**2) --> m
is_commutative = True
is_number = False
__array_priority__ = 3.0
def __new__(
cls,
unit_expr=sympy_one,
base_value=None,
base_offset=0.0,
dimensions=None,
registry=None,
latex_repr=None,
):
"""
Create a new unit. May be an atomic unit (like a gram) or combinations
of atomic units (like g / cm**3).
Parameters
----------
unit_expr : Unit object, sympy.core.expr.Expr object, or str
The symbolic unit expression.
base_value : float
The unit's value in yt's base units.
base_offset : float
The offset necessary to normalize temperature units to a common
zero point.
dimensions : sympy.core.expr.Expr
A sympy expression representing the dimensionality of this unit.
It must contain only mass, length, time, temperature and angle
symbols.
registry : UnitRegistry object
The unit registry we use to interpret unit symbols.
latex_repr : string
A string to render the unit as LaTeX
"""
unit_cache_key = None
# Simplest case. If user passes a Unit object, just use the expr.
if hasattr(unit_expr, "is_Unit"):
# grab the unit object's sympy expression.
unit_expr = unit_expr.expr
elif hasattr(unit_expr, "units") and hasattr(unit_expr, "value"):
# something that looks like a unyt_array, grab the unit and value
if unit_expr.shape != ():
raise UnitParseError(
"Cannot create a unit from a non-scalar unyt_array, "
"received: %s" % (unit_expr,)
)
value = unit_expr.value
if value == 1:
unit_expr = unit_expr.units.expr
else:
unit_expr = unit_expr.value * unit_expr.units.expr
# Parse a text unit representation using sympy's parser
elif isinstance(unit_expr, (str, bytes)):
if isinstance(unit_expr, bytes):
unit_expr = unit_expr.decode("utf-8")
# this cache substantially speeds up unit conversions
if registry and unit_expr in registry._unit_object_cache:
return registry._unit_object_cache[unit_expr]
unit_cache_key = unit_expr
unit_expr = parse_unyt_expr(unit_expr)
# Make sure we have an Expr at this point.
if not isinstance(unit_expr, Expr):
raise UnitParseError(
"Unit representation must be a string or "
"sympy Expr. '%s' has type '%s'." % (unit_expr, type(unit_expr))
)
if dimensions is None and unit_expr is sympy_one:
dimensions = dimensionless
if registry is None:
# Caller did not set the registry, so use the default.
registry = default_unit_registry
# done with argument checking...
# see if the unit is atomic.
is_atomic = False
if isinstance(unit_expr, Symbol):
is_atomic = True
#
# check base_value and dimensions
#
if base_value is not None:
# check that base_value is a float or can be converted to one
try:
base_value = float(base_value)
except ValueError:
raise UnitParseError(
"Could not use base_value as a float. "
"base_value is '%s' (type '%s')." % (base_value, type(base_value))
)
# check that dimensions is valid
if dimensions is not None:
_validate_dimensions(dimensions)
else:
# lookup the unit symbols
unit_data = _get_unit_data_from_expr(unit_expr, registry.lut)
base_value = unit_data[0]
dimensions = unit_data[1]
if len(unit_data) > 2:
base_offset = unit_data[2]
latex_repr = unit_data[3]
else:
base_offset = 0.0
# Create obj with superclass construct.
obj = super().__new__(cls)
# Attach attributes to obj.
obj.expr = unit_expr
obj.is_atomic = is_atomic
obj.base_value = base_value
obj.base_offset = base_offset
obj.dimensions = dimensions
obj._latex_repr = latex_repr
obj.registry = registry
# lets us avoid isinstance calls
obj.is_Unit = True
# if we parsed a string unit expression, cache the result
# for faster lookup later
if unit_cache_key is not None:
registry._unit_object_cache[unit_cache_key] = obj
# Return `obj` so __init__ can handle it.
return obj
@property
def latex_repr(self):
"""A LaTeX representation for the unit
Examples
--------
>>> from unyt import g, cm
>>> (g/cm**3).units.latex_repr
'\\\\frac{\\\\rm{g}}{\\\\rm{cm}^{3}}'
"""
if self._latex_repr is not None:
return self._latex_repr
if self.expr.is_Atom:
expr = self.expr
else:
expr = self.expr.copy()
self._latex_repr = _get_latex_representation(expr, self.registry)
return self._latex_repr
@property
def units(self):
return self
def __hash__(self):
return int(self.registry.unit_system_id, 16) ^ hash(self.expr)
# end sympy conventions
def __repr__(self):
if self.expr == sympy_one:
return "(dimensionless)"
# @todo: don't use dunder method?
return self.expr.__repr__()
def __str__(self):
if self.expr == sympy_one:
return "dimensionless"
unit_str = self.expr.__str__()
if unit_str == "degC":
return "°C"
if unit_str == "delta_degC":
return "Δ°C"
if unit_str == "degF":
return "°F"
if unit_str == "delta_degF":
return "Δ°F"
# @todo: don't use dunder method?
return unit_str
#
# Start unit operations
#
def __add__(self, u):
raise InvalidUnitOperation("addition with unit objects is not allowed")
def __radd__(self, u):
raise InvalidUnitOperation("addition with unit objects is not allowed")
def __sub__(self, u):
raise InvalidUnitOperation("subtraction with unit objects is not allowed")
def __rsub__(self, u):
raise InvalidUnitOperation("subtraction with unit objects is not allowed")
def __iadd__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __isub__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __imul__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __itruediv__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __rmul__(self, u):
return self.__mul__(u)
def __mul__(self, u):
"""Multiply Unit with u (Unit object)."""
if not getattr(u, "is_Unit", False):
data = np.array(u, subok=True)
unit = getattr(u, "units", None)
if unit is not None:
if self.dimensions is logarithmic:
raise InvalidUnitOperation(
f"Tried to multiply '{self}' and '{unit}'."
)
units = unit * self
else:
units = self
if data.dtype.kind not in ("f", "u", "i", "c"):
raise InvalidUnitOperation(
"Tried to multiply a Unit object with '%s' (type %s). "
"This behavior is undefined." % (u, type(u))
)
if data.shape == ():
return _import_cache_singleton.uq(data, units, bypass_validation=True)
return _import_cache_singleton.ua(data, units, bypass_validation=True)
elif self.dimensions is logarithmic and not u.is_dimensionless:
raise InvalidUnitOperation(f"Tried to multiply '{self}' and '{u}'.")
elif u.dimensions is logarithmic and not self.is_dimensionless:
raise InvalidUnitOperation(f"Tried to multiply '{self}' and '{u}'.")
base_offset = 0.0
if self.base_offset or u.base_offset:
if u.dimensions in (temperature, angle) and self.is_dimensionless:
base_offset = u.base_offset
elif self.dimensions in (temperature, angle) and u.is_dimensionless:
base_offset = self.base_offset
else:
raise InvalidUnitOperation(
"Quantities with dimensions of angle or units of "
"Fahrenheit or Celsius cannot be multiplied."
)
return Unit(
self.expr * u.expr,
base_value=(self.base_value * u.base_value),
base_offset=base_offset,
dimensions=(self.dimensions * u.dimensions),
registry=self.registry,
)
def __truediv__(self, u):
"""Divide Unit by u (Unit object)."""
if not isinstance(u, Unit):
if isinstance(u, (numeric_type, list, tuple, np.ndarray)):
from unyt.array import unyt_quantity
return unyt_quantity(1.0, self) / u
else:
raise InvalidUnitOperation(
"Tried to divide a Unit object by '%s' (type %s). This "
"behavior is undefined." % (u, type(u))
)
elif self.dimensions is logarithmic and not u.is_dimensionless:
raise InvalidUnitOperation(f"Tried to divide '{self}' and '{u}'.")
elif u.dimensions is logarithmic and not self.is_dimensionless:
raise InvalidUnitOperation(f"Tried to divide '{self}' and '{u}'.")
base_offset = 0.0
if self.base_offset or u.base_offset:
if self.dimensions in (temperature, angle) and u.is_dimensionless:
base_offset = self.base_offset
else:
raise InvalidUnitOperation(
"Quantities with units of Farhenheit "
"and Celsius cannot be divided."
)
return Unit(
self.expr / u.expr,
base_value=(self.base_value / u.base_value),
base_offset=base_offset,
dimensions=(self.dimensions / u.dimensions),
registry=self.registry,
)
def __rtruediv__(self, u):
return u * self**-1
def __pow__(self, p):
"""Take Unit to power p (float)."""
try:
p = Rational(str(p)).limit_denominator()
except (ValueError, TypeError):
raise InvalidUnitOperation(
"Tried to take a Unit object to the "
"power '%s' (type %s). Failed to cast "
"it to a float." % (p, type(p))
)
if self.dimensions is logarithmic and p != 1.0:
raise InvalidUnitOperation(f"Tried to raise '{self}' to power '{p}'")
return Unit(
self.expr**p,
base_value=(self.base_value**p),
dimensions=(self.dimensions**p),
registry=self.registry,
)
def __eq__(self, u):
"""Test unit equality."""
if not isinstance(u, Unit):
return False
return (
math.isclose(self.base_value, u.base_value)
and self.dimensions == u.dimensions
)
def __ne__(self, u):
"""Test unit inequality."""
if not isinstance(u, Unit):
return True
if not math.isclose(self.base_value, u.base_value):
return True
# use 'is' comparison dimensions to avoid expensive sympy operation
if self.dimensions is u.dimensions:
return False
# fall back to expensive sympy comparison
return self.dimensions != u.dimensions
def copy(self, *, deep=False):
expr = str(self.expr)
base_value = copy.deepcopy(self.base_value)
base_offset = copy.deepcopy(self.base_offset)
dimensions = copy.deepcopy(self.dimensions)
if deep:
registry = copy.deepcopy(self.registry)
else:
registry = copy.copy(self.registry)
return Unit(expr, base_value, base_offset, dimensions, registry)
def __deepcopy__(self, memodict=None):
return self.copy(deep=True)
#
# End unit operations
#
def same_dimensions_as(self, other_unit):
"""Test if the dimensions of *other_unit* are the same as this unit
Examples
--------
>>> from unyt import Msun, kg, mile
>>> Msun.units.same_dimensions_as(kg.units)
True
>>> Msun.units.same_dimensions_as(mile.units)
False
"""
# test first for 'is' equality to avoid expensive sympy operation
if self.dimensions is other_unit.dimensions:
return True
return (self.dimensions / other_unit.dimensions) == sympy_one
@property
def is_dimensionless(self):
"""Is this a dimensionless unit?
Returns
-------
True for a dimensionless unit, False otherwise
Examples
--------
>>> from unyt import count, kg
>>> count.units.is_dimensionless
True
>>> kg.units.is_dimensionless
False
"""
return self.dimensions is sympy_one
@property
def is_code_unit(self):
"""Is this a "code" unit?
Returns
-------
True if the unit consists of atom units that being with "code".
False otherwise
"""
for atom in self.expr.atoms():
if not (str(atom).startswith("code") or atom.is_Number):
return False
return True
def list_equivalencies(self):
"""Lists the possible equivalencies associated with this unit object
Examples
--------
>>> from unyt import km
>>> km.units.list_equivalencies()
spectral: length <-> spatial_frequency <-> frequency <-> energy
schwarzschild: mass <-> length
compton: mass <-> length
"""
from unyt.equivalencies import equivalence_registry
for k, v in equivalence_registry.items():
if self.has_equivalent(k):
print(v())
def has_equivalent(self, equiv):
"""
Check to see if this unit object as an equivalent unit in *equiv*.
Example
-------
>>> from unyt import km
>>> km.has_equivalent('spectral')
True
>>> km.has_equivalent('mass_energy')
False
"""
try:
this_equiv = equivalence_registry[equiv]()
except KeyError:
raise KeyError(f'No such equivalence "{equiv}".')
old_dims = self.dimensions
return old_dims in this_equiv._dims
def get_base_equivalent(self, unit_system=None):
"""Create and return dimensionally-equivalent units in a specified base.
>>> from unyt import g, cm
>>> (g/cm**3).get_base_equivalent('mks')
kg/m**3
>>> (g/cm**3).get_base_equivalent('solar')
Mearth/AU**3
"""
from unyt.unit_registry import _sanitize_unit_system
unit_system = _sanitize_unit_system(unit_system, self)
try:
conv_data = _check_em_conversion(
self.units, registry=self.registry, unit_system=unit_system
)
um = unit_system.units_map
if self.dimensions in um and self.expr == um[self.dimensions]:
return self.copy()
except MKSCGSConversionError:
raise UnitsNotReducible(self.units, unit_system)
if any(conv_data):
new_units, _ = _em_conversion(self, conv_data, unit_system=unit_system)
else:
try:
new_units = unit_system[self.dimensions]
except MissingMKSCurrent:
raise UnitsNotReducible(self.units, unit_system)
return Unit(new_units, registry=self.registry)
def get_cgs_equivalent(self):
"""Create and return dimensionally-equivalent cgs units.
Example
-------
>>> from unyt import kg, m
>>> (kg/m**3).get_cgs_equivalent()
g/cm**3
"""
return self.get_base_equivalent(unit_system="cgs")
def get_mks_equivalent(self):
"""Create and return dimensionally-equivalent mks units.
Example
-------
>>> from unyt import g, cm
>>> (g/cm**3).get_mks_equivalent()
kg/m**3
"""
return self.get_base_equivalent(unit_system="mks")
def get_conversion_factor(self, other_units, dtype=None):
"""Get the conversion factor and offset (if any) from one unit
to another
Parameters
----------
other_units: unit object
The units we want the conversion factor for
dtype: numpy dtype
The dtype to return the conversion factor as
Returns
-------
conversion_factor : float
old_units / new_units
offset : float or None
Offset between this unit and the other unit. None if there is
no offset.
Examples
--------
>>> from unyt import km, cm, degree_fahrenheit, degree_celsius
>>> km.get_conversion_factor(cm)
(100000.0, None)
>>> degree_celsius.get_conversion_factor(degree_fahrenheit)
(1.7999999999999998, -31.999999999999886)
"""
return _get_conversion_factor(self, other_units, dtype)
def latex_representation(self):
"""A LaTeX representation for the unit
Examples
--------
>>> from unyt import g, cm
>>> (g/cm**3).latex_representation()
'\\\\frac{\\\\rm{g}}{\\\\rm{cm}^{3}}'
"""
return self.latex_repr
def as_coeff_unit(self):
"""Factor the coefficient multiplying a unit
For units that are multiplied by a constant dimensionless
coefficient, returns a tuple containing the coefficient and
a new unit object for the unmultiplied unit.
Example
-------
>>> import unyt as u
>>> unit = (u.m**2/u.cm).simplify()
>>> unit
100*m
>>> unit.as_coeff_unit()
(100.0, m)
"""
coeff, mul = self.expr.as_coeff_Mul()
coeff = float(coeff)
ret = Unit(
mul,
self.base_value / coeff,
self.base_offset,
self.dimensions,
self.registry,
)
return coeff, ret
def simplify(self):
"""Return a new equivalent unit object with a simplified unit expression
>>> import unyt as u
>>> unit = (u.m**2/u.cm).simplify()
>>> unit
100*m
"""
expr = self.expr
self.expr = _cancel_mul(expr, self.registry)
return self
def _factor_pairs(expr):
factors = expr.as_ordered_factors()
expanded_factors = []
for f in factors:
if f.is_Number:
continue
base, exp = f.as_base_exp()
if exp.q != 1:
expanded_factors.append(base ** Mod(exp, 1))
exp = floor(exp)
if exp >= 0:
f = (base,) * exp
else:
f = (1 / base,) * abs(exp)
expanded_factors.extend(f)
return list(itertools.combinations(expanded_factors, 2))
def _create_unit_from_factor(factor, registry):
base, exp = factor.as_base_exp()
f = registry[str(base)]
return Unit(base, f[0], f[2], f[1], registry, f[3]) ** exp
def _cancel_mul(expr, registry):
pairs_to_consider = _factor_pairs(expr)
uncancelable_pairs = set()
while len(pairs_to_consider):
pair = pairs_to_consider.pop()
if pair in uncancelable_pairs:
continue
u1 = _create_unit_from_factor(pair[0], registry)
u2 = _create_unit_from_factor(pair[1], registry)
prod = u1 * u2
if prod.dimensions == 1:
expr = expr / pair[0]
expr = expr / pair[1]
value = prod.base_value
if value != 1:
if value.is_integer():
value = int(value)
expr *= value
else:
uncancelable_pairs.add(pair)
pairs_to_consider = _factor_pairs(expr)
return expr
#
# Unit manipulation functions
#
# map from dimensions in one unit system to dimensions in other system,
# canonical unit to convert to in that system, and floating point
# conversion factor
em_conversions = {
("C", dims.charge_mks): (dims.charge_cgs, "statC", 0.1 * speed_of_light_cm_per_s),
("statC", dims.charge_cgs): (dims.charge_mks, "C", 10.0 / speed_of_light_cm_per_s),
("T", dims.magnetic_field_mks): (dims.magnetic_field_cgs, "G", 1.0e4),
("G", dims.magnetic_field_cgs): (dims.magnetic_field_mks, "T", 1.0e-4),
("A", dims.current_mks): (dims.current_cgs, "statA", 0.1 * speed_of_light_cm_per_s),
("statA", dims.current_cgs): (
dims.current_mks,
"A",
10.0 / speed_of_light_cm_per_s,
),
("V", dims.electric_potential_mks): (
dims.electric_potential_cgs,
"statV",
1.0e-8 * speed_of_light_cm_per_s,
),
("statV", dims.electric_potential_cgs): (
dims.electric_potential_mks,
"V",
1.0e8 / speed_of_light_cm_per_s,
),
("Ω", dims.resistance_mks): (
dims.resistance_cgs,
"statohm",
1.0e9 / (speed_of_light_cm_per_s**2),
),
("statohm", dims.resistance_cgs): (
dims.resistance_mks,
"Ω",
1.0e-9 * speed_of_light_cm_per_s**2,
),
}
em_conversion_dims = [k[1] for k in em_conversions.keys()]
def _em_conversion(orig_units, conv_data, to_units=None, unit_system=None):
"""Convert between E&M & MKS base units.
If orig_units is a CGS (or MKS) E&M unit, conv_data contains the
corresponding MKS (or CGS) unit and scale factor converting between them.
This must be done by replacing the expression of the original unit
with the new one in the unit expression and multiplying by the scale
factor.
"""
conv_unit, canonical_unit, scale = conv_data
if conv_unit is None:
conv_unit = canonical_unit
new_expr = scale * canonical_unit.expr
if unit_system is not None:
# we don't know the to_units, so we get it directly from the
# conv_data
to_units = Unit(conv_unit.expr, registry=orig_units.registry)
new_units = Unit(new_expr, registry=orig_units.registry)
conv = new_units.get_conversion_factor(to_units)
return to_units, conv
@lru_cache(maxsize=128, typed=False)
def _check_em_conversion(unit, to_unit=None, unit_system=None, registry=None):
"""Check to see if the units contain E&M units
This function supports unyt's ability to convert data to and from E&M
electromagnetic units. However, this support is limited and only very
simple unit expressions can be readily converted. This function tries
to see if the unit is an atomic base unit that is present in the
em_conversions dict. If it does not contain E&M units, the function
returns an empty tuple. If it does contain an atomic E&M unit in
the em_conversions dict, it returns a tuple containing the unit to convert
to and scale factor. If it contains a more complicated E&M unit and we are
trying to convert between CGS & MKS E&M units, it raises an error.
"""
em_map = ()
if unit == to_unit or unit.dimensions not in em_conversion_dims:
return em_map
if unit.is_atomic:
prefix, unit_wo_prefix = _split_prefix(str(unit), unit.registry.lut)
else:
prefix, unit_wo_prefix = "", str(unit)
if (unit_wo_prefix, unit.dimensions) in em_conversions:
em_info = em_conversions[unit_wo_prefix, unit.dimensions]
em_unit = Unit(prefix + em_info[1], registry=registry)
if to_unit is None:
cmks_in_unit = current_mks in unit.dimensions.atoms()
cmks_in_unit_system = unit_system.units_map[current_mks]
cmks_in_unit_system = cmks_in_unit_system is not None
if cmks_in_unit and cmks_in_unit_system:
em_map = (unit_system[unit.dimensions], unit, 1.0)
else:
em_map = (None, em_unit, em_info[2])
elif to_unit.dimensions == em_unit.dimensions:
em_map = (to_unit, em_unit, em_info[2])
if em_map:
return em_map
if unit_system is None:
from unyt.unit_systems import unit_system_registry
unit_system = unit_system_registry["mks"]
for unit_atom in unit.expr.atoms():
if unit_atom.is_Number:
continue
bu = str(unit_atom)
budims = Unit(bu, registry=registry).dimensions
try:
if str(unit_system[budims]) == bu:
continue
except MissingMKSCurrent:
raise MKSCGSConversionError(unit)
return em_map
def _get_conversion_factor(old_units, new_units, dtype):
"""
Get the conversion factor between two units of equivalent dimensions. This
is the number you multiply data by to convert from values in `old_units` to
values in `new_units`.
Parameters
----------
old_units: str or Unit object
The current units.
new_units : str or Unit object
The units we want.
dtype: NumPy dtype
The dtype of the conversion factor
Returns
-------
conversion_factor : float
`old_units / new_units`
offset : float or None
Offset between the old unit and new unit.
"""
if old_units.dimensions != new_units.dimensions:
raise UnitConversionError(
old_units, old_units.dimensions, new_units, new_units.dimensions
)
old_basevalue = old_units.base_value
old_baseoffset = old_units.base_offset
new_basevalue = new_units.base_value
new_baseoffset = new_units.base_offset
ratio = old_basevalue / new_basevalue
if old_baseoffset == 0 and new_baseoffset == 0:
return (ratio, None)
else:
# the dimensions are either temperature or angle (lat, lon)
if old_units.dimensions == temperature:
# for degree Celsius, back out the SI prefix scaling from
# offset scaling for degree Fahrenheit
old_prefix, _ = _split_prefix(str(old_units), old_units.registry.lut)
if old_prefix != "":
old_baseoffset /= old_basevalue
new_prefix, _ = _split_prefix(str(new_units), new_units.registry.lut)
if new_prefix != "":
new_baseoffset /= new_basevalue
return ratio, ratio * old_baseoffset - new_baseoffset
#
# Helper functions
#
def _get_unit_data_from_expr(unit_expr, unit_symbol_lut):
"""
Grabs the total base_value and dimensions from a valid unit expression.
Parameters
----------
unit_expr: Unit object, or sympy Expr object
The expression containing unit symbols.
unit_symbol_lut: dict
Provides the unit data for each valid unit symbol.
"""
# Now for the sympy possibilities
if isinstance(unit_expr, Number):
if unit_expr is sympy_one:
return (1.0, sympy_one)
return (float(unit_expr), sympy_one)
if isinstance(unit_expr, Symbol):
return _lookup_unit_symbol(unit_expr.name, unit_symbol_lut)
if isinstance(unit_expr, Pow):
unit_data = _get_unit_data_from_expr(unit_expr.args[0], unit_symbol_lut)
power = unit_expr.args[1]
if isinstance(power, Symbol):
raise UnitParseError(f"Invalid unit expression '{unit_expr}'.")
conv = float(unit_data[0] ** power)
unit = unit_data[1] ** power
return (conv, unit)
if isinstance(unit_expr, Mul):
base_value = 1.0
dimensions = 1
for expr in unit_expr.args:
unit_data = _get_unit_data_from_expr(expr, unit_symbol_lut)
base_value *= unit_data[0]
dimensions *= unit_data[1]
return (float(base_value), dimensions)
raise UnitParseError(
"Cannot parse for unit data from '%s'. Please supply"
" an expression of only Unit, Symbol, Pow, and Mul"
"objects." % str(unit_expr)
)
def _validate_dimensions(dimensions):
if isinstance(dimensions, Mul):
for dim in dimensions.args:
_validate_dimensions(dim)
elif isinstance(dimensions, Symbol):
if dimensions not in base_dimensions:
raise UnitParseError(
f"Dimensionality expression contains an unknown symbol '{dimensions}'."
)
elif isinstance(dimensions, Pow):
if not isinstance(dimensions.args[1], Number):
raise UnitParseError(
"Dimensionality expression '%s' contains a "
"unit symbol as a power." % dimensions
)
elif isinstance(dimensions, (Add, Number)):
if not isinstance(dimensions, One):
raise UnitParseError(
"Only dimensions that are instances of Pow, "
"Mul, or symbols in the base dimensions are "
"allowed. Got dimensions '%s'" % dimensions
)
elif not isinstance(dimensions, Basic):
raise UnitParseError(f"Bad dimensionality expression '{dimensions}'.")
def define_unit(
symbol, value, tex_repr=None, offset=None, prefixable=False, registry=None
):
"""
Define a new unit and add it to the specified unit registry.
Parameters
----------
symbol : string
The symbol for the new unit.
value : tuple or :class:`unyt.array.unyt_quantity`
The definition of the new unit in terms of some other units. For
example, one would define a new "mph" unit with ``(1.0, "mile/hr")``
or with ``1.0*unyt.mile/unyt.hr``
tex_repr : string, optional
The LaTeX representation of the new unit. If one is not supplied, it
will be generated automatically based on the symbol string.
offset : float, optional
The default offset for the unit. If not set, an offset of 0 is assumed.
prefixable : boolean, optional
Whether or not the new unit can use SI prefixes. Default: False
registry : :class:`unyt.unit_registry.UnitRegistry` or None
The unit registry to add the unit to. If None, then defaults to the
global default unit registry. If registry is set to None then the
unit object will be added as an attribute to the top-level :mod:`unyt`
namespace to ease working with the newly defined unit. See the example
below.
Examples
--------
>>> from unyt import day
>>> two_weeks = 14.0*day
>>> one_day = 1.0*day
>>> define_unit("two_weeks", two_weeks)
>>> from unyt import two_weeks
>>> print((3*two_weeks)/one_day)
42.0 dimensionless
"""
import unyt
from unyt.array import _iterable, unyt_quantity
if registry is None:
registry = default_unit_registry
if symbol in registry:
raise RuntimeError(
f"Unit symbol '{symbol}' already exists in the provided registry"
)
if not isinstance(value, unyt_quantity):
if _iterable(value) and len(value) == 2:
value = unyt_quantity(value[0], value[1], registry=registry)
else:
raise RuntimeError(
'"value" needs to be a quantity or ' "(value, unit) tuple!"
)
base_value = float(value.in_base(unit_system="mks"))
dimensions = value.units.dimensions
registry.add(
symbol,
base_value,
dimensions,
prefixable=prefixable,
tex_repr=tex_repr,
offset=offset,
)
if registry is default_unit_registry:
u = Unit(symbol, registry=registry)
setattr(unyt, symbol, u)
NULL_UNIT = Unit()
|
{
"content_hash": "b95a1a3c5d4065ce0cb67a64416d34b0",
"timestamp": "",
"source": "github",
"line_count": 1110,
"max_line_length": 88,
"avg_line_length": 32.66936936936937,
"alnum_prop": 0.5721534346303395,
"repo_name": "yt-project/unyt",
"id": "5efc0bb701d4fc3cafc5bba6256873daa4aad6ac",
"size": "36271",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "unyt/unit_object.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "463795"
},
{
"name": "TeX",
"bytes": "10958"
}
],
"symlink_target": ""
}
|
from telemetry.timeline import async_slice as async_slice_module
from telemetry.timeline import flow_event as flow_event_module
from telemetry.timeline import slice as slice_module
class TimelineEventContainer(object):
"""Represents a container for events.
"""
def __init__(self, name, parent):
self.parent = parent
self.name = name
@staticmethod
def IsAsyncSlice(t):
return t == async_slice_module.AsyncSlice
@staticmethod
def IsSliceOrAsyncSlice(t):
return t in (slice_module.Slice, async_slice_module.AsyncSlice)
# Basic functions that subclasses of TimelineEventContainer should implement
# in order to expose their events. New methods should be added to this part of
# the code only when absolutely certain they're needed.
def IterChildContainers(self):
raise NotImplementedError()
def IterEventsInThisContainer(self, event_type_predicate, event_predicate):
"""Iterates all the TimelineEvents in this container.
Only events with a type matching event_type_predicate AND matching event
event_predicate will be yielded.
event_type_predicate is given an actual type object, e.g.:
event_type_predicate(slice_module.Slice)
event_predicate is given actual events:
event_predicate(thread.slices[7])
DO NOT ASSUME that the event_type_predicate will be called for every event
found. The relative calling order of the two is left up to the implementer
of the method.
"""
del event_type_predicate, event_predicate # unused
return
yield # pylint: disable=unreachable
def IterAllEvents(self,
recursive=True,
event_type_predicate=lambda t: True,
event_predicate=lambda e: True):
"""Iterates all events in this container, pre-filtered by two predicates.
Only events with a type matching event_type_predicate AND matching event
event_predicate will be yielded.
event_type_predicate is given an actual type object, e.g.:
event_type_predicate(slice_module.Slice)
event_predicate is given actual events:
event_predicate(thread.slices[7])
"""
if not recursive:
for e in self.IterEventsInThisContainer(
event_type_predicate, event_predicate):
yield e
return
# TODO(nduca): Write this as a proper iterator instead of one that creates a
# list and then iterates it.
containers = []
def GetContainersRecursive(container):
containers.append(container)
for container in container.IterChildContainers():
GetContainersRecursive(container)
GetContainersRecursive(self)
# Actually create the iterator.
for c in containers:
for e in c.IterEventsInThisContainer(event_type_predicate,
event_predicate):
yield e
# Helper functions for finding common kinds of events. Must always take an
# optinal recurisve parameter and be implemented in terms fo IterAllEvents.
def IterTimelineMarkers(self, names, recursive=True):
if isinstance(names, basestring):
names = set([names])
else:
names = set(names)
def IsEventNeeded(event):
return event.parent_slice is None and event.name in names
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=self.IsSliceOrAsyncSlice,
event_predicate=IsEventNeeded)
def IterAllEventsOfName(self, name, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=lambda t: True,
event_predicate=lambda e: e.name == name)
def IterAllSlices(self, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=lambda t: t == slice_module.Slice)
def IterAllSlicesInRange(self, start, end, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=lambda t: t == slice_module.Slice,
event_predicate=lambda s: s.start >= start and s.end <= end)
def IterAllSlicesOfName(self, name, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=lambda t: t == slice_module.Slice,
event_predicate=lambda e: e.name == name)
def IterAllToplevelSlicesOfName(self, name, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=lambda t: t == slice_module.Slice,
event_predicate=lambda e: e.name == name and e.parent_slice is None)
def IterAllAsyncSlicesOfName(self, name, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=self.IsAsyncSlice,
event_predicate=lambda e: e.name == name)
def IterAllAsyncSlicesStartsWithName(self, name, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=self.IsAsyncSlice,
event_predicate=lambda e: e.name.startswith(name))
def IterAllFlowEvents(self, recursive=True):
return self.IterAllEvents(
recursive=recursive,
event_type_predicate=lambda t: t == flow_event_module.FlowEvent)
# List versions. These should always be simple expressions that list() on
# an underlying iter method.
def GetAllEvents(self, recursive=True):
return list(self.IterAllEvents(recursive=recursive))
def GetAllEventsOfName(self, name, recursive=True):
return list(self.IterAllEventsOfName(name, recursive))
def GetAllToplevelSlicesOfName(self, name, recursive=True):
return list(self.IterAllToplevelSlicesOfName(name, recursive))
|
{
"content_hash": "07788789101876d53a9334b4950dc5b8",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 80,
"avg_line_length": 35.64556962025316,
"alnum_prop": 0.7056107954545454,
"repo_name": "endlessm/chromium-browser",
"id": "e82fd496bdcd83f134a06efa828dacf995c8cea5",
"size": "5795",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/catapult/telemetry/telemetry/timeline/event_container.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import logging
from mendeley import Mendeley
from mendeley.exception import MendeleyException
__author__ = 'robodasha'
__email__ = 'damirah@live.com'
class MendeleyResolver(object):
FIELDS = ['id', 'title', 'type', 'abstract', 'source', 'year', 'authors',
'identifiers', 'keywords', 'link', 'month', 'day', 'revision',
'pages', 'volume', 'issue', 'websites', 'publisher', 'city',
'edition', 'institution', 'series', 'chapter', 'editors',
'file_attached', 'reader_count',
'reader_count_by_academic_status',
'reader_count_by_subdiscipline', 'reader_count_by_country',
'group_count']
def __init__(self, client_id, secret):
self._logger = logging.getLogger(__name__)
mendeley_object = Mendeley(client_id, secret)
auth = mendeley_object.start_client_credentials_flow()
self._mendeley = auth.authenticate()
def _remove_non_aplha(self, text):
"""
:param text:
:return:
"""
if text is None or len(text) < 1:
self._logger.debug('Input string was empty')
return ''
return ''.join(ch for ch in text if ch.isalpha())
def get_document_by_doi(self, doi):
"""
:param doi:
:return: document metadata (all possible fields are listed in
MendeleyResolver.FIELDS) or None if document wasn't found
"""
self._logger.info('Resolving document from Mendeley by DOI: {}'
.format(doi))
try:
doc = self._mendeley.catalog.by_identifier(doi=doi, view='all')
self._logger.debug('Found document')
return doc
except MendeleyException:
pass
return None
def get_document_by_title_and_year(self, title, year):
"""
:param title:
:param year:
:return: document metadata (all possible fields are listed in
MendeleyResolver.FIELDS) or None if document wasn't found
"""
self._logger.info('Resolving document from Mendeley by title and year: '
'{}, {}'.format(title, year))
orig_title = self._remove_non_aplha(title).lower()
try:
page = self._mendeley.catalog.advanced_search(title=title,
view='all').list()
for doc in page.items:
mendeley_title = self._remove_non_aplha(doc.title).lower()
self._logger.debug('Comparing titles {}, {}'
.format(mendeley_title, orig_title))
if mendeley_title == orig_title and int(doc.year) == int(year):
return doc
except MendeleyException:
pass
return None
|
{
"content_hash": "97760bc31bb3647f8eb2cfe0690b923c",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 80,
"avg_line_length": 38.567567567567565,
"alnum_prop": 0.5448493342676944,
"repo_name": "robodasha/research_papers",
"id": "762b6665e4cc6d2c439ea5188c5082c9be966f5e",
"size": "2855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "research_papers/tools/mendeley_resolver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "197"
},
{
"name": "Python",
"bytes": "20057"
}
],
"symlink_target": ""
}
|
def simple_generator_function():
yield 1
yield 2
yield 3
def withfor():
for value in simple_generator_function():
print(value)
def withnext():
our_generator = simple_generator_function()
print next(our_generator)
print next(our_generator)
print next(our_generator)
def main():
print "with for"
withfor()
print "with next"
withnext()
if __name__ == "__main__":
main()
|
{
"content_hash": "9a88a492a21b6ed88c2ce125ab6e230f",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 47,
"avg_line_length": 18.73913043478261,
"alnum_prop": 0.6194895591647331,
"repo_name": "juancarlosqr/datascience",
"id": "ed79fa51bc00934e6a8ba315a0abe7fb09c941b6",
"size": "478",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/playground/learnpythonthehardway.org/generator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "191608"
},
{
"name": "HTML",
"bytes": "1984011"
},
{
"name": "Jupyter Notebook",
"bytes": "1731379"
},
{
"name": "Python",
"bytes": "228353"
},
{
"name": "R",
"bytes": "35351"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class DtickrangeValidator(_plotly_utils.basevalidators.InfoArrayValidator):
def __init__(
self,
plotly_name="dtickrange",
parent_name="layout.scene.yaxis.tickformatstop",
**kwargs
):
super(DtickrangeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
items=kwargs.pop(
"items",
[
{"valType": "any", "editType": "plot"},
{"valType": "any", "editType": "plot"},
],
),
role=kwargs.pop("role", "info"),
**kwargs
)
|
{
"content_hash": "197a762cc0eb160d00128354b951622e",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 75,
"avg_line_length": 31,
"alnum_prop": 0.49193548387096775,
"repo_name": "plotly/python-api",
"id": "3588faf2d8343745c52689a941c8c6178c2c5448",
"size": "744",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/scene/yaxis/tickformatstop/_dtickrange.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import os
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import (
CommonPasswordValidator, MinimumLengthValidator, NumericPasswordValidator,
UserAttributeSimilarityValidator, get_default_password_validators,
get_password_validators, password_changed,
password_validators_help_text_html, password_validators_help_texts,
validate_password,
)
from django.core.exceptions import ValidationError
from django.test import TestCase, override_settings
from django.utils._os import upath
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {
'min_length': 12,
}},
])
class PasswordValidationTest(TestCase):
def test_get_default_password_validators(self):
validators = get_default_password_validators()
self.assertEqual(len(validators), 2)
self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator')
self.assertEqual(validators[1].__class__.__name__, 'MinimumLengthValidator')
self.assertEqual(validators[1].min_length, 12)
def test_get_password_validators_custom(self):
validator_config = [{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}]
validators = get_password_validators(validator_config)
self.assertEqual(len(validators), 1)
self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator')
self.assertEqual(get_password_validators([]), [])
def test_validate_password(self):
self.assertIsNone(validate_password('sufficiently-long'))
msg_too_short = 'This password is too short. It must contain at least 12 characters.'
with self.assertRaises(ValidationError) as cm:
validate_password('django4242')
self.assertEqual(cm.exception.messages, [msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_short')
with self.assertRaises(ValidationError) as cm:
validate_password('password')
self.assertEqual(cm.exception.messages, ['This password is too common.', msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_common')
self.assertIsNone(validate_password('password', password_validators=[]))
def test_password_changed(self):
self.assertIsNone(password_changed('password'))
def test_password_validators_help_texts(self):
help_texts = password_validators_help_texts()
self.assertEqual(len(help_texts), 2)
self.assertIn('12 characters', help_texts[1])
self.assertEqual(password_validators_help_texts(password_validators=[]), [])
def test_password_validators_help_text_html(self):
help_text = password_validators_help_text_html()
self.assertEqual(help_text.count('<li>'), 2)
self.assertIn('12 characters', help_text)
@override_settings(AUTH_PASSWORD_VALIDATORS=[])
def test_empty_password_validator_help_text_html(self):
self.assertEqual(password_validators_help_text_html(), '')
class MinimumLengthValidatorTest(TestCase):
def test_validate(self):
expected_error = "This password is too short. It must contain at least %d characters."
self.assertIsNone(MinimumLengthValidator().validate('12345678'))
self.assertIsNone(MinimumLengthValidator(min_length=3).validate('123'))
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator().validate('1234567')
self.assertEqual(cm.exception.messages, [expected_error % 8])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_short')
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator(min_length=3).validate('12')
self.assertEqual(cm.exception.messages, [expected_error % 3])
def test_help_text(self):
self.assertEqual(
MinimumLengthValidator().get_help_text(),
"Your password must contain at least 8 characters."
)
class UserAttributeSimilarityValidatorTest(TestCase):
def test_validate(self):
user = User.objects.create_user(
username='testclient', password='password', email='testclient@example.com',
first_name='Test', last_name='Client',
)
expected_error = "The password is too similar to the %s."
self.assertIsNone(UserAttributeSimilarityValidator().validate('testclient'))
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('testclient', user=user),
self.assertEqual(cm.exception.messages, [expected_error % "username"])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_similar')
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('example.com', user=user),
self.assertEqual(cm.exception.messages, [expected_error % "email address"])
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=['first_name'],
max_similarity=0.3,
).validate('testclient', user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
self.assertIsNone(
UserAttributeSimilarityValidator(user_attributes=['first_name']).validate('testclient', user=user)
)
def test_help_text(self):
self.assertEqual(
UserAttributeSimilarityValidator().get_help_text(),
"Your password can't be too similar to your other personal information."
)
class CommonPasswordValidatorTest(TestCase):
def test_validate(self):
expected_error = "This password is too common."
self.assertIsNone(CommonPasswordValidator().validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
CommonPasswordValidator().validate('godzilla')
self.assertEqual(cm.exception.messages, [expected_error])
def test_validate_custom_list(self):
path = os.path.join(os.path.dirname(os.path.realpath(upath(__file__))), 'common-passwords-custom.txt')
validator = CommonPasswordValidator(password_list_path=path)
expected_error = "This password is too common."
self.assertIsNone(validator.validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
validator.validate('from-my-custom-list')
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_common')
def test_help_text(self):
self.assertEqual(
CommonPasswordValidator().get_help_text(),
"Your password can't be a commonly used password."
)
class NumericPasswordValidatorTest(TestCase):
def test_validate(self):
expected_error = "This password is entirely numeric."
self.assertIsNone(NumericPasswordValidator().validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
NumericPasswordValidator().validate('42424242')
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, 'password_entirely_numeric')
def test_help_text(self):
self.assertEqual(
NumericPasswordValidator().get_help_text(),
"Your password can't be entirely numeric."
)
|
{
"content_hash": "0cdf9de8f3d772a9104a9d28a842f235",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 110,
"avg_line_length": 44.68181818181818,
"alnum_prop": 0.6738301119023398,
"repo_name": "yephper/django",
"id": "987ae8c80e44a5eb80cd963ff2b17a8712839b42",
"size": "7864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/auth_tests/test_validators.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
}
|
import functools
import sys
from oslo_log import log as logging
import six
from nova.compute import utils as compute_utils
import nova.conf
from nova import exception
from nova.image import glance
from nova import utils
from nova.virt.xenapi import vm_utils
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
class GlanceStore(object):
def _call_glance_plugin(self, context, instance, session, fn, params):
glance_api_servers = glance.get_api_servers()
def pick_glance(kwargs):
server = next(glance_api_servers)
kwargs['endpoint'] = server
# NOTE(sdague): is the return significant here at all?
return server
def retry_cb(context, instance, exc=None):
if exc:
exc_info = sys.exc_info()
LOG.debug(six.text_type(exc), exc_info=exc_info)
compute_utils.add_instance_fault_from_exc(
context, instance, exc, exc_info)
cb = functools.partial(retry_cb, context, instance)
return session.call_plugin_serialized_with_retry(
'glance', fn, CONF.glance.num_retries, pick_glance, cb, **params)
def _make_params(self, context, session, image_id):
return {'image_id': image_id,
'sr_path': vm_utils.get_sr_path(session),
'extra_headers': glance.generate_identity_headers(context)}
def download_image(self, context, session, instance, image_id):
params = self._make_params(context, session, image_id)
params['uuid_stack'] = vm_utils._make_uuid_stack()
try:
vdis = self._call_glance_plugin(context, instance, session,
'download_vhd2', params)
except exception.PluginRetriesExceeded:
raise exception.CouldNotFetchImage(image_id=image_id)
return vdis
def upload_image(self, context, session, instance, image_id, vdi_uuids):
params = self._make_params(context, session, image_id)
params['vdi_uuids'] = vdi_uuids
props = params['properties'] = {}
props['auto_disk_config'] = instance['auto_disk_config']
props['os_type'] = instance.get('os_type', None) or (
CONF.xenserver.default_os_type)
compression_level = vm_utils.get_compression_level()
if compression_level:
props['xenapi_image_compression_level'] = compression_level
auto_disk_config = utils.get_auto_disk_config_from_instance(instance)
if utils.is_auto_disk_config_disabled(auto_disk_config):
props["auto_disk_config"] = "disabled"
try:
self._call_glance_plugin(context, instance, session,
'upload_vhd2', params)
except exception.PluginRetriesExceeded:
raise exception.CouldNotUploadImage(image_id=image_id)
|
{
"content_hash": "19787d8595ac883a9983b4f618ab1590",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 77,
"avg_line_length": 37.17948717948718,
"alnum_prop": 0.62,
"repo_name": "bigswitch/nova",
"id": "6e0454c3230b2f786fbb976983889a734f763b26",
"size": "3536",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/virt/xenapi/image/glance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17220528"
},
{
"name": "Shell",
"bytes": "36658"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
}
|
"""SCons.Tool.dmd
Tool-specific initialization for the Digital Mars D compiler.
(http://digitalmars.com/d)
Coded by Andy Friesen (andy@ikagames.com)
15 November 2003
There are a number of problems with this script at this point in time.
The one that irritates me the most is the Windows linker setup. The D
linker doesn't have a way to add lib paths on the commandline, as far
as I can see. You have to specify paths relative to the SConscript or
use absolute paths. To hack around it, add '#/blah'. This will link
blah.lib from the directory where SConstruct resides.
Compiler variables:
DC - The name of the D compiler to use. Defaults to dmd or gdmd,
whichever is found.
DPATH - List of paths to search for import modules.
DVERSIONS - List of version tags to enable when compiling.
DDEBUG - List of debug tags to enable when compiling.
Linker related variables:
LIBS - List of library files to link in.
DLINK - Name of the linker to use. Defaults to dmd or gdmd.
DLINKFLAGS - List of linker flags.
Lib tool variables:
DLIB - Name of the lib tool to use. Defaults to lib.
DLIBFLAGS - List of flags to pass to the lib tool.
LIBS - Same as for the linker. (libraries to pull into the .lib)
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/dmd.py 2009/09/04 16:33:07 david"
import os
import string
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Scanner.D
import SCons.Tool
# Adapted from c++.py
def isD(source):
if not source:
return 0
for s in source:
if s.sources:
ext = os.path.splitext(str(s.sources[0]))[1]
if ext == '.d':
return 1
return 0
smart_link = {}
smart_lib = {}
def generate(env):
global smart_link
global smart_lib
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
DAction = SCons.Action.Action('$DCOM', '$DCOMSTR')
static_obj.add_action('.d', DAction)
shared_obj.add_action('.d', DAction)
static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter)
dc = env.Detect(['dmd', 'gdmd'])
env['DC'] = dc
env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of$TARGET $SOURCES'
env['_DINCFLAGS'] = '$( ${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
env['_DVERFLAGS'] = '$( ${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)} $)'
env['_DDEBUGFLAGS'] = '$( ${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)} $)'
env['_DFLAGS'] = '$( ${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)} $)'
env['DPATH'] = ['#/']
env['DFLAGS'] = []
env['DVERSIONS'] = []
env['DDEBUG'] = []
if dc:
# Add the path to the standard library.
# This is merely for the convenience of the dependency scanner.
dmd_path = env.WhereIs(dc)
if dmd_path:
x = string.rindex(dmd_path, dc)
phobosDir = dmd_path[:x] + '/../src/phobos'
if os.path.isdir(phobosDir):
env.Append(DPATH = [phobosDir])
env['DINCPREFIX'] = '-I'
env['DINCSUFFIX'] = ''
env['DVERPREFIX'] = '-version='
env['DVERSUFFIX'] = ''
env['DDEBUGPREFIX'] = '-debug='
env['DDEBUGSUFFIX'] = ''
env['DFLAGPREFIX'] = '-'
env['DFLAGSUFFIX'] = ''
env['DFILESUFFIX'] = '.d'
# Need to use the Digital Mars linker/lib on windows.
# *nix can just use GNU link.
if env['PLATFORM'] == 'win32':
env['DLINK'] = '$DC'
env['DLINKCOM'] = '$DLINK -of$TARGET $SOURCES $DFLAGS $DLINKFLAGS $_DLINKLIBFLAGS'
env['DLIB'] = 'lib'
env['DLIBCOM'] = '$DLIB $_DLIBFLAGS -c $TARGET $SOURCES $_DLINKLIBFLAGS'
env['_DLINKLIBFLAGS'] = '$( ${_concat(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
env['_DLIBFLAGS'] = '$( ${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)} $)'
env['DLINKFLAGS'] = []
env['DLIBLINKPREFIX'] = ''
env['DLIBLINKSUFFIX'] = '.lib'
env['DLIBFLAGPREFIX'] = '-'
env['DLIBFLAGSUFFIX'] = ''
env['DLINKFLAGPREFIX'] = '-'
env['DLINKFLAGSUFFIX'] = ''
SCons.Tool.createStaticLibBuilder(env)
# Basically, we hijack the link and ar builders with our own.
# these builders check for the presence of D source, and swap out
# the system's defaults for the Digital Mars tools. If there's no D
# source, then we silently return the previous settings.
linkcom = env.get('LINKCOM')
try:
env['SMART_LINKCOM'] = smart_link[linkcom]
except KeyError:
def _smartLink(source, target, env, for_signature,
defaultLinker=linkcom):
if isD(source):
# XXX I'm not sure how to add a $DLINKCOMSTR variable
# so that it works with this _smartLink() logic,
# and I don't have a D compiler/linker to try it out,
# so we'll leave it alone for now.
return '$DLINKCOM'
else:
return defaultLinker
env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink
arcom = env.get('ARCOM')
try:
env['SMART_ARCOM'] = smart_lib[arcom]
except KeyError:
def _smartLib(source, target, env, for_signature,
defaultLib=arcom):
if isD(source):
# XXX I'm not sure how to add a $DLIBCOMSTR variable
# so that it works with this _smartLib() logic, and
# I don't have a D compiler/archiver to try it out,
# so we'll leave it alone for now.
return '$DLIBCOM'
else:
return defaultLib
env['SMART_ARCOM'] = smart_lib[arcom] = _smartLib
# It is worth noting that the final space in these strings is
# absolutely pivotal. SCons sees these as actions and not generators
# if it is not there. (very bad)
env['ARCOM'] = '$SMART_ARCOM '
env['LINKCOM'] = '$SMART_LINKCOM '
else: # assuming linux
linkcom = env.get('LINKCOM')
try:
env['SMART_LINKCOM'] = smart_link[linkcom]
except KeyError:
def _smartLink(source, target, env, for_signature,
defaultLinker=linkcom, dc=dc):
if isD(source):
try:
libs = env['LIBS']
except KeyError:
libs = []
if 'phobos' not in libs and 'gphobos' not in libs:
if dc is 'dmd':
env.Append(LIBS = ['phobos'])
elif dc is 'gdmd':
env.Append(LIBS = ['gphobos'])
if 'pthread' not in libs:
env.Append(LIBS = ['pthread'])
if 'm' not in libs:
env.Append(LIBS = ['m'])
return defaultLinker
env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink
env['LINKCOM'] = '$SMART_LINKCOM '
def exists(env):
return env.Detect(['dmd', 'gdmd'])
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
{
"content_hash": "d298101a8c631dba4f3343d1a4195bda",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 120,
"avg_line_length": 38.504464285714285,
"alnum_prop": 0.5950144927536232,
"repo_name": "cournape/numscons",
"id": "5f519b7f04b92ee6b427c733df5f69a070a2d131",
"size": "8625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numscons/scons-local/scons-local-1.2.0/SCons/Tool/dmd.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1275"
},
{
"name": "FORTRAN",
"bytes": "146"
},
{
"name": "Python",
"bytes": "2033297"
},
{
"name": "Shell",
"bytes": "421"
}
],
"symlink_target": ""
}
|
from unittest.mock import patch
from unittest.mock import MagicMock
from cauldron.environ import logger
@patch('cauldron.environ.logger.log')
def test_header_zero(log: MagicMock):
"""Should log a level zero header without modification"""
logger.header('hello', level=0)
args = log.call_args[0]
assert 'hello' == args[0], 'Message should not be modified'
@patch('cauldron.environ.logger.log')
def test_header_infinity(log: MagicMock):
"""Should log a high level header without modification"""
logger.header('hello', level=8)
args = log.call_args[0]
assert 'hello' == args[0], 'Message should not be modified'
@patch('cauldron.environ.logger.raw')
def test_log_with_kwargs(raw: MagicMock):
"""Should include kwargs in log output."""
message = logger.log('test', foo=42)
assert 1 == raw.call_count
assert 0 < message.find('foo: 42'), """
Expected to find the foo kwarg in the message.
"""
@patch('traceback.extract_tb')
def test_get_error_stack_module(extract_tb: MagicMock):
"""Should nullify location when the location is module"""
frame = MagicMock()
frame.name = '<module>'
extract_tb.return_value = [frame]
result = logger.get_error_stack()
assert result[0]['location'] is None, """
Expected a <module> value to be changed to `None`.
"""
@patch('traceback.extract_tb')
def test_get_error_stack(extract_tb: MagicMock):
"""Should remove prefix when location is a remote shared library path"""
frame = MagicMock()
frame.name = '/tmp/cd-remote/__cauldron_shared_libs/test'
extract_tb.return_value = [frame]
result = logger.get_error_stack()
assert result[0]['location'] == '/test', """
Expected the remote prefix to be removed.
"""
|
{
"content_hash": "90265a70a59022b6a158fe92f2224356",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 76,
"avg_line_length": 33.05555555555556,
"alnum_prop": 0.6666666666666666,
"repo_name": "sernst/cauldron",
"id": "b291a24dbf8105c60565fbe53e8b203b0da7dbed",
"size": "1785",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cauldron/test/environ/test_logger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "36"
},
{
"name": "CSS",
"bytes": "1369"
},
{
"name": "Dockerfile",
"bytes": "842"
},
{
"name": "HTML",
"bytes": "21740"
},
{
"name": "JavaScript",
"bytes": "48753"
},
{
"name": "Python",
"bytes": "913057"
},
{
"name": "SCSS",
"bytes": "17130"
},
{
"name": "Shell",
"bytes": "300"
},
{
"name": "Vue",
"bytes": "95790"
}
],
"symlink_target": ""
}
|
"""Script that generates the build.ninja for ninja itself.
Projects that use ninja themselves should either write a similar script
or use a meta-build system that supports Ninja output."""
from __future__ import print_function
from optparse import OptionParser
import os
import sys
import platform_helper
sys.path.insert(0, 'misc')
import ninja_syntax
parser = OptionParser()
profilers = ['gmon', 'pprof']
parser.add_option('--platform',
help='target platform (' + '/'.join(platform_helper.platforms()) + ')',
choices=platform_helper.platforms())
parser.add_option('--host',
help='host platform (' + '/'.join(platform_helper.platforms()) + ')',
choices=platform_helper.platforms())
parser.add_option('--debug', action='store_true',
help='enable debugging extras',)
parser.add_option('--profile', metavar='TYPE',
choices=profilers,
help='enable profiling (' + '/'.join(profilers) + ')',)
parser.add_option('--with-gtest', metavar='PATH',
help='use gtest unpacked in directory PATH')
parser.add_option('--with-python', metavar='EXE',
help='use EXE as the Python interpreter',
default=os.path.basename(sys.executable))
parser.add_option('--force-pselect', action='store_true',
help="ppoll() is used by default where available, but some platforms may need to use pselect instead",)
(options, args) = parser.parse_args()
if args:
print('ERROR: extra unparsed command-line arguments:', args)
sys.exit(1)
platform = platform_helper.Platform(options.platform)
if options.host:
host = platform_helper.Platform(options.host)
else:
host = platform
BUILD_FILENAME = 'build.ninja'
buildfile = open(BUILD_FILENAME, 'w')
n = ninja_syntax.Writer(buildfile)
n.comment('This file is used to build ninja itself.')
n.comment('It is generated by ' + os.path.basename(__file__) + '.')
n.newline()
n.variable('ninja_required_version', '1.3')
n.newline()
n.comment('The arguments passed to configure.py, for rerunning it.')
n.variable('configure_args', ' '.join(sys.argv[1:]))
env_keys = set(['CXX', 'AR', 'CFLAGS', 'LDFLAGS'])
configure_env = dict((k, os.environ[k]) for k in os.environ if k in env_keys)
if configure_env:
config_str = ' '.join([k + '=' + configure_env[k] for k in configure_env])
n.variable('configure_env', config_str + '$ ')
n.newline()
CXX = configure_env.get('CXX', 'g++')
objext = '.o'
if platform.is_msvc():
CXX = 'cl'
objext = '.obj'
def src(filename):
return os.path.join('src', filename)
def built(filename):
return os.path.join('$builddir', filename)
def doc(filename):
return os.path.join('doc', filename)
def cc(name, **kwargs):
return n.build(built(name + objext), 'cxx', src(name + '.c'), **kwargs)
def cxx(name, **kwargs):
return n.build(built(name + objext), 'cxx', src(name + '.cc'), **kwargs)
def binary(name):
if platform.is_windows():
exe = name + '.exe'
n.build(name, 'phony', exe)
return exe
return name
n.variable('builddir', 'build')
n.variable('cxx', CXX)
if platform.is_msvc():
n.variable('ar', 'link')
else:
n.variable('ar', configure_env.get('AR', 'ar'))
if platform.is_msvc():
cflags = ['/nologo', # Don't print startup banner.
'/Zi', # Create pdb with debug info.
'/W4', # Highest warning level.
'/WX', # Warnings as errors.
'/wd4530', '/wd4100', '/wd4706',
'/wd4512', '/wd4800', '/wd4702', '/wd4819',
# Disable warnings about passing "this" during initialization.
'/wd4355',
'/GR-', # Disable RTTI.
# Disable size_t -> int truncation warning.
# We never have strings or arrays larger than 2**31.
'/wd4267',
'/DNOMINMAX', '/D_CRT_SECURE_NO_WARNINGS',
'/D_VARIADIC_MAX=10',
'/DNINJA_PYTHON="%s"' % options.with_python]
if platform.msvc_needs_fs():
cflags.append('/FS')
ldflags = ['/DEBUG', '/libpath:$builddir']
if not options.debug:
cflags += ['/Ox', '/DNDEBUG', '/GL']
ldflags += ['/LTCG', '/OPT:REF', '/OPT:ICF']
else:
cflags = ['-g', '-Wall', '-Wextra',
'-Wno-deprecated',
'-Wno-unused-parameter',
'-fno-rtti',
'-fno-exceptions',
'-fvisibility=hidden', '-pipe',
'-Wno-missing-field-initializers',
'-DNINJA_PYTHON="%s"' % options.with_python]
if options.debug:
cflags += ['-D_GLIBCXX_DEBUG', '-D_GLIBCXX_DEBUG_PEDANTIC']
cflags.remove('-fno-rtti') # Needed for above pedanticness.
else:
cflags += ['-O2', '-DNDEBUG']
if 'clang' in os.path.basename(CXX):
cflags += ['-fcolor-diagnostics']
if platform.is_mingw():
cflags += ['-D_WIN32_WINNT=0x0501']
ldflags = ['-L$builddir']
libs = []
if platform.is_mingw():
cflags.remove('-fvisibility=hidden');
ldflags.append('-static')
elif platform.is_sunos5():
cflags.remove('-fvisibility=hidden')
elif platform.is_msvc():
pass
else:
if options.profile == 'gmon':
cflags.append('-pg')
ldflags.append('-pg')
elif options.profile == 'pprof':
cflags.append('-fno-omit-frame-pointer')
libs.extend(['-Wl,--no-as-needed', '-lprofiler'])
if (platform.is_linux() or platform.is_openbsd() or platform.is_bitrig()) and not options.force_pselect:
cflags.append('-DUSE_PPOLL')
def shell_escape(str):
"""Escape str such that it's interpreted as a single argument by
the shell."""
# This isn't complete, but it's just enough to make NINJA_PYTHON work.
if platform.is_windows():
return str
if '"' in str:
return "'%s'" % str.replace("'", "\\'")
return str
if 'CFLAGS' in configure_env:
cflags.append(configure_env['CFLAGS'])
n.variable('cflags', ' '.join(shell_escape(flag) for flag in cflags))
if 'LDFLAGS' in configure_env:
ldflags.append(configure_env['LDFLAGS'])
n.variable('ldflags', ' '.join(shell_escape(flag) for flag in ldflags))
n.newline()
if platform.is_msvc():
n.rule('cxx',
command='$cxx /showIncludes $cflags -c $in /Fo$out',
description='CXX $out',
deps='msvc')
else:
n.rule('cxx',
command='$cxx -MMD -MT $out -MF $out.d $cflags -c $in -o $out',
depfile='$out.d',
deps='gcc',
description='CXX $out')
n.newline()
if host.is_msvc():
n.rule('ar',
command='lib /nologo /ltcg /out:$out $in',
description='LIB $out')
elif host.is_mingw():
n.rule('ar',
command='cmd /c $ar cqs $out.tmp $in && move /Y $out.tmp $out',
description='AR $out')
else:
n.rule('ar',
command='rm -f $out && $ar crs $out $in',
description='AR $out')
n.newline()
if platform.is_msvc():
n.rule('link',
command='$cxx $in $libs /nologo /link $ldflags /out:$out',
description='LINK $out')
else:
n.rule('link',
command='$cxx $ldflags -o $out $in $libs',
description='LINK $out')
n.newline()
objs = []
if not platform.is_windows() and not platform.is_solaris():
n.comment('browse_py.h is used to inline browse.py.')
n.rule('inline',
command='src/inline.sh $varname < $in > $out',
description='INLINE $out')
n.build(built('browse_py.h'), 'inline', src('browse.py'),
implicit='src/inline.sh',
variables=[('varname', 'kBrowsePy')])
n.newline()
objs += cxx('browse', order_only=built('browse_py.h'))
n.newline()
n.comment('the depfile parser and ninja lexers are generated using re2c.')
def has_re2c():
import subprocess
try:
proc = subprocess.Popen(['re2c', '-V'], stdout=subprocess.PIPE)
return int(proc.communicate()[0], 10) >= 1103
except OSError:
return False
if has_re2c():
n.rule('re2c',
command='re2c -b -i --no-generation-date -o $out $in',
description='RE2C $out')
# Generate the .cc files in the source directory so we can check them in.
n.build(src('depfile_parser.cc'), 're2c', src('depfile_parser.in.cc'))
n.build(src('lexer.cc'), 're2c', src('lexer.in.cc'))
else:
print("warning: A compatible version of re2c (>= 0.11.3) was not found; "
"changes to src/*.in.cc will not affect your build.")
n.newline()
n.comment('Core source files all build into ninja library.')
for name in ['build',
'build_log',
'clean',
'debug_flags',
'depfile_parser',
'deps_log',
'disk_interface',
'edit_distance',
'eval_env',
'graph',
'graphviz',
'lexer',
'line_printer',
'manifest_parser',
'metrics',
'state',
'util',
'version']:
objs += cxx(name)
if platform.is_windows():
for name in ['subprocess-win32',
'includes_normalize-win32',
'msvc_helper-win32',
'msvc_helper_main-win32']:
objs += cxx(name)
if platform.is_msvc():
objs += cxx('minidump-win32')
objs += cc('getopt')
else:
objs += cxx('subprocess-posix')
if platform.is_msvc():
ninja_lib = n.build(built('ninja.lib'), 'ar', objs)
else:
ninja_lib = n.build(built('libninja.a'), 'ar', objs)
n.newline()
if platform.is_msvc():
libs.append('ninja.lib')
else:
libs.append('-lninja')
all_targets = []
n.comment('Main executable is library plus main() function.')
objs = cxx('ninja')
ninja = n.build(binary('ninja'), 'link', objs, implicit=ninja_lib,
variables=[('libs', libs)])
n.newline()
all_targets += ninja
n.comment('Tests all build into ninja_test executable.')
variables = []
test_cflags = cflags + ['-DGTEST_HAS_RTTI=0']
test_ldflags = None
test_libs = libs
objs = []
if options.with_gtest:
path = options.with_gtest
gtest_all_incs = '-I%s -I%s' % (path, os.path.join(path, 'include'))
if platform.is_msvc():
gtest_cflags = '/nologo /EHsc /Zi /D_VARIADIC_MAX=10 ' + gtest_all_incs
else:
gtest_cflags = '-fvisibility=hidden ' + gtest_all_incs
objs += n.build(built('gtest-all' + objext), 'cxx',
os.path.join(path, 'src', 'gtest-all.cc'),
variables=[('cflags', gtest_cflags)])
test_cflags.append('-I%s' % os.path.join(path, 'include'))
else:
# Use gtest from system.
if platform.is_msvc():
test_libs.extend(['gtest_main.lib', 'gtest.lib'])
else:
test_libs.extend(['-lgtest_main', '-lgtest'])
n.variable('test_cflags', test_cflags)
for name in ['build_log_test',
'build_test',
'clean_test',
'depfile_parser_test',
'deps_log_test',
'disk_interface_test',
'edit_distance_test',
'graph_test',
'lexer_test',
'manifest_parser_test',
'ninja_test',
'state_test',
'subprocess_test',
'test',
'util_test']:
objs += cxx(name, variables=[('cflags', '$test_cflags')])
if platform.is_windows():
for name in ['includes_normalize_test', 'msvc_helper_test']:
objs += cxx(name, variables=[('cflags', test_cflags)])
if not platform.is_windows():
test_libs.append('-lpthread')
ninja_test = n.build(binary('ninja_test'), 'link', objs, implicit=ninja_lib,
variables=[('ldflags', test_ldflags),
('libs', test_libs)])
n.newline()
all_targets += ninja_test
n.comment('Ancillary executables.')
objs = cxx('parser_perftest')
all_targets += n.build(binary('parser_perftest'), 'link', objs,
implicit=ninja_lib, variables=[('libs', libs)])
objs = cxx('build_log_perftest')
all_targets += n.build(binary('build_log_perftest'), 'link', objs,
implicit=ninja_lib, variables=[('libs', libs)])
objs = cxx('canon_perftest')
all_targets += n.build(binary('canon_perftest'), 'link', objs,
implicit=ninja_lib, variables=[('libs', libs)])
objs = cxx('hash_collision_bench')
all_targets += n.build(binary('hash_collision_bench'), 'link', objs,
implicit=ninja_lib, variables=[('libs', libs)])
n.newline()
n.comment('Generate a graph using the "graph" tool.')
n.rule('gendot',
command='./ninja -t graph all > $out')
n.rule('gengraph',
command='dot -Tpng $in > $out')
dot = n.build(built('graph.dot'), 'gendot', ['ninja', 'build.ninja'])
n.build('graph.png', 'gengraph', dot)
n.newline()
n.comment('Generate the manual using asciidoc.')
n.rule('asciidoc',
command='asciidoc -b docbook -d book -o $out $in',
description='ASCIIDOC $out')
n.rule('xsltproc',
command='xsltproc --nonet doc/docbook.xsl $in > $out',
description='XSLTPROC $out')
xml = n.build(built('manual.xml'), 'asciidoc', doc('manual.asciidoc'))
manual = n.build(doc('manual.html'), 'xsltproc', xml,
implicit=doc('style.css'))
n.build('manual', 'phony',
order_only=manual)
n.newline()
n.comment('Generate Doxygen.')
n.rule('doxygen',
command='doxygen $in',
description='DOXYGEN $in')
n.variable('doxygen_mainpage_generator',
src('gen_doxygen_mainpage.sh'))
n.rule('doxygen_mainpage',
command='$doxygen_mainpage_generator $in > $out',
description='DOXYGEN_MAINPAGE $out')
mainpage = n.build(built('doxygen_mainpage'), 'doxygen_mainpage',
['README', 'COPYING'],
implicit=['$doxygen_mainpage_generator'])
n.build('doxygen', 'doxygen', doc('doxygen.config'),
implicit=mainpage)
n.newline()
if not host.is_mingw():
n.comment('Regenerate build files if build script changes.')
n.rule('configure',
command='${configure_env}%s configure.py $configure_args' %
options.with_python,
generator=True)
n.build('build.ninja', 'configure',
implicit=['configure.py', os.path.normpath('misc/ninja_syntax.py')])
n.newline()
n.default(ninja)
n.newline()
if host.is_linux():
n.comment('Packaging')
n.rule('rpmbuild',
command="misc/packaging/rpmbuild.sh",
description='Building rpms..')
n.build('rpm', 'rpmbuild')
n.newline()
n.build('all', 'phony', all_targets)
print('wrote %s.' % BUILD_FILENAME)
|
{
"content_hash": "17fc2d5bf4cd790e3691ea202d60d207",
"timestamp": "",
"source": "github",
"line_count": 433,
"max_line_length": 121,
"avg_line_length": 33.739030023094685,
"alnum_prop": 0.5835443904442467,
"repo_name": "pombredanne/ninja",
"id": "431b03e337654faf59978b39b97abd6e1856c603",
"size": "15230",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "configure.py",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""Openstack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import sys
import traceback
from oslo.config import cfg
from six import moves
from heat.openstack.common.gettextutils import _ # noqa
from heat.openstack.common import importutils
from heat.openstack.common import jsonutils
from heat.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config',
metavar='PATH',
help='If this option is specified, the logging configuration '
'file specified is used and overrides any other logging '
'options specified. Please see the Python logging module '
'documentation for details on logging configuration '
'files.'),
cfg.StrOpt('log-format',
default=None,
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='syslog facility to receive log lines')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error')
]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user)s %(tenant)s] '
'%(instance)s%(message)s',
help='format string to use for log messages with context'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='format string to use for log messages without context'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='data to append to log format when level is DEBUG'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='prefix each line of exception output with this format'),
cfg.ListOpt('default_log_levels',
default=[
'amqplib=WARN',
'sqlalchemy=WARN',
'boto=WARN',
'suds=INFO',
'keystone=INFO',
'eventlet.wsgi.server=WARN'
],
help='list of logger=LEVEL pairs'),
cfg.BoolOpt('publish_errors',
default=False,
help='publish error events'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='make deprecations fatal'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='If an instance is passed with the log message, format '
'it like this'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='If an instance UUID is passed with the log message, '
'format it like this'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
else:
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
else:
instance_uuid = kwargs.pop('instance_uuid', None)
if instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra.update({'instance': instance_extra})
extra.update({"project": self.project})
extra.update({"version": self.version})
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [itertools.ifilter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(type, value, tb):
extra = {}
if CONF.verbose:
extra['exc_info'] = (type, value, tb)
getLogger(product_name).critical(str(value), **extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config):
try:
logging.config.fileConfig(log_config)
except moves.configparser.Error as exc:
raise LogConfigError(log_config, str(exc))
def setup(product_name):
"""Setup logging."""
if CONF.log_config:
_load_log_config(CONF.log_config)
else:
_setup_logging_from_conf()
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts,
logging_context_format_string=
logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
def _setup_logging_from_conf():
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not CONF.log_file:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
handler = importutils.import_object(
"heat.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
level = logging.getLevelName(level_name)
logger = logging.getLogger(mod)
logger.setLevel(level)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg)
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
"""
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(sdague): default the fancier formating params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id', None):
self._fmt = CONF.logging_context_format_string
else:
self._fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
self._fmt += " " + CONF.logging_debug_format_suffix
# Cache this on the record, Logger will respect our formated copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
|
{
"content_hash": "0e8bb19ca2a155ee2b26922376640d8a",
"timestamp": "",
"source": "github",
"line_count": 540,
"max_line_length": 78,
"avg_line_length": 35.08148148148148,
"alnum_prop": 0.5892103040540541,
"repo_name": "savi-dev/heat",
"id": "94c85147ef077f3efd4078edeaac84caa7a98c1a",
"size": "19760",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "heat/openstack/common/log.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
class Figure:
def __init__(self, perimeter = 0):
self.setPerimeter(perimeter)
def __str__(self):
return "make string: " + str(self.p)
def __int__(self):
return self.p
def setPerimeter(self, perimeter):
self.p = perimeter
f = Figure(37)
print(f)
|
{
"content_hash": "f7c480d9a7e74acd6cb6f6a700c04ab8",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 44,
"avg_line_length": 19.8,
"alnum_prop": 0.569023569023569,
"repo_name": "dluschan/school",
"id": "3a11d1a0cacc432bf38196c74e5a806df9dd3d73",
"size": "297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oop/ex2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "9434"
},
{
"name": "Pascal",
"bytes": "22734"
},
{
"name": "Python",
"bytes": "7012"
},
{
"name": "QMake",
"bytes": "146"
}
],
"symlink_target": ""
}
|
"""Multiple DB API backend support.
A DB backend module should implement a method named 'get_backend' which
takes no arguments. The method can return any object that implements DB
API methods.
"""
import functools
import logging
import threading
import time
from neutron.openstack.common.db import exception
from neutron.openstack.common.gettextutils import _LE
from neutron.openstack.common import importutils
LOG = logging.getLogger(__name__)
def safe_for_db_retry(f):
"""Enable db-retry for decorated function, if config option enabled."""
f.__dict__['enable_retry'] = True
return f
class wrap_db_retry(object):
"""Retry db.api methods, if DBConnectionError() raised
Retry decorated db.api methods. If we enabled `use_db_reconnect`
in config, this decorator will be applied to all db.api functions,
marked with @safe_for_db_retry decorator.
Decorator catchs DBConnectionError() and retries function in a
loop until it succeeds, or until maximum retries count will be reached.
"""
def __init__(self, retry_interval, max_retries, inc_retry_interval,
max_retry_interval):
super(wrap_db_retry, self).__init__()
self.retry_interval = retry_interval
self.max_retries = max_retries
self.inc_retry_interval = inc_retry_interval
self.max_retry_interval = max_retry_interval
def __call__(self, f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
next_interval = self.retry_interval
remaining = self.max_retries
while True:
try:
return f(*args, **kwargs)
except exception.DBConnectionError as e:
if remaining == 0:
LOG.exception(_LE('DB exceeded retry limit.'))
raise exception.DBError(e)
if remaining != -1:
remaining -= 1
LOG.exception(_LE('DB connection error.'))
# NOTE(vsergeyev): We are using patched time module, so
# this effectively yields the execution
# context to another green thread.
time.sleep(next_interval)
if self.inc_retry_interval:
next_interval = min(
next_interval * 2,
self.max_retry_interval
)
return wrapper
class DBAPI(object):
def __init__(self, backend_name, backend_mapping=None, lazy=False,
**kwargs):
"""Initialize the chosen DB API backend.
:param backend_name: name of the backend to load
:type backend_name: str
:param backend_mapping: backend name -> module/class to load mapping
:type backend_mapping: dict
:param lazy: load the DB backend lazily on the first DB API method call
:type lazy: bool
Keyword arguments:
:keyword use_db_reconnect: retry DB transactions on disconnect or not
:type use_db_reconnect: bool
:keyword retry_interval: seconds between transaction retries
:type retry_interval: int
:keyword inc_retry_interval: increase retry interval or not
:type inc_retry_interval: bool
:keyword max_retry_interval: max interval value between retries
:type max_retry_interval: int
:keyword max_retries: max number of retries before an error is raised
:type max_retries: int
"""
self._backend = None
self._backend_name = backend_name
self._backend_mapping = backend_mapping or {}
self._lock = threading.Lock()
if not lazy:
self._load_backend()
self.use_db_reconnect = kwargs.get('use_db_reconnect', False)
self.retry_interval = kwargs.get('retry_interval', 1)
self.inc_retry_interval = kwargs.get('inc_retry_interval', True)
self.max_retry_interval = kwargs.get('max_retry_interval', 10)
self.max_retries = kwargs.get('max_retries', 20)
def _load_backend(self):
with self._lock:
if not self._backend:
# Import the untranslated name if we don't have a mapping
backend_path = self._backend_mapping.get(self._backend_name,
self._backend_name)
backend_mod = importutils.import_module(backend_path)
self._backend = backend_mod.get_backend()
def __getattr__(self, key):
if not self._backend:
self._load_backend()
attr = getattr(self._backend, key)
if not hasattr(attr, '__call__'):
return attr
# NOTE(vsergeyev): If `use_db_reconnect` option is set to True, retry
# DB API methods, decorated with @safe_for_db_retry
# on disconnect.
if self.use_db_reconnect and hasattr(attr, 'enable_retry'):
attr = wrap_db_retry(
retry_interval=self.retry_interval,
max_retries=self.max_retries,
inc_retry_interval=self.inc_retry_interval,
max_retry_interval=self.max_retry_interval)(attr)
return attr
|
{
"content_hash": "f4c618f0211f613ffa9c4e27dbda7d44",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 79,
"avg_line_length": 36.40816326530612,
"alnum_prop": 0.5866965620328849,
"repo_name": "gopal1cloud/neutron",
"id": "7f71d6a2b6812934914e07da445df10d2370b828",
"size": "5989",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "neutron/openstack/common/db/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1451"
},
{
"name": "Python",
"bytes": "9138456"
},
{
"name": "Shell",
"bytes": "9202"
}
],
"symlink_target": ""
}
|
from domino._impl.custommetrics.paths.api_metric_values_v1.post import ApiForpost
class ApiMetricValuesV1(
ApiForpost,
):
pass
|
{
"content_hash": "b9497a470609b652653127d2698fc242",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 81,
"avg_line_length": 19.571428571428573,
"alnum_prop": 0.7664233576642335,
"repo_name": "dominodatalab/python-domino",
"id": "901637fbdf54f0b63a9779c886830892db1f2bea",
"size": "137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "domino/_impl/custommetrics/apis/paths/api_metric_values_v1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "518781"
},
{
"name": "Shell",
"bytes": "142"
}
],
"symlink_target": ""
}
|
import sys
import re
import string
"""Baby Names exercise
Define the extract_names() function below and change main()
to call it.
For writing regex, it's nice to include a copy of the target
text for inspiration.
Here's what the html looks like in the baby.html files:
...
<h3 align="center">Popularity in 1990</h3>
....
<tr align="right"><td>1</td><td>Michael</td><td>Jessica</td>
<tr align="right"><td>2</td><td>Christopher</td><td>Ashley</td>
<tr align="right"><td>3</td><td>Matthew</td><td>Brittany</td>
...
Suggested milestones for incremental development:
-Extract the year and print it
-Extract the names and rank numbers and just print them
-Get the names data into a dict and print it
-Build the [year, 'name rank', ... ] list and print it
-Fix main() to use the extract_names list
"""
def extract_names(filename):
"""
Given a file name for baby.html, returns a list starting with the year string
followed by the name-rank strings in alphabetical order.
['2006', 'Aaliyah 91', Aaron 57', 'Abagail 895', ' ...]
"""
with open(filename) as f:
data = f.read()
rank_dict = {}
results = []
# Grab the date from the correct <h2> or <h3>
date = re.search(r"<h\d\D+(\d+)</h\d>", data)
# Store tuples of all matches in the form (rank, boy_name, girl_name)
matches = re.findall(r"<[\w\s]+=\"\w+\"><\w+>(\d+)</td><td>(\w+)</td><td>(\w+)</td>", data)
results.append(date.group(1))
# Convert tuples to dict, make sure to only update first occurance of each name
for match in matches:
rank, boy_name, girl_name = match
if boy_name not in rank_dict:
rank_dict[boy_name] = rank
if girl_name not in rank_dict:
rank_dict[girl_name] = rank
# Format the results, then return the sorted list
for key, value in rank_dict.items():
results.append("%s %s" % (key, value))
return sorted(results)
def main():
# This command-line parsing code is provided.
# Make a list of command line arguments, omitting the [0] element
# which is the script itself.
args = sys.argv[1:]
if not args:
print 'usage: [--summaryfile] file [file ...]'
sys.exit(1)
# Notice the summary flag and remove it from args if it is present.
summary = False
if args[0] == '--summaryfile':
summary = True
del args[0]
# +++your code here+++
# For each filename, get the names, then either print the text output
# or write it to a summary file
for filename in args:
names_list = extract_names(filename)
formatted_list = "\n".join(names_list) + "\n"
if summary:
with open(filename + ".summary", "w") as f:
f.write(formatted_list)
else:
print formatted_list
if __name__ == '__main__':
main()
|
{
"content_hash": "2d024a9f258c2350830c872a5ae94893",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 95,
"avg_line_length": 30.455555555555556,
"alnum_prop": 0.6446552353155782,
"repo_name": "brebory/google-python-exercises",
"id": "0c2fc48303daeb09ccb2bffdb44a2dd6a129b8e7",
"size": "2968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "babynames/babynames.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "55197"
}
],
"symlink_target": ""
}
|
import getdns, sys
try:
ulabel = getdns.alabel_to_ulabel('xn--p1acf')
# Next line contains a utf-8 string
alabel = getdns.ulabel_to_alabel('рус')
ulabel1 = getdns.alabel_to_ulabel('xn--vermgensberatung-pwb')
# Next line contains a utf-8 string
alabel1 = getdns.ulabel_to_alabel('vermögensberatung')
except getdns.error as e:
print(str(e))
sys.exit(1)
print (ulabel)
print (alabel)
print (ulabel1)
print (alabel1)
|
{
"content_hash": "44cbcee23d7027b98b14ef3a71e75a78",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 65,
"avg_line_length": 26.294117647058822,
"alnum_prop": 0.6912751677852349,
"repo_name": "getdnsapi/getdns-python-bindings",
"id": "719ae5c3535452d6568a0387f8c24639de954f21",
"size": "489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/idn.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "167399"
},
{
"name": "Python",
"bytes": "12834"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from functools import wraps
from indico.core.settings import SettingsProxyBase
from indico.core.settings.util import get_all_settings, get_setting
from indico.modules.categories import Category
from indico.modules.categories.models.settings import CategorySetting
def _category_or_id(f):
@wraps(f)
def wrapper(self, category, *args, **kwargs):
if isinstance(category, Category):
category = category.id
return f(self, int(category), *args, **kwargs)
return wrapper
class CategorySettingsProxy(SettingsProxyBase):
"""Proxy class to access category-specific settings for a certain module."""
@property
def query(self):
"""Return a query object filtering by the proxy's module."""
return CategorySetting.find(module=self.module)
@_category_or_id
def get_all(self, category, no_defaults=False):
"""Retrieve all settings.
:param category: Category (or its ID)
:param no_defaults: Only return existing settings and ignore defaults.
:return: Dict containing the settings
"""
return get_all_settings(CategorySetting, None, self, no_defaults, category_id=category)
@_category_or_id
def get(self, category, name, default=SettingsProxyBase.default_sentinel):
"""Retrieve the value of a single setting.
:param category: Category (or its ID)
:param name: Setting name
:param default: Default value in case the setting does not exist
:return: The settings's value or the default value
"""
self._check_name(name)
return get_setting(CategorySetting, self, name, default, self._cache, category_id=category)
@_category_or_id
def set(self, category, name, value):
"""Set a single setting.
:param category: Category (or its ID)
:param name: Setting name
:param value: Setting value; must be JSON-serializable
"""
self._check_name(name)
CategorySetting.set(self.module, name, self._convert_from_python(name, value), category_id=category)
self._flush_cache()
@_category_or_id
def set_multi(self, category, items):
"""Set multiple settings at once.
:param category: Category (or its ID)
:param items: Dict containing the new settings
"""
items = {k: self._convert_from_python(k, v) for k, v in items.iteritems()}
CategorySetting.set_multi(self.module, items, category_id=category)
self._flush_cache()
@_category_or_id
def delete(self, category, *names):
"""Delete settings.
:param category: Category (or its ID)
:param names: One or more names of settings to delete
"""
for name in names:
CategorySetting.delete(self.module, *name, category_id=category)
self._flush_cache()
@_category_or_id
def delete_all(self, category):
"""Delete all settings.
:param category: Category (or its ID)
"""
CategorySetting.delete_all(self.module, category_id=category)
self._flush_cache()
|
{
"content_hash": "f4a5d3b283db69a437c5b563d1142643",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 108,
"avg_line_length": 34.29347826086956,
"alnum_prop": 0.6532488114104595,
"repo_name": "mvidalgarcia/indico",
"id": "d28f86d114df4216af0b35149e885889fa75946e",
"size": "3369",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "indico/modules/categories/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "538590"
},
{
"name": "HTML",
"bytes": "1345380"
},
{
"name": "JavaScript",
"bytes": "1781971"
},
{
"name": "Mako",
"bytes": "1340"
},
{
"name": "Python",
"bytes": "4381847"
},
{
"name": "Shell",
"bytes": "3568"
},
{
"name": "TeX",
"bytes": "22182"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
}
|
import httplib
import json
import logging
from os import environ
from oauth2_client.credentials_manager import CredentialManager, ServiceInformation
from orangecloud_client import URL_API
from orangecloud_client.files import Files
from orangecloud_client.folders import Folders
from orangecloud_client.freespace import Freespace
_logger = logging.getLogger(__name__)
logging.getLogger("requests").setLevel(logging.WARNING)
class InvalidStatusCode(Exception):
def __init__(self, status_code, body):
self.status_code = status_code
self.body = body
def __str__(self):
if self.body is None:
return '%d' % self.status_code
elif type(self.body) == str:
return '%d : %s' % (self.status_code, self.body)
else:
return '%d : %s' % (self.status_code, json.dumps(self.body))
class ApiManager(CredentialManager):
""""
Implementation of the OAUTH2 client according to the recommendations here: https://developer.orange.com/apis/cloud-france/api-reference
"""
SCOPES = ['openid', 'cloud']
def __init__(self, client_id, client_secret, redirect_uri):
proxies = dict(http=environ.get('HTTP_PROXY', ''), https=environ.get('HTTPS_PROXY', ''))
# some certificates such as netatmo are invalid
super(ApiManager, self).__init__(
ServiceInformation(
'%s/oauth/v2/authorize' % URL_API,
'%s/oauth/v2/token' % URL_API,
client_id=client_id,
client_secret=client_secret,
scopes=ApiManager.SCOPES,
skip_ssl_verifications=False),
proxies)
self.folders = Folders(self)
self.freespace = Freespace(self)
self.files = Files(self)
self.redirect_uri = redirect_uri
@staticmethod
def _is_token_expired(response):
if response.status_code == httplib.UNAUTHORIZED:
try:
json_data = response.json()
return json_data.get('message', '') == 'Invalid credentials'
except:
return False
else:
return False
|
{
"content_hash": "d53eda365d23515f97ecac363cf37c99",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 139,
"avg_line_length": 33.184615384615384,
"alnum_prop": 0.6184515530829856,
"repo_name": "antechrestos/orangecloud-client",
"id": "7205ab179a76b051405f86d5b7b08406cb7fffdf",
"size": "2157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/orangecloud_client/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "58060"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.db import models
from django.db import connection, DEFAULT_DB_ALIAS
class Square(models.Model):
root = models.IntegerField()
square = models.PositiveIntegerField()
def __unicode__(self):
return "%s ** 2 == %s" % (self.root, self.square)
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __unicode__(self):
return u'%s %s' % (self.first_name, self.last_name)
class SchoolClass(models.Model):
year = models.PositiveIntegerField()
day = models.CharField(max_length=9, blank=True)
last_updated = models.DateTimeField()
# Unfortunately, the following model breaks MySQL hard.
# Until #13711 is fixed, this test can't be run under MySQL.
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
class VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ(models.Model):
class Meta:
# We need to use a short actual table name or
# we hit issue #8548 which we're not testing!
verbose_name = 'model_with_long_table_name'
primary_key_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.AutoField(primary_key=True)
charfield_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.CharField(max_length=100)
m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.ManyToManyField(Person,blank=True)
qn = connection.ops.quote_name
__test__ = {'API_TESTS': """
#4896: Test cursor.executemany
>>> from django.db import connection
>>> cursor = connection.cursor()
>>> opts = Square._meta
>>> f1, f2 = opts.get_field('root'), opts.get_field('square')
>>> query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)'
... % (connection.introspection.table_name_converter(opts.db_table), qn(f1.column), qn(f2.column)))
>>> cursor.executemany(query, [(i, i**2) for i in range(-5, 6)]) and None or None
>>> Square.objects.order_by('root')
[<Square: -5 ** 2 == 25>, <Square: -4 ** 2 == 16>, <Square: -3 ** 2 == 9>, <Square: -2 ** 2 == 4>, <Square: -1 ** 2 == 1>, <Square: 0 ** 2 == 0>, <Square: 1 ** 2 == 1>, <Square: 2 ** 2 == 4>, <Square: 3 ** 2 == 9>, <Square: 4 ** 2 == 16>, <Square: 5 ** 2 == 25>]
#4765: executemany with params=[] does nothing
>>> cursor.executemany(query, []) and None or None
>>> Square.objects.count()
11
#6254: fetchone, fetchmany, fetchall return strings as unicode objects
>>> Person(first_name="John", last_name="Doe").save()
>>> Person(first_name="Jane", last_name="Doe").save()
>>> Person(first_name="Mary", last_name="Agnelline").save()
>>> Person(first_name="Peter", last_name="Parker").save()
>>> Person(first_name="Clark", last_name="Kent").save()
>>> opts2 = Person._meta
>>> f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
>>> query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
... % (qn(f3.column), qn(f4.column), connection.introspection.table_name_converter(opts2.db_table),
... qn(f3.column)))
>>> cursor.execute(query2) and None or None
>>> cursor.fetchone()
(u'Clark', u'Kent')
>>> list(cursor.fetchmany(2))
[(u'Jane', u'Doe'), (u'John', u'Doe')]
>>> list(cursor.fetchall())
[(u'Mary', u'Agnelline'), (u'Peter', u'Parker')]
"""}
|
{
"content_hash": "fb5acab88ce7b52b4b2ec072db64c481",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 262,
"avg_line_length": 40.97530864197531,
"alnum_prop": 0.653510093401627,
"repo_name": "Smarsh/django",
"id": "e3137f2710c4423496404196040a68b81b40e6ea",
"size": "3319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/regressiontests/backends/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "37584"
},
{
"name": "HTML",
"bytes": "86821"
},
{
"name": "JavaScript",
"bytes": "91586"
},
{
"name": "Python",
"bytes": "5721639"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
}
|
from .. import Variable
from ..conventions import cf_encoder
from ..core import indexing
from ..core.utils import FrozenOrderedDict, close_on_error, Frozen
from ..core.pycompat import iteritems, bytes_type, unicode_type, OrderedDict
from .common import AbstractWritableDataStore
from .netCDF4_ import _nc4_group, _nc4_values_and_dtype
def maybe_decode_bytes(txt):
if isinstance(txt, bytes_type):
return txt.decode('utf-8')
else:
return txt
def _read_attributes(h5netcdf_var):
# GH451
# to ensure conventions decoding works properly on Python 3, decode all
# bytes attributes to strings
attrs = OrderedDict()
for k in h5netcdf_var.ncattrs():
v = h5netcdf_var.getncattr(k)
if k not in ['_FillValue', 'missing_value']:
v = maybe_decode_bytes(v)
attrs[k] = v
return attrs
class H5NetCDFStore(AbstractWritableDataStore):
"""Store for reading and writing data via h5netcdf
"""
def __init__(self, filename, mode='r', format=None, group=None,
writer=None):
import h5netcdf.legacyapi
if format not in [None, 'NETCDF4']:
raise ValueError('invalid format for h5netcdf backend')
ds = h5netcdf.legacyapi.Dataset(filename, mode=mode)
with close_on_error(ds):
self.ds = _nc4_group(ds, group, mode)
self.format = format
self._filename = filename
super(H5NetCDFStore, self).__init__(writer)
def store(self, variables, attributes):
# All NetCDF files get CF encoded by default, without this attempting
# to write times, for example, would fail.
cf_variables, cf_attrs = cf_encoder(variables, attributes)
AbstractWritableDataStore.store(self, cf_variables, cf_attrs)
def open_store_variable(self, var):
dimensions = var.dimensions
data = indexing.LazilyIndexedArray(var)
attrs = _read_attributes(var)
# netCDF4 specific encoding
encoding = dict(var.filters())
chunking = var.chunking()
encoding['chunksizes'] = chunking if chunking != 'contiguous' else None
# save source so __repr__ can detect if it's local or not
encoding['source'] = self._filename
return Variable(dimensions, data, attrs, encoding)
def get_variables(self):
return FrozenOrderedDict((k, self.open_store_variable(v))
for k, v in iteritems(self.ds.variables))
def get_attrs(self):
return Frozen(_read_attributes(self.ds))
def get_dimensions(self):
return self.ds.dimensions
def set_dimension(self, name, length):
self.ds.createDimension(name, size=length)
def set_attribute(self, key, value):
self.ds.setncattr(key, value)
def prepare_variable(self, name, variable):
import h5py
attrs = variable.attrs.copy()
variable, dtype = _nc4_values_and_dtype(variable)
if dtype is str:
dtype = h5py.special_dtype(vlen=unicode_type)
self.set_necessary_dimensions(variable)
fill_value = attrs.pop('_FillValue', None)
if fill_value in ['\x00']:
fill_value = None
encoding = variable.encoding
kwargs = {}
for key in ['zlib', 'complevel', 'shuffle',
'chunksizes', 'fletcher32']:
if key in encoding:
kwargs[key] = encoding[key]
nc4_var = self.ds.createVariable(name, dtype, variable.dims,
fill_value=fill_value, **kwargs)
for k, v in iteritems(attrs):
nc4_var.setncattr(k, v)
return nc4_var, variable.data
def sync(self):
super(H5NetCDFStore, self).sync()
self.ds.sync()
def close(self):
ds = self.ds
# netCDF4 only allows closing the root group
while ds.parent is not None:
ds = ds.parent
ds.close()
|
{
"content_hash": "502f8dcd2409c0742e12db257628f4f5",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 79,
"avg_line_length": 32.79338842975206,
"alnum_prop": 0.6171875,
"repo_name": "kjordahl/xray",
"id": "a33d8f9ed0b3be3b74ce37cae217f20b3685f872",
"size": "4020",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xray/backends/h5netcdf_.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "2986"
},
{
"name": "Python",
"bytes": "684347"
},
{
"name": "Shell",
"bytes": "161"
}
],
"symlink_target": ""
}
|
from datetime import date
class EntityTable(object):
"""
Stores entities that are later read by the Interpreter. During parsing,
entities are set based on their given tags from the Seq2Seq model and are
stored in this class.
"""
def __init__(self):
""" Creates a EntityTable object and sets some defaults.
"""
self._entity_dict = {}
# set the defaults for EntityTable construction
self._entity_dict['stats'] = []
self._entity_dict['playoff_rd'] = 0 # default is regular season
self._entity_dict['start_date'] = date(1946,11,8)
self._entity_dict['end_date'] = date.today()
self._entity_dict['player_name'] = None
self._entity_dict['game_won'] = None
self._entity_dict['home_game'] = None
self._entity_dict['started_game'] = None
self._entity_dict['played_for'] = None
self._entity_dict['played_against'] = None
def __iter__(self):
for k,v in self._entity_dict.items():
if v != None:
yield k, v
def get_stats(self):
return self._entity_dict['stats']
def add_stat(self, stat_entity):
self._entity_dict['stats'].append(stat_entity)
""" Getter and setter properties for all entity_dict pairs. """
@property
def player_name(self):
return self._entity_dict['player_name']
@player_name.setter
def player_name(self, value):
self._entity_dict['player_name'] = value
@property
def playoff_rd(self):
return self._entity_dict['playoff_rd']
@playoff_rd.setter
def playoff_rd(self, value):
self._entity_dict['playoff_rd'] = value
@property
def start_date(self):
return self._entity_dict['start_date']
@start_date.setter
def start_date(self, value):
self._entity_dict['start_date'] = value
@property
def end_date(self):
return self._entity_dict['end_date']
@end_date.setter
def end_date(self, value):
self._entity_dict['end_date'] = value
@property
def home_game(self):
return self._entity_dict['home_game']
@home_game.setter
def home_game(self, value):
self._entity_dict['home_game'] = value
@property
def started_game(self):
return self._entity_dict['started_game']
@started_game.setter
def started_game(self, value):
self._entity_dict['started_game'] = value
@property
def game_won(self):
return self._entity_dict['game_won']
@game_won.setter
def game_won(self, value):
self._entity_dict['game_won'] = value
@property
def started_game(self):
return self._entity_dict['started_game']
@started_game.setter
def started_game(self, value):
self._entity_dict['started_game'] = value
@property
def played_for(self):
return self._entity_dict['played_for']
@played_for.setter
def played_for(self, value):
self._entity_dict['played_for'] = value
@property
def played_against(self):
return self._entity_dict['played_against']
@played_against.setter
def played_against(self, value):
self._entity_dict['played_against'] = value
|
{
"content_hash": "802d3c11f2df997444b0ec0f9ca07965",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 81,
"avg_line_length": 22.854166666666668,
"alnum_prop": 0.5986022485566697,
"repo_name": "furioustiles/HooperHub",
"id": "cc6b0d96e06d27802a6e65aceb8cab137e96a632",
"size": "3291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hooperhub/util/entity_table.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51791"
}
],
"symlink_target": ""
}
|
import tarfile
import sys
version = sys.argv[1]
tar = tarfile.open(f"dist/bioscrape-{version}.tar.gz")
for member in tar.getmembers():
print(member)
tar.close()
|
{
"content_hash": "0a806a11a41bf74a9577d08af731f23b",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 54,
"avg_line_length": 23.571428571428573,
"alnum_prop": 0.7272727272727273,
"repo_name": "ananswam/bioscrape",
"id": "5a6a8178025fba628f70cf55478aa7bda444a5f2",
"size": "165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unzip_dist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1056816"
},
{
"name": "MATLAB",
"bytes": "3685"
},
{
"name": "Python",
"bytes": "434232"
}
],
"symlink_target": ""
}
|
import sqlite3
import sys
from ..dependency import Dependency
from .. import DbConnection
from .documenttablecreator import DocumentTableCreator
class DocumentManager(Dependency):
"""Creates a DocumentManager
DocumentManager will call :func:`recommender.document.DocumentTableCreator.init_database`
:param database_manager: instance of a DatabaseManager
:type database_manager: recommender.DatabaseManager
"""
def __init__(self, database_manager):
super(DocumentManager, self).__init__(database_manager)
self._db_connection_str = database_manager.get_db_connection_str()
table_creator = DocumentTableCreator(self._db_connection_str)
table_creator.init_database()
self.temp_conn = None
pass
def _get_db_connection(self):
if self.temp_conn is None:
return DbConnection(self._db_connection_str)
else:
return self.temp_conn
def build_dependencies(self):
"""There are no dependencies that have to be built
Inherited from :class:`recommender.Dependencies`
"""
pass
def get_new_document_id(self):
"""Creates a new unique document_id by storing it in the database
"""
doc_id = None
with self._get_db_connection() as conn:
try:
self._add_document(conn)
doc_id = self._get_latest_document_id(conn)
except:
conn.rollback()
raise Exception(sys.exc_info())
else:
conn.commit()
return doc_id
pass
def _add_document(self, conn):
"""Adds a new Document into the database in order to generate a unique id
The id can be queried by :func:`recommender.document.Document._get_lastest_id'
"""
c = conn.cursor()
c.execute(
'''
INSERT INTO Document
VALUES (null)
;
'''
)
pass
def _get_latest_document_id(self, conn):
"""Queries the last id inserted in the Database
:returns: int representing a new document
"""
c = conn.cursor()
c.execute(
'''
SELECT
MAX(document_id)
FROM
Document
;
'''
)
result = c.fetchone()
return None if result is None else result[0]
def has_document(self, document_id):
"""Checks whether the document_id is already in use or not.
:param document_id: int representing a document
:type document_id: int
:returns: bool -- True, if the document does exists
"""
with self._get_db_connection() as conn:
c = conn.curosor()
c.execute(
'''
SELECT
document_id
FROM
Document
;
'''
)
return c.fetchone() is not None
|
{
"content_hash": "e03fa46dc57ea2b2d3dbd9008c489eb6",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 93,
"avg_line_length": 28.25,
"alnum_prop": 0.5470337594231399,
"repo_name": "dustywind/bachelor-thesis",
"id": "380169987052992a3fdd839f2fb7c96b87c6b61b",
"size": "3052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "impl/recommender/document/documentmanager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2554"
},
{
"name": "HTML",
"bytes": "4363"
},
{
"name": "JavaScript",
"bytes": "26943"
},
{
"name": "Makefile",
"bytes": "10588"
},
{
"name": "Python",
"bytes": "109888"
},
{
"name": "Shell",
"bytes": "3012"
},
{
"name": "TeX",
"bytes": "163624"
}
],
"symlink_target": ""
}
|
from txgoogleapi.unauth_requester import UnauthRequester
from txgoogleapi.api_key_requester import ApiKeyRequester
from txgoogleapi.oauth_requester import OAuthRequester
from txgoogleapi.api import Google
|
{
"content_hash": "d4871091f223b6c85a7f791e26fdc834",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 57,
"avg_line_length": 51.25,
"alnum_prop": 0.8829268292682927,
"repo_name": "iksteen/txgoogleapi",
"id": "4b6479af2e0c85b7893750c3717a27c0259653be",
"size": "205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "txgoogleapi/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15836"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.