code
stringlengths 1
199k
|
|---|
from ...imports import *
class PlottableAxis:
scale = 'log'
lim = [None, None]
size_normalization = 1
'''
General class definition for a plottable axis,
which can appear as either an xaxis or yaxis.
The standard items to include in the definition
of a plottable object are:
source = Generally a string, indicating a
retrievable population attribute,
which will be used as the value
to plot. The values-getting can
also be over-written by defining
a .value() method, if the desired
values are more complicated than
a simple attribute/property (e.g.
if it depends on an input, like
wavelength).
label = Human-friendly string describing
this axis, which will appear as
its label in the plot.
scale = String ('linear', 'log', ?) to
indicate what scale should be
used for this axis.
lim = Tuple to indicate (lower, upper)
limits for the plot. These might
also be used to set the vmin and
vmax of a color map, if this
plottable is being used to color
points in a BubblePanel.
size_normalization = (optional) What to multiply
the values by to convert them
into sizes for a scatter plot.
'''
def __init__(self, panel=None,
orientation=None,
**kw):
'''
Initialize a plottable axis, connecting it
to some parent panel that will handle all
of the population cycling and building.
'''
self.panel = panel
self.orientation = orientation
self.kw = kw
def __call__(self, panel=None, orientation=None, **kw):
'''
As a backup, in case we call something that
looks like initializing this PlottableAxis,
make sure that we connect to the appropriate
panel.
'''
new_instance = copy.deepcopy(self)
new_instance.panel = panel
new_instance.orientation = orientation
return new_instance
def __repr__(self):
return f"<Plottable | {self.label}>".replace('\n', ' ')
def value(self):
'''
Extract the values for this plottable axis.
By default, this is done by pulling using
the string in `source` to pull an attribute
from a population.
Write over this function in order to make
more complicated function calls
'''
return getattr(self.panel.pop, self.source)
def value_lowerupper(self):
'''
Extract the upper and lower uncertainties
for this plottable axis. This function
will likely need to be overwritten for
any attribute that doesn't directly have
an uncertainty defined inside the population.
'''
try:
ul = self.panel.pop.uncertainty_lowerupper(self.source)
return ul
except AtlasError:
sigma = self.value()*0.0
return sigma, sigma
def clean_axis(initial):
'''
Make sure the axis initializer is a PlottableAxis
class definition.
Parameters
----------
initial : PlottableAxis class definition, string
Returns
-------
axis : PlottableAxis class definition
'''
if initial is None:
# pass through None so panel can use its own axis
return None
elif type(initial) is type:
# pass through an actual PlottableAxis definition
return initial
elif isinstance(initial, PlottableAxis):
# pass through an actual PlottableAxis object
return initial
elif type(initial) is str:
# create a temporary PlottableAxis from this string
class GenericPlottableAxis(PlottableAxis):
source = initial
label = initial
scale = 'log'
lim = [None, None]
return GenericPlottableAxis
else:
# complain otherwise
raise ValueError(f'''
It' not clear how to turn {initial} into
a definition for a PlottableAxis.
''')
|
import pdb
import os
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import functools
from glob import glob
from tensorflow.contrib.data import TFRecordDataset
from tensorflow.contrib.ffmpeg import decode_audio
NUM_CLASSES = len(np.load('data/classes.npy'))
identity = lambda x: x
def amplitude_to_db(S):
magnitude = tf.abs(S)
ref_value = tf.pow(tf.reduce_max(magnitude), 2)
magnitude = tf.pow(magnitude, 2)
log_spec = tf.multiply(10.0, tf.log(tf.maximum(1e-10, magnitude)))
log_spec = tf.subtract(log_spec, tf.multiply(10.0, tf.log(tf.maximum(1e-10, ref_value))))
return log_spec
def audio_paths_and_labels(folder, pattern="*.wav"):
classes = [path.split("/")[-1] for path in glob(os.path.join(folder, "*"))]
filenames = glob(os.path.join(folder, "**", pattern))
labels = [classes.index(filename.split("/")[-2]) for filename in filenames]
return np.asarray(filenames), np.asarray(labels)
def read_audio(audio):
return tf.reshape(audio, [1, 44100 * 3]) # reshape from shape (44100*3, 1) to (44100 * 3)
def stft(audio):
return tf.transpose(tf.contrib.signal.stft(audio, 512, 256))
def normalize_image(X):
X = tf.image.per_image_standardization(X)
return tf.nn.relu(X)
def reshape(X):
return tf.reshape(X, [257, 515])
def noise(X):
return tf.add(X, tf.random_normal(X.shape, mean=0.0, stddev=0.3))
def train_sample(X):
return X, X
def default_parser(filename):
audio = read_audio(filename)
X = stft(audio)
X = amplitude_to_db(X)
X = normalize_image(X)
X_noisy = add_noise(X)
return X_noisy, X
def compose(*functions):
return functools.reduce(lambda f, g: lambda x: f(g(x)), functions, lambda x: x)
class Generator():
def __init__(self, tf_record_path, batch_size=32, num_epochs=10, parser=identity):
self._filename = tf_record_path
self._batch_size = batch_size
self._parser = parser
dataset = TFRecordDataset(self._filename)
dataset = dataset.map(self._parse_tfrecord, num_threads=4, output_buffer_size=2*4*batch_size)
dataset = dataset.repeat()
dataset = dataset.shuffle(buffer_size=5000)
dataset = dataset.batch(batch_size)
self._session = tf.Session()
self._iterator = dataset.make_one_shot_iterator()
self._next_element = self._iterator.get_next()
def _parse_tfrecord(self, example_proto):
features = {"audio": tf.FixedLenFeature([], tf.string),
"label": tf.FixedLenFeature([], tf.int64)}
parsed_features = tf.parse_single_example(example_proto, features)
audio = tf.decode_raw(parsed_features['audio'], tf.float32)
label = tf.cast(parsed_features['label'], tf.int32)
X = self._parser(audio)
return X, tf.one_hot(label, NUM_CLASSES, axis=-1)
def next_batch(self):
while 1:
audios, labels = self._session.run(self._next_element)
yield (audios, labels)
|
from gruffy import SideBar
g = SideBar(800)
g.title = "Gruffy's Graph"
g.theme_pastel()
g.transparent = True
g.data("Apples", [1, 2, 3, 4, 4, 3])
g.data("Oranges", [4, 8, 7, 9, 8, 9])
g.data("Watermelon", [2, 3, 1, 5, 6, 8])
g.data("Peaches", [9, 9, 10, 8, 7, 9])
g.labels = {0: '2003', 2: '2004', 4: '2005.09'}
g.additional_line_values = True
g.write()
|
import os
import sys
import subprocess
import logging
import json
logger = logging.getLogger("ash")
def _walk_up(bottom):
"""
walk up a dir tree. Code adapted from https://gist.github.com/1098474
"""
bottom = os.path.realpath(bottom)
#get files in current dir
names = os.listdir(bottom)
dirs, nondirs = [], []
for name in names:
if os.path.isdir(os.path.join(bottom, name)):
dirs.append(name)
else:
nondirs.append(name)
yield bottom, dirs, nondirs
new_path = os.path.realpath(os.path.join(bottom, '..'))
# see if we are at the top
if new_path == bottom:
return
for x in _walk_up(new_path):
yield x
def _get_env_dir(venv_dir_name):
"""
Get the virtualenv from this dir or parent dirs. If no virtualenv is found,
this returns False
"""
for curr_dir, included_dirs, included_files in _walk_up(os.getcwd()):
if venv_dir_name in included_dirs:
env = os.path.join(curr_dir, venv_dir_name)
logger.debug("virtualenv found! %s", env)
return env
logger.debug("No virtualenv found with dir name %s" % (venv_dir_name))
return False
def _check_virtualenv(venv_command):
"""
Check for the existence of virtualenv
"""
try:
command = "%s --version" % (venv_command)
logger.debug("Checking virtualenv with command: %s" % (command))
subprocess.call(command, stdout=open(os.devnull, 'wb'), shell=True)
except:
return False
return True
def _create_venv(venv_command, directory, args):
"""
Create a virtual environment. Right now it is using virtualenv through the
command line, but later on I may change this to the virtualenv API, or venv
in later versions of Python 3.
"""
if args:
command = "%s %s %s" % (venv_command, args, directory)
else:
command = "%s %s" % (venv_command, directory)
logger.debug("Create env with command: %s", command)
return subprocess.call(command, shell=True)
def _activate_venv(dir):
"""
Activate a virtualenv, given its directory.
"""
activate_file = os.path.join(dir, "Scripts", "activate_this.py")
if not os.path.exists(activate_file): # for Linux system
activate_file = os.path.join(dir, "bin", "activate_this.py")
logger.debug("Activating virtualenv with file %s", activate_file)
exec(open(activate_file).read(), dict(__file__=activate_file))
def _get_config():
"""
Get the config for this program, returning a dict with the settings.
"""
default_config = {
"venv_command": "virtualenv",
"venv_dir_name": "python_env",
"debug": False
}
config = {} # the config that is read from the config file
config_location = os.path.join(os.path.expanduser("~"), ".ashconfig")
if (os.path.exists(config_location)):
config_file = open(config_location, "r")
config = json.loads(config_file.read())
config_file.close()
result = default_config.copy()
result.update(config)
return result
def main():
config = _get_config()
# Configure logging
logger.setLevel(logging.DEBUG) # baseline level, fine tune later on
log_handler = logging.StreamHandler()
if not config["debug"]:
log_handler.setLevel(logging.INFO)
log_handler.setFormatter(logging.Formatter('%(message)s'))
else:
log_handler.setLevel(logging.DEBUG)
log_handler.setFormatter(
logging.Formatter(
'%(levelname)s %(asctime)s: %(message)s'
)
)
logger.addHandler(log_handler)
logger.debug("Program configured and ready to roll")
if not _check_virtualenv(config["venv_command"]):
logger.error("It seems that virtualenv is not installed or "
"configured properly on this system. Exiting now.")
sys.exit(3)
if len(sys.argv) <= 1:
logger.info("You didn't specify what to do. Refer to the README file "
"for the commands that ash supports. Exiting now.")
sys.exit(2)
venv = _get_env_dir(config["venv_dir_name"])
if sys.argv[1] == "init":
if venv:
if venv == os.path.join(os.getcwd(), config["venv_dir_name"]):
logger.warning("I found an existing virtualenv in this "
"directory. Exiting now.")
sys.exit(1)
else:
logger.warning("There is an existing virtualenv up the "
"directory hierarchy at %s. I will continue to make a "
"virtualenv in this directory. If it's not what you want, "
"simply delete the created directory." % (venv))
args = " ".join(sys.argv[2:]) if (len(sys.argv) > 2) else None
sys.exit(_create_venv(config["venv_command"],
os.path.join(os.getcwd(), config["venv_dir_name"]), args))
if not venv:
logger.warning("I cannot detect a virtual environment in this "
"directory hierarchy. The command will be run as-is.")
else:
_activate_venv(venv)
command = " ".join(sys.argv[1:])
logger.debug("Command to call: %s", command)
subprocess.call(command, shell=True)
if __name__ == "__main__":
main()
|
from decimal import Decimal, getcontext
def nthroot (n, A, precision):
getcontext().prec = precision
n = Decimal(n)
x_0 = A / n #step 1: make a while guess.
x_1 = 1 #need it to exist before step 2
while True:
#step 2:
x_0, x_1 = x_1, (1 / n)*((n - 1)*x_0 + (A / (x_0 ** (n - 1))))
if x_0 == x_1:
return x_1
print nthroot(5, 34, 10)
|
import picamera
import time
import pygame
import tty
import sys
import os
from PIL import Image
from PIL import ImageOps
cam = picamera.PiCamera()
cam.resolution = (640, 480)
cam.framerate=80
cam.hflip = True
cam.vflip=True
tty.setraw(sys.stdin)
now=time.time()
file_string ="data/forward/img_" + time.strftime("%y%m%d_%H-%M-%S") + ".jpg"
cam.capture(file_string,use_video_port=True)
then=time.time()
print("%s function took %f ms" % (file_string, (then-now)*1000.0))
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import subprocess
import chainer.computational_graph
import chainerrl
def collect_variables(obj):
"""Collect Variable objects inside a given object.
Args:
obj (object): Object to collect Variable objects from.
Returns:
List of Variable objects.
"""
variables = []
if isinstance(obj, chainer.Variable):
return [obj]
elif isinstance(obj, chainerrl.action_value.ActionValue):
return list(obj.params)
elif isinstance(obj, chainerrl.distribution.Distribution):
return list(obj.params)
elif isinstance(obj, (list, tuple)):
variables = []
for child in obj:
variables.extend(collect_variables(child))
return variables
def is_graphviz_available():
return chainerrl.misc.is_return_code_zero(['dot', '-V'])
def draw_computational_graph(outputs, filepath):
"""Draw a computational graph and write to a given file.
Args:
outputs (object): Output(s) of the computational graph. It must be
a Variable, an ActionValue, a Distribution or a list of them.
filepath (str): Filepath to write a graph without file extention.
A DOT file will be saved with ".gv" extension added.
If Graphviz's dot command is available, a PNG file will also be
saved with ".png" extension added.
"""
variables = collect_variables(outputs)
g = chainer.computational_graph.build_computational_graph(variables)
gv_filepath = filepath + '.gv'
with open(gv_filepath, 'w') as f:
# future.builtins.str is required to make sure the content is unicode
# in both py2 and py3
f.write(str(g.dump()))
if is_graphviz_available():
png_filepath = filepath + '.png'
subprocess.check_call(
['dot', '-Tpng', gv_filepath, '-o', png_filepath])
|
"""Foyer
"""
from __future__ import print_function
import os
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import foyer.version
requirements = [line.strip() for line in open('requirements.txt').readlines()]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(['foyer'])
sys.exit(errcode)
setup(
name='foyer',
version=foyer.version.short_version,
description=__doc__.split('\n'),
long_description=__doc__,
author='Janos Sallai, Christoph Klein',
author_email='janos.sallai@vanderbilt.edu, christoph.klein@vanderbilt.edu',
url='https://github.com/iModels/foyer',
download_url='https://github.com/iModels/foyer/tarball/{}'.format(
foyer.version.short_version),
packages=find_packages(),
package_dir={'foyer': 'foyer'},
include_package_data=True,
install_requires=requirements,
license="MIT",
zip_safe=False,
keywords='foyer',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License,'
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Chemistry',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
],
test_suite='tests',
cmdclass={'test': PyTest,
},
extras_require={'utils': ['pytest'],
},
)
|
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('projeto101', '0008_auto_20160817_2041'),
]
operations = [
migrations.AddField(
model_name='consulta',
name='data_consulta',
field=models.DateField(default=datetime.datetime(2016, 8, 18, 1, 43, 50, 898487, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='consulta',
name='hora_consulta',
field=models.TimeField(default=datetime.datetime(2016, 8, 18, 1, 44, 2, 455493, tzinfo=utc)),
preserve_default=False,
),
]
|
import time
import psutil
import datetime
import threading
from DataManager import DataManager
class PerformanceCollector:
def __init__(self):
self.dump_interval = 30 # Seconds
self.data_manager = DataManager()
self.cpu_thread = threading.Thread(target=self.collect_cpu_performance)
self.memory_thread = threading.Thread(target=self.collect_mem_performance)
self.network_thread = threading.Thread(target=self.collect_net_performance)
self.disk_thread = threading.Thread(target=self.collect_hdd_performance)
self.cpu_thread.daemon = True
self.memory_thread.daemon = True
self.network_thread.daemon = True
self.disk_thread.daemon = True
print('[', datetime.datetime.today(), ']', 'Start')
self.cpu_thread.start()
self.memory_thread.start()
self.network_thread.start()
self.disk_thread.start()
def collect_cpu_performance(self):
while True:
cpu = psutil.cpu_percent(self.dump_interval)
self.data_manager.add_cpu_data(cpu)
def collect_mem_performance(self):
while True:
time.sleep(self.dump_interval)
mem = psutil.virtual_memory().percent
self.data_manager.add_mem_data(mem)
def collect_net_performance(self):
while True:
net = psutil.net_io_counters()
sent_old = net.bytes_sent
receive_old = net.bytes_recv
time.sleep(self.dump_interval)
net = psutil.net_io_counters()
sent = net.bytes_sent
receive = net.bytes_recv
self.data_manager.add_net_data((receive - receive_old) / self.dump_interval,
(sent - sent_old) / self.dump_interval)
def collect_hdd_performance(self):
while True:
disk = psutil.disk_io_counters()
read_old = disk.read_bytes
write_old = disk.write_bytes
time.sleep(self.dump_interval)
disk = psutil.disk_io_counters()
read = disk.read_bytes
write = disk.write_bytes
self.data_manager.add_hdd_data((read - read_old) / self.dump_interval,
(write - write_old) / self.dump_interval)
def wait(self):
self.cpu_thread.join()
self.memory_thread.join()
self.network_thread.join()
self.disk_thread.join()
|
import sys
import argparse
import re
import string
import aspell
import time
import csv
table = string.maketrans("","")
exclude = set(string.punctuation)
speller = aspell.Speller('lang', 'en')
def test_trans(s):
return s.translate(table, string.punctuation)
def ignoreword(s):
match1 = re.search('@[A-Z0-9.-]+',s) # @[A-Z0-9.-]+ -- at-sign and rest of CAPS WORD
match2 = re.search('^[0-9],[a-zA-Z]+',s) # ^[0-9],[a-zA-Z]+ -- numbercomma and rest of word
match3 = re.search('^[0-9],\"[a-zA-Z]+',s) # ^[0-9],\"[a-zA-Z]+ -- numbercommadoublequote
match4 = re.search('^[A-Z]+',s) # ^[A-Z]+ -- all CAPS
if (match1 or match2 or match3 or match4):
ignoreword = True
else:
ignoreword = False
return ignoreword
def dashrepl(matchobj):
if matchobj.group(0) == '-': return ' '
else: return '-'
parser = argparse.ArgumentParser(description='Used to automatically fix mis-spelled words',
prog='autoaspell')
parser.add_argument('--version', action='version',
version='%(prog)s 0.1')
parser.add_argument('infile', nargs='?',
help='the name of the text file to parse.',
default=sys.stdin)
parser.add_argument('outfile', nargs='?',
help='the name of the text file to output.',
default=sys.stdout)
parser.add_argument('-j','--journalfile',
help='File where changes should be journaled.',
default="autoaspell_journal.txt",
required=False)
args = vars(parser.parse_args())
journal = []
lineno = 0
wordno = 0
journal.append(('line_number','word_number','word_in_file','parrsed_word','best_suggested_word')) # set header row
print "Parsing..."
with open(args['infile'],'rb') as fin:
for line in fin:
lineno = lineno + 1
newline = line
for word in line.split():
wordno = wordno + 1
parsedword = word.translate(table, string.punctuation)
if not speller.check(parsedword): # if not found in dictionary
if not ignoreword(word): # if not an ignoreword
suggwords = speller.suggest(parsedword) # get suggestions
if (len(suggwords) > 0):
bestword = suggwords[0] # if suggestions exist, pick best
else:
bestword = parsedword # if no suggestion, set to self
journal.append((lineno, wordno, word, parsedword, bestword))
if not speller.check(parsedword):
newline = re.sub(parsedword, bestword, newline)
wordno = 0
with open(args['outfile'], 'a') as myfile:
myfile.write(newline)
with open(args['journalfile'],'wb') as jfo:
writer = csv.writer(jfo, delimiter='|', quoting=csv.QUOTE_NONNUMERIC)
writer.writerows(journal)
|
from .constant import Constant
__NR_Linux = Constant('__NR_Linux',4000)
__NR_syscall = Constant('__NR_syscall',(4000 + 0))
__NR_exit = Constant('__NR_exit',(4000 + 1))
__NR_fork = Constant('__NR_fork',(4000 + 2))
__NR_read = Constant('__NR_read',(4000 + 3))
__NR_write = Constant('__NR_write',(4000 + 4))
__NR_open = Constant('__NR_open',(4000 + 5))
__NR_close = Constant('__NR_close',(4000 + 6))
__NR_waitpid = Constant('__NR_waitpid',(4000 + 7))
__NR_creat = Constant('__NR_creat',(4000 + 8))
__NR_link = Constant('__NR_link',(4000 + 9))
__NR_unlink = Constant('__NR_unlink',(4000 + 10))
__NR_execve = Constant('__NR_execve',(4000 + 11))
__NR_chdir = Constant('__NR_chdir',(4000 + 12))
__NR_time = Constant('__NR_time',(4000 + 13))
__NR_mknod = Constant('__NR_mknod',(4000 + 14))
__NR_chmod = Constant('__NR_chmod',(4000 + 15))
__NR_lchown = Constant('__NR_lchown',(4000 + 16))
__NR_break = Constant('__NR_break',(4000 + 17))
__NR_unused18 = Constant('__NR_unused18',(4000 + 18))
__NR_lseek = Constant('__NR_lseek',(4000 + 19))
__NR_getpid = Constant('__NR_getpid',(4000 + 20))
__NR_mount = Constant('__NR_mount',(4000 + 21))
__NR_umount = Constant('__NR_umount',(4000 + 22))
__NR_setuid = Constant('__NR_setuid',(4000 + 23))
__NR_getuid = Constant('__NR_getuid',(4000 + 24))
__NR_stime = Constant('__NR_stime',(4000 + 25))
__NR_ptrace = Constant('__NR_ptrace',(4000 + 26))
__NR_alarm = Constant('__NR_alarm',(4000 + 27))
__NR_unused28 = Constant('__NR_unused28',(4000 + 28))
__NR_pause = Constant('__NR_pause',(4000 + 29))
__NR_utime = Constant('__NR_utime',(4000 + 30))
__NR_stty = Constant('__NR_stty',(4000 + 31))
__NR_gtty = Constant('__NR_gtty',(4000 + 32))
__NR_access = Constant('__NR_access',(4000 + 33))
__NR_nice = Constant('__NR_nice',(4000 + 34))
__NR_ftime = Constant('__NR_ftime',(4000 + 35))
__NR_sync = Constant('__NR_sync',(4000 + 36))
__NR_kill = Constant('__NR_kill',(4000 + 37))
__NR_rename = Constant('__NR_rename',(4000 + 38))
__NR_mkdir = Constant('__NR_mkdir',(4000 + 39))
__NR_rmdir = Constant('__NR_rmdir',(4000 + 40))
__NR_dup = Constant('__NR_dup',(4000 + 41))
__NR_pipe = Constant('__NR_pipe',(4000 + 42))
__NR_times = Constant('__NR_times',(4000 + 43))
__NR_prof = Constant('__NR_prof',(4000 + 44))
__NR_brk = Constant('__NR_brk',(4000 + 45))
__NR_setgid = Constant('__NR_setgid',(4000 + 46))
__NR_getgid = Constant('__NR_getgid',(4000 + 47))
__NR_signal = Constant('__NR_signal',(4000 + 48))
__NR_geteuid = Constant('__NR_geteuid',(4000 + 49))
__NR_getegid = Constant('__NR_getegid',(4000 + 50))
__NR_acct = Constant('__NR_acct',(4000 + 51))
__NR_umount2 = Constant('__NR_umount2',(4000 + 52))
__NR_lock = Constant('__NR_lock',(4000 + 53))
__NR_ioctl = Constant('__NR_ioctl',(4000 + 54))
__NR_fcntl = Constant('__NR_fcntl',(4000 + 55))
__NR_mpx = Constant('__NR_mpx',(4000 + 56))
__NR_setpgid = Constant('__NR_setpgid',(4000 + 57))
__NR_ulimit = Constant('__NR_ulimit',(4000 + 58))
__NR_unused59 = Constant('__NR_unused59',(4000 + 59))
__NR_umask = Constant('__NR_umask',(4000 + 60))
__NR_chroot = Constant('__NR_chroot',(4000 + 61))
__NR_ustat = Constant('__NR_ustat',(4000 + 62))
__NR_dup2 = Constant('__NR_dup2',(4000 + 63))
__NR_getppid = Constant('__NR_getppid',(4000 + 64))
__NR_getpgrp = Constant('__NR_getpgrp',(4000 + 65))
__NR_setsid = Constant('__NR_setsid',(4000 + 66))
__NR_sigaction = Constant('__NR_sigaction',(4000 + 67))
__NR_sgetmask = Constant('__NR_sgetmask',(4000 + 68))
__NR_ssetmask = Constant('__NR_ssetmask',(4000 + 69))
__NR_setreuid = Constant('__NR_setreuid',(4000 + 70))
__NR_setregid = Constant('__NR_setregid',(4000 + 71))
__NR_sigsuspend = Constant('__NR_sigsuspend',(4000 + 72))
__NR_sigpending = Constant('__NR_sigpending',(4000 + 73))
__NR_sethostname = Constant('__NR_sethostname',(4000 + 74))
__NR_setrlimit = Constant('__NR_setrlimit',(4000 + 75))
__NR_getrlimit = Constant('__NR_getrlimit',(4000 + 76))
__NR_getrusage = Constant('__NR_getrusage',(4000 + 77))
__NR_gettimeofday = Constant('__NR_gettimeofday',(4000 + 78))
__NR_settimeofday = Constant('__NR_settimeofday',(4000 + 79))
__NR_getgroups = Constant('__NR_getgroups',(4000 + 80))
__NR_setgroups = Constant('__NR_setgroups',(4000 + 81))
__NR_reserved82 = Constant('__NR_reserved82',(4000 + 82))
__NR_symlink = Constant('__NR_symlink',(4000 + 83))
__NR_unused84 = Constant('__NR_unused84',(4000 + 84))
__NR_readlink = Constant('__NR_readlink',(4000 + 85))
__NR_uselib = Constant('__NR_uselib',(4000 + 86))
__NR_swapon = Constant('__NR_swapon',(4000 + 87))
__NR_reboot = Constant('__NR_reboot',(4000 + 88))
__NR_readdir = Constant('__NR_readdir',(4000 + 89))
__NR_mmap = Constant('__NR_mmap',(4000 + 90))
__NR_munmap = Constant('__NR_munmap',(4000 + 91))
__NR_truncate = Constant('__NR_truncate',(4000 + 92))
__NR_ftruncate = Constant('__NR_ftruncate',(4000 + 93))
__NR_fchmod = Constant('__NR_fchmod',(4000 + 94))
__NR_fchown = Constant('__NR_fchown',(4000 + 95))
__NR_getpriority = Constant('__NR_getpriority',(4000 + 96))
__NR_setpriority = Constant('__NR_setpriority',(4000 + 97))
__NR_profil = Constant('__NR_profil',(4000 + 98))
__NR_statfs = Constant('__NR_statfs',(4000 + 99))
__NR_fstatfs = Constant('__NR_fstatfs',(4000 + 100))
__NR_ioperm = Constant('__NR_ioperm',(4000 + 101))
__NR_socketcall = Constant('__NR_socketcall',(4000 + 102))
__NR_syslog = Constant('__NR_syslog',(4000 + 103))
__NR_setitimer = Constant('__NR_setitimer',(4000 + 104))
__NR_getitimer = Constant('__NR_getitimer',(4000 + 105))
__NR_stat = Constant('__NR_stat',(4000 + 106))
__NR_lstat = Constant('__NR_lstat',(4000 + 107))
__NR_fstat = Constant('__NR_fstat',(4000 + 108))
__NR_unused109 = Constant('__NR_unused109',(4000 + 109))
__NR_iopl = Constant('__NR_iopl',(4000 + 110))
__NR_vhangup = Constant('__NR_vhangup',(4000 + 111))
__NR_idle = Constant('__NR_idle',(4000 + 112))
__NR_vm86 = Constant('__NR_vm86',(4000 + 113))
__NR_wait4 = Constant('__NR_wait4',(4000 + 114))
__NR_swapoff = Constant('__NR_swapoff',(4000 + 115))
__NR_sysinfo = Constant('__NR_sysinfo',(4000 + 116))
__NR_ipc = Constant('__NR_ipc',(4000 + 117))
__NR_fsync = Constant('__NR_fsync',(4000 + 118))
__NR_sigreturn = Constant('__NR_sigreturn',(4000 + 119))
__NR_clone = Constant('__NR_clone',(4000 + 120))
__NR_setdomainname = Constant('__NR_setdomainname',(4000 + 121))
__NR_uname = Constant('__NR_uname',(4000 + 122))
__NR_modify_ldt = Constant('__NR_modify_ldt',(4000 + 123))
__NR_adjtimex = Constant('__NR_adjtimex',(4000 + 124))
__NR_mprotect = Constant('__NR_mprotect',(4000 + 125))
__NR_sigprocmask = Constant('__NR_sigprocmask',(4000 + 126))
__NR_create_module = Constant('__NR_create_module',(4000 + 127))
__NR_init_module = Constant('__NR_init_module',(4000 + 128))
__NR_delete_module = Constant('__NR_delete_module',(4000 + 129))
__NR_get_kernel_syms = Constant('__NR_get_kernel_syms',(4000 + 130))
__NR_quotactl = Constant('__NR_quotactl',(4000 + 131))
__NR_getpgid = Constant('__NR_getpgid',(4000 + 132))
__NR_fchdir = Constant('__NR_fchdir',(4000 + 133))
__NR_bdflush = Constant('__NR_bdflush',(4000 + 134))
__NR_sysfs = Constant('__NR_sysfs',(4000 + 135))
__NR_personality = Constant('__NR_personality',(4000 + 136))
__NR_afs_syscall = Constant('__NR_afs_syscall',(4000 + 137))
__NR_setfsuid = Constant('__NR_setfsuid',(4000 + 138))
__NR_setfsgid = Constant('__NR_setfsgid',(4000 + 139))
__NR__llseek = Constant('__NR__llseek',(4000 + 140))
__NR_getdents = Constant('__NR_getdents',(4000 + 141))
__NR__newselect = Constant('__NR__newselect',(4000 + 142))
__NR_flock = Constant('__NR_flock',(4000 + 143))
__NR_msync = Constant('__NR_msync',(4000 + 144))
__NR_readv = Constant('__NR_readv',(4000 + 145))
__NR_writev = Constant('__NR_writev',(4000 + 146))
__NR_cacheflush = Constant('__NR_cacheflush',(4000 + 147))
__NR_cachectl = Constant('__NR_cachectl',(4000 + 148))
__NR_sysmips = Constant('__NR_sysmips',(4000 + 149))
__NR_unused150 = Constant('__NR_unused150',(4000 + 150))
__NR_getsid = Constant('__NR_getsid',(4000 + 151))
__NR_fdatasync = Constant('__NR_fdatasync',(4000 + 152))
__NR__sysctl = Constant('__NR__sysctl',(4000 + 153))
__NR_mlock = Constant('__NR_mlock',(4000 + 154))
__NR_munlock = Constant('__NR_munlock',(4000 + 155))
__NR_mlockall = Constant('__NR_mlockall',(4000 + 156))
__NR_munlockall = Constant('__NR_munlockall',(4000 + 157))
__NR_sched_setparam = Constant('__NR_sched_setparam',(4000 + 158))
__NR_sched_getparam = Constant('__NR_sched_getparam',(4000 + 159))
__NR_sched_setscheduler = Constant('__NR_sched_setscheduler',(4000 + 160))
__NR_sched_getscheduler = Constant('__NR_sched_getscheduler',(4000 + 161))
__NR_sched_yield = Constant('__NR_sched_yield',(4000 + 162))
__NR_sched_get_priority_max = Constant('__NR_sched_get_priority_max',(4000 + 163))
__NR_sched_get_priority_min = Constant('__NR_sched_get_priority_min',(4000 + 164))
__NR_sched_rr_get_interval = Constant('__NR_sched_rr_get_interval',(4000 + 165))
__NR_nanosleep = Constant('__NR_nanosleep',(4000 + 166))
__NR_mremap = Constant('__NR_mremap',(4000 + 167))
__NR_accept = Constant('__NR_accept',(4000 + 168))
__NR_bind = Constant('__NR_bind',(4000 + 169))
__NR_connect = Constant('__NR_connect',(4000 + 170))
__NR_getpeername = Constant('__NR_getpeername',(4000 + 171))
__NR_getsockname = Constant('__NR_getsockname',(4000 + 172))
__NR_getsockopt = Constant('__NR_getsockopt',(4000 + 173))
__NR_listen = Constant('__NR_listen',(4000 + 174))
__NR_recv = Constant('__NR_recv',(4000 + 175))
__NR_recvfrom = Constant('__NR_recvfrom',(4000 + 176))
__NR_recvmsg = Constant('__NR_recvmsg',(4000 + 177))
__NR_send = Constant('__NR_send',(4000 + 178))
__NR_sendmsg = Constant('__NR_sendmsg',(4000 + 179))
__NR_sendto = Constant('__NR_sendto',(4000 + 180))
__NR_setsockopt = Constant('__NR_setsockopt',(4000 + 181))
__NR_shutdown = Constant('__NR_shutdown',(4000 + 182))
__NR_socket = Constant('__NR_socket',(4000 + 183))
__NR_socketpair = Constant('__NR_socketpair',(4000 + 184))
__NR_setresuid = Constant('__NR_setresuid',(4000 + 185))
__NR_getresuid = Constant('__NR_getresuid',(4000 + 186))
__NR_query_module = Constant('__NR_query_module',(4000 + 187))
__NR_poll = Constant('__NR_poll',(4000 + 188))
__NR_nfsservctl = Constant('__NR_nfsservctl',(4000 + 189))
__NR_setresgid = Constant('__NR_setresgid',(4000 + 190))
__NR_getresgid = Constant('__NR_getresgid',(4000 + 191))
__NR_prctl = Constant('__NR_prctl',(4000 + 192))
__NR_rt_sigreturn = Constant('__NR_rt_sigreturn',(4000 + 193))
__NR_rt_sigaction = Constant('__NR_rt_sigaction',(4000 + 194))
__NR_rt_sigprocmask = Constant('__NR_rt_sigprocmask',(4000 + 195))
__NR_rt_sigpending = Constant('__NR_rt_sigpending',(4000 + 196))
__NR_rt_sigtimedwait = Constant('__NR_rt_sigtimedwait',(4000 + 197))
__NR_rt_sigqueueinfo = Constant('__NR_rt_sigqueueinfo',(4000 + 198))
__NR_rt_sigsuspend = Constant('__NR_rt_sigsuspend',(4000 + 199))
__NR_pread = Constant('__NR_pread',(4000 + 200))
__NR_pwrite = Constant('__NR_pwrite',(4000 + 201))
__NR_chown = Constant('__NR_chown',(4000 + 202))
__NR_getcwd = Constant('__NR_getcwd',(4000 + 203))
__NR_capget = Constant('__NR_capget',(4000 + 204))
__NR_capset = Constant('__NR_capset',(4000 + 205))
__NR_sigaltstack = Constant('__NR_sigaltstack',(4000 + 206))
__NR_sendfile = Constant('__NR_sendfile',(4000 + 207))
__NR_getpmsg = Constant('__NR_getpmsg',(4000 + 208))
__NR_putpmsg = Constant('__NR_putpmsg',(4000 + 209))
__NR_mmap2 = Constant('__NR_mmap2',(4000 + 210))
__NR_truncate64 = Constant('__NR_truncate64',(4000 + 211))
__NR_ftruncate64 = Constant('__NR_ftruncate64',(4000 + 212))
__NR_stat64 = Constant('__NR_stat64',(4000 + 213))
__NR_lstat64 = Constant('__NR_lstat64',(4000 + 214))
__NR_fstat64 = Constant('__NR_fstat64',(4000 + 215))
__NR_pivot_root = Constant('__NR_pivot_root',(4000 + 216))
__NR_mincore = Constant('__NR_mincore',(4000 + 217))
__NR_madvise = Constant('__NR_madvise',(4000 + 218))
__NR_getdents64 = Constant('__NR_getdents64',(4000 + 219))
__NR_fcntl64 = Constant('__NR_fcntl64',(4000 + 220))
__NR_reserved221 = Constant('__NR_reserved221',(4000 + 221))
__NR_gettid = Constant('__NR_gettid',(4000 + 222))
__NR_readahead = Constant('__NR_readahead',(4000 + 223))
__NR_setxattr = Constant('__NR_setxattr',(4000 + 224))
__NR_lsetxattr = Constant('__NR_lsetxattr',(4000 + 225))
__NR_fsetxattr = Constant('__NR_fsetxattr',(4000 + 226))
__NR_getxattr = Constant('__NR_getxattr',(4000 + 227))
__NR_lgetxattr = Constant('__NR_lgetxattr',(4000 + 228))
__NR_fgetxattr = Constant('__NR_fgetxattr',(4000 + 229))
__NR_listxattr = Constant('__NR_listxattr',(4000 + 230))
__NR_llistxattr = Constant('__NR_llistxattr',(4000 + 231))
__NR_flistxattr = Constant('__NR_flistxattr',(4000 + 232))
__NR_removexattr = Constant('__NR_removexattr',(4000 + 233))
__NR_lremovexattr = Constant('__NR_lremovexattr',(4000 + 234))
__NR_fremovexattr = Constant('__NR_fremovexattr',(4000 + 235))
__NR_tkill = Constant('__NR_tkill',(4000 + 236))
__NR_sendfile64 = Constant('__NR_sendfile64',(4000 + 237))
__NR_futex = Constant('__NR_futex',(4000 + 238))
__NR_sched_setaffinity = Constant('__NR_sched_setaffinity',(4000 + 239))
__NR_sched_getaffinity = Constant('__NR_sched_getaffinity',(4000 + 240))
__NR_io_setup = Constant('__NR_io_setup',(4000 + 241))
__NR_io_destroy = Constant('__NR_io_destroy',(4000 + 242))
__NR_io_getevents = Constant('__NR_io_getevents',(4000 + 243))
__NR_io_submit = Constant('__NR_io_submit',(4000 + 244))
__NR_io_cancel = Constant('__NR_io_cancel',(4000 + 245))
__NR_exit_group = Constant('__NR_exit_group',(4000 + 246))
__NR_lookup_dcookie = Constant('__NR_lookup_dcookie',(4000 + 247))
__NR_epoll_create = Constant('__NR_epoll_create',(4000 + 248))
__NR_epoll_ctl = Constant('__NR_epoll_ctl',(4000 + 249))
__NR_epoll_wait = Constant('__NR_epoll_wait',(4000 + 250))
__NR_remap_file_pages = Constant('__NR_remap_file_pages',(4000 + 251))
__NR_set_tid_address = Constant('__NR_set_tid_address',(4000 + 252))
__NR_restart_syscall = Constant('__NR_restart_syscall',(4000 + 253))
__NR_fadvise64 = Constant('__NR_fadvise64',(4000 + 254))
__NR_statfs64 = Constant('__NR_statfs64',(4000 + 255))
__NR_fstatfs64 = Constant('__NR_fstatfs64',(4000 + 256))
__NR_timer_create = Constant('__NR_timer_create',(4000 + 257))
__NR_timer_settime = Constant('__NR_timer_settime',(4000 + 258))
__NR_timer_gettime = Constant('__NR_timer_gettime',(4000 + 259))
__NR_timer_getoverrun = Constant('__NR_timer_getoverrun',(4000 + 260))
__NR_timer_delete = Constant('__NR_timer_delete',(4000 + 261))
__NR_clock_settime = Constant('__NR_clock_settime',(4000 + 262))
__NR_clock_gettime = Constant('__NR_clock_gettime',(4000 + 263))
__NR_clock_getres = Constant('__NR_clock_getres',(4000 + 264))
__NR_clock_nanosleep = Constant('__NR_clock_nanosleep',(4000 + 265))
__NR_tgkill = Constant('__NR_tgkill',(4000 + 266))
__NR_utimes = Constant('__NR_utimes',(4000 + 267))
__NR_mbind = Constant('__NR_mbind',(4000 + 268))
__NR_get_mempolicy = Constant('__NR_get_mempolicy',(4000 + 269))
__NR_set_mempolicy = Constant('__NR_set_mempolicy',(4000 + 270))
__NR_mq_open = Constant('__NR_mq_open',(4000 + 271))
__NR_mq_unlink = Constant('__NR_mq_unlink',(4000 + 272))
__NR_mq_timedsend = Constant('__NR_mq_timedsend',(4000 + 273))
__NR_mq_timedreceive = Constant('__NR_mq_timedreceive',(4000 + 274))
__NR_mq_notify = Constant('__NR_mq_notify',(4000 + 275))
__NR_mq_getsetattr = Constant('__NR_mq_getsetattr',(4000 + 276))
__NR_vserver = Constant('__NR_vserver',(4000 + 277))
__NR_waitid = Constant('__NR_waitid',(4000 + 278))
__NR_add_key = Constant('__NR_add_key',(4000 + 280))
__NR_request_key = Constant('__NR_request_key',(4000 + 281))
__NR_keyctl = Constant('__NR_keyctl',(4000 + 282))
__NR_set_thread_area = Constant('__NR_set_thread_area',(4000 + 283))
__NR_inotify_init = Constant('__NR_inotify_init',(4000 + 284))
__NR_inotify_add_watch = Constant('__NR_inotify_add_watch',(4000 + 285))
__NR_inotify_rm_watch = Constant('__NR_inotify_rm_watch',(4000 + 286))
__NR_migrate_pages = Constant('__NR_migrate_pages',(4000 + 287))
__NR_openat = Constant('__NR_openat',(4000 + 288))
__NR_mkdirat = Constant('__NR_mkdirat',(4000 + 289))
__NR_mknodat = Constant('__NR_mknodat',(4000 + 290))
__NR_fchownat = Constant('__NR_fchownat',(4000 + 291))
__NR_futimesat = Constant('__NR_futimesat',(4000 + 292))
__NR_fstatat = Constant('__NR_fstatat',(4000 + 293))
__NR_unlinkat = Constant('__NR_unlinkat',(4000 + 294))
__NR_renameat = Constant('__NR_renameat',(4000 + 295))
__NR_linkat = Constant('__NR_linkat',(4000 + 296))
__NR_symlinkat = Constant('__NR_symlinkat',(4000 + 297))
__NR_readlinkat = Constant('__NR_readlinkat',(4000 + 298))
__NR_fchmodat = Constant('__NR_fchmodat',(4000 + 299))
__NR_faccessat = Constant('__NR_faccessat',(4000 + 300))
__NR_pselect6 = Constant('__NR_pselect6',(4000 + 301))
__NR_ppoll = Constant('__NR_ppoll',(4000 + 302))
__NR_unshare = Constant('__NR_unshare',(4000 + 303))
__NR_splice = Constant('__NR_splice',(4000 + 304))
__NR_sync_file_range = Constant('__NR_sync_file_range',(4000 + 305))
__NR_tee = Constant('__NR_tee',(4000 + 306))
__NR_vmsplice = Constant('__NR_vmsplice',(4000 + 307))
__NR_move_pages = Constant('__NR_move_pages',(4000 + 308))
__NR_set_robust_list = Constant('__NR_set_robust_list',(4000 + 272))
__NR_get_robust_list = Constant('__NR_get_robust_list',(4000 + 273))
__NR_kexec_load = Constant('__NR_kexec_load',(4000 + 274))
__NR_getcpu = Constant('__NR_getcpu',(4000 + 275))
__NR_epoll_pwait = Constant('__NR_epoll_pwait',(4000 + 276))
__NR_ioprio_set = Constant('__NR_ioprio_set',(4000 + 277))
__NR_ioprio_get = Constant('__NR_ioprio_get',(4000 + 278))
__NR_utimensat = Constant('__NR_utimensat',(4000 + 279))
__NR_signalfd = Constant('__NR_signalfd',(4000 + 280))
__NR_timerfd = Constant('__NR_timerfd',(4000 + 281))
__NR_eventfd = Constant('__NR_eventfd',(4000 + 282))
__NR_fallocate = Constant('__NR_fallocate',(4000 + 283))
__NR_timerfd_create = Constant('__NR_timerfd_create',(4000 + 284))
__NR_timerfd_gettime = Constant('__NR_timerfd_gettime',(4000 + 285))
__NR_timerfd_settime = Constant('__NR_timerfd_settime',(4000 + 286))
__SYS_NERR = Constant('__SYS_NERR',((164) + 1))
_SYS_TIME_H = Constant('_SYS_TIME_H',1)
SYS_accept = Constant('SYS_accept',(4000 + 168))
SYS_access = Constant('SYS_access',(4000 + 33))
SYS_acct = Constant('SYS_acct',(4000 + 51))
SYS_add_key = Constant('SYS_add_key',(4000 + 280))
SYS_adjtimex = Constant('SYS_adjtimex',(4000 + 124))
SYS_afs_syscall = Constant('SYS_afs_syscall',(4000 + 137))
SYS_alarm = Constant('SYS_alarm',(4000 + 27))
SYS_bdflush = Constant('SYS_bdflush',(4000 + 134))
SYS_bind = Constant('SYS_bind',(4000 + 169))
SYS_break = Constant('SYS_break',(4000 + 17))
SYS_brk = Constant('SYS_brk',(4000 + 45))
SYS_cachectl = Constant('SYS_cachectl',(4000 + 148))
SYS_cacheflush = Constant('SYS_cacheflush',(4000 + 147))
SYS_capget = Constant('SYS_capget',(4000 + 204))
SYS_capset = Constant('SYS_capset',(4000 + 205))
SYS_chdir = Constant('SYS_chdir',(4000 + 12))
SYS_chmod = Constant('SYS_chmod',(4000 + 15))
SYS_chown = Constant('SYS_chown',(4000 + 202))
SYS_chroot = Constant('SYS_chroot',(4000 + 61))
SYS_clock_getres = Constant('SYS_clock_getres',(4000 + 264))
SYS_clock_gettime = Constant('SYS_clock_gettime',(4000 + 263))
SYS_clock_nanosleep = Constant('SYS_clock_nanosleep',(4000 + 265))
SYS_clock_settime = Constant('SYS_clock_settime',(4000 + 262))
SYS_clone = Constant('SYS_clone',(4000 + 120))
SYS_close = Constant('SYS_close',(4000 + 6))
SYS_connect = Constant('SYS_connect',(4000 + 170))
SYS_creat = Constant('SYS_creat',(4000 + 8))
SYS_create_module = Constant('SYS_create_module',(4000 + 127))
SYS_delete_module = Constant('SYS_delete_module',(4000 + 129))
SYS_dup = Constant('SYS_dup',(4000 + 41))
SYS_dup2 = Constant('SYS_dup2',(4000 + 63))
SYS_epoll_create = Constant('SYS_epoll_create',(4000 + 248))
SYS_epoll_ctl = Constant('SYS_epoll_ctl',(4000 + 249))
SYS_epoll_pwait = Constant('SYS_epoll_pwait',(4000 + 276))
SYS_epoll_wait = Constant('SYS_epoll_wait',(4000 + 250))
SYS_eventfd = Constant('SYS_eventfd',(4000 + 282))
SYS_execve = Constant('SYS_execve',(4000 + 11))
SYS_exit = Constant('SYS_exit',(4000 + 1))
SYS_exit_group = Constant('SYS_exit_group',(4000 + 246))
SYS_faccessat = Constant('SYS_faccessat',(4000 + 300))
SYS_fadvise64 = Constant('SYS_fadvise64',(4000 + 254))
SYS_fallocate = Constant('SYS_fallocate',(4000 + 283))
SYS_fchdir = Constant('SYS_fchdir',(4000 + 133))
SYS_fchmod = Constant('SYS_fchmod',(4000 + 94))
SYS_fchmodat = Constant('SYS_fchmodat',(4000 + 299))
SYS_fchown = Constant('SYS_fchown',(4000 + 95))
SYS_fchownat = Constant('SYS_fchownat',(4000 + 291))
SYS_fcntl = Constant('SYS_fcntl',(4000 + 55))
SYS_fcntl64 = Constant('SYS_fcntl64',(4000 + 220))
SYS_fdatasync = Constant('SYS_fdatasync',(4000 + 152))
SYS_fgetxattr = Constant('SYS_fgetxattr',(4000 + 229))
SYS_flistxattr = Constant('SYS_flistxattr',(4000 + 232))
SYS_flock = Constant('SYS_flock',(4000 + 143))
SYS_fork = Constant('SYS_fork',(4000 + 2))
SYS_fremovexattr = Constant('SYS_fremovexattr',(4000 + 235))
SYS_fsetxattr = Constant('SYS_fsetxattr',(4000 + 226))
SYS_fstat = Constant('SYS_fstat',(4000 + 108))
SYS_fstat64 = Constant('SYS_fstat64',(4000 + 215))
SYS_fstatat = Constant('SYS_fstatat',(4000 + 293))
SYS_fstatfs = Constant('SYS_fstatfs',(4000 + 100))
SYS_fstatfs64 = Constant('SYS_fstatfs64',(4000 + 256))
SYS_fsync = Constant('SYS_fsync',(4000 + 118))
SYS_ftime = Constant('SYS_ftime',(4000 + 35))
SYS_ftruncate = Constant('SYS_ftruncate',(4000 + 93))
SYS_ftruncate64 = Constant('SYS_ftruncate64',(4000 + 212))
SYS_futex = Constant('SYS_futex',(4000 + 238))
SYS_futimesat = Constant('SYS_futimesat',(4000 + 292))
SYS_getcpu = Constant('SYS_getcpu',(4000 + 275))
SYS_getcwd = Constant('SYS_getcwd',(4000 + 203))
SYS_getdents = Constant('SYS_getdents',(4000 + 141))
SYS_getdents64 = Constant('SYS_getdents64',(4000 + 219))
SYS_getegid = Constant('SYS_getegid',(4000 + 50))
SYS_geteuid = Constant('SYS_geteuid',(4000 + 49))
SYS_getgid = Constant('SYS_getgid',(4000 + 47))
SYS_getgroups = Constant('SYS_getgroups',(4000 + 80))
SYS_getitimer = Constant('SYS_getitimer',(4000 + 105))
SYS_get_kernel_syms = Constant('SYS_get_kernel_syms',(4000 + 130))
SYS_get_mempolicy = Constant('SYS_get_mempolicy',(4000 + 269))
SYS_getpeername = Constant('SYS_getpeername',(4000 + 171))
SYS_getpgid = Constant('SYS_getpgid',(4000 + 132))
SYS_getpgrp = Constant('SYS_getpgrp',(4000 + 65))
SYS_getpid = Constant('SYS_getpid',(4000 + 20))
SYS_getpmsg = Constant('SYS_getpmsg',(4000 + 208))
SYS_getppid = Constant('SYS_getppid',(4000 + 64))
SYS_getpriority = Constant('SYS_getpriority',(4000 + 96))
SYS_getresgid = Constant('SYS_getresgid',(4000 + 191))
SYS_getresuid = Constant('SYS_getresuid',(4000 + 186))
SYS_getrlimit = Constant('SYS_getrlimit',(4000 + 76))
SYS_get_robust_list = Constant('SYS_get_robust_list',(4000 + 273))
SYS_getrusage = Constant('SYS_getrusage',(4000 + 77))
SYS_getsid = Constant('SYS_getsid',(4000 + 151))
SYS_getsockname = Constant('SYS_getsockname',(4000 + 172))
SYS_getsockopt = Constant('SYS_getsockopt',(4000 + 173))
SYS_gettid = Constant('SYS_gettid',(4000 + 222))
SYS_gettimeofday = Constant('SYS_gettimeofday',(4000 + 78))
SYS_getuid = Constant('SYS_getuid',(4000 + 24))
SYS_getxattr = Constant('SYS_getxattr',(4000 + 227))
SYS_gtty = Constant('SYS_gtty',(4000 + 32))
SYS_idle = Constant('SYS_idle',(4000 + 112))
SYS_init_module = Constant('SYS_init_module',(4000 + 128))
SYS_inotify_add_watch = Constant('SYS_inotify_add_watch',(4000 + 285))
SYS_inotify_init = Constant('SYS_inotify_init',(4000 + 284))
SYS_inotify_rm_watch = Constant('SYS_inotify_rm_watch',(4000 + 286))
SYS_io_cancel = Constant('SYS_io_cancel',(4000 + 245))
SYS_ioctl = Constant('SYS_ioctl',(4000 + 54))
SYS_io_destroy = Constant('SYS_io_destroy',(4000 + 242))
SYS_io_getevents = Constant('SYS_io_getevents',(4000 + 243))
SYS_ioperm = Constant('SYS_ioperm',(4000 + 101))
SYS_iopl = Constant('SYS_iopl',(4000 + 110))
SYS_ioprio_get = Constant('SYS_ioprio_get',(4000 + 278))
SYS_ioprio_set = Constant('SYS_ioprio_set',(4000 + 277))
SYS_io_setup = Constant('SYS_io_setup',(4000 + 241))
SYS_io_submit = Constant('SYS_io_submit',(4000 + 244))
SYS_ipc = Constant('SYS_ipc',(4000 + 117))
SYS_kexec_load = Constant('SYS_kexec_load',(4000 + 274))
SYS_keyctl = Constant('SYS_keyctl',(4000 + 282))
SYS_kill = Constant('SYS_kill',(4000 + 37))
SYS_lchown = Constant('SYS_lchown',(4000 + 16))
SYS_lgetxattr = Constant('SYS_lgetxattr',(4000 + 228))
SYS_link = Constant('SYS_link',(4000 + 9))
SYS_linkat = Constant('SYS_linkat',(4000 + 296))
SYS_Linux = Constant('SYS_Linux',4000)
SYS_listen = Constant('SYS_listen',(4000 + 174))
SYS_listxattr = Constant('SYS_listxattr',(4000 + 230))
SYS_llistxattr = Constant('SYS_llistxattr',(4000 + 231))
SYS__llseek = Constant('SYS__llseek',(4000 + 140))
SYS_lock = Constant('SYS_lock',(4000 + 53))
SYS_lookup_dcookie = Constant('SYS_lookup_dcookie',(4000 + 247))
SYS_lremovexattr = Constant('SYS_lremovexattr',(4000 + 234))
SYS_lseek = Constant('SYS_lseek',(4000 + 19))
SYS_lsetxattr = Constant('SYS_lsetxattr',(4000 + 225))
SYS_lstat = Constant('SYS_lstat',(4000 + 107))
SYS_lstat64 = Constant('SYS_lstat64',(4000 + 214))
SYS_madvise = Constant('SYS_madvise',(4000 + 218))
SYS_mbind = Constant('SYS_mbind',(4000 + 268))
SYS_migrate_pages = Constant('SYS_migrate_pages',(4000 + 287))
SYS_mincore = Constant('SYS_mincore',(4000 + 217))
SYS_mkdir = Constant('SYS_mkdir',(4000 + 39))
SYS_mkdirat = Constant('SYS_mkdirat',(4000 + 289))
SYS_mknod = Constant('SYS_mknod',(4000 + 14))
SYS_mknodat = Constant('SYS_mknodat',(4000 + 290))
SYS_mlock = Constant('SYS_mlock',(4000 + 154))
SYS_mlockall = Constant('SYS_mlockall',(4000 + 156))
SYS_mmap = Constant('SYS_mmap',(4000 + 90))
SYS_mmap2 = Constant('SYS_mmap2',(4000 + 210))
SYS_modify_ldt = Constant('SYS_modify_ldt',(4000 + 123))
SYS_mount = Constant('SYS_mount',(4000 + 21))
SYS_move_pages = Constant('SYS_move_pages',(4000 + 308))
SYS_mprotect = Constant('SYS_mprotect',(4000 + 125))
SYS_mpx = Constant('SYS_mpx',(4000 + 56))
SYS_mq_getsetattr = Constant('SYS_mq_getsetattr',(4000 + 276))
SYS_mq_notify = Constant('SYS_mq_notify',(4000 + 275))
SYS_mq_open = Constant('SYS_mq_open',(4000 + 271))
SYS_mq_timedreceive = Constant('SYS_mq_timedreceive',(4000 + 274))
SYS_mq_timedsend = Constant('SYS_mq_timedsend',(4000 + 273))
SYS_mq_unlink = Constant('SYS_mq_unlink',(4000 + 272))
SYS_mremap = Constant('SYS_mremap',(4000 + 167))
SYS_msync = Constant('SYS_msync',(4000 + 144))
SYS_munlock = Constant('SYS_munlock',(4000 + 155))
SYS_munlockall = Constant('SYS_munlockall',(4000 + 157))
SYS_munmap = Constant('SYS_munmap',(4000 + 91))
SYS_nanosleep = Constant('SYS_nanosleep',(4000 + 166))
SYS__newselect = Constant('SYS__newselect',(4000 + 142))
SYS_nfsservctl = Constant('SYS_nfsservctl',(4000 + 189))
SYS_nice = Constant('SYS_nice',(4000 + 34))
SYS_open = Constant('SYS_open',(4000 + 5))
SYS_openat = Constant('SYS_openat',(4000 + 288))
SYS_pause = Constant('SYS_pause',(4000 + 29))
SYS_personality = Constant('SYS_personality',(4000 + 136))
SYS_pipe = Constant('SYS_pipe',(4000 + 42))
SYS_pivot_root = Constant('SYS_pivot_root',(4000 + 216))
SYS_poll = Constant('SYS_poll',(4000 + 188))
SYS_ppoll = Constant('SYS_ppoll',(4000 + 302))
SYS_prctl = Constant('SYS_prctl',(4000 + 192))
SYS_pread = Constant('SYS_pread',(4000 + 200))
SYS_prof = Constant('SYS_prof',(4000 + 44))
SYS_profil = Constant('SYS_profil',(4000 + 98))
SYS_pselect6 = Constant('SYS_pselect6',(4000 + 301))
SYS_ptrace = Constant('SYS_ptrace',(4000 + 26))
SYS_putpmsg = Constant('SYS_putpmsg',(4000 + 209))
SYS_pwrite = Constant('SYS_pwrite',(4000 + 201))
SYS_query_module = Constant('SYS_query_module',(4000 + 187))
SYS_quotactl = Constant('SYS_quotactl',(4000 + 131))
SYS_read = Constant('SYS_read',(4000 + 3))
SYS_readahead = Constant('SYS_readahead',(4000 + 223))
SYS_readdir = Constant('SYS_readdir',(4000 + 89))
SYS_readlink = Constant('SYS_readlink',(4000 + 85))
SYS_readlinkat = Constant('SYS_readlinkat',(4000 + 298))
SYS_readv = Constant('SYS_readv',(4000 + 145))
SYS_reboot = Constant('SYS_reboot',(4000 + 88))
SYS_recv = Constant('SYS_recv',(4000 + 175))
SYS_recvfrom = Constant('SYS_recvfrom',(4000 + 176))
SYS_recvmsg = Constant('SYS_recvmsg',(4000 + 177))
SYS_remap_file_pages = Constant('SYS_remap_file_pages',(4000 + 251))
SYS_removexattr = Constant('SYS_removexattr',(4000 + 233))
SYS_rename = Constant('SYS_rename',(4000 + 38))
SYS_renameat = Constant('SYS_renameat',(4000 + 295))
SYS_request_key = Constant('SYS_request_key',(4000 + 281))
SYS_reserved221 = Constant('SYS_reserved221',(4000 + 221))
SYS_reserved82 = Constant('SYS_reserved82',(4000 + 82))
SYS_restart_syscall = Constant('SYS_restart_syscall',(4000 + 253))
SYS_rmdir = Constant('SYS_rmdir',(4000 + 40))
SYS_rt_sigaction = Constant('SYS_rt_sigaction',(4000 + 194))
SYS_rt_sigpending = Constant('SYS_rt_sigpending',(4000 + 196))
SYS_rt_sigprocmask = Constant('SYS_rt_sigprocmask',(4000 + 195))
SYS_rt_sigqueueinfo = Constant('SYS_rt_sigqueueinfo',(4000 + 198))
SYS_rt_sigreturn = Constant('SYS_rt_sigreturn',(4000 + 193))
SYS_rt_sigsuspend = Constant('SYS_rt_sigsuspend',(4000 + 199))
SYS_rt_sigtimedwait = Constant('SYS_rt_sigtimedwait',(4000 + 197))
SYS_sched_getaffinity = Constant('SYS_sched_getaffinity',(4000 + 240))
SYS_sched_getparam = Constant('SYS_sched_getparam',(4000 + 159))
SYS_sched_get_priority_max = Constant('SYS_sched_get_priority_max',(4000 + 163))
SYS_sched_get_priority_min = Constant('SYS_sched_get_priority_min',(4000 + 164))
SYS_sched_getscheduler = Constant('SYS_sched_getscheduler',(4000 + 161))
SYS_sched_rr_get_interval = Constant('SYS_sched_rr_get_interval',(4000 + 165))
SYS_sched_setaffinity = Constant('SYS_sched_setaffinity',(4000 + 239))
SYS_sched_setparam = Constant('SYS_sched_setparam',(4000 + 158))
SYS_sched_setscheduler = Constant('SYS_sched_setscheduler',(4000 + 160))
SYS_sched_yield = Constant('SYS_sched_yield',(4000 + 162))
SYS_send = Constant('SYS_send',(4000 + 178))
SYS_sendfile = Constant('SYS_sendfile',(4000 + 207))
SYS_sendfile64 = Constant('SYS_sendfile64',(4000 + 237))
SYS_sendmsg = Constant('SYS_sendmsg',(4000 + 179))
SYS_sendto = Constant('SYS_sendto',(4000 + 180))
SYS_setdomainname = Constant('SYS_setdomainname',(4000 + 121))
SYS_setfsgid = Constant('SYS_setfsgid',(4000 + 139))
SYS_setfsuid = Constant('SYS_setfsuid',(4000 + 138))
SYS_setgid = Constant('SYS_setgid',(4000 + 46))
SYS_setgroups = Constant('SYS_setgroups',(4000 + 81))
SYS_sethostname = Constant('SYS_sethostname',(4000 + 74))
SYS_setitimer = Constant('SYS_setitimer',(4000 + 104))
SYS_set_mempolicy = Constant('SYS_set_mempolicy',(4000 + 270))
SYS_setpgid = Constant('SYS_setpgid',(4000 + 57))
SYS_setpriority = Constant('SYS_setpriority',(4000 + 97))
SYS_setregid = Constant('SYS_setregid',(4000 + 71))
SYS_setresgid = Constant('SYS_setresgid',(4000 + 190))
SYS_setresuid = Constant('SYS_setresuid',(4000 + 185))
SYS_setreuid = Constant('SYS_setreuid',(4000 + 70))
SYS_setrlimit = Constant('SYS_setrlimit',(4000 + 75))
SYS_set_robust_list = Constant('SYS_set_robust_list',(4000 + 272))
SYS_setsid = Constant('SYS_setsid',(4000 + 66))
SYS_setsockopt = Constant('SYS_setsockopt',(4000 + 181))
SYS_set_thread_area = Constant('SYS_set_thread_area',(4000 + 283))
SYS_set_tid_address = Constant('SYS_set_tid_address',(4000 + 252))
SYS_settimeofday = Constant('SYS_settimeofday',(4000 + 79))
SYS_setuid = Constant('SYS_setuid',(4000 + 23))
SYS_setxattr = Constant('SYS_setxattr',(4000 + 224))
SYS_sgetmask = Constant('SYS_sgetmask',(4000 + 68))
SYS_shutdown = Constant('SYS_shutdown',(4000 + 182))
SYS_sigaction = Constant('SYS_sigaction',(4000 + 67))
SYS_sigaltstack = Constant('SYS_sigaltstack',(4000 + 206))
SYS_signal = Constant('SYS_signal',(4000 + 48))
SYS_signalfd = Constant('SYS_signalfd',(4000 + 280))
SYS_sigpending = Constant('SYS_sigpending',(4000 + 73))
SYS_sigprocmask = Constant('SYS_sigprocmask',(4000 + 126))
SYS_sigreturn = Constant('SYS_sigreturn',(4000 + 119))
SYS_sigsuspend = Constant('SYS_sigsuspend',(4000 + 72))
SYS_socket = Constant('SYS_socket',(4000 + 183))
SYS_socketcall = Constant('SYS_socketcall',(4000 + 102))
SYS_socketpair = Constant('SYS_socketpair',(4000 + 184))
SYS_splice = Constant('SYS_splice',(4000 + 304))
SYS_ssetmask = Constant('SYS_ssetmask',(4000 + 69))
SYS_stat = Constant('SYS_stat',(4000 + 106))
SYS_stat64 = Constant('SYS_stat64',(4000 + 213))
SYS_statfs = Constant('SYS_statfs',(4000 + 99))
SYS_statfs64 = Constant('SYS_statfs64',(4000 + 255))
SYS_stime = Constant('SYS_stime',(4000 + 25))
SYS_stty = Constant('SYS_stty',(4000 + 31))
SYS_swapoff = Constant('SYS_swapoff',(4000 + 115))
SYS_swapon = Constant('SYS_swapon',(4000 + 87))
SYS_symlink = Constant('SYS_symlink',(4000 + 83))
SYS_symlinkat = Constant('SYS_symlinkat',(4000 + 297))
SYS_sync = Constant('SYS_sync',(4000 + 36))
SYS_sync_file_range = Constant('SYS_sync_file_range',(4000 + 305))
SYS_syscall = Constant('SYS_syscall',(4000 + 0))
SYS__sysctl = Constant('SYS__sysctl',(4000 + 153))
SYS_sysfs = Constant('SYS_sysfs',(4000 + 135))
SYS_sysinfo = Constant('SYS_sysinfo',(4000 + 116))
SYS_syslog = Constant('SYS_syslog',(4000 + 103))
SYS_sysmips = Constant('SYS_sysmips',(4000 + 149))
SYS_tee = Constant('SYS_tee',(4000 + 306))
SYS_tgkill = Constant('SYS_tgkill',(4000 + 266))
SYS_time = Constant('SYS_time',(4000 + 13))
SYS_timer_create = Constant('SYS_timer_create',(4000 + 257))
SYS_timer_delete = Constant('SYS_timer_delete',(4000 + 261))
SYS_timerfd = Constant('SYS_timerfd',(4000 + 281))
SYS_timerfd_create = Constant('SYS_timerfd_create',(4000 + 284))
SYS_timerfd_gettime = Constant('SYS_timerfd_gettime',(4000 + 285))
SYS_timerfd_settime = Constant('SYS_timerfd_settime',(4000 + 286))
SYS_timer_getoverrun = Constant('SYS_timer_getoverrun',(4000 + 260))
SYS_timer_gettime = Constant('SYS_timer_gettime',(4000 + 259))
SYS_timer_settime = Constant('SYS_timer_settime',(4000 + 258))
SYS_times = Constant('SYS_times',(4000 + 43))
SYS_tkill = Constant('SYS_tkill',(4000 + 236))
SYS_truncate = Constant('SYS_truncate',(4000 + 92))
SYS_truncate64 = Constant('SYS_truncate64',(4000 + 211))
SYS_ulimit = Constant('SYS_ulimit',(4000 + 58))
SYS_umask = Constant('SYS_umask',(4000 + 60))
SYS_umount = Constant('SYS_umount',(4000 + 22))
SYS_umount2 = Constant('SYS_umount2',(4000 + 52))
SYS_uname = Constant('SYS_uname',(4000 + 122))
SYS_unlink = Constant('SYS_unlink',(4000 + 10))
SYS_unlinkat = Constant('SYS_unlinkat',(4000 + 294))
SYS_unshare = Constant('SYS_unshare',(4000 + 303))
SYS_unused109 = Constant('SYS_unused109',(4000 + 109))
SYS_unused150 = Constant('SYS_unused150',(4000 + 150))
SYS_unused18 = Constant('SYS_unused18',(4000 + 18))
SYS_unused28 = Constant('SYS_unused28',(4000 + 28))
SYS_unused59 = Constant('SYS_unused59',(4000 + 59))
SYS_unused84 = Constant('SYS_unused84',(4000 + 84))
SYS_uselib = Constant('SYS_uselib',(4000 + 86))
SYS_ustat = Constant('SYS_ustat',(4000 + 62))
SYS_utime = Constant('SYS_utime',(4000 + 30))
SYS_utimensat = Constant('SYS_utimensat',(4000 + 279))
SYS_utimes = Constant('SYS_utimes',(4000 + 267))
SYS_vhangup = Constant('SYS_vhangup',(4000 + 111))
SYS_vm86 = Constant('SYS_vm86',(4000 + 113))
SYS_vmsplice = Constant('SYS_vmsplice',(4000 + 307))
SYS_vserver = Constant('SYS_vserver',(4000 + 277))
SYS_wait4 = Constant('SYS_wait4',(4000 + 114))
SYS_waitid = Constant('SYS_waitid',(4000 + 278))
SYS_waitpid = Constant('SYS_waitpid',(4000 + 7))
SYS_write = Constant('SYS_write',(4000 + 4))
SYS_writev = Constant('SYS_writev',(4000 + 146))
|
import _plotly_utils.basevalidators
class CustomdataValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="customdata", parent_name="violin", **kwargs):
super(CustomdataValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
"""
Django settings for mybook project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'ar(rg$a&*6!p$#uyj_zcxo57=4$7*+^38yfgicw8@vsqimyewr'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cms',
'bootstrapform', # django-bootstrap-form
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mybook.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mybook.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'ja'
TIME_ZONE = 'Asia/Tokyo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
|
import os
import sys
import subprocess
import pytest
main_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
empty_python_output = """\
import prophy
"""
def tr(str_):
""" Facilitates testing strings output from windows cmd-line programs. """
return str_.translate(None, b'\r')
def call(args):
popen = subprocess.Popen([sys.executable, "-m", "prophyc"] + args,
cwd = main_dir,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
out, err = popen.communicate()
return popen.returncode, out, err
def test_showing_version():
ret, out, err = call(["--version"])
expected_version = b'0.7.7'
assert ret == 0
assert tr(out) == b'prophyc ' + expected_version + b'\n'
assert err == b""
def test_missing_input():
ret, out, err = call([])
assert ret == 1
assert out == b""
assert tr(err) == b"prophyc: error: missing input file\n"
def test_no_output_directory(tmpdir_cwd):
open("input.xml", "w").write("")
ret, out, err = call(["--python_out", "no_dir",
os.path.join(str(tmpdir_cwd), "input_xml")])
assert ret == 1
assert out == b""
assert tr(err) == b"prophyc: error: argument --python_out: no_dir directory not found\n"
def test_missing_output(tmpdir_cwd):
open("input.xml", "w")
ret, out, err = call(["--isar", os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 1
assert out == b""
assert tr(err) == b"prophyc: error: missing output directives\n"
def test_passing_isar_and_sack(tmpdir_cwd):
open("input", "w")
ret, out, err = call(["--isar", "--sack", "--python_out", ".",
os.path.join(str(tmpdir_cwd), "input")])
assert ret == 1
assert out == b""
assert tr(err) == b"prophyc: error: argument --sack: not allowed with argument --isar\n"
def test_isar_compiles_single_empty_xml(tmpdir_cwd):
open("input.xml", "w").write("<struct/>")
ret, out, err = call(["--isar", "--python_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert empty_python_output == open("input.py").read()
def test_isar_compiles_multiple_empty_xmls(tmpdir_cwd):
open("input1.xml", "w").write("<struct/>")
open("input2.xml", "w").write("<struct/>")
open("input3.xml", "w").write("<struct/>")
ret, out, err = call(["--isar",
"--python_out",
str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input1.xml"),
os.path.join(str(tmpdir_cwd), "input2.xml"),
os.path.join(str(tmpdir_cwd), "input3.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert empty_python_output == open("input1.py").read()
assert empty_python_output == open("input2.py").read()
assert empty_python_output == open("input3.py").read()
def test_outputs_to_correct_directory(tmpdir_cwd):
open("input.xml", "w").write("<struct/>")
os.mkdir("output")
ret, out, err = call(["--isar", "--python_out",
os.path.join(str(tmpdir_cwd), "output"),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert empty_python_output == open(os.path.join("output", "input.py")).read()
def test_isar_patch(tmpdir_cwd):
open("input.xml", "w").write("""\
<x>
<struct name="B">
<member name="a" type="u8"/>
</struct>
<struct name="A">
<member name="a" type="u8"/>
</struct>
</x>
""")
open("patch", "w").write("""\
B insert 999 b A
B dynamic b a
""")
ret, out, err = call(["--isar", "--patch",
os.path.join(str(tmpdir_cwd), "patch"),
"--python_out",
str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert empty_python_output + """\
class A(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [('a', prophy.u8)]
class B(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [('a', prophy.u8),
('b', prophy.array(A, bound = 'a'))]
""" == open("input.py").read()
def test_isar_cpp(tmpdir_cwd):
open("input.xml", "w").write("""
<xml>
<struct name="Test">
<member name="x" type="u32">
<dimension isVariableSize="true"/>
</member>
</struct>
</xml>
""")
ret, out, err = call(["--isar",
"--cpp_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert """\
struct Test
{
uint32_t x_len;
uint32_t x[1]; /// dynamic array, size in x_len
};
""" in open("input.pp.hpp").read()
assert """\
template <>
Test* swap<Test>(Test* payload)
{
swap(&payload->x_len);
return cast<Test*>(swap_n_fixed(payload->x, payload->x_len));
}
""" in open("input.pp.cpp").read()
def test_isar_warnings(tmpdir_cwd):
open("input.xml", "w").write("""
<xml>
<system xmlns:xi="http://www.xyz.com/1984/XInclude">
<xi:include href="include.xml"/>
</system>
</xml>
""")
ret, out, err = call(["--isar",
"--python_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert tr(err) == b"prophyc: warning: file include.xml not found\n"
def test_quiet_warnings(tmpdir_cwd):
open("input.xml", "w").write("""
<xml>
<system xmlns:xi="http://www.xyz.com/1984/XInclude">
<xi:include href="include.xml"/>
</system>
</xml>
""")
ret, out, err = call(["--isar",
"--quiet",
"--python_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert tr(err) == b""
def test_isar_with_includes(tmpdir_cwd):
open("input.xml", "w").write("""
<xml>
<system xmlns:xi="http://www.xyz.com/1984/XInclude">
<xi:include href="helper.xml"/>
</system>
<struct name="X">
<member name="a" type="Y"/>
</struct>
</xml>
""")
open("helper.xml", "w").write("""
<xml>
<struct name="Y">
<member name="a" type="u64"/>
</struct>
</xml>
""")
ret, out, err = call(["--isar",
"-I", str(tmpdir_cwd),
"--cpp_full_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert """\
struct X : public prophy::detail::message<X>
{
enum { encoded_byte_size = 8 };
Y a;
X() { }
X(const Y& _1): a(_1) { }
size_t get_byte_size() const
{
return 8;
}
};
""" in open("input.ppf.hpp").read()
@pytest.clang_installed
def test_sack_compiles_single_empty_hpp(tmpdir_cwd):
open("input.hpp", "w").write("")
ret, out, err = call(["--sack", "--python_out",
str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.hpp")])
assert ret == 0
assert out == b""
assert err == b""
assert empty_python_output == open("input.py").read()
@pytest.clang_installed
def test_sack_patch(tmpdir_cwd):
open("input.hpp", "w").write("""\
struct X
{
int x;
};
""")
open("patch", "w").write("""\
X type x r64
""")
ret, out, err = call(["--sack", "--patch",
os.path.join(str(tmpdir_cwd), "patch"),
"--python_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.hpp")])
assert ret == 0
assert out == b""
assert err == b""
assert empty_python_output + """\
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [('x', prophy.r64)]
""" == open("input.py").read()
@pytest.clang_installed
def test_multiple_outputs(tmpdir_cwd):
open("input.xml", "w").write("""
<xml>
<struct name="Test">
<member name="x" type="u32"/>
</struct>
</xml>
""")
ret, out, err = call(["--isar",
"--python_out", str(tmpdir_cwd),
"--cpp_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 0
assert out == b""
assert err == b""
assert open("input.py").read() == """\
import prophy
class Test(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [('x', prophy.u32)]
"""
assert open("input.pp.hpp").read() == """\
struct Test
{
uint32_t x;
};
namespace prophy
{
template <> Test* swap<Test>(Test*);
} // namespace prophy
"""
assert open("input.pp.cpp").read() == """\
using namespace prophy::detail;
namespace prophy
{
template <>
Test* swap<Test>(Test* payload)
{
swap(&payload->x);
return payload + 1;
}
} // namespace prophy
"""
@pytest.clang_not_installed
def test_clang_not_installed(tmpdir_cwd):
open("input.hpp", "w").write("")
ret, out, err = call(["--sack",
"--python_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.hpp")])
assert ret == 1
assert out == b""
assert tr(err) == b"prophyc: error: sack input requires clang and it's not installed\n"
def test_prophy_language(tmpdir_cwd):
open("input.prophy", "w").write("""\
struct X
{
u32 x[5];
u64 y<2>;
};
union U
{
1: X x;
2: u32 y;
};
""")
ret, out, err = call(["--python_out", str(tmpdir_cwd),
"--cpp_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.prophy")])
assert ret == 0
assert out == b""
assert err == b""
assert open("input.py").read() == """\
import prophy
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [('x', prophy.array(prophy.u32, size = 5)),
('num_of_y', prophy.u32),
('y', prophy.array(prophy.u64, bound = 'num_of_y', size = 2))]
class U(prophy.with_metaclass(prophy.union_generator, prophy.union)):
_descriptor = [('x', X, 1),
('y', prophy.u32, 2)]
"""
assert open("input.pp.hpp").read() == """\
struct X
{
uint32_t x[5];
uint32_t num_of_y;
uint64_t y[2]; /// limited array, size in num_of_y
};
struct U
{
enum _discriminator
{
discriminator_x = 1,
discriminator_y = 2
} discriminator;
union
{
X x;
uint32_t y;
};
};
namespace prophy
{
template <> X* swap<X>(X*);
template <> U* swap<U>(U*);
} // namespace prophy
"""
assert open("input.pp.cpp").read() == """\
using namespace prophy::detail;
namespace prophy
{
template <>
X* swap<X>(X* payload)
{
swap_n_fixed(payload->x, 5);
swap(&payload->num_of_y);
swap_n_fixed(payload->y, payload->num_of_y);
return payload + 1;
}
template <>
U* swap<U>(U* payload)
{
swap(reinterpret_cast<uint32_t*>(&payload->discriminator));
switch (payload->discriminator)
{
case U::discriminator_x: swap(&payload->x); break;
case U::discriminator_y: swap(&payload->y); break;
default: break;
}
return payload + 1;
}
} // namespace prophy
"""
def test_prophy_parse_errors(tmpdir_cwd):
open("input.prophy", "w").write("""\
struct X {};
union Y {};
constant
""")
ret, out, err = call(["--python_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.prophy")])
assert ret == 1
assert out == b""
errlines = tr(err).splitlines()
assert len(errlines) == 2
assert errlines[0].endswith(b"input.prophy:1:11: error: syntax error at '}'")
assert errlines[1].endswith(b"input.prophy:2:10: error: syntax error at '}'")
assert not os.path.exists("input.py")
@pytest.clang_installed
def test_sack_parse_warnings(tmpdir_cwd):
open("input.cpp", "w").write("""\
int foo() { int x; }
rubbish;
""")
ret, out, err = call(['--python_out', str(tmpdir_cwd), '--sack',
os.path.join(str(tmpdir_cwd), 'input.cpp')])
assert ret == 0
assert out == b""
errlines = tr(err).splitlines()
assert len(errlines) == 2
assert b'input.cpp:1:20: warning: control reaches end of non-void function' in errlines[0]
assert b'input.cpp:2:1: warning: C++ requires a type specifier for all declarations' in errlines[1]
assert os.path.exists("input.py")
@pytest.clang_installed
def test_sack_parse_errors(tmpdir_cwd):
open("input.unknown", "w").write("")
ret, out, err = call(['--python_out', str(tmpdir_cwd), '--sack',
os.path.join(str(tmpdir_cwd), 'input.unknown')])
assert ret == 1
assert out == b""
assert b'input.unknown: error: error parsing translation unit' in tr(err)
assert not os.path.exists("input.py")
def test_cpp_full_out(tmpdir_cwd):
open("input.prophy", "w").write("""
typedef i16 TP;
const MAX = 4;
struct X {
u32 x;
TP y<MAX>;
};
""")
ret, out, err = call(["--cpp_full_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.prophy")])
assert ret == 0
assert out == b""
assert err == b""
assert open("input.ppf.hpp").read() == """\
namespace prophy
{
namespace generated
{
typedef int16_t TP;
enum { MAX = 4u };
struct X : public prophy::detail::message<X>
{
enum { encoded_byte_size = 16 };
uint32_t x;
std::vector<TP> y; /// limit 4
X(): x() { }
X(uint32_t _1, const std::vector<TP>& _2): x(_1), y(_2) { }
size_t get_byte_size() const
{
return 16;
}
};
} // namespace generated
} // namespace prophy
"""
assert open("input.ppf.cpp").read() == """\
using namespace prophy::generated;
namespace prophy
{
namespace detail
{
template <>
template <endianness E>
uint8_t* message_impl<X>::encode(const X& x, uint8_t* pos)
{
pos = do_encode<E>(pos, x.x);
pos = do_encode<E>(pos, uint32_t(std::min(x.y.size(), size_t(4))));
do_encode<E>(pos, x.y.data(), uint32_t(std::min(x.y.size(), size_t(4))));
pos = pos + 8;
return pos;
}
template uint8_t* message_impl<X>::encode<native>(const X& x, uint8_t* pos);
template uint8_t* message_impl<X>::encode<little>(const X& x, uint8_t* pos);
template uint8_t* message_impl<X>::encode<big>(const X& x, uint8_t* pos);
template <>
template <endianness E>
bool message_impl<X>::decode(X& x, const uint8_t*& pos, const uint8_t* end)
{
return (
do_decode<E>(x.x, pos, end) &&
do_decode_resize<E, uint32_t>(x.y, pos, end, 4) &&
do_decode_in_place<E>(x.y.data(), x.y.size(), pos, end) &&
do_decode_advance(8, pos, end)
);
}
template bool message_impl<X>::decode<native>(X& x, const uint8_t*& pos, const uint8_t* end);
template bool message_impl<X>::decode<little>(X& x, const uint8_t*& pos, const uint8_t* end);
template bool message_impl<X>::decode<big>(X& x, const uint8_t*& pos, const uint8_t* end);
template <>
void message_impl<X>::print(const X& x, std::ostream& out, size_t indent)
{
do_print(out, indent, "x", x.x);
do_print(out, indent, "y", x.y.data(), std::min(x.y.size(), size_t(4)));
}
template void message_impl<X>::print(const X& x, std::ostream& out, size_t indent);
} // namespace detail
} // namespace prophy
"""
def test_cpp_full_out_error(tmpdir_cwd):
open("input.xml", "w").write("""
<xml>
<struct name="Test">
<member name="x" type="Unknown">
<dimension isVariableSize="true"/>
</member>
</struct>
</xml>
""")
ret, out, err = call(["--isar", "--cpp_full_out", str(tmpdir_cwd),
os.path.join(str(tmpdir_cwd), "input.xml")])
assert ret == 1
assert out == b""
assert tr(err) == (b"prophyc: warning: type 'Unknown' not found\n"
b"prophyc: error: Test byte size unknown\n")
|
from __future__ import absolute_import, unicode_literals, print_function, division
import re
import sublime_plugin
import sublime
from collections import defaultdict
import tempfile
import binascii
try:
from .sublimerepl import manager, SETTINGS_FILE
except (ImportError, ValueError):
from sublimerepl import manager, SETTINGS_FILE
def default_sender(repl, text, view=None):
repl.write(text)
"""Senders is a dict of functions used to transfer text to repl as a repl
specific load_file action"""
SENDERS = defaultdict(lambda: default_sender)
def sender(external_id,):
def wrap(func):
SENDERS[external_id] = func
return wrap
@sender("coffee")
def coffee(repl, text, view=None):
"""
use CoffeeScript multiline hack
http://coffeescript.org/documentation/docs/repl.html
"""
default_sender(repl, text.replace("\n", u'\uFF00') + "\n", view)
@sender("python")
def python_sender(repl, text, view=None):
text_wo_encoding = re.sub(
pattern=r"#.*coding[:=]\s*([-\w.]+)",
repl="# <SublimeREPL: encoding comment removed>",
string=text,
count=1)
code = binascii.hexlify(text_wo_encoding.encode("utf-8"))
execute = ''.join([
'from binascii import unhexlify as __un; exec(compile(__un("',
str(code.decode('ascii')),
'").decode("utf-8"), "<string>", "exec"))\n'
])
return default_sender(repl, execute, view)
@sender("ruby")
def ruby_sender(repl, text, view=None):
code = binascii.b2a_base64(text.encode("utf-8"))
payload = "begin require 'base64'; eval(Base64.decode64('%s'), binding=TOPLEVEL_BINDING) end\n" % (code.decode("ascii"),)
return default_sender(repl, payload, view)
@sender("clojure")
def clojure_sender(repl, text, view):
# call (load-string) instead of just writing the string so
# that syntax errors are caught and thrown back immediately
text = '(load-string "' + text.strip().replace('"', r'\"') + '")'
# find the first non-commented statement from the start of the file
namespacedecl = view.find(r"^[^;]*?\(", 0)
# if it's a namespace declaration, go search for the namespace name
if namespacedecl and view.scope_name(namespacedecl.end()-1).startswith("source.clojure meta.function.namespace.clojure"):
namespacedecl = view.extract_scope(namespacedecl.end()-1)
# we're looking for the first symbol within the declaration that
# looks like a namespace and isn't metadata, a comment, etc.
pos = namespacedecl.begin() + 3
while pos < namespacedecl.end():
# see http://clojure.org/reader for a description of valid
# namespace names. the inital } or whitespace make sure we're
# not matching on keywords etc.
namespace = view.find(r"[\}\s][A-Za-z\_!\?\*\+\-][\w!\?\*\+\-:]*(\.[\w!\?\*\+\-:]+)*", pos)
if not namespace:
# couldn't find the namespace name within the declaration. suspicious.
break
elif view.scope_name(namespace.begin() + 1).startswith("source.clojure meta.function.namespace.clojure entity.name.namespace.clojure"):
# looks alright, we've got our namespace!
# switch to namespace before executing command
# we could do this explicitly by calling (ns), (in-ns) etc:
# text = "(ns " + view.substr(namespace)[1:] + ") " + text
# but this would not only result in an extra return value
# printed to the user, the repl would also remain in that
# namespace after execution, so instead we do the same thing
# that swank-clojure does:
text = "(binding [*ns* (or (find-ns '" + view.substr(namespace)[1:] + ") (find-ns 'user))] " + text + ')'
# i.e. we temporarily switch to the namespace if it has already
# been created, otherwise we execute it in 'user. the most
# elegant option for this would probably be:
# text = "(binding [*ns* (create-ns '" + view.substr(namespace)[1:] + ")] " + text + ')'
# but this can lead to problems because of newly created
# namespaces not automatically referring to clojure.core
# (see https://groups.google.com/forum/?fromgroups=#!topic/clojure/Th-Bqq68hfo)
break
else:
# false alarm (metadata or a comment), keep looking
pos = namespace.end()
return default_sender(repl, text + repl.cmd_postfix, view)
class ReplViewWrite(sublime_plugin.TextCommand):
def run(self, edit, external_id, text):
for rv in manager.find_repl(external_id):
rv.append_input_text(text)
break # send to first repl found
else:
sublime.error_message("Cannot find REPL for '{0}'".format(external_id))
class ReplSend(sublime_plugin.TextCommand):
def run(self, edit, external_id, text, with_auto_postfix=True):
for rv in manager.find_repl(external_id):
if with_auto_postfix:
text += rv.repl.cmd_postfix
if sublime.load_settings(SETTINGS_FILE).get('show_transferred_text'):
rv.append_input_text(text)
rv.adjust_end()
SENDERS[external_id](rv.repl, text, self.view)
break
else:
sublime.error_message("Cannot find REPL for '{}'".format(external_id))
class ReplTransferCurrent(sublime_plugin.TextCommand):
def run(self, edit, scope="selection", action="send"):
text = ""
if scope == "selection":
text = self.selected_text()
elif scope == "lines":
text = self.selected_lines()
elif scope == "function":
text = self.selected_functions()
elif scope == "block":
text = self.selected_blocks()
elif scope == "file":
text = self.selected_file()
cmd = "repl_" + action
self.view.window().run_command(cmd, {"external_id": self.repl_external_id(), "text": text})
def repl_external_id(self):
return self.view.scope_name(0).split(" ")[0].split(".", 1)[1]
def selected_text(self):
v = self.view
parts = [v.substr(region) for region in v.sel()]
return "".join(parts)
def selected_blocks(self):
# TODO: Clojure only for now
v = self.view
strs = []
old_sel = list(v.sel())
v.run_command("expand_selection", {"to": "brackets"})
v.run_command("expand_selection", {"to": "brackets"})
for s in v.sel():
strs.append(v.substr(s))
v.sel().clear()
for s in old_sel:
v.sel().add(s)
return "\n\n".join(strs)
def selected_lines(self):
v = self.view
parts = []
for sel in v.sel():
for line in v.lines(sel):
parts.append(v.substr(line))
return "\n".join(parts)
def selected_file(self):
v = self.view
return v.substr(sublime.Region(0, v.size()))
|
import sys
import os
import hyperspy.api as hs
import numpy as np
import matplotlib.pyplot as plt
from scipy import signal
from keras.models import load_model
from scipy import optimize, misc
crop_min = 600.
crop_max = 680.
dispersion = 0.4
spectra_path = '/home/mike/Mn_Valences/Test_Spectra/NMC_531_Z1/ICA-Triple-GB_Segr.msa'
model = load_model('/home/mike/Mn_Valences/Mn_Classifier_Good_Copy/weights/CNN_Noise_DataAug/highest_val_acc_weights_epoch98-val_acc0.994_cnn.h5')
def test_spectra(argv):
unknown_spectra = load_spectra(spectra_path)
unknown_spectra = subtract_background(unknown_spectra, crop_min, crop_max, indices_before_onset=60)
resampled_spectra = preprocess_spectra(unknown_spectra, 620, 660, crop_min, crop_max, dispersion)
predict_spectra(resampled_spectra)
def load_spectra(spectra_path):
s = hs.load(spectra_path)
s.crop(-1,crop_min, crop_max)
s = np.array(s)
return s
def PowerLaw(p, x): #substract background (powerlaw)
return p[0]*x**(p[1])
def subtract_background(unknown_spectra, crop_min, crop_max, indices_before_onset=60):
x = np.linspace(crop_min, crop_max, 200)
errfunc = lambda p, x, unknown_spectra : PowerLaw(p, x) - unknown_spectra # Distance to the target function
p0 = [ -0.4, 0.15]
p1, success = optimize.leastsq(errfunc, p0, args=(x[0:indices_before_onset], ( np.array(unknown_spectra[0:indices_before_onset]) )), maxfev=500000)
unknown_spectra -= PowerLaw(p1, x)
return unknown_spectra
def preprocess_spectra(unknown_spectra, min, max, crop_min, crop_max, dispersion):
unknown_spectra = (unknown_spectra[(int((min-crop_min)/dispersion)):(len(unknown_spectra) - (int((crop_max-max)/dispersion)))])
unknown_spectra = np.array(unknown_spectra).astype('float32')
unknown_spectra = unknown_spectra.reshape(unknown_spectra.shape + (1,))
unknown_spectra -= np.mean(unknown_spectra)
unknown_spectra /= np.max(unknown_spectra)
f = signal.resample(unknown_spectra, 500)
return f
def predict_spectra(f):
label = ['Mn2+', 'Mn3+', 'Mn4+']
pred = model.predict(f.reshape(1,500,1))
print("Class probabilities: ", pred)
print("The predicted valence is: ", label[np.argmax(pred)])
if __name__ == "__main__":
test_spectra(sys.argv)
|
"""
BitBake 'Fetch' implementations
Classes for obtaining upstream sources for the
BitBake build tools.
"""
from future_builtins import zip
import os
import subprocess
import logging
import bb
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
class Perforce(FetchMethod):
def supports(self, ud, d):
return ud.type in ['p4']
def doparse(url, d):
parm = {}
path = url.split("://")[1]
delim = path.find("@");
if delim != -1:
(user, pswd, host, port) = path.split('@')[0].split(":")
path = path.split('@')[1]
else:
(host, port) = d.getVar('P4PORT').split(':')
user = ""
pswd = ""
if path.find(";") != -1:
keys=[]
values=[]
plist = path.split(';')
for item in plist:
if item.count('='):
(key, value) = item.split('=')
keys.append(key)
values.append(value)
parm = dict(zip(keys, values))
path = "//" + path.split(';')[0]
host += ":%s" % (port)
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
return host, path, user, pswd, parm
doparse = staticmethod(doparse)
def getcset(d, depot, host, user, pswd, parm):
p4opt = ""
if "cset" in parm:
return parm["cset"];
if user:
p4opt += " -u %s" % (user)
if pswd:
p4opt += " -P %s" % (pswd)
if host:
p4opt += " -p %s" % (host)
p4date = d.getVar("P4DATE", True)
if "revision" in parm:
depot += "#%s" % (parm["revision"])
elif "label" in parm:
depot += "@%s" % (parm["label"])
elif p4date:
depot += "@%s" % (p4date)
p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
cset = p4file.strip()
logger.debug(1, "READ %s", cset)
if not cset:
return -1
return cset.split(' ')[1]
getcset = staticmethod(getcset)
def urldata_init(self, ud, d):
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
# If a label is specified, we use that as our filename
if "label" in parm:
ud.localfile = "%s.tar.gz" % (parm["label"])
return
base = path
which = path.find('/...')
if which != -1:
base = path[:which-1]
base = self._strip_leading_slashes(base)
cset = Perforce.getcset(d, path, host, user, pswd, parm)
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
def download(self, ud, d):
"""
Fetch urls
"""
(host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
if depot.find('/...') != -1:
path = depot[:depot.find('/...')]
else:
path = depot
module = parm.get('module', os.path.basename(path))
# Get the p4 command
p4opt = ""
if user:
p4opt += " -u %s" % (user)
if pswd:
p4opt += " -P %s" % (pswd)
if host:
p4opt += " -p %s" % (host)
p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
# create temp directory
logger.debug(2, "Fetch: creating temporary directory")
bb.utils.mkdirhier(d.expand('${WORKDIR}'))
mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
tmpfile, errors = bb.process.run(mktemp)
tmpfile = tmpfile.strip()
if not tmpfile:
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
if "label" in parm:
depot = "%s@%s" % (depot, parm["label"])
else:
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
depot = "%s@%s" % (depot, cset)
os.chdir(tmpfile)
logger.info("Fetch " + ud.url)
logger.info("%s%s files %s", p4cmd, p4opt, depot)
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
p4file = [f.rstrip() for f in p4file.splitlines()]
if not p4file:
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
count = 0
for file in p4file:
list = file.split()
if list[2] == "delete":
continue
dest = list[0][len(path)+1:]
where = dest.find("#")
subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
count = count + 1
if count == 0:
logger.error()
raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
# cleanup
bb.utils.prunedir(tmpfile)
|
from flask import Flask, Response, request
import flask
import argparse
import requests
from flask.ext.cors import CORS
from security import logged_in_route
parser = argparse.ArgumentParser(description='Interface Backend')
parser.add_argument("-p", "--port", type=int, help="the port to run server on", default=80)
parser.add_argument("-s", "--schedule", type=str, help="the url of the schedule service", default="http://localhost:5003")
args = parser.parse_args()
app = Flask(__name__)
CORS(app)
@app.route("/schedule/weekly/")
@logged_in_route
def get_weekly_schedule():
response = requests.get(args.schedule + "/schedule/weekly/")
return Response(response.text, mimetype=response.headers["Content-Type"])
@app.route("/schedule/weekly/", methods=["DELETE"])
@logged_in_route
def delete_weekly_schedule():
response = requests.delete(args.schedule + "/schedule/weekly/", data=request.get_data(), headers=request.headers)
return Response(response.text, mimetype=response.headers["Content-Type"])
@app.route("/schedule/weekly/", methods=["POST"])
@logged_in_route
def post_weekly_schedule():
response = requests.post(args.schedule + "/schedule/weekly/", data=request.get_data(), headers=request.headers)
return Response(response.text, mimetype=response.headers["Content-Type"])
@app.route("/schedule/")
@logged_in_route
def get_schedule():
response = requests.get(args.schedule + "/schedule/")
return Response(response.text, mimetype=response.headers["Content-Type"])
@app.route("/schedule/", methods=["DELETE"])
@logged_in_route
def delete_schedule():
response = requests.delete(args.schedule + "/schedule/", data=request.get_data(), headers=request.headers)
return Response(response.text, mimetype=response.headers["Content-Type"])
@app.route("/schedule/", methods=["POST"])
@logged_in_route
def post_schedule():
response = requests.post(args.schedule + "/schedule/", data=request.get_data(), headers=request.headers)
return Response(response.text, mimetype=response.headers["Content-Type"])
@app.route("/user/")
@logged_in_route
def valid_user():
return flask.jsonify({"logged_in": True})
if __name__ == "__main__":
app.run(debug=True, port=args.port, host='0.0.0.0')
|
import _plotly_utils.basevalidators
class R0Validator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="r0", parent_name="barpolar", **kwargs):
super(R0Validator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc+clearAxisTypes"),
**kwargs
)
|
from django.conf.urls import url
from django.views.generic import TemplateView
from interface import views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='home.html'), name='home'),
url(r'^orgs$', views.OrganizationListView.as_view(), name='organization-list'),
url(r'^projects$', views.ProjectListView.as_view(), name='project-list'),
url(r'^projects/new$', views.ProjectCreateView.as_view(), name='new-project'),
url(r'^projects/(?P<pk>[0-9]+)$', views.ProjectUpdateView.as_view(), name='edit-project'),
url(r'^projects/(?P<pk>[0-9]+)/delete$', views.ProjectDeleteView.as_view(), name='delete-project'),
url(r'^projects/(?P<pk>[0-9]+)/merge$', views.ProjectMergeView.as_view(), name='merge-project'),
url(r'^logout$', views.logout_user, name='logout'),
url(r'^webhook$', views.PullRequestWebhook.as_view(), name='webhook'),
url(r'^api/repos/(?P<pk>[0-9]+)/pull_requests$', views.get_repo_pull_request_options),
url(r'^api/pull_requests/(?P<pk>[0-9]+)/template', views.get_pull_request_template),
url(r'^api/set_organization', views.update_session_organization, name='update-session-organization'),
]
|
import os
from os import path as osp
DATA_DIR = osp.realpath(osp.join(osp.dirname(__file__), '../../data/'))
def reference_data_path(sub_path):
return osp.join(DATA_DIR, sub_path)
|
import numpy as np
import scipy.ndimage as ndi
import scipy.sparse as sp
from discretize.utils.code_utils import is_scalar
from scipy.spatial import cKDTree, Delaunay
from scipy import interpolate
import discretize
from discretize.utils.code_utils import deprecate_function
import warnings
num_types = [int, float]
def random_model(shape, seed=None, anisotropy=None, its=100, bounds=None):
"""Create random tensor model.
Creates a random tensor model by convolving a kernel function with a
uniformly distributed model. The user specifies the number of cells
along the x, (y and z) directions with the input argument *shape* and
the function outputs a tensor model with the same shape. Afterwards,
the user may use the :py:func:`~discretize.utils.mkvc` function
to convert the tensor to a vector which can be plotting on a
corresponding tensor mesh.
Parameters
----------
shape : (dim) tuple of int
shape of the model.
seed : int, optional
pick which model to produce, prints the seed if you don't choose
anisotropy : numpy.ndarray, optional
this is the kernel that is convolved with the model
its : int, optional
number of smoothing iterations
bounds : list, optional
Lower and upper bounds on the model. Has the form [lower_bound, upper_bound].
Returns
-------
numpy.ndarray
A random generated model whose shape was specified by the input parameter *shape*
Examples
--------
Here, we generate a random model for a 2D tensor mesh and plot.
>>> from discretize import TensorMesh
>>> from discretize.utils import random_model, mkvc
>>> import matplotlib as mpl
>>> import matplotlib.pyplot as plt
>>> h = [(1., 50)]
>>> vmin, vmax = 0., 1.
>>> mesh = TensorMesh([h, h])
>>> model = random_model(mesh.shape_cells, seed=4, bounds=[vmin, vmax])
>>> fig = plt.figure(figsize=(5, 4))
>>> ax = plt.subplot(111)
>>> im, = mesh.plot_image(model, grid=False, ax=ax, clim=[vmin, vmax])
>>> cbar = plt.colorbar(im)
>>> ax.set_title('Random Tensor Model')
>>> plt.show()
"""
if bounds is None:
bounds = [0, 1]
if seed is None:
seed = np.random.randint(1e3)
print("Using a seed of: ", seed)
if type(shape) in num_types:
shape = (shape,) # make it a tuple for consistency
np.random.seed(seed)
mr = np.random.rand(*shape)
if anisotropy is None:
if len(shape) == 1:
smth = np.array([1, 10.0, 1], dtype=float)
elif len(shape) == 2:
smth = np.array([[1, 7, 1], [2, 10, 2], [1, 7, 1]], dtype=float)
elif len(shape) == 3:
kernal = np.array([1, 4, 1], dtype=float).reshape((1, 3))
smth = np.array(
sp.kron(sp.kron(kernal, kernal.T).todense()[:], kernal).todense()
).reshape((3, 3, 3))
else:
if len(anisotropy.shape) != len(shape):
raise ValueError("Anisotropy must be the same shape.")
smth = np.array(anisotropy, dtype=float)
smth = smth / smth.sum() # normalize
mi = mr
for i in range(its):
mi = ndi.convolve(mi, smth)
# scale the model to live between the bounds.
mi = (mi - mi.min()) / (mi.max() - mi.min()) # scaled between 0 and 1
mi = mi * (bounds[1] - bounds[0]) + bounds[0]
return mi
def unpack_widths(value):
"""Unpack a condensed representation of cell widths or time steps.
For a list of numbers, if the same value is repeat or expanded by a constant
factor, it may be represented in a condensed form using list of floats
and/or tuples. **unpack_widths** takes a list of floats and/or tuples in
condensed form, e.g.:
[ float, (cellSize, numCell), (cellSize, numCell, factor) ]
and expands the representation to a list containing all widths in order. That is:
[ w1, w2, w3, ..., wn ]
Parameters
----------
value : list of float and/or tuple
The list of floats and/or tuples that are to be unpacked
Returns
-------
numpy.ndarray
The unpacked list with all widths in order
Examples
--------
Time stepping for time-domain codes can be represented in condensed form, e.g.:
>>> from discretize.utils import unpack_widths
>>> dt = [ (1e-5, 10), (1e-4, 4), 1e-3 ]
The above means to take 10 steps at a step width of 1e-5 s and then
4 more at 1e-4 s, and then one step of 1e-3 s. When unpacked, the output is
of length 15 and is given by:
>>> unpack_widths(dt)
array([1.e-05, 1.e-05, 1.e-05, 1.e-05, 1.e-05, 1.e-05, 1.e-05, 1.e-05,
1.e-05, 1.e-05, 1.e-04, 1.e-04, 1.e-04, 1.e-04, 1.e-03])
Each axis of a tensor mesh can also be defined as a condensed list of floats
and/or tuples. When a third number is defined in any tuple, the width value
is successively expanded by that factor, e.g.:
>>> dt = [ 6., 8., (10.0, 3), (8.0, 4, 2.) ]
>>> unpack_widths(dt)
array([ 6., 8., 10., 10., 10., 16., 32., 64., 128.])
"""
if type(value) is not list:
raise Exception("unpack_widths must be a list of scalars and tuples.")
proposed = []
for v in value:
if is_scalar(v):
proposed += [float(v)]
elif type(v) is tuple and len(v) == 2:
proposed += [float(v[0])] * int(v[1])
elif type(v) is tuple and len(v) == 3:
start = float(v[0])
num = int(v[1])
factor = float(v[2])
pad = ((np.ones(num) * np.abs(factor)) ** (np.arange(num) + 1)) * start
if factor < 0:
pad = pad[::-1]
proposed += pad.tolist()
else:
raise Exception(
"unpack_widths must contain only scalars and len(2) or len(3) tuples."
)
return np.array(proposed)
def closest_points_index(mesh, pts, grid_loc="CC", **kwargs):
"""Find the indicies for the nearest grid location for a set of points.
Parameters
----------
mesh : discretize.base.BaseMesh
An instance of *discretize.base.BaseMesh*
pts : (n, dim) numpy.ndarray
Points to query.
grid_loc : {'CC', 'N', 'Fx', 'Fy', 'Fz', 'Ex', 'Ex', 'Ey', 'Ez'}
Specifies the grid on which points are being moved to.
Returns
-------
(n ) numpy.ndarray of int
Vector of length *n* containing the indicies for the closest
respective cell center, node, face or edge.
Examples
--------
Here we define a set of random (x, y) locations and find the closest
cell centers and nodes on a mesh.
>>> from discretize import TensorMesh
>>> from discretize.utils import closest_points_index
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> h = 2*np.ones(5)
>>> mesh = TensorMesh([h, h], x0='00')
Define some random locations, grid cell centers and grid nodes,
>>> xy_random = np.random.uniform(0, 10, size=(4,2))
>>> xy_centers = mesh.cell_centers
>>> xy_nodes = mesh.nodes
Find indicies of closest cell centers and nodes,
>>> ind_centers = closest_points_index(mesh, xy_random, 'CC')
>>> ind_nodes = closest_points_index(mesh, xy_random, 'N')
Plot closest cell centers and nodes
>>> fig = plt.figure(figsize=(5, 5))
>>> ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
>>> mesh.plot_grid(ax=ax)
>>> ax.scatter(xy_random[:, 0], xy_random[:, 1], 50, 'k')
>>> ax.scatter(xy_centers[ind_centers, 0], xy_centers[ind_centers, 1], 50, 'r')
>>> ax.scatter(xy_nodes[ind_nodes, 0], xy_nodes[ind_nodes, 1], 50, 'b')
>>> plt.show()
"""
if "gridLoc" in kwargs:
warnings.warn(
"The gridLoc keyword argument has been deprecated, please use grid_loc. "
"This will be removed in discretize 1.0.0",
DeprecationWarning,
)
grid_loc = kwargs["gridLoc"]
warnings.warn(
"The closest_points_index utilty function has been moved to be a method of "
"a class object. Please access it as mesh.closest_points_index(). This will "
"be removed in a future version of discretize",
DeprecationWarning,
)
return mesh.closest_points_index(pts, grid_loc=grid_loc, discard=True)
def extract_core_mesh(xyzlim, mesh, mesh_type="tensor"):
"""Extracts the core mesh from a global mesh.
Parameters
----------
xyzlim : (dim, 2) numpy.ndarray
2D array defining the x, y and z cutoffs for the core mesh region. Each
row contains the minimum and maximum limit for the x, y and z axis,
respectively.
mesh : discretize.TensorMesh
The mesh
mesh_type : str, optional
Unused currently
Returns
-------
tuple: (**active_index**, **core_mesh**)
**active_index** is a boolean array that maps from the global the mesh
to core mesh. **core_mesh** is a *discretize.base.BaseMesh* object representing
the core mesh.
Examples
--------
Here, we define a 2D tensor mesh that has both a core region and padding.
We use the function **extract_core_mesh** to return a mesh which contains
only the core region.
>>> from discretize.utils import extract_core_mesh
>>> from discretize import TensorMesh
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> import matplotlib as mpl
>>> mpl.rcParams.update({"font.size": 14})
Form a mesh of a uniform cube
>>> h = [(1., 5, -1.5), (1., 20), (1., 5, 1.5)]
>>> mesh = TensorMesh([h, h], origin='CC')
Plot original mesh
>>> fig = plt.figure(figsize=(7, 7))
>>> ax = fig.add_subplot(111)
>>> mesh.plot_grid(ax=ax)
>>> ax.set_title('Original Tensor Mesh')
>>> plt.show()
Set the limits for the cutoff of the core mesh (dim, 2)
>>> xlim = np.c_[-10., 10]
>>> ylim = np.c_[-10., 10]
>>> core_limits = np.r_[xlim, ylim]
Extract indices of core mesh cells and the core mesh, then plot
>>> core_ind, core_mesh = extract_core_mesh(core_limits, mesh)
>>> fig = plt.figure(figsize=(4, 4))
>>> ax = fig.add_subplot(111)
>>> core_mesh.plot_grid(ax=ax)
>>> ax.set_title('Core Mesh')
>>> plt.show()
"""
if not isinstance(mesh, discretize.TensorMesh):
raise Exception("Only implemented for class TensorMesh")
if mesh.dim == 1:
xyzlim = xyzlim.flatten()
xmin, xmax = xyzlim[0], xyzlim[1]
xind = np.logical_and(mesh.cell_centers_x > xmin, mesh.cell_centers_x < xmax)
xc = mesh.cell_centers_x[xind]
hx = mesh.h[0][xind]
origin = [xc[0] - hx[0] * 0.5]
meshCore = discretize.TensorMesh([hx], origin=origin)
actind = (mesh.cell_centers > xmin) & (mesh.cell_centers < xmax)
elif mesh.dim == 2:
xmin, xmax = xyzlim[0, 0], xyzlim[0, 1]
ymin, ymax = xyzlim[1, 0], xyzlim[1, 1]
xind = np.logical_and(mesh.cell_centers_x > xmin, mesh.cell_centers_x < xmax)
yind = np.logical_and(mesh.cell_centers_y > ymin, mesh.cell_centers_y < ymax)
xc = mesh.cell_centers_x[xind]
yc = mesh.cell_centers_y[yind]
hx = mesh.h[0][xind]
hy = mesh.h[1][yind]
origin = [xc[0] - hx[0] * 0.5, yc[0] - hy[0] * 0.5]
meshCore = discretize.TensorMesh([hx, hy], origin=origin)
actind = (
(mesh.cell_centers[:, 0] > xmin)
& (mesh.cell_centers[:, 0] < xmax)
& (mesh.cell_centers[:, 1] > ymin)
& (mesh.cell_centers[:, 1] < ymax)
)
elif mesh.dim == 3:
xmin, xmax = xyzlim[0, 0], xyzlim[0, 1]
ymin, ymax = xyzlim[1, 0], xyzlim[1, 1]
zmin, zmax = xyzlim[2, 0], xyzlim[2, 1]
xind = np.logical_and(mesh.cell_centers_x > xmin, mesh.cell_centers_x < xmax)
yind = np.logical_and(mesh.cell_centers_y > ymin, mesh.cell_centers_y < ymax)
zind = np.logical_and(mesh.cell_centers_z > zmin, mesh.cell_centers_z < zmax)
xc = mesh.cell_centers_x[xind]
yc = mesh.cell_centers_y[yind]
zc = mesh.cell_centers_z[zind]
hx = mesh.h[0][xind]
hy = mesh.h[1][yind]
hz = mesh.h[2][zind]
origin = [xc[0] - hx[0] * 0.5, yc[0] - hy[0] * 0.5, zc[0] - hz[0] * 0.5]
meshCore = discretize.TensorMesh([hx, hy, hz], origin=origin)
actind = (
(mesh.cell_centers[:, 0] > xmin)
& (mesh.cell_centers[:, 0] < xmax)
& (mesh.cell_centers[:, 1] > ymin)
& (mesh.cell_centers[:, 1] < ymax)
& (mesh.cell_centers[:, 2] > zmin)
& (mesh.cell_centers[:, 2] < zmax)
)
else:
raise Exception("Not implemented!")
return actind, meshCore
def mesh_builder_xyz(
xyz,
h,
padding_distance=[[0, 0], [0, 0], [0, 0]],
base_mesh=None,
depth_core=None,
expansion_factor=1.3,
mesh_type="tensor",
):
"""Generate a tensor or tree mesh using a cloud of points.
For a cloud of (x,y[,z]) locations and specified minimum cell widths
(hx,hy,[hz]), this function creates a tensor or a tree mesh.
The lateral extent of the core region is determine by the cloud of points.
Other properties of the mesh can be defined automatically or by the user.
If *base_mesh* is an instance of :class:`~discretize.TensorMesh` or
:class:`~discretize.TreeMesh`, the core cells will be centered
on the underlying mesh to reduce interpolation errors.
Parameters
----------
xyz : (n, dim) numpy.ndarray
Location points
h : (dim ) list
Cell size(s) for the core mesh
padding_distance : list, optional
Padding distances [[W,E], [N,S], [Down,Up]]
base_mesh : discretize.TensorMesh or discretize.TreeMesh, optional
discretize mesh used to center the core mesh
depth_core : float, optional
Depth of core mesh below xyz
expansion_factor : float. optional
Expansion factor for padding cells. Ignored if *mesh_type* = *tree*
mesh_type : {'tensor', 'tree'}
Specify output mesh type
Returns
--------
discretize.TensorMesh or discretize.TreeMesh
Mesh of type specified by *mesh_type*
Examples
--------
>>> import discretize
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> xy_loc = np.random.randn(8,2)
>>> mesh = discretize.utils.mesh_builder_xyz(
... xy_loc, [0.1, 0.1], depth_core=0.5,
... padding_distance=[[1,2], [1,0]],
... mesh_type='tensor',
... )
>>> axs = plt.subplot()
>>> mesh.plot_image(mesh.cell_volumes, grid=True, ax=axs)
>>> axs.scatter(xy_loc[:,0], xy_loc[:,1], 15, c='w', zorder=3)
>>> axs.set_aspect('equal')
>>> plt.show()
"""
if mesh_type.lower() not in ["tensor", "tree"]:
raise ValueError("Revise mesh_type. Only TENSOR | TREE mesh are implemented")
# Get extent of points
limits = []
center = []
nC = []
for dim in range(xyz.shape[1]):
max_min = np.r_[xyz[:, dim].max(), xyz[:, dim].min()]
limits += [max_min]
center += [np.mean(max_min)]
nC += [int((max_min[0] - max_min[1]) / h[dim])]
if depth_core is not None:
nC[-1] += int(depth_core / h[-1])
limits[-1][1] -= depth_core
if mesh_type.lower() == "tensor":
# Figure out padding cells from distance
def expand(dx, pad):
length = 0
nc = 0
while length < pad:
nc += 1
length = np.sum(dx * expansion_factor ** (np.asarray(range(nc)) + 1))
return nc
# Define h along each dimension
h_dim = []
nC_origin = []
for dim in range(xyz.shape[1]):
h_dim += [
[
(
h[dim],
expand(h[dim], padding_distance[dim][0]),
-expansion_factor,
),
(h[dim], nC[dim]),
(
h[dim],
expand(h[dim], padding_distance[dim][1]),
expansion_factor,
),
]
]
nC_origin += [h_dim[-1][0][1]]
# Create mesh
mesh = discretize.TensorMesh(h_dim)
elif mesh_type.lower() == "tree":
# Figure out full extent required from input
h_dim = []
nC_origin = []
for ii, cc in enumerate(nC):
extent = limits[ii][0] - limits[ii][1] + np.sum(padding_distance[ii])
# Number of cells at the small octree level
maxLevel = int(np.log2(extent / h[ii])) + 1
h_dim += [np.ones(2 ** maxLevel) * h[ii]]
# Define the mesh and origin
mesh = discretize.TreeMesh(h_dim)
for ii, cc in enumerate(nC):
core = limits[ii][0] - limits[ii][1]
pad2 = int(np.log2(padding_distance[ii][0] / h[ii] + 1))
nC_origin += [int(np.ceil((mesh.h[ii].sum() - core) / h[ii] / 2))]
# Set origin
origin = []
for ii, hi in enumerate(mesh.h):
origin += [limits[ii][1] - np.sum(hi[: nC_origin[ii]])]
mesh.origin = np.hstack(origin)
# Shift mesh if global mesh is used based on closest to centroid
axis = ["x", "y", "z"]
if base_mesh is not None:
for dim in range(base_mesh.dim):
cc_base = getattr(
base_mesh,
"cell_centers_{orientation}".format(orientation=axis[dim]),
)
cc_local = getattr(
mesh, "cell_centers_{orientation}".format(orientation=axis[dim])
)
shift = (
cc_base[np.max([np.searchsorted(cc_base, center[dim]) - 1, 0])]
- cc_local[np.max([np.searchsorted(cc_local, center[dim]) - 1, 0])]
)
origin[dim] += shift
mesh.origin = np.hstack(origin)
return mesh
def refine_tree_xyz(
mesh,
xyz,
method="radial",
octree_levels=[1, 1, 1],
octree_levels_padding=None,
finalize=False,
min_level=0,
max_distance=np.inf,
):
"""Refine region within a :class:`~discretize.TreeMesh`
This function refines the specified region of a tree mesh using
one of several methods. These are summarized below:
**radial:** refines based on radial distances from a set of xy[z] locations.
Consider a tree mesh whose smallest cell size has a width of *h* . And
*octree_levels = [nc1, nc2, nc3, ...]* . Within a distance of *nc1 x h*
from any of the points supplied, the smallest cell size is used. Within a distance of
*nc2 x (2h)* , the cells will have a width of *2h* . Within a distance of *nc3 x (4h)* ,
the cells will have a width of *4h* . Etc...
**surface:** refines downward from a triangulated surface.
Consider a tree mesh whose smallest cell size has a width of *h*. And
*octree_levels = [nc1, nc2, nc3, ...]* . Within a downward distance of *nc1 x h*
from the topography (*xy[z]* ) supplied, the smallest cell size is used. The
topography is triangulated if the points supplied are coarser than the cell
size. No refinement is done above the topography. Within a vertical distance of
*nc2 x (2h)* , the cells will have a width of *2h* . Within a vertical distance
of *nc3 x (4h)* , the cells will have a width of *4h* . Etc...
**box:** refines inside the convex hull defined by the xy[z] locations.
Consider a tree mesh whose smallest cell size has a width of *h*. And
*octree_levels = [nc1, nc2, nc3, ...]* . Within the convex hull defined by *xyz* ,
the smallest cell size is used. Within a distance of *nc2 x (2h)* from that convex
hull, the cells will have a width of *2h* . Within a distance of *nc3 x (4h)* ,
the cells will have a width of *4h* . Etc...
Parameters
----------
mesh : discretize.TreeMesh
The tree mesh object to be refined
xyz : numpy.ndarray
2D array of points (n, dim)
method : {'radial', 'surface', 'box'}
Method used to refine the mesh based on xyz locations.
- *radial:* Based on radial distance xy[z] and cell centers
- *surface:* Refines downward from a triangulated surface
- *box:* Inside limits defined by outer xy[z] locations
octree_levels : list of int, optional
Minimum number of cells around points in each *k* octree level
starting from the smallest cells size; i.e. *[nc(k), nc(k-1), ...]* .
Note that you *can* set entries to 0; e.g. you don't want to discretize
using the smallest cell size.
octree_levels_padding : list of int, optional
Padding cells added to extend the region of refinement at each level.
Used for *method = surface* and *box*. Has the form *[nc(k), nc(k-1), ...]*
finalize : bool, optional
Finalize the tree mesh.
min_level : int, optional
Sets the largest cell size allowed in the mesh. The default (*0*),
allows the largest cell size to be used.
max_distance : float
Maximum refinement distance from xy[z] locations.
Used if *method* = "surface" to reduce interpolation distance
Returns
-------
discretize.TreeMesh
The refined tree mesh
Examples
--------
Here we use the **refine_tree_xyz** function refine a tree mesh
based on topography as well as a cluster of points.
>>> from discretize import TreeMesh
>>> from discretize.utils import mkvc, refine_tree_xyz
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> dx = 5 # minimum cell width (base mesh cell width) in x
>>> dy = 5 # minimum cell width (base mesh cell width) in y
>>> x_length = 300.0 # domain width in x
>>> y_length = 300.0 # domain width in y
Compute number of base mesh cells required in x and y
>>> nbcx = 2 ** int(np.round(np.log(x_length / dx) / np.log(2.0)))
>>> nbcy = 2 ** int(np.round(np.log(y_length / dy) / np.log(2.0)))
Define the base mesh
>>> hx = [(dx, nbcx)]
>>> hy = [(dy, nbcy)]
>>> mesh = TreeMesh([hx, hy], x0="CC")
Refine surface topography
>>> xx = mesh.vectorNx
>>> yy = -3 * np.exp((xx ** 2) / 100 ** 2) + 50.0
>>> pts = np.c_[mkvc(xx), mkvc(yy)]
>>> mesh = refine_tree_xyz(
... mesh, pts, octree_levels=[2, 4], method="surface", finalize=False
... )
Refine mesh near points
>>> xx = np.array([-10.0, 10.0, 10.0, -10.0])
>>> yy = np.array([-40.0, -40.0, -60.0, -60.0])
>>> pts = np.c_[mkvc(xx), mkvc(yy)]
>>> mesh = refine_tree_xyz(
... mesh, pts, octree_levels=[4, 2], method="radial", finalize=True
... )
Plot the mesh
>>> fig = plt.figure(figsize=(6, 6))
>>> ax = fig.add_subplot(111)
>>> mesh.plotGrid(ax=ax)
>>> ax.set_xbound(mesh.x0[0], mesh.x0[0] + np.sum(mesh.hx))
>>> ax.set_ybound(mesh.x0[1], mesh.x0[1] + np.sum(mesh.hy))
>>> ax.set_title("QuadTree Mesh")
>>> plt.show()
"""
if octree_levels_padding is not None:
if len(octree_levels_padding) != len(octree_levels):
raise ValueError(
"'octree_levels_padding' must be the length %i" % len(octree_levels)
)
else:
octree_levels_padding = np.zeros_like(octree_levels)
octree_levels = np.asarray(octree_levels)
octree_levels_padding = np.asarray(octree_levels_padding)
# Trigger different refine methods
if method.lower() == "radial":
# Compute the outer limits of each octree level
rMax = np.cumsum(
mesh.h[0].min() * octree_levels * 2 ** np.arange(len(octree_levels))
)
rs = np.ones(xyz.shape[0])
level = np.ones(xyz.shape[0], dtype=np.int32)
for ii, nC in enumerate(octree_levels):
# skip "zero" sized balls
if rMax[ii] > 0:
mesh.refine_ball(
xyz, rs * rMax[ii], level * (mesh.max_level - ii), finalize=False
)
if finalize:
mesh.finalize()
elif method.lower() == "surface":
# Compute centroid
centroid = np.mean(xyz, axis=0)
if mesh.dim == 2:
rOut = np.abs(centroid[0] - xyz).max()
hz = mesh.h[1].min()
else:
# Largest outer point distance
rOut = np.linalg.norm(
np.r_[
np.abs(centroid[0] - xyz[:, 0]).max(),
np.abs(centroid[1] - xyz[:, 1]).max(),
]
)
hz = mesh.h[2].min()
# Compute maximum depth of refinement
zmax = np.cumsum(hz * octree_levels * 2 ** np.arange(len(octree_levels)))
# Compute maximum horizontal padding offset
padWidth = np.cumsum(
mesh.h[0].min()
* octree_levels_padding
* 2 ** np.arange(len(octree_levels_padding))
)
# Increment the vertical offset
zOffset = 0
xyPad = -1
depth = zmax[-1]
# Cycle through the Tree levels backward
for ii in range(len(octree_levels) - 1, -1, -1):
dx = mesh.h[0].min() * 2 ** ii
if mesh.dim == 3:
dy = mesh.h[1].min() * 2 ** ii
dz = mesh.h[2].min() * 2 ** ii
else:
dz = mesh.h[1].min() * 2 ** ii
# Increase the horizontal extent of the surface
if xyPad != padWidth[ii]:
xyPad = padWidth[ii]
# Calculate expansion for padding XY cells
expansion_factor = (rOut + xyPad) / rOut
xLoc = (xyz - centroid) * expansion_factor + centroid
if mesh.dim == 3:
# Create a new triangulated surface
tri2D = Delaunay(xLoc[:, :2])
F = interpolate.LinearNDInterpolator(tri2D, xLoc[:, 2])
else:
F = interpolate.interp1d(
xLoc[:, 0], xLoc[:, 1], fill_value="extrapolate"
)
limx = np.r_[xLoc[:, 0].max(), xLoc[:, 0].min()]
nCx = int(np.ceil((limx[0] - limx[1]) / dx))
if mesh.dim == 3:
limy = np.r_[xLoc[:, 1].max(), xLoc[:, 1].min()]
nCy = int(np.ceil((limy[0] - limy[1]) / dy))
# Create a grid at the octree level in xy
CCx, CCy = np.meshgrid(
np.linspace(limx[1], limx[0], nCx),
np.linspace(limy[1], limy[0], nCy),
)
xy = np.c_[CCx.reshape(-1), CCy.reshape(-1)]
# Only keep points within triangulation
indexTri = tri2D.find_simplex(xy)
else:
xy = np.linspace(limx[1], limx[0], nCx)
indexTri = np.ones_like(xy, dtype="bool")
# Interpolate the elevation linearly
z = F(xy[indexTri != -1])
newLoc = np.c_[xy[indexTri != -1], z]
# Only keep points within max_distance
tree = cKDTree(xyz)
r, ind = tree.query(newLoc)
# Apply vertical padding for current octree level
dim = mesh.dim - 1
zOffset = 0
while zOffset < depth:
indIn = r < (max_distance + padWidth[ii])
nnz = int(np.sum(indIn))
if nnz > 0:
mesh.insert_cells(
np.c_[newLoc[indIn, :dim], newLoc[indIn, -1] - zOffset],
np.ones(nnz) * mesh.max_level - ii,
finalize=False,
)
zOffset += dz
depth -= dz * octree_levels[ii]
if finalize:
mesh.finalize()
elif method.lower() == "box":
# Define the data extent [bottom SW, top NE]
bsw = np.min(xyz, axis=0)
tne = np.max(xyz, axis=0)
hs = np.asarray([h.min() for h in mesh.h])
hx = hs[0]
hz = hs[-1]
# Pre-calculate outer extent of each level
# x_pad
padWidth = np.cumsum(
hx * octree_levels_padding * 2 ** np.arange(len(octree_levels))
)
if mesh.dim == 3:
# y_pad
hy = hs[1]
padWidth = np.c_[
padWidth,
np.cumsum(
hy * octree_levels_padding * 2 ** np.arange(len(octree_levels))
),
]
# Pre-calculate max depth of each level
padWidth = np.c_[
padWidth,
np.cumsum(
hz
* np.maximum(octree_levels - 1, 0)
* 2 ** np.arange(len(octree_levels))
),
]
levels = []
BSW = []
TNE = []
for ii, octZ in enumerate(octree_levels):
if octZ > 0:
levels.append(mesh.max_level - ii)
BSW.append(bsw - padWidth[ii])
TNE.append(tne + padWidth[ii])
mesh.refine_box(BSW, TNE, levels, finalize=finalize)
else:
raise NotImplementedError(
"Only method= 'radial', 'surface'" " or 'box' have been implemented"
)
return mesh
def active_from_xyz(mesh, xyz, grid_reference="CC", method="linear"):
"""Return boolean array indicating which cells are below surface
For a set of locations defining a surface, **active_from_xyz** outputs a
boolean array indicating which mesh cells like below the surface points.
This method uses SciPy's interpolation routine to interpolate between
location points defining the surface. Nearest neighbour interpolation
is used for cells outside the convex hull of the surface points.
Parameters
----------
mesh : discretize.TensorMesh or discretize.TreeMesh or discretize.CylindricalMesh
Mesh object. If *mesh* is a cylindrical mesh, it must be symmetric
xyz : (N, dim) numpy.ndarray
Points defining the surface topography.
grid_reference : {'CC', 'N'}
Define where the cell is defined relative to surface. Choose between {'CC','N'}
- If 'CC' is used, cells are active if their centers are below the surface.
- If 'N' is used, cells are active if they lie entirely below the surface.
method : {'linear', 'nearest'}
Interpolation method for locations between the xyz points.
Returns
-------
(n_cells) numpy.ndarray of bool
1D mask array of *bool* for the active cells below xyz.
Examples
--------
Here we define the active cells below a parabola. We demonstrate the differences
that appear when using the 'CC' and 'N' options for *reference_grid*.
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> from discretize import TensorMesh
>>> from discretize.utils import active_from_xyz
Determine active cells for a given mesh and topography
>>> mesh = TensorMesh([5, 5])
>>> topo_func = lambda x: -3*(x-0.2)*(x-0.8)+.5
>>> topo_points = np.linspace(0, 1)
>>> topo_vals = topo_func(topo_points)
>>> active_cc = active_from_xyz(mesh, np.c_[topo_points, topo_vals], grid_reference='CC')
>>> active_n = active_from_xyz(mesh, np.c_[topo_points, topo_vals], grid_reference='N')
Plot visual representation
.. collapse:: Expand to show scripting for plot
>>> ax = plt.subplot(121)
>>> mesh.plot_image(active_cc, ax=ax)
>>> mesh.plot_grid(centers=True, ax=ax)
>>> ax.plot(np.linspace(0,1), topo_func(np.linspace(0,1)), color='C3')
>>> ax.set_title("CC")
>>> ax = plt.subplot(122)
>>> mesh.plot_image(active_n, ax=ax)
>>> mesh.plot_grid(nodes=True, ax=ax)
>>> ax.plot(np.linspace(0,1), topo_func(np.linspace(0,1)), color='C3')
>>> ax.set_title("N")
>>> plt.show()
"""
try:
if not mesh.is_symmetric:
raise NotImplementedError(
"Unsymmetric CylindricalMesh is not yet supported"
)
except AttributeError:
pass
if grid_reference not in ["N", "CC"]:
raise ValueError(
"Value of grid_reference must be 'N' (nodal) or 'CC' (cell center)"
)
dim = mesh.dim - 1
if mesh.dim == 3:
if xyz.shape[1] != 3:
raise ValueError("xyz locations of shape (*, 3) required for 3D mesh")
if method == "linear":
tri2D = Delaunay(xyz[:, :2])
z_interpolate = interpolate.LinearNDInterpolator(tri2D, xyz[:, 2])
else:
z_interpolate = interpolate.NearestNDInterpolator(xyz[:, :2], xyz[:, 2])
elif mesh.dim == 2:
if xyz.shape[1] != 2:
raise ValueError("xyz locations of shape (*, 2) required for 2D mesh")
z_interpolate = interpolate.interp1d(
xyz[:, 0], xyz[:, 1], bounds_error=False, fill_value=np.nan, kind=method
)
else:
if xyz.ndim != 1:
raise ValueError("xyz locations of shape (*, ) required for 1D mesh")
if grid_reference == "CC":
# this should work for all 4 mesh types...
locations = mesh.cell_centers
if mesh.dim == 1:
active = np.zeros(mesh.nC, dtype="bool")
active[np.searchsorted(mesh.cell_centers_x, xyz).max() :] = True
return active
elif grid_reference == "N":
try:
# try for Cyl, Tensor, and Tree operations
if mesh.dim == 3:
locations = np.vstack(
[
mesh.cell_centers
+ (np.c_[-1, 1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[-1, -1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[1, 1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[1, -1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
]
)
elif mesh.dim == 2:
locations = np.vstack(
[
mesh.cell_centers
+ (np.c_[-1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
]
)
else:
active = np.zeros(mesh.nC, dtype="bool")
active[np.searchsorted(mesh.nodes_x, xyz).max() :] = True
return active
except AttributeError:
# Try for Curvilinear Mesh
gridN = mesh.gridN.reshape((*mesh.vnN, mesh.dim), order="F")
if mesh.dim == 3:
locations = np.vstack(
[
gridN[:-1, 1:, 1:].reshape((-1, mesh.dim), order="F"),
gridN[:-1, :-1, 1:].reshape((-1, mesh.dim), order="F"),
gridN[1:, 1:, 1:].reshape((-1, mesh.dim), order="F"),
gridN[1:, :-1, 1:].reshape((-1, mesh.dim), order="F"),
]
)
elif mesh.dim == 2:
locations = np.vstack(
[
gridN[:-1, 1:].reshape((-1, mesh.dim), order="F"),
gridN[1:, 1:].reshape((-1, mesh.dim), order="F"),
]
)
# Interpolate z values on CC or N
z_xyz = z_interpolate(locations[:, :-1]).squeeze()
# Apply nearest neighbour if in extrapolation
ind_nan = np.isnan(z_xyz)
if any(ind_nan):
tree = cKDTree(xyz)
_, ind = tree.query(locations[ind_nan, :])
z_xyz[ind_nan] = xyz[ind, dim]
# Create an active bool of all True
active = np.all(
(locations[:, dim] < z_xyz).reshape((mesh.nC, -1), order="F"), axis=1
)
return active.ravel()
meshTensor = deprecate_function(unpack_widths, "meshTensor", removal_version="1.0.0", future_warn=False)
closestPoints = deprecate_function(
closest_points_index, "closestPoints", removal_version="1.0.0", future_warn=False
)
ExtractCoreMesh = deprecate_function(
extract_core_mesh, "ExtractCoreMesh", removal_version="1.0.0", future_warn=False
)
closest_points = deprecate_function(
closest_points_index, "closest_points", removal_version="1.0.0", future_warn=False
)
|
"""
ERP+
"""
__author__ = 'António Anacleto'
__credits__ = []
__version__ = "1.0"
__maintainer__ = "António Anacleto"
__status__ = "Development"
__model_name__ = 'linha_leitura_tecnica.LinhaLeituraTecnica'
import auth, base_models
from orm import *
from form import *
try:
from my_contador import Contador
except:
from contador import Contador
class LinhaLeituraTecnica(Model, View):
def __init__(self, **kargs):
Model.__init__(self, **kargs)
self.__name__ = 'linha_leitura_tecnica'
self.__title__ = 'Linhas de Folha de Leitura Tecnica'
self.__model_name__ = __model_name__
self.__list_edit_mode__ = 'inline'
self.__get_options__ = ['contador']
self.leitura_tecnica = parent_field(view_order=1, name ='Leitura Técnica', args='style:visibility="hidden"', model_name='leitura_tecnica.LeituraTecnica', nolabel=True, onlist=False, column='numero')
self.contador = choice_field(view_order=2, name ='Contador', args='required tabIndex="-1"', onchange='contador_onchange', model='contador', column='nome', options='model.get_contadores()')
self.equipamento = string_field(view_order=3, name ='Equipamento', args='readonly="readonly" tabIndex="-1"', size=50, nolabel=True, search=False)
self.leitura_anterior = decimal_field(view_order=4, name ='Leitura anterior', args='readonly="readonly" tabIndex="-1"', size=20, default=1.0)
self.leitura_actual = decimal_field(view_order=5, name ='Leitura actual', size=20, default=1.0)
self.hora = time_field(view_order=6, name ='Hora Leitura', args='required ', default=time.strftime('%H:%M:%S'))
def get_contadores(self):
return Contador().get_options()
def contador_onchange(self, record):
result = record.copy()
product = Produto().get(key=record['produto'])
if len(product) != 0:
product = product[0]
for key in ['quantidade', 'valor_unitario', 'valor_total']:
result[key]= to_decimal(result[key])
if result[key] <= to_decimal(0):
result[key]= to_decimal(0)
if to_decimal(result['desconto']) > to_decimal(0):
desconto = (100-to_decimal(result['desconto']))/100
else:
desconto = to_decimal(0)
result['valor_unitario'] = to_decimal(Produto().get_sale_price(produto=product['id'], quantidade=result['quantidade'], unidade=product['unidade_medida_padrao'], terminal = get_terminal('Loja')))
result['valor_total'] = result['quantidade'] * result['valor_unitario'] * desconto
result['iva'] = to_decimal(product['iva'])
result['unidade'] = product['unidade_medida_padrao']
else:
result={}
return result
|
import sys
if __name__ == '__main__':
if len(sys.argv) == 2:
line = bytes.fromhex(sys.argv[1]).decode('ascii', 'ignore')
print(line)
print(''.join('{:02X}'.format(ord(i)) for i in line))
|
from __future__ import absolute_import, print_function, unicode_literals
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "s_analyzer.settings.default")
application = get_wsgi_application()
|
import struct
import io
import sys
import datetime
import re
try:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
import lzf
HAS_PYTHON_LZF = True
except ImportError:
HAS_PYTHON_LZF = False
REDIS_RDB_6BITLEN = 0
REDIS_RDB_14BITLEN = 1
REDIS_RDB_32BITLEN = 2
REDIS_RDB_ENCVAL = 3
REDIS_RDB_OPCODE_AUX = 250
REDIS_RDB_OPCODE_RESIZEDB = 251
REDIS_RDB_OPCODE_EXPIRETIME_MS = 252
REDIS_RDB_OPCODE_EXPIRETIME = 253
REDIS_RDB_OPCODE_SELECTDB = 254
REDIS_RDB_OPCODE_EOF = 255
REDIS_RDB_TYPE_STRING = 0
REDIS_RDB_TYPE_LIST = 1
REDIS_RDB_TYPE_SET = 2
REDIS_RDB_TYPE_ZSET = 3
REDIS_RDB_TYPE_HASH = 4
REDIS_RDB_TYPE_HASH_ZIPMAP = 9
REDIS_RDB_TYPE_LIST_ZIPLIST = 10
REDIS_RDB_TYPE_SET_INTSET = 11
REDIS_RDB_TYPE_ZSET_ZIPLIST = 12
REDIS_RDB_TYPE_HASH_ZIPLIST = 13
REDIS_RDB_TYPE_LIST_QUICKLIST = 14
REDIS_RDB_ENC_INT8 = 0
REDIS_RDB_ENC_INT16 = 1
REDIS_RDB_ENC_INT32 = 2
REDIS_RDB_ENC_LZF = 3
DATA_TYPE_MAPPING = {
0 : "string", 1 : "list", 2 : "set", 3 : "sortedset", 4 : "hash",
9 : "hash", 10 : "list", 11 : "set", 12 : "sortedset", 13 : "hash", 14 : "list"}
class RdbCallback(object):
"""
A Callback to handle events as the Redis dump file is parsed.
This callback provides a serial and fast access to the dump file.
"""
def start_rdb(self):
"""
Called once we know we are dealing with a valid redis dump file
"""
pass
def aux_field(self, key, value):
""""
Called in the beginning of the RDB with various meta data fields such as:
redis-ver, redis-bits, ctime, used-mem
exists since redis 3.2 (RDB v7)
"""
pass
def start_database(self, db_number):
"""
Called to indicate database the start of database `db_number`
Once a database starts, another database cannot start unless
the first one completes and then `end_database` method is called
Typically, callbacks store the current database number in a class variable
"""
pass
def db_size(self, db_size, expires_size):
"""
Called per database before the keys, with the key count in the main dictioney and the total voletaile key count
exists since redis 3.2 (RDB v7)
"""
pass
def set(self, key, value, expiry, info):
"""
Callback to handle a key with a string value and an optional expiry
`key` is the redis key
`value` is a string or a number
`expiry` is a datetime object. None and can be None
`info` is a dictionary containing additional information about this object.
"""
pass
def start_hash(self, key, length, expiry, info):
"""Callback to handle the start of a hash
`key` is the redis key
`length` is the number of elements in this hash.
`expiry` is a `datetime` object. None means the object does not expire
`info` is a dictionary containing additional information about this object.
After `start_hash`, the method `hset` will be called with this `key` exactly `length` times.
After that, the `end_hash` method will be called.
"""
pass
def hset(self, key, field, value):
"""
Callback to insert a field=value pair in an existing hash
`key` is the redis key for this hash
`field` is a string
`value` is the value to store for this field
"""
pass
def end_hash(self, key):
"""
Called when there are no more elements in the hash
`key` is the redis key for the hash
"""
pass
def start_set(self, key, cardinality, expiry, info):
"""
Callback to handle the start of a hash
`key` is the redis key
`cardinality` is the number of elements in this set
`expiry` is a `datetime` object. None means the object does not expire
`info` is a dictionary containing additional information about this object.
After `start_set`, the method `sadd` will be called with `key` exactly `cardinality` times
After that, the `end_set` method will be called to indicate the end of the set.
Note : This callback handles both Int Sets and Regular Sets
"""
pass
def sadd(self, key, member):
"""
Callback to inser a new member to this set
`key` is the redis key for this set
`member` is the member to insert into this set
"""
pass
def end_set(self, key):
"""
Called when there are no more elements in this set
`key` the redis key for this set
"""
pass
def start_list(self, key, expiry, info):
"""
Callback to handle the start of a list
`key` is the redis key for this list
`expiry` is a `datetime` object. None means the object does not expire
`info` is a dictionary containing additional information about this object.
After `start_list`, the method `rpush` will be called with `key` exactly `length` times
After that, the `end_list` method will be called to indicate the end of the list
Note : This callback handles both Zip Lists and Linked Lists.
"""
pass
def rpush(self, key, value):
"""
Callback to insert a new value into this list
`key` is the redis key for this list
`value` is the value to be inserted
Elements must be inserted to the end (i.e. tail) of the existing list.
"""
pass
def end_list(self, key, info):
"""
Called when there are no more elements in this list
`key` the redis key for this list
`info` is a dictionary containing additional information about this object that wasn't known in start_list.
"""
pass
def start_sorted_set(self, key, length, expiry, info):
"""
Callback to handle the start of a sorted set
`key` is the redis key for this sorted
`length` is the number of elements in this sorted set
`expiry` is a `datetime` object. None means the object does not expire
`info` is a dictionary containing additional information about this object.
After `start_sorted_set`, the method `zadd` will be called with `key` exactly `length` times.
Also, `zadd` will be called in a sorted order, so as to preserve the ordering of this sorted set.
After that, the `end_sorted_set` method will be called to indicate the end of this sorted set
Note : This callback handles sorted sets in that are stored as ziplists or skiplists
"""
pass
def zadd(self, key, score, member):
"""Callback to insert a new value into this sorted set
`key` is the redis key for this sorted set
`score` is the score for this `value`
`value` is the element being inserted
"""
pass
def end_sorted_set(self, key):
"""
Called when there are no more elements in this sorted set
`key` is the redis key for this sorted set
"""
pass
def end_database(self, db_number):
"""
Called when the current database ends
After `end_database`, one of the methods are called -
1) `start_database` with a new database number
OR
2) `end_rdb` to indicate we have reached the end of the file
"""
pass
def end_rdb(self):
"""Called to indicate we have completed parsing of the dump file"""
pass
class RdbParser(object):
"""
A Parser for Redis RDB Files
This class is similar in spirit to a SAX parser for XML files.
The dump file is parsed sequentially. As and when objects are discovered,
appropriate methods in the callback are called.
Typical usage :
callback = MyRdbCallback() # Typically a subclass of RdbCallback
parser = RdbParser(callback)
parser.parse('/var/redis/6379/dump.rdb')
filter is a dictionary with the following keys
{"dbs" : [0, 1], "keys" : "foo.*", "types" : ["hash", "set", "sortedset", "list", "string"]}
If filter is None, results will not be filtered
If dbs, keys or types is None or Empty, no filtering will be done on that axis
"""
def __init__(self, callback, filters = None) :
"""
`callback` is the object that will receive parse events
"""
self._callback = callback
self._key = None
self._expiry = None
self.init_filter(filters)
self._rdb_version = 0
def parse(self, filename):
"""
Parse a redis rdb dump file, and call methods in the
callback object during the parsing operation.
"""
self.parse_fd(open(filename, "rb"))
def parse_fd(self, fd):
with fd as f:
self.verify_magic_string(f.read(5))
self.verify_version(f.read(4))
self._callback.start_rdb()
is_first_database = True
db_number = 0
while True :
self._expiry = None
data_type = read_unsigned_char(f)
if data_type == REDIS_RDB_OPCODE_EXPIRETIME_MS :
self._expiry = to_datetime(read_unsigned_long(f) * 1000)
data_type = read_unsigned_char(f)
elif data_type == REDIS_RDB_OPCODE_EXPIRETIME :
self._expiry = to_datetime(read_unsigned_int(f) * 1000000)
data_type = read_unsigned_char(f)
if data_type == REDIS_RDB_OPCODE_SELECTDB :
if not is_first_database :
self._callback.end_database(db_number)
is_first_database = False
db_number = self.read_length(f)
self._callback.start_database(db_number)
continue
if data_type == REDIS_RDB_OPCODE_AUX:
aux_key = self.read_string(f)
aux_val = self.read_string(f)
ret = self._callback.aux_field(aux_key, aux_val)
if ret:
break # TODO: make all callbacks return abort flag
continue
if data_type == REDIS_RDB_OPCODE_RESIZEDB:
db_size = self.read_length(f)
expire_size = self.read_length(f)
self._callback.db_size(db_size, expire_size)
continue
if data_type == REDIS_RDB_OPCODE_EOF :
self._callback.end_database(db_number)
self._callback.end_rdb()
if self._rdb_version >= 5:
f.read(8)
break
if self.matches_filter(db_number) :
self._key = self.read_string(f)
if self.matches_filter(db_number, self._key, data_type):
self.read_object(f, data_type)
else:
self.skip_object(f, data_type)
else :
self.skip_key_and_object(f, data_type)
def read_length_with_encoding(self, f) :
length = 0
is_encoded = False
bytes = []
bytes.append(read_unsigned_char(f))
enc_type = (bytes[0] & 0xC0) >> 6
if enc_type == REDIS_RDB_ENCVAL :
is_encoded = True
length = bytes[0] & 0x3F
elif enc_type == REDIS_RDB_6BITLEN :
length = bytes[0] & 0x3F
elif enc_type == REDIS_RDB_14BITLEN :
bytes.append(read_unsigned_char(f))
length = ((bytes[0]&0x3F)<<8)|bytes[1]
else :
length = ntohl(f)
return (length, is_encoded)
def read_length(self, f) :
return self.read_length_with_encoding(f)[0]
def read_string(self, f) :
tup = self.read_length_with_encoding(f)
length = tup[0]
is_encoded = tup[1]
val = None
if is_encoded :
if length == REDIS_RDB_ENC_INT8 :
val = read_signed_char(f)
elif length == REDIS_RDB_ENC_INT16 :
val = read_signed_short(f)
elif length == REDIS_RDB_ENC_INT32 :
val = read_signed_int(f)
elif length == REDIS_RDB_ENC_LZF :
clen = self.read_length(f)
l = self.read_length(f)
val = self.lzf_decompress(f.read(clen), l)
else:
raise Exception('read_string', "Invalid string encoding %s"%(length))
else :
val = f.read(length)
return val
def read_float(self, f):
dbl_length = read_unsigned_char(f)
if dbl_length == 253:
return float('nan')
elif dbl_length == 254:
return float('inf')
elif dbl_length == 255:
return float('-inf')
data = f.read(dbl_length)
if isinstance(data, str):
return float(data)
return data # bug?
# Read an object for the stream
# f is the redis file
# enc_type is the type of object
def read_object(self, f, enc_type) :
if enc_type == REDIS_RDB_TYPE_STRING :
val = self.read_string(f)
self._callback.set(self._key, val, self._expiry, info={'encoding':'string'})
elif enc_type == REDIS_RDB_TYPE_LIST :
# A redis list is just a sequence of strings
# We successively read strings from the stream and create a list from it
# The lists are in order i.e. the first string is the head,
# and the last string is the tail of the list
length = self.read_length(f)
self._callback.start_list(self._key, self._expiry, info={'encoding':'linkedlist' })
for count in xrange(0, length) :
val = self.read_string(f)
self._callback.rpush(self._key, val)
self._callback.end_list(self._key, info={'encoding':'linkedlist' })
elif enc_type == REDIS_RDB_TYPE_SET :
# A redis list is just a sequence of strings
# We successively read strings from the stream and create a set from it
# Note that the order of strings is non-deterministic
length = self.read_length(f)
self._callback.start_set(self._key, length, self._expiry, info={'encoding':'hashtable'})
for count in xrange(0, length) :
val = self.read_string(f)
self._callback.sadd(self._key, val)
self._callback.end_set(self._key)
elif enc_type == REDIS_RDB_TYPE_ZSET :
length = self.read_length(f)
self._callback.start_sorted_set(self._key, length, self._expiry, info={'encoding':'skiplist'})
for count in xrange(0, length) :
val = self.read_string(f)
score = self.read_float(f)
self._callback.zadd(self._key, score, val)
self._callback.end_sorted_set(self._key)
elif enc_type == REDIS_RDB_TYPE_HASH :
length = self.read_length(f)
self._callback.start_hash(self._key, length, self._expiry, info={'encoding':'hashtable'})
for count in xrange(0, length) :
field = self.read_string(f)
value = self.read_string(f)
self._callback.hset(self._key, field, value)
self._callback.end_hash(self._key)
elif enc_type == REDIS_RDB_TYPE_HASH_ZIPMAP :
self.read_zipmap(f)
elif enc_type == REDIS_RDB_TYPE_LIST_ZIPLIST :
self.read_ziplist(f)
elif enc_type == REDIS_RDB_TYPE_SET_INTSET :
self.read_intset(f)
elif enc_type == REDIS_RDB_TYPE_ZSET_ZIPLIST :
self.read_zset_from_ziplist(f)
elif enc_type == REDIS_RDB_TYPE_HASH_ZIPLIST :
self.read_hash_from_ziplist(f)
elif enc_type == REDIS_RDB_TYPE_LIST_QUICKLIST:
self.read_list_from_quicklist(f)
else :
raise Exception('read_object', 'Invalid object type %d for key %s' % (enc_type, self._key))
def skip_key_and_object(self, f, data_type):
self.skip_string(f)
self.skip_object(f, data_type)
def skip_string(self, f):
tup = self.read_length_with_encoding(f)
length = tup[0]
is_encoded = tup[1]
bytes_to_skip = 0
if is_encoded :
if length == REDIS_RDB_ENC_INT8 :
bytes_to_skip = 1
elif length == REDIS_RDB_ENC_INT16 :
bytes_to_skip = 2
elif length == REDIS_RDB_ENC_INT32 :
bytes_to_skip = 4
elif length == REDIS_RDB_ENC_LZF :
clen = self.read_length(f)
l = self.read_length(f)
bytes_to_skip = clen
else :
bytes_to_skip = length
skip(f, bytes_to_skip)
def skip_object(self, f, enc_type):
skip_strings = 0
if enc_type == REDIS_RDB_TYPE_STRING :
skip_strings = 1
elif enc_type == REDIS_RDB_TYPE_LIST :
skip_strings = self.read_length(f)
elif enc_type == REDIS_RDB_TYPE_SET :
skip_strings = self.read_length(f)
elif enc_type == REDIS_RDB_TYPE_ZSET :
skip_strings = self.read_length(f) * 2
elif enc_type == REDIS_RDB_TYPE_HASH :
skip_strings = self.read_length(f) * 2
elif enc_type == REDIS_RDB_TYPE_HASH_ZIPMAP :
skip_strings = 1
elif enc_type == REDIS_RDB_TYPE_LIST_ZIPLIST :
skip_strings = 1
elif enc_type == REDIS_RDB_TYPE_SET_INTSET :
skip_strings = 1
elif enc_type == REDIS_RDB_TYPE_ZSET_ZIPLIST :
skip_strings = 1
elif enc_type == REDIS_RDB_TYPE_HASH_ZIPLIST :
skip_strings = 1
elif enc_type == REDIS_RDB_TYPE_LIST_QUICKLIST:
skip_strings = self.read_length(f)
else :
raise Exception('skip_object', 'Invalid object type %d for key %s' % (enc_type, self._key))
for x in xrange(0, skip_strings):
self.skip_string(f)
def read_intset(self, f) :
raw_string = self.read_string(f)
buff = StringIO(raw_string)
encoding = read_unsigned_int(buff)
num_entries = read_unsigned_int(buff)
self._callback.start_set(self._key, num_entries, self._expiry, info={'encoding':'intset', 'sizeof_value':len(raw_string)})
for x in xrange(0, num_entries) :
if encoding == 8 :
entry = read_signed_long(buff)
elif encoding == 4 :
entry = read_signed_int(buff)
elif encoding == 2 :
entry = read_signed_short(buff)
else :
raise Exception('read_intset', 'Invalid encoding %d for key %s' % (encoding, self._key))
self._callback.sadd(self._key, entry)
self._callback.end_set(self._key)
def read_ziplist(self, f) :
raw_string = self.read_string(f)
buff = StringIO(raw_string)
zlbytes = read_unsigned_int(buff)
tail_offset = read_unsigned_int(buff)
num_entries = read_unsigned_short(buff)
self._callback.start_list(self._key, self._expiry, info={'encoding':'ziplist', 'sizeof_value':len(raw_string)})
for x in xrange(0, num_entries) :
val = self.read_ziplist_entry(buff)
self._callback.rpush(self._key, val)
zlist_end = read_unsigned_char(buff)
if zlist_end != 255 :
raise Exception('read_ziplist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key))
self._callback.end_list(self._key, info={'encoding':'ziplist'})
def read_list_from_quicklist(self, f):
count = self.read_length(f)
total_size = 0
self._callback.start_list(self._key, self._expiry, info={'encoding': 'quicklist', 'zips': count})
for i in xrange(0, count):
raw_string = self.read_string(f)
total_size += len(raw_string)
buff = StringIO(raw_string)
zlbytes = read_unsigned_int(buff)
tail_offset = read_unsigned_int(buff)
num_entries = read_unsigned_short(buff)
for x in xrange(0, num_entries):
self._callback.rpush(self._key, self.read_ziplist_entry(buff))
zlist_end = read_unsigned_char(buff)
if zlist_end != 255:
raise Exception('read_quicklist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key))
self._callback.end_list(self._key, info={'encoding': 'quicklist', 'zips': count, 'sizeof_value': total_size})
def read_zset_from_ziplist(self, f) :
raw_string = self.read_string(f)
buff = StringIO(raw_string)
zlbytes = read_unsigned_int(buff)
tail_offset = read_unsigned_int(buff)
num_entries = read_unsigned_short(buff)
if (num_entries % 2) :
raise Exception('read_zset_from_ziplist', "Expected even number of elements, but found %d for key %s" % (num_entries, self._key))
num_entries = num_entries /2
self._callback.start_sorted_set(self._key, num_entries, self._expiry, info={'encoding':'ziplist', 'sizeof_value':len(raw_string)})
for x in xrange(0, num_entries) :
member = self.read_ziplist_entry(buff)
score = self.read_ziplist_entry(buff)
if isinstance(score, str) :
score = float(score)
self._callback.zadd(self._key, score, member)
zlist_end = read_unsigned_char(buff)
if zlist_end != 255 :
raise Exception('read_zset_from_ziplist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key))
self._callback.end_sorted_set(self._key)
def read_hash_from_ziplist(self, f) :
raw_string = self.read_string(f)
buff = StringIO(raw_string)
zlbytes = read_unsigned_int(buff)
tail_offset = read_unsigned_int(buff)
num_entries = read_unsigned_short(buff)
if (num_entries % 2) :
raise Exception('read_hash_from_ziplist', "Expected even number of elements, but found %d for key %s" % (num_entries, self._key))
num_entries = num_entries /2
self._callback.start_hash(self._key, num_entries, self._expiry, info={'encoding':'ziplist', 'sizeof_value':len(raw_string)})
for x in xrange(0, num_entries) :
field = self.read_ziplist_entry(buff)
value = self.read_ziplist_entry(buff)
self._callback.hset(self._key, field, value)
zlist_end = read_unsigned_char(buff)
if zlist_end != 255 :
raise Exception('read_hash_from_ziplist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key))
self._callback.end_hash(self._key)
def read_ziplist_entry(self, f) :
length = 0
value = None
prev_length = read_unsigned_char(f)
if prev_length == 254 :
prev_length = read_unsigned_int(f)
entry_header = read_unsigned_char(f)
if (entry_header >> 6) == 0 :
length = entry_header & 0x3F
value = f.read(length)
elif (entry_header >> 6) == 1 :
length = ((entry_header & 0x3F) << 8) | read_unsigned_char(f)
value = f.read(length)
elif (entry_header >> 6) == 2 :
length = read_big_endian_unsigned_int(f)
value = f.read(length)
elif (entry_header >> 4) == 12 :
value = read_signed_short(f)
elif (entry_header >> 4) == 13 :
value = read_signed_int(f)
elif (entry_header >> 4) == 14 :
value = read_signed_long(f)
elif (entry_header == 240) :
value = read_24bit_signed_number(f)
elif (entry_header == 254) :
value = read_signed_char(f)
elif (entry_header >= 241 and entry_header <= 253) :
value = entry_header - 241
else :
raise Exception('read_ziplist_entry', 'Invalid entry_header %d for key %s' % (entry_header, self._key))
return value
def read_zipmap(self, f) :
raw_string = self.read_string(f)
buff = io.BytesIO(bytearray(raw_string))
num_entries = read_unsigned_char(buff)
self._callback.start_hash(self._key, num_entries, self._expiry, info={'encoding':'zipmap', 'sizeof_value':len(raw_string)})
while True :
next_length = self.read_zipmap_next_length(buff)
if next_length is None :
break
key = buff.read(next_length)
next_length = self.read_zipmap_next_length(buff)
if next_length is None :
raise Exception('read_zip_map', 'Unexepcted end of zip map for key %s' % self._key)
free = read_unsigned_char(buff)
value = buff.read(next_length)
try:
value = int(value)
except ValueError:
pass
skip(buff, free)
self._callback.hset(self._key, key, value)
self._callback.end_hash(self._key)
def read_zipmap_next_length(self, f) :
num = read_unsigned_char(f)
if num < 254:
return num
elif num == 254:
return read_unsigned_int(f)
else:
return None
def verify_magic_string(self, magic_string) :
if magic_string != 'REDIS' :
raise Exception('verify_magic_string', 'Invalid File Format')
def verify_version(self, version_str) :
version = int(version_str)
if version < 1 or version > 7:
raise Exception('verify_version', 'Invalid RDB version number %d' % version)
self._rdb_version = version
def init_filter(self, filters):
self._filters = {}
if not filters:
filters={}
if not 'dbs' in filters:
self._filters['dbs'] = None
elif isinstance(filters['dbs'], int):
self._filters['dbs'] = (filters['dbs'], )
elif isinstance(filters['dbs'], list):
self._filters['dbs'] = [int(x) for x in filters['dbs']]
else:
raise Exception('init_filter', 'invalid value for dbs in filter %s' %filters['dbs'])
if not ('keys' in filters and filters['keys']):
self._filters['keys'] = re.compile(".*")
else:
self._filters['keys'] = re.compile(filters['keys'])
if not ('not_keys' in filters and filters['not_keys']):
self._filters['not_keys'] = None
else:
self._filters['not_keys'] = re.compile(filters['not_keys'])
if not 'types' in filters:
self._filters['types'] = ('set', 'hash', 'sortedset', 'string', 'list')
elif isinstance(filters['types'], str):
self._filters['types'] = (filters['types'], )
elif isinstance(filters['types'], list):
self._filters['types'] = [str(x) for x in filters['types']]
else:
raise Exception('init_filter', 'invalid value for types in filter %s' %filters['types'])
def matches_filter(self, db_number, key=None, data_type=None):
if self._filters['dbs'] and (not db_number in self._filters['dbs']):
return False
if key and self._filters['not_keys'] and (self._filters['not_keys'].match(str(key))):
return False
if key and (not self._filters['keys'].match(str(key))):
return False
if data_type is not None and (not self.get_logical_type(data_type) in self._filters['types']):
return False
return True
def get_logical_type(self, data_type):
return DATA_TYPE_MAPPING[data_type]
def lzf_decompress(self, compressed, expected_length):
if HAS_PYTHON_LZF:
return lzf.decompress(compressed, expected_length)
else:
in_stream = bytearray(compressed)
in_len = len(in_stream)
in_index = 0
out_stream = bytearray()
out_index = 0
while in_index < in_len :
ctrl = in_stream[in_index]
if not isinstance(ctrl, int) :
raise Exception('lzf_decompress', 'ctrl should be a number %s for key %s' % (str(ctrl), self._key))
in_index = in_index + 1
if ctrl < 32 :
for x in xrange(0, ctrl + 1) :
out_stream.append(in_stream[in_index])
#sys.stdout.write(chr(in_stream[in_index]))
in_index = in_index + 1
out_index = out_index + 1
else :
length = ctrl >> 5
if length == 7 :
length = length + in_stream[in_index]
in_index = in_index + 1
ref = out_index - ((ctrl & 0x1f) << 8) - in_stream[in_index] - 1
in_index = in_index + 1
for x in xrange(0, length + 2) :
out_stream.append(out_stream[ref])
ref = ref + 1
out_index = out_index + 1
if len(out_stream) != expected_length :
raise Exception('lzf_decompress', 'Expected lengths do not match %d != %d for key %s' % (len(out_stream), expected_length, self._key))
return str(out_stream)
def skip(f, free):
if free :
f.read(free)
def ntohl(f) :
val = read_unsigned_int(f)
new_val = 0
new_val = new_val | ((val & 0x000000ff) << 24)
new_val = new_val | ((val & 0xff000000) >> 24)
new_val = new_val | ((val & 0x0000ff00) << 8)
new_val = new_val | ((val & 0x00ff0000) >> 8)
return new_val
def to_datetime(usecs_since_epoch):
seconds_since_epoch = usecs_since_epoch / 1000000
useconds = usecs_since_epoch % 1000000
dt = datetime.datetime.utcfromtimestamp(seconds_since_epoch)
delta = datetime.timedelta(microseconds = useconds)
return dt + delta
def read_signed_char(f) :
return struct.unpack('b', f.read(1))[0]
def read_unsigned_char(f) :
return struct.unpack('B', f.read(1))[0]
def read_signed_short(f) :
return struct.unpack('h', f.read(2))[0]
def read_unsigned_short(f) :
return struct.unpack('H', f.read(2))[0]
def read_signed_int(f) :
return struct.unpack('i', f.read(4))[0]
def read_unsigned_int(f) :
return struct.unpack('I', f.read(4))[0]
def read_big_endian_unsigned_int(f):
return struct.unpack('>I', f.read(4))[0]
def read_24bit_signed_number(f):
s = '0' + f.read(3)
num = struct.unpack('i', s)[0]
return num >> 8
def read_signed_long(f) :
return struct.unpack('q', f.read(8))[0]
def read_unsigned_long(f) :
return struct.unpack('Q', f.read(8))[0]
def string_as_hexcode(string) :
for s in string :
if isinstance(s, int) :
print(hex(s))
else :
print(hex(ord(s)))
class DebugCallback(RdbCallback) :
def start_rdb(self):
print('[')
def aux_field(self, key, value):
print('aux:[%s:%s]' % (key, value))
def start_database(self, db_number):
print('{')
def db_size(self, db_size, expires_size):
print('db_size: %s, expires_size %s' % (db_size, expires_size))
def set(self, key, value, expiry):
print('"%s" : "%s"' % (str(key), str(value)))
def start_hash(self, key, length, expiry):
print('"%s" : {' % str(key))
pass
def hset(self, key, field, value):
print('"%s" : "%s"' % (str(field), str(value)))
def end_hash(self, key):
print('}')
def start_set(self, key, cardinality, expiry):
print('"%s" : [' % str(key))
def sadd(self, key, member):
print('"%s"' % str(member))
def end_set(self, key):
print(']')
def start_list(self, key, expiry, info):
print('"%s" : [' % str(key))
def rpush(self, key, value) :
print('"%s"' % str(value))
def end_list(self, key, info):
print(']')
def start_sorted_set(self, key, length, expiry):
print('"%s" : {' % str(key))
def zadd(self, key, score, member):
print('"%s" : "%s"' % (str(member), str(score)))
def end_sorted_set(self, key):
print('}')
def end_database(self, db_number):
print('}')
def end_rdb(self):
print(']')
|
from pyspark.ml.wrapper import JavaParams
@staticmethod
def _mml_from_java(java_stage):
"""
Given a Java object, create and return a Python wrapper of it.
Used for ML persistence.
Meta-algorithms such as Pipeline should override this method as a classmethod.
"""
def __get_class(clazz):
"""
Loads Python class from its name.
"""
parts = clazz.split('.')
module = ".".join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
stage_name = java_stage.getClass().getName().replace("org.apache.spark", "pyspark")
stage_name = stage_name.replace("com.microsoft.ml.spark", "mmlspark")
# Generate a default new instance from the stage_name class.
py_type = __get_class(stage_name)
if issubclass(py_type, JavaParams):
# Load information from java_stage to the instance.
py_stage = py_type()
py_stage._java_obj = java_stage
py_stage._resetUid(java_stage.uid())
py_stage._transfer_params_from_java()
elif hasattr(py_type, "_from_java"):
py_stage = py_type._from_java(java_stage)
else:
raise NotImplementedError("This Java stage cannot be loaded into Python currently: %r"
% stage_name)
return py_stage
JavaParams._from_java = _mml_from_java
|
from adapter import urllib2_oauth
import compat
import util
class Client(object):
request_token_url = None
authorize_url = None
access_token_url = None
def __init__(self, client, token=None,
adapter=urllib2_oauth.Adapter, **fetch_parameters):
self.adapter = adapter
self.fetch_parameters = dict(client=client, token=token,
**fetch_parameters)
def set_token(self, token):
self.fetch_parameters['token'] = token
def fetch(self, *args, **kwargs):
raise_errs = kwargs.pop('raise_http_errors', False)
kwargs = dict(self.fetch_parameters, **kwargs)
resp = self.adapter.fetch(*args, **kwargs)
if str(resp.code).startswith('2') or not raise_errs:
return resp
else:
raise ClientError('HTTP error %s: %r' % (resp.code, str(resp)))
def get_authorize_url(self, authorize_url=None, request_token_url=None,
**kwargs):
"""Return a redirect-ready authorize url and temporary credentials"""
url = request_token_url or self.request_token_url
if not url:
raise ValueError('request_token_url')
auth_url = authorize_url or self.authorize_url
if not auth_url:
raise ValueError('authorize_url')
kwargs.setdefault('method', 'POST')
if 'oauth_callback' not in self.fetch_parameters:
kwargs.setdefault('oauth_callback', 'oob')
resp = self.fetch(url, **kwargs)
temp = util.Credentials.build(str(resp))
auth_url = util.url_extend(
auth_url, dict(oauth_token=temp.identifier))
return auth_url, temp
def get_access_token(self, temporary_credentials, verifier,
access_token_url=None, **kwargs):
url = access_token_url or self.access_token_url
if not url:
raise ValueError('access_token_url')
kwargs['token'] = util.Credentials.build(temporary_credentials)
if 'oauth_verifier=' in verifier:
verifier = dict(
compat.parse_qsl(verifier.split('?')[-1]))['oauth_verifier']
elif hasattr(verifier, 'get'):
verifier = verifier.get('oauth_verifier')
kwargs['oauth_verifier'] = verifier
resp = self.fetch(url, **kwargs)
return util.Credentials.build(str(resp))
class ClientError(Exception):
pass
|
from __future__ import print_function
from time import strftime, gmtime
import time
import compute_opportunities
import sys
if len(sys.argv) < 2 or (sys.argv[1] != "True" and sys.argv[1] != "False"):
print("Usage: trading.py <real trading mode>")
print(" real trading mode is True or False")
sys.exit(1)
real = sys.argv[1]
key_file = ""
if real == "True":
if len(sys.argv) < 3:
print("Usage: trading.py True <key file>")
print(" key file - Path to a file containing key/secret/nonce data")
sys.exit(1)
else:
key_file = sys.argv[2]
f = open('sum_profit_' + str(strftime("%d-%b-%Y-%H-%M-%S", gmtime())) + '.txt','w')
i=0
sum_profit = 0
opportunities = {
"btc -> eur -> usd -> btc": [["bid", "btc_eur"], ["bid", "eur_usd"], ["ask", "btc_usd"]],
"btc -> usd -> eur -> btc": [["bid", "btc_usd"], ["ask", "eur_usd"], ["ask", "btc_eur"]],
"btc -> ltc -> usd -> btc": [["ask", "ltc_btc"], ["bid", "ltc_usd"], ["ask", "btc_usd"]],
"btc -> usd -> ltc -> btc": [["bid", "btc_usd"], ["ask", "ltc_usd"], ["bid", "ltc_btc"]],
"btc -> rur -> usd -> btc": [["bid", "btc_rur"], ["ask", "usd_rur"], ["ask", "btc_usd"]],
"btc -> usd -> rur -> btc": [["bid", "btc_usd"], ["bid", "usd_rur"], ["ask", "btc_rur"]],
"btc -> rur -> ltc -> btc": [["bid", "btc_rur"], ["ask", "ltc_rur"], ["bid", "ltc_btc"]],
"btc -> ltc -> rur -> btc": [["ask", "ltc_btc"], ["bid", "ltc_rur"], ["ask", "btc_rur"]]
}
tax = float(0.998)
init_volume = 100
while True:
p = compute_opportunities.start_trade(opportunities, tax, init_volume, real, key_file)
sum_profit += p
if p>0:
print( str(strftime("%d-%b-%Y-%H-%M-%S", gmtime())) + " " + str(p) + " " + str(sum_profit), file = f)
time.sleep(15)
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "template_korean.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
from __future__ import absolute_import
from __future__ import print_function
import os
import numpy
import matplotlib.pyplot as plt
import datetime
import clawpack.visclaw.colormaps as colormap
import clawpack.visclaw.gaugetools as gaugetools
import clawpack.clawutil.data as clawutil
import clawpack.amrclaw.data as amrclaw
import clawpack.geoclaw.data as geodata
import clawpack.geoclaw.surge.plot as surgeplot
def setplot(plotdata):
plotdata.clearfigures()
plotdata.format = 'binary'
# Load data from output
clawdata = clawutil.ClawInputData(2)
clawdata.read(os.path.join(plotdata.outdir, 'claw.data'))
physics = geodata.GeoClawData()
physics.read(os.path.join(plotdata.outdir, 'geoclaw.data'))
surge_data = geodata.SurgeData()
surge_data.read(os.path.join(plotdata.outdir, 'surge.data'))
friction_data = geodata.FrictionData()
friction_data.read(os.path.join(plotdata.outdir, 'friction.data'))
# Load storm track
track = surgeplot.track_data(os.path.join(plotdata.outdir, 'fort.track'))
# Set afteraxes function
def surge_afteraxes(cd):
surgeplot.surge_afteraxes(cd, track, plot_direction=False,
kwargs={"markersize": 4})
def friction_after_axes(cd):
plt.title(r"Manning's $n$ Coefficient")
# Color limits
surface_limits = [-5.0, 5.0]
speed_limits = [0.0, 3.0]
wind_limits = [0, 45]
pressure_limits = [940, 1013]
friction_bounds = [0.01, 0.04]
# ==========================================================================
# Plot specifications
# ==========================================================================
# Specify set of zooms for plotting
regions = {"World": {"xlimits": (clawdata.lower[0], clawdata.upper[0]),
"ylimits": (clawdata.lower[1], clawdata.upper[1]),
"figsize": (6.4 * 2, 4.8)}}
for (name, region_dict) in regions.items():
# Surface Figure
plotfigure = plotdata.new_plotfigure(name="Surface - %s" % name)
if 'figsize' in region_dict.keys():
plotfigure.kwargs = {"figsize": region_dict['figsize']}
plotaxes = plotfigure.new_plotaxes('surface')
plotaxes.title = "Surface"
plotaxes.xlimits = region_dict["xlimits"]
plotaxes.ylimits = region_dict["ylimits"]
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# Speed Figure
plotfigure = plotdata.new_plotfigure(name="Currents - %s" % name)
if 'figsize' in region_dict.keys():
plotfigure.kwargs = {"figsize": region_dict['figsize']}
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Currents"
plotaxes.xlimits = region_dict["xlimits"]
plotaxes.ylimits = region_dict["ylimits"]
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_speed(plotaxes, bounds=speed_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['speed'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#
# Hurricane Forcing fields
#
# Pressure field
plotfigure = plotdata.new_plotfigure(name='Pressure')
plotfigure.show = surge_data.pressure_forcing and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['World']['xlimits']
plotaxes.ylimits = regions['World']['ylimits']
if 'figsize' in regions['World'].keys():
plotfigure.kwargs = {"figsize": regions['World']['figsize']}
plotaxes.title = "Pressure Field"
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_pressure(plotaxes, bounds=pressure_limits)
surgeplot.add_land(plotaxes)
# Wind field
plotfigure = plotdata.new_plotfigure(name='Wind Speed')
plotfigure.show = surge_data.wind_forcing and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['World']['xlimits']
plotaxes.ylimits = regions['World']['ylimits']
if 'figsize' in regions['World'].keys():
plotfigure.kwargs = {"figsize": regions['World']['figsize']}
plotaxes.title = "Wind Field"
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_wind(plotaxes, bounds=wind_limits)
surgeplot.add_land(plotaxes)
# ========================================================================
# Figures for gauges
# ========================================================================
plotfigure = plotdata.new_plotfigure(name='Gauge Surfaces', figno=300,
type='each_gauge')
plotfigure.show = True
plotfigure.clf_each_gauge = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = [-2, 1]
# plotaxes.xlabel = "Days from landfall"
# plotaxes.ylabel = "Surface (m)"
plotaxes.ylimits = [-1, 5]
plotaxes.title = 'Surface'
def gauge_afteraxes(cd):
axes = plt.gca()
surgeplot.plot_landfall_gauge(cd.gaugesoln, axes)
# Fix up plot - in particular fix time labels
axes.set_title('Station %s' % cd.gaugeno)
axes.set_xlabel('Days relative to landfall')
axes.set_ylabel('Surface (m)')
axes.set_xlim([-2, 1])
axes.set_ylim([-1, 5])
axes.set_xticks([-2, -1, 0, 1])
axes.set_xticklabels([r"$-2$", r"$-1$", r"$0$", r"$1$"])
axes.grid(True)
plotaxes.afteraxes = gauge_afteraxes
# Plot surface as blue curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = 3
plotitem.plotstyle = 'b-'
#
# Gauge Location Plot
#
def gauge_location_afteraxes(cd):
plt.subplots_adjust(left=0.12, bottom=0.06, right=0.97, top=0.97)
surge_afteraxes(cd)
gaugetools.plot_gauge_locations(cd.plotdata, gaugenos='all',
format_string='ko', add_labels=True)
plotfigure = plotdata.new_plotfigure(name="Gauge Locations")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Locations'
plotaxes.scaled = True
if 'figsize' in regions['World'].keys():
plotfigure.kwargs = {"figsize": regions['World']['figsize']}
plotaxes.xlimits = regions['World']["xlimits"]
plotaxes.ylimits = regions['World']["ylimits"]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# -----------------------------------------
# Parameters used only when creating html and/or latex hardcopy
# e.g., via pyclaw.plotters.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_gaugenos = [1, 2, 3, 4] # list of gauges to print
plotdata.print_fignos = 'all' # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.latex = True # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
plotdata.parallel = True # parallel plotting
return plotdata
|
from unittest import TestCase, mock
from preggy import expect
import thumbor.server
from tests.fixtures.custom_error_handler import (
ErrorHandler as CustomErrorHandler,
)
from thumbor.app import ThumborServiceApp
from thumbor.config import Config
from thumbor.server import (
configure_log,
get_application,
get_as_integer,
get_config,
get_context,
get_importer,
main,
run_server,
validate_config,
)
class ServerTestCase(TestCase):
def test_can_get_value_as_integer(self):
expect(get_as_integer("1")).to_equal(1)
expect(get_as_integer("a")).to_be_null()
expect(get_as_integer("")).to_be_null()
expect(get_as_integer(None)).to_be_null()
def test_can_get_config_from_path(self):
config = get_config("./tests/fixtures/thumbor_config_server_test.conf")
with mock.patch.dict("os.environ", {"ENGINE": "test"}):
expect(config).not_to_be_null()
expect(config.ALLOWED_SOURCES).to_be_like(["mydomain.com"])
expect(config.ENGINE).to_be_like("thumbor.engines.pil")
def test_can_get_config_with_env_enabled(self):
config = get_config(
"./tests/fixtures/thumbor_config_server_test.conf", True
)
with mock.patch.dict("os.environ", {"ENGINE": "test"}):
expect(config).not_to_be_null()
expect(config.ALLOWED_SOURCES).to_be_like(["mydomain.com"])
expect(config.ENGINE).to_be_like("test")
@mock.patch("logging.basicConfig")
def test_can_configure_log_from_config(self, basic_config_mock):
conf = Config()
configure_log(conf, "DEBUG")
params = dict(
datefmt="%Y-%m-%d %H:%M:%S",
level=10,
format="%(asctime)s %(name)s:%(levelname)s %(message)s",
)
basic_config_mock.assert_called_with(**params)
@mock.patch("logging.config.dictConfig")
def test_can_configure_log_from_dict_config(self, dict_config_mock):
conf = Config(THUMBOR_LOG_CONFIG={"level": "INFO"})
configure_log(conf, "DEBUG")
params = dict(
level="INFO",
)
dict_config_mock.assert_called_with(params)
def test_can_import_default_modules(self):
conf = Config()
importer = get_importer(conf)
expect(importer).not_to_be_null()
expect(importer.filters).not_to_be_empty()
def test_can_import_with_custom_error_handler_class(self):
conf = Config(
USE_CUSTOM_ERROR_HANDLING=True,
ERROR_HANDLER_MODULE="tests.fixtures.custom_error_handler",
)
importer = get_importer(conf)
expect(importer).not_to_be_null()
expect(importer.error_handler_class).not_to_be_null()
expect(importer.error_handler_class).to_be_instance_of(
CustomErrorHandler
)
def test_validate_config_security_key(self):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY=None)
with expect.error_to_happen(
RuntimeError,
message="No security key was found for this instance of thumbor. "
"Please provide one using the conf file or a security key file.",
):
validate_config(conf, server_parameters)
def test_validate_config_security_key_from_config(self):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY="something")
validate_config(conf, server_parameters)
expect(server_parameters.security_key).to_equal("something")
@mock.patch.object(thumbor.server, "which")
def test_validate_gifsicle_path(self, which_mock):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY="test", USE_GIFSICLE_ENGINE=True)
which_mock.return_value = "/usr/bin/gifsicle"
validate_config(conf, server_parameters)
expect(server_parameters.gifsicle_path).to_equal("/usr/bin/gifsicle")
@mock.patch.object(thumbor.server, "which")
def test_validate_null_gifsicle_path(self, which_mock):
server_parameters = mock.Mock(security_key=None)
conf = Config(SECURITY_KEY="test", USE_GIFSICLE_ENGINE=True)
which_mock.return_value = None
with expect.error_to_happen(
RuntimeError,
message="If using USE_GIFSICLE_ENGINE configuration to True, "
"the `gifsicle` binary must be in the PATH and must be an executable.",
):
validate_config(conf, server_parameters)
def test_get_context(self):
server_parameters = mock.Mock(
security_key=None, app_class="thumbor.app.ThumborServiceApp"
)
conf = Config(SECURITY_KEY="test")
importer = get_importer(conf)
context = get_context(server_parameters, conf, importer)
expect(context).not_to_be_null()
def test_get_application(self):
server_parameters = mock.Mock(
security_key=None, app_class="thumbor.app.ThumborServiceApp"
)
conf = Config(SECURITY_KEY="test")
importer = get_importer(conf)
context = get_context(server_parameters, conf, importer)
app = get_application(context)
expect(app).not_to_be_null()
expect(app).to_be_instance_of(ThumborServiceApp)
@mock.patch.object(thumbor.server, "HTTPServer")
def test_can_run_server_with_default_params(self, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(
fd=None, port=1234, ip="0.0.0.0", processes=1
)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
run_server(application, context)
server_instance_mock.bind.assert_called_with(1234, "0.0.0.0")
server_instance_mock.start.assert_called_with(1)
@mock.patch.object(thumbor.server, "HTTPServer")
def test_can_run_server_with_multiple_processes(self, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(
fd=None, port=1234, ip="0.0.0.0", processes=5
)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
run_server(application, context)
server_instance_mock.start.assert_called_with(5)
@mock.patch.object(thumbor.server, "HTTPServer")
@mock.patch.object(thumbor.server, "socket_from_fd")
def test_can_run_server_with_fd(self, socket_from_fd_mock, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(fd=11, port=1234, ip="0.0.0.0", processes=1)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
socket_from_fd_mock.return_value = "socket mock"
run_server(application, context)
server_instance_mock.add_socket.assert_called_with("socket mock")
server_instance_mock.start.assert_called_with(1)
@mock.patch.object(thumbor.server, "HTTPServer")
@mock.patch.object(thumbor.server, "bind_unix_socket")
def test_can_run_server_with_unix_socket(
self, bind_unix_socket, server_mock
):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(
fd="/path/bin", port=1234, ip="0.0.0.0", processes=1
)
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
bind_unix_socket.return_value = "socket mock"
run_server(application, context)
bind_unix_socket.assert_called_with("/path/bin")
server_instance_mock.add_socket.assert_called_with("socket mock")
server_instance_mock.start.assert_called_with(1)
@mock.patch.object(thumbor.server, "HTTPServer")
def test_run_server_returns_server(self, server_mock):
application = mock.Mock()
context = mock.Mock()
context.server = mock.Mock(fd=None, port=1234, ip="0.0.0.0")
server_instance_mock = mock.Mock()
server_mock.return_value = server_instance_mock
server = run_server(application, context)
self.assertEqual(server, server_instance_mock)
@mock.patch.object(thumbor.server, "setup_signal_handler")
@mock.patch.object(thumbor.server, "HTTPServer")
@mock.patch.object(thumbor.server, "get_server_parameters")
@mock.patch("tornado.ioloop.IOLoop.instance", create=True)
def test_can_run_main(
self,
ioloop_mock,
get_server_parameters_mock,
server_mock,
setup_signal_handler_mock,
):
server_parameters = mock.Mock(
config_path="./tests/fixtures/thumbor_config_server_test.conf",
log_level="DEBUG",
debug=False,
security_key="sec",
app_class="thumbor.app.ThumborServiceApp",
fd=None,
ip="0.0.0.0",
port=1234,
)
get_server_parameters_mock.return_value = server_parameters
ioloop_instance_mock = mock.Mock()
ioloop_mock.return_value = ioloop_instance_mock
main()
ioloop_instance_mock.start.assert_any_call()
self.assertTrue(setup_signal_handler_mock.called)
self.assertTrue(server_mock.called)
def cleanup(self):
ServerTestCase.cleanup_called = True
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
__all__ = [
"IntegerValue",
]
import qy
import qy.llvm as llvm
class IntegerValue(qy.Value):
"""
Integer value in the wrapper language.
"""
def __invert__(self):
"""
Return the result of bitwise inversion.
"""
return qy.get().builder.xor(self._value, LLVM_Constant.int(self.type_, -1))
def __eq__(self, other):
"""
Return the result of an equality comparison.
"""
return \
qy.Value.from_low(
qy.get().builder.icmp(
llvm.ICMP_EQ,
self._value,
qy.value_from_any(other)._value,
),
)
def __gt__(self, other):
"""
Return the result of a greater-than comparison.
"""
return \
qy.Value.from_low(
qy.get().builder.icmp(
llvm.ICMP_SGT,
self._value,
qy.value_from_any(other).cast_to(self.type_)._value,
),
)
def __ge__(self, other):
"""
Return the result of a greater-than-or-equal comparison.
"""
return \
qy.Value.from_low(
qy.get().builder.icmp(
llvm.ICMP_SGE,
self._value,
qy.value_from_any(other).cast_to(self.type_)._value,
),
)
def __lt__(self, other):
"""
Return the result of a less-than comparison.
"""
return \
qy.Value.from_low(
qy.get().builder.icmp(
llvm.ICMP_SLT,
self._value,
qy.value_from_any(other).cast_to(self.type_)._value,
),
)
def __le__(self, other):
"""
Return the result of a less-than-or-equal comparison.
"""
return \
qy.Value.from_low(
qy.get().builder.icmp(
llvm.ICMP_SLE,
self._value,
qy.value_from_any(other).cast_to(self.type_)._value,
),
)
def __add__(self, other):
"""
Return the result of an addition.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.add(self._value, other._value))
def __sub__(self, other):
"""
Return the result of a subtraction.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.sub(self._value, other._value))
def __mul__(self, other):
"""
Return the result of a multiplication.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.mul(self._value, other._value))
def __div__(self, other):
"""
Return the result of a division.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.sdiv(self._value, other._value))
def __mod__(self, other):
"""
Return the remainder of a division.
Note that this operation performs C-style, not Python-style, modulo.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.srem(self._value, other._value))
def __and__(self, other):
"""
Return the result of a bitwise and.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.and_(self._value, other._value))
def __xor__(self, other):
"""
Return the result of a bitwise xor.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.xor(self._value, other._value))
def __or__(self, other):
"""
Return the result of a bitwise or.
"""
other = qy.value_from_any(other).cast_to(self.type_)
return IntegerValue(qy.get().builder.or_(self._value, other._value))
def cast_to(self, type_, name = ""):
"""
Cast this value to the specified type.
"""
# XXX cleanly handle signedness somehow (explicit "signed" qy value?)
type_ = qy.type_from_any(type_)
low_value = None
if type_.kind == llvm.TYPE_DOUBLE:
low_value = qy.get().builder.sitofp(self._value, type_, name)
elif type_.kind == llvm.TYPE_INTEGER:
if self.type_.width == type_.width:
low_value = self._value
elif self.type_.width < type_.width:
low_value = qy.get().builder.sext(self._value, type_, name)
elif self.type_.width > type_.width:
low_value = qy.get().builder.trunc(self._value, type_, name)
if low_value is None:
raise CoercionError(self.type_, type_)
else:
return qy.Value.from_any(low_value)
def to_python(self):
"""
Emit conversion of this value to a Python object.
"""
int_from_long = Function.named("PyInt_FromLong", object_ptr_type, [ctypes.c_long])
return int_from_long(self._value)
|
import os
script = os.environ['PROCESSINGSCRIPT']
if script == 'redis-to-bigquery':
os.system("python redis-to-bigquery.py")
elif script == 'twitter-to-redis':
os.system("python twitter-to-redis.py")
else:
print "unknown script %s" % script
|
from django.contrib import admin
from src.basic.messages.models import Message
class MessageAdmin(admin.ModelAdmin):
list_display = ('from_user', 'to_user', 'subject', 'to_status', 'from_status', 'created', 'content_type', 'object_id')
admin.site.register(Message, MessageAdmin)
|
"""Search Giphy."""
import random
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.plugin import PluginSetupError
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
api_key = config.create("giphy", "key",
fallback="",
comment="A Giphy API key. API keys can be registered at "
"https://developers.giphy.com/dashboard/")
def collect_photos(posts):
options = []
for entry in posts:
if 'photos' in entry:
for photo in entry['photos']:
options.append(photo['original_size']['url'])
return options
@commands.create('giphy', cost=2, category='Search', params=[Text('query')])
@rate_limit()
async def search(message, query):
"""
Search Giphy for images and pick a random one.
Example::
/giphy all time low
"""
r = await http.get("https://api.giphy.com/v1/gifs/search", params={
"q": query,
"api_key": api_key()
})
data = r.json()
if not len(data['data']):
raise CommandError("No results matching '{}'.".format(query))
return random.choice(data['data'])['url']
@commands.create('sticker', cost=2, category='Search', params=[Text('query')])
@rate_limit()
async def sticker(message, query):
"""
Search Giphy for stickers and pick a random one.
Example::
/sticker happy
"""
r = await http.get("https://api.giphy.com/v1/stickers/search", params={
"q": query,
"api_key": api_key()
})
data = r.json()
if not len(data['data']):
raise CommandError("No results matching '{}'.".format(query))
return random.choice(data['data'])['url']
def setup():
config.add(api_key)
if not api_key():
raise PluginSetupError("This plugin requires an API key from Giphy. Registration is free. Get keys from "
"https://developers.giphy.com/dashboard/.")
commands.add(search)
commands.add(sticker)
|
import _plotly_utils.basevalidators
class DyValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="dy", parent_name="waterfall", **kwargs):
super(DyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
"""
The 'instructions' widget for Morse Trainer.
Used to show a QLabel containing instructions. Read only.
instructions = Instructions()
"""
from PyQt5.QtWidgets import (QApplication, QWidget, QComboBox, QLabel,
QTextEdit, QHBoxLayout, QVBoxLayout, QGroupBox)
class Instructions(QWidget):
def __init__(self, text):
"""Create instructions containing 'text'."""
QWidget.__init__(self)
self.initUI(text)
self.show()
def initUI(self, text):
# define the widgets in this group
doc = QTextEdit(self)
doc.setReadOnly(True)
doc.insertPlainText(text)
# start the layout
layout = QVBoxLayout()
layout.addWidget(doc)
self.setLayout(layout)
|
from __future__ import unicode_literals
from abc import abstractmethod
from six import with_metaclass
from ..policy import ABCPolicyUtilMeta, PolicyUtil
class AbstractActionGenerator(with_metaclass(ABCPolicyUtilMeta, PolicyUtil)):
"""
Abstract base class for action generators, which determine what actions are to be executed based on current
container states.
"""
@abstractmethod
def get_state_actions(self, state, **kwargs):
"""
Generates actions from a single configuration state.
:param state: Configuration state.
:type state: dockermap.map.state.ConfigState
:param kwargs: Additional keyword arguments.
:return: Actions on the client, map, and configurations.
:rtype: list[dockermap.map.action.ItemAction]
"""
pass
|
f = open("game_actions.yml","w+")
for actor in range(1,6):
for turn in range(1,4):
f.write("ac_game1-actor" + str(actor) + "turn" + str(turn) + ":\n")
f.write(" description: \"{ actor: " + str(actor) + ", moves: 'ruldruldrl' }\"\n")
f.write(" turn: firstgameturns" + str(turn) + "\n\n")
f.close()
f = open("game_events.yml","w+")
for actor in range(1,6):
for turn in range(1,4):
f.write("ev_game1-actor" + str(actor) + "turn" + str(turn) + ":\n")
f.write(" description: \"{ actor: " + str(actor) + ", moves: 'ruldruldrl' }\"\n")
f.write(" turn: firstgameturns" + str(turn) + "\n\n")
f.close()
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flaskext.mysql import MySQL
from flask_login import LoginManager
from flask_mail import Mail
from .momentjs import momentjs
from flask_babel import Babel
from flask.json import JSONEncoder
app = Flask(__name__)
app.config.from_object('config')
mysql = MySQL()
mysql.init_app(app)
db = SQLAlchemy(app)
lm = LoginManager(app)
mail = Mail(app)
babel = Babel(app)
app.jinja_env.globals['momentjs'] = momentjs
class CustomJSONEncoder(JSONEncoder):
"""This class adds support for lazy translation texts to flask's JSON encoder. This is neccessary when flashing translated texts."""
def defualt(self, obj):
from speaklater import is_lazy_string
if is_lazy_string(obj):
try:
return unicode(obj) # python 2
except NameError:
return str(obj) # python 3
return super(CustomJSONEncoder, self).defualt(obj)
app.json_encoder = CustomJSONEncoder
from app import views, models
|
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base.domain import Domain
from twilio.rest.lookups.v1 import V1
class Lookups(Domain):
def __init__(self, twilio):
"""
Initialize the Lookups Domain
:returns: Domain for Lookups
:rtype: twilio.rest.lookups.Lookups
"""
super(Lookups, self).__init__(twilio)
self.base_url = 'https://lookups.twilio.com'
# Versions
self._v1 = None
@property
def v1(self):
"""
:returns: Version v1 of lookups
:rtype: twilio.rest.lookups.v1.V1
"""
if self._v1 is None:
self._v1 = V1(self)
return self._v1
@property
def phone_numbers(self):
"""
:rtype: twilio.rest.lookups.v1.phone_number.PhoneNumberList
"""
return self.v1.phone_numbers
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Lookups>'
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy as np
import os,cython
sourcefiles = ['floyd.pyx']
this_dir = os.path.split(cython.__file__)[0]
extensions = [
Extension("floyd", sourcefiles,
include_dirs=[np.get_include(),this_dir])
]
setup(cmdclass={'build_ext': build_ext},
ext_modules=extensions)
|
import sys
import os
import subprocess
argv_size = len(sys.argv)
if argv_size != 3 and argv_size != 4:
print("Usage protos_compiler.py PROTO_COMPILER_FILE_PATH [proto dir] [proto extension]")
sys.exit(0)
try:
proto_compiler_file_path = sys.argv[1]
directory = sys.argv[2]
if argv_size == 4:
extension = sys.argv[3]
else:
extension = ".proto"
# Find all *.extension files with filter
files = [file for file in os.listdir(directory) if file.endswith(extension)]
print ("Nohros Inc. Protocol Buffer compiler invocation")
print ("")
print ("Compiling *.protos using: ")
print (" Compiler: " + proto_compiler_file_path)
print (" Proto(s) path: " + directory)
print (" Extension: " + extension)
print (" Amount: " + str(len(files)))
print ("")
# compile all the proto specified proto files
for file in files:
script = "\"" + proto_compiler_file_path + "\" " + directory + "\\" + file
print ("Executing the compiler script")
print (" " + script)
os.system(script)
except Exception as inst:
print (inst)
sys.exit(1)
|
"""Testing suite for Multivariate Normal class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
import numpy.testing as npt
import scipy.stats as scs
from multidensity import MvN
class MvNTestCase(ut.TestCase):
"""Test MvN distribution class."""
def test_pdf(self):
"""Test pdf."""
ndim, nobs = 3, 10
size = (nobs, ndim)
data = np.random.normal(size=size)
pdf = MvN.pdf(data)
self.assertEqual(pdf.ndim, 1)
norm_pdf = np.zeros(nobs)
for obs in range(nobs):
scs_ndpf = scs.multivariate_normal.pdf
norm_pdf[obs] = scs_ndpf(data[obs], mean=np.zeros(ndim),
cov=np.eye(ndim))
npt.assert_array_almost_equal(np.exp(pdf), norm_pdf)
mean = np.random.normal(size=size)
pdf = MvN.pdf(data, mean=mean)
norm_pdf = np.zeros(nobs)
for obs in range(nobs):
scs_ndpf = scs.multivariate_normal.pdf
norm_pdf[obs] = scs_ndpf(data[obs], mean=mean[obs])
npt.assert_array_almost_equal(np.exp(pdf), norm_pdf)
mean = np.random.normal(size=size)
cov = np.zeros((nobs, ndim, ndim))
for obs in range(nobs):
cov[obs] = np.corrcoef(np.random.normal(size=size).T)
pdf = MvN.pdf(data, mean=mean, cov=cov)
norm_pdf = np.zeros(nobs)
for obs in range(nobs):
scs_ndpf = scs.multivariate_normal.pdf
norm_pdf[obs] = scs_ndpf(data[obs], mean=mean[obs], cov=cov[obs])
npt.assert_array_almost_equal(np.exp(pdf), norm_pdf)
if __name__ == '__main__':
ut.main()
|
import equibel as eb
def test_expanding_cardinality():
G = eb.star_graph(3)
G.add_formula(1, 'p')
G.add_formula(2, 'p')
G.add_formula(3, '~p')
R_semantic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SEMANTIC, opt_type=eb.CARDINALITY, simplify=True)
assert(R_semantic.formula_conj(0) == eb.parse_formula('p'))
R_syntactic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SYNTACTIC, opt_type=eb.CARDINALITY, simplify=True)
assert(R_syntactic == R_semantic)
def test_expanding_iteration_chain_semantic():
p = eb.parse_formula('p')
G = eb.path_graph(5)
G.add_formula(0, p)
G.add_formula(4, ~p)
R_semantic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SEMANTIC, simplify=True)
assert(R_semantic.formulas() == {0: set([p]), 1: set([p]), 2: set([]), 3: set([~p]), 4: set([~p])})
def test_expanding_iteration_chain_syntactic():
p = eb.parse_formula('p')
G = eb.path_graph(5)
G.add_formula(0, p)
G.add_formula(4, ~p)
R_syntactic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SYNTACTIC, simplify=True)
assert(R_syntactic.formulas() == {0: set([p]), 1: set([p]), 2: set([]), 3: set([~p]), 4: set([~p])})
def test_expanding_iteration_medium_chain():
p, q, r = [eb.parse_formula(letter) for letter in "pqr"]
f = p & q & r
g = ~p & ~q & ~r
G = eb.path_graph(10)
G.add_formula(0, f)
G.add_formula(9, g)
R_semantic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SEMANTIC, simplify=True)
print(R_semantic.formulas())
for node in range(0, 5):
assert(R_semantic.formula_conj(node) == f)
for node in range(5,10):
assert(R_semantic.formula_conj(node) == g)
R_syntactic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SYNTACTIC, simplify=True)
assert(R_semantic == R_syntactic)
"""
def test_expanding_iteration_big_chain():
p, q, r = [eb.parse_formula(letter) for letter in "pqr"]
f = p & q & r
g = ~p & ~q & ~r
G = eb.path_graph(20)
G.add_formula(0, f)
G.add_formula(19, g)
R_semantic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SEMANTIC, simplify=True)
print(R_semantic.formulas())
for node in range(0, 10):
assert(R_semantic.formula_conj(node) == f)
for node in range(11,20):
assert(R_semantic.formula_conj(node) == g)
R_syntactic, num_expanding_iterations = eb.iterate_expanding_fixpoint(G, method=eb.SYNTACTIC, simplify=True)
assert(R_semantic == R_syntactic)
"""
if __name__ == '__main__':
#test_expanding_iteration_chain()
test_expanding_iteration_medium_chain()
#test_expanding_iteration_big_chain()
|
from nbconvert.preprocessors.sanitize import SanitizeHTML
from traitlets import Set
class PlotlySanitizeHTML(SanitizeHTML):
def __init__(self, **kw):
super(PlotlySanitizeHTML, self).__init__(**kw)
# Add Plotly key to safe_output_keys:
safe_keys = self.safe_output_keys.update(['text/vnd.plotly.v1+html'])
def sanitize_code_outputs(self, outputs):
"""
Overwritten to allow Plotly outputs be rendered as text/html.
"""
outputs = super(PlotlySanitizeHTML, self).sanitize_code_outputs(outputs)
# Do extra processing for 'text/vnd.plotly.v1+html'
for output in outputs:
if output['output_type'] in ('stream', 'error'):
continue
data = output.data
for key in data:
if key == 'text/vnd.plotly.v1+html':
data['text/html'] = data[key]
return outputs
|
from django.conf.urls import url
from test_views.views import IndexView, TestView, LoginRequiredView
urlpatterns = [
url(r'^$', IndexView()),
url(r'^test$', TestView()),
url(r'^login_required$', LoginRequiredView()),
]
|
import unittest
import os
import redis
import hashlib
import subprocess
import binascii
import RandomIO
from sys import platform as _platform
if _platform.startswith('linux') or _platform == 'darwin':
cat_cmd = 'cat'
iotools_call = ['IOTools.py']
elif _platform == 'win32':
cat_cmd = 'type'
iotools_call = ['python', os.path.join('../bin', 'IOTools.py')]
class TestRandomIO(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_gen(self):
s = RandomIO.RandomIO()
b = s.read(100)
self.assertEqual(len(b), 100)
self.assertEqual(
RandomIO.RandomIO(123456).read(100),
RandomIO.RandomIO(123456).read(100))
self.assertEqual(RandomIO.RandomIO(b'byte string seed').read(
100), RandomIO.RandomIO(b'byte string seed').read(100))
self.assertEqual(RandomIO.RandomIO(1.23456).read(
100), RandomIO.RandomIO(1.23456).read(100))
def test_consistent(self):
s1 = RandomIO.RandomIO('seed string')
s2 = RandomIO.RandomIO('seed string')
s3 = RandomIO.RandomIO('seed string')
s4 = RandomIO.RandomIO('another seed')
self.assertEqual(s1.read(100), s2.read(100))
self.assertNotEqual(s3.read(100), s4.read(100))
def test_crossplatform(self):
string_seed1 = b'\t\xb0\xef\xd9\x05p\xe1W\x17\x8a9\xc6!;^6\x1d\xadj\
\xb4#n\x1d/\x12+\xe6\xb1\x80\xc86\x06I\xc4!\x8b39\x84E\x1d\x14\xdf\x14e\x12\
\xfa\xf0\r\x1b'
s = RandomIO.RandomIO('seed1').read(50)
self.assertEqual(s, string_seed1)
string_123456 = b'\x18\xb2\xce\x8a \xc9\xe2n\xd9\xf6\x06\x0b8\xf9\xb9\
\xf8\x9b#81z\xf8\x02\x83\x1e\xa2\xf02\x7f\xad\xd7*h\xad9\xf6\x14U\xca\x90\\i\
\xcc~#h\xaa\xb4\x1b['
s = RandomIO.RandomIO(123456).read(50)
self.assertEqual(s, string_123456)
def test_read(self):
s1 = RandomIO.RandomIO('seed string')
with self.assertRaises(RuntimeError) as ex:
s1.read()
self.assertEqual(
str(ex.exception),
'Stream size must be specified if bytes to read is not.')
def test_dump(self):
s1 = RandomIO.RandomIO('seed string')
s2 = RandomIO.RandomIO('seed string')
file1 = 'file1'
file2 = 'file2'
with open(file1, 'wb') as f:
s1.dump(f, 100)
with open(file2, 'wb') as f:
s2.dump(f, 100)
with open(file1, 'rb') as f:
contents1 = f.read()
with open(file2, 'rb') as f:
contents2 = f.read()
self.assertEqual(len(contents1), 100)
self.assertEqual(contents1, contents2)
os.remove(file1)
os.remove(file2)
def test_genfile(self):
path = RandomIO.RandomIO('seed string').genfile(100)
with open(path, 'rb') as f:
contents = f.read()
self.assertEqual(len(contents), 100)
os.remove(path)
dir = 'test_directory/'
os.mkdir(dir)
path = RandomIO.RandomIO('seed string').genfile(100, dir)
(h1, t1) = os.path.split(dir)
(h2, t2) = os.path.split(path)
self.assertEqual(h1, h2)
with open(path, 'rb') as f:
contents = f.read()
self.assertEqual(len(contents), 100)
os.remove(path)
os.rmdir(dir)
def test_large(self):
length = 100000000
file1 = RandomIO.RandomIO('seed string').genfile(length)
file2 = RandomIO.RandomIO('seed string').genfile(length)
with open(file1, 'rb') as f1:
with open(file1, 'rb') as f2:
for c in iter(lambda: f1.read(1000), b''):
self.assertEqual(c, f2.read(1000))
os.remove(file1)
os.remove(file2)
def test_read_limit(self):
s1 = RandomIO.RandomIO('seed string', 100)
s1.seek(90)
buf1 = s1.read(100)
self.assertEqual(len(buf1), 10)
def test_read_zero(self):
s1 = RandomIO.RandomIO('seed string')
b = s1.read(0)
self.assertEqual(len(b), 0)
def test_seek_beginning(self):
s1 = RandomIO.RandomIO('seed string')
buf1 = s1.read(10)
s1.seek(0)
buf2 = s1.read(10)
self.assertEqual(buf1, buf2)
def test_seek_middle(self):
s1 = RandomIO.RandomIO('seed string')
s1.seek(10000)
buf1 = s1.read(10)
s1.seek(-10, os.SEEK_CUR)
buf2 = s1.read(10)
self.assertEqual(buf1, buf2)
def test_seek_end_consistency(self):
s1 = RandomIO.RandomIO('seed string', 100)
s1.seek(98)
buf1 = s1.read(10)
s1.seek(90)
buf2 = s1.read(10)
self.assertEqual(buf1, buf2[-2:])
def test_seek_end(self):
s1 = RandomIO.RandomIO('seed string', 1000)
s1.seek(900)
buf1 = s1.read(10)
s1.seek(100, os.SEEK_END)
buf2 = s1.read(10)
self.assertEqual(buf1, buf2)
def test_tell_beginning(self):
s1 = RandomIO.RandomIO('seed string')
s1.read(100)
p = s1.tell()
self.assertEqual(p, 100)
def test_tell_seek_parity(self):
s1 = RandomIO.RandomIO('seed string')
s1.seek(100)
p = s1.tell()
self.assertEqual(p, 100)
def test_seek_end_not_possible(self):
s1 = RandomIO.RandomIO('seed string')
with self.assertRaises(RuntimeError) as ex:
s1.seek(100, os.SEEK_END)
self.assertEqual(
str(ex.exception),
'Cannot seek from end of stream if size is unknown.')
def test_iotools_txt(self):
output = 'txt_test.out'
size = 10485760
subprocess.call(
iotools_call + ['pairgen', str(size),
'-p', '10', '-o', output])
with open(output, 'r') as pairsfile:
for line in pairsfile:
(hexseed, hash) = line.rstrip().split(' ')
seed = binascii.unhexlify(hexseed)
testhash = hashlib.sha256(
RandomIO.RandomIO(seed).read(size)).hexdigest()
self.assertEqual(hash, testhash)
os.remove(output)
def test_iotools_redis(self):
# no redis support for windows, so just pass the test
if not _platform == 'win32':
r = redis.StrictRedis(host='localhost', port=6379, db=0)
output = 'redis_test.out'
size = 10485760
subprocess.call(
iotools_call + ['pairgen', str(size), '-p', '10', '-o', output,
'--redis'])
subprocess.call(
'{0} {1} | redis-cli --pipe'.format(cat_cmd, output),
shell=True)
for hexseed in r.scan_iter():
seed = binascii.unhexlify(hexseed)
testhash = hashlib.sha256(
RandomIO.RandomIO(seed).read(size)).hexdigest()
self.assertEqual(r.get(hexseed).decode('ascii'), testhash)
os.remove(output)
r.flushall()
if __name__ == '__main__':
unittest.main()
|
import sys
if sys.version_info[0] == 2:
import mock
else:
from unittest import mock
from unittest import TestCase
from uuid import uuid4
import vcr
from message_api.channel import MessageApi
class TestChannelCreate(TestCase):
def setUp(self):
self.project_id = str(uuid4().hex)
self.api_key = str(uuid4().hex)
self.message_api = MessageApi(
project_id=self.project_id,
api_key=self.api_key
)
self.api_url = self.message_api.api_url
def test_channel_create_success(self):
channel_token = str(uuid4().hex)
self.message_api._send_request = mock.Mock()
self.message_api._send_request.return_value = {
"token": channel_token
}
client_id = str(uuid4().hex)
result = self.message_api.create_channel(client_id)
self.assertEqual(channel_token, result)
body = {
"channel_id": client_id
}
url = self.api_url + "/projects/" + self.project_id + "/create_channel"
self.message_api._send_request.assert_called_once_with(
"post",
url,
json=body
)
@vcr.use_cassette('tests/vcr/test_create_channel.yaml', record_mode="once")
def test_create_channel_vcr(self):
client_id = "test-channel"
message_api = MessageApi(
project_id="test",
api_key="1234"
)
result = message_api.create_channel(client_id)
def test_send_message_success(self):
self.message_api._send_request = mock.Mock()
client_id = str(uuid4().hex)
message = str(uuid4().hex)
self.message_api.send_message(client_id, message)
body = {
"channel_id": client_id,
"message": message
}
url = self.api_url + "/projects/" + self.project_id + "/send_message"
self.message_api._send_request.assert_called_once_with(
"post",
url,
json=body
)
@vcr.use_cassette('tests/vcr/test_send_message.yaml', record_mode="once")
def test_send_message_vcr(self):
client_id = "test-channel"
message_api = MessageApi(
project_id="test",
api_key="1234"
)
result = message_api.send_message("test-channel", "test-message")
|
from django.utils import simplejson
from dajaxice.decorators import dajaxice_register
from dajaxice.core import dajaxice_functions
from dajax.core import Dajax
import os, glob, zipfile
from datetime import datetime
from lighthouse.templateGen.lapack_le.lapack_le import generateTemplate, generateTemplate_C
dir_download = "./static/download/"
extension_dic = {'fortran': 'f90', 'cpp': 'c'}
@dajaxice_register
def createTemplate(request, checked_list, language, time):
dajax = Dajax()
dajax.add_css_class("#template_output", "brush: %s;"%language)
try:
extension = extension_dic[language]
file_zip = zipfile.ZipFile(dir_download+"lighthouse_%s.zip"%time, "w")
makeFile("temp_%s.%s"%(checked_list[0].lower(), extension))
with open(dir_download+'%s.%s'%(time, extension), 'w') as outfile:
for item in checked_list:
item = item.lower()
if language == 'cpp':
go = generateTemplate_C(item)
else:
go = generateTemplate(item)
go.make_template()
name = "./lighthouse/templateGen/lapack_le/%s/codeTemplates/temp_%s.%s"%(language, item, extension)
file_zip.write(name, os.path.basename(name), zipfile.ZIP_DEFLATED)
with open(name,"r") as infile:
outfile.write(infile.read())
## display f_output, which contains all routines
f_output = open(dir_download+"%s.%s"%(time, extension),"r")
dajax.assign("#template_output", 'innerHTML', f_output.read())
dajax.script('SyntaxHighlighter.highlight()')
f_output.close()
## write README into the zip file
file_zip.write("./lighthouse/templateGen/lapack_le/README", os.path.basename(dir_download+"README"), zipfile.ZIP_DEFLATED)
## write makefile into the zip file
file_zip.write(dir_download+"makefile", os.path.basename(dir_download+"makefile"), zipfile.ZIP_DEFLATED)
file_zip.close()
## remove makefile from the download directory
os.remove(dir_download+"makefile")
except:
dajax.assign("#template_output", 'innerHTML', 'Coming soon...')
return dajax.json()
def makeFile(file_name):
with open(dir_download+'makefile', 'w') as outfile:
outfile.write("# This is a simple example of how to compile a program containing LAPACK routines. \n\n")
if ".f90" in file_name:
outfile.write("CC=gfortran\nCFLAGS=-llapack -lblas\n\n")
elif ".c" in file_name:
outfile.write("CC=gcc\nCFLAGS=-llapack -lblas\n\n")
outfile.write("lapackout: %s\n"%file_name)
outfile.write("\t$(CC) %s -o lapackout $(CFLAGS)"%file_name)
|
class OpsimulateError(Exception):
"""General Opsimulate Error"""
class ModuleValidationError(OpsimulateError):
"""Error with module contents"""
class ModuleMetadataError(ModuleValidationError):
"""Error with format of module metadata"""
class ModuleScriptsExecutableError(ModuleValidationError):
"""Error with module scripts being executable"""
class VMNotRunningError(OpsimulateError):
"""Error with Gitlab VM not running yet"""
class HomeDirNotSetupError(OpsimulateError):
"""Error with Opsimulate home dir not setup yet"""
class ModuleNotSelectedError(OpsimulateError):
"""Error with a module not being selected yet"""
class GCPCredentialsNotLoadedError(OpsimulateError):
"""Error with GCP credentials not having been loaded yet"""
|
from django.test import TestCase
from scrapper import models, crawler, tasks
class DefaultTest(TestCase):
def setup(self):
models.Criterion.objects.create(type='hash_tag', value=u'مصر')
models.Criterion.objects.create(type='hash_tag', value=u'وطن')
models.Criterion.objects.create(type='hash_tag', value=u'سيناء')
models.Criterion.objects.create(type='hash_tag', value=u'عرب')
models.Criterion.objects.create(type='user_name', value=u'moemenology')
models.Criterion.objects.create(type='user_name', value=u'Mah_Yaser')
def test_example(self):
self.assertEqual(2, 1 + 1)
class FetchTest(DefaultTest):
pass
|
import pytest
@pytest.fixture
def atac_alignment_quality_metric_low(testapp, award, encode_lab,
analysis_step_run_atac_encode4_alignment,
ATAC_bam):
item = {
"step_run": analysis_step_run_atac_encode4_alignment['@id'],
"award": award["uuid"],
"lab": encode_lab["uuid"],
"assay_term_name": 'ATAC-seq',
"quality_metric_of": [ATAC_bam['@id']],
"pct_mapped_reads": 76,
"nfr_peak_exists": False,
"mapped_reads": 880479,
}
return testapp.post_json('/atac_alignment_quality_metric', item).json['@graph'][0]
|
import sys
import re
import json
import gzip
re_filename = re.compile(".*[.-][Jj]?([0-9]+)[Jj]?\.json.gz")
re_arrived = re.compile("^atmost: arrived '(.+)'")
re_linking = re.compile("^atmost: linking '(.+)': \(predicted=([0-9.-]+)G±([0-9.-]+)G\|available=([0-9.-]+)G\)")
re_linked = re.compile("^atmost: linked '(.+)': \(predicted=([0-9.-]+)G±([0-9.-]+)G\|actual=([0-9.-]+)G\)")
try:
data = []
for arg in sys.argv[1:]:
match = re_filename.search(arg)
if match:
jobs = int(match.group(1))
jk = "j%d" % jobs
targets = {}
with gzip.open(arg, "rt") as inp:
jd = json.load(inp)
for attr in jd:
line = attr["line"]
del attr["line"]
match = re_arrived.search(line)
if match:
targets[match.group(1)] = {
"name": match.group(1),
"arrived": attr}
else:
match = re_linking.search(line)
if match:
attr["predicted"] = float(match.group(2))
attr["error"] = float(match.group(3))
attr["available"] = float(match.group(4))
targets[match.group(1)]["linking"] = attr
else:
match = re_linked.search(line)
if match:
attr["predicted"] = float(match.group(2))
attr["error"] = float(match.group(3))
attr["actual"] = float(match.group(4))
targets[match.group(1)]["linked"] = attr
else:
sys.stderr.write(line)
data.append({"jobs": jobs, "targets": [x for x in targets.values()]})
json.dump(data, sys.stdout)
except BrokenPipeError:
pass
except KeyboardInterrupt:
pass
|
from django.core.urlresolvers import resolve
from collections import OrderedDict
from ndc.models import Tag
def menus(request):
menus = OrderedDict([
('timetable', {'title': 'Timetable', 'icon': 'calendar', 'color': 'blue'}),
('sessions', {'title': 'Session', 'icon': 'video', 'color': 'green'}),
('speakers', {'title': 'Speaker', 'icon': 'man', 'color': 'yellow'}),
('companies', {'title': 'Company', 'icon': 'cmd', 'color': 'pink'}),
])
try:
name = resolve(request.path).url_name
if name in menus:
menus[name]['active'] = True
except:
pass
return {
'menus': menus,
}
def tags(request):
return {
'tags': Tag.objects.all(),
}
def search(request):
return {
'q': request.GET.get('q', ''),
}
|
"""
Domain sharding for Django static files.
"""
VERSION = (0, 1)
__version__ = '.'.join([str(x) for x in VERSION])
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('letter', '0002_auto_20200303_1117'),
('guide', '0005_action_mail_intent'),
]
operations = [
migrations.AddField(
model_name='action',
name='letter_template',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='letter.LetterTemplate'),
),
]
|
"""Definitions for tt's built-in Boolean operators."""
class BooleanOperator(object):
"""A thin wrapper around a Boolean operator."""
def __init__(self, precedence, eval_func, default_symbol_str,
default_plain_english_str):
self._precedence = precedence
self._eval_func = eval_func
self._default_symbol_str = default_symbol_str
self._default_plain_english_str = default_plain_english_str
def __str__(self):
return self._default_plain_english_str
def __repr__(self):
return '<BooleanOperator "{}">'.format(self._default_plain_english_str)
@property
def precedence(self):
"""Precedence of this operator, relative to other operators.
:type: :class:`int <python:int>`
.. code-block:: python
>>> from tt.definitions import TT_AND_OP, TT_OR_OP
>>> TT_AND_OP.precedence > TT_OR_OP.precedence
True
"""
return self._precedence
@property
def eval_func(self):
"""The evaluation function wrapped by this operator.
:type: :data:`Callable <python:typing.Callable>`
.. code-block:: python
>>> from tt.definitions import TT_XOR_OP
>>> TT_XOR_OP.eval_func(0, 0)
False
>>> TT_XOR_OP.eval_func(True, False)
True
"""
return self._eval_func
@property
def default_symbol_str(self):
"""The default symbolic string representation of this operator.
Some operators may not have a recognized symbol str, in which case
this attribute will be ``None``.
:type: :class:`str <python:str>` or ``None``
.. code-block:: python
>>> from tt.definitions import TT_AND_OP, TT_NAND_OP
>>> print(TT_AND_OP.default_symbol_str)
/\\
>>> print(TT_NAND_OP.default_symbol_str)
None
"""
return self._default_symbol_str
@property
def default_plain_english_str(self):
"""The default plain English string representation of this operator.
Unlike :data:`default_symbol_str`, this attribute should never be
``None``.
:type: :class:`str <python:str>`
.. code-block:: python
>>> from tt.definitions import TT_AND_OP, TT_NAND_OP
>>> print(TT_AND_OP.default_plain_english_str)
and
>>> print(TT_NAND_OP.default_plain_english_str)
nand
"""
return self._default_plain_english_str
_PRECEDENCE = {
'ZERO': 0,
'LOW': 1,
'MEDIUM': 2,
'HIGH': 3
}
TT_NOT_OP = BooleanOperator(_PRECEDENCE['HIGH'],
lambda a: not a,
'~', 'not')
"""tt's operator implementation of a Boolean NOT.
:type: :class:`BooleanOperator`
"""
TT_IMPL_OP = BooleanOperator(_PRECEDENCE['MEDIUM'],
lambda a, b: (not a) or b,
'->', 'impl')
"""tt's operator implementation of a Boolean IMPLIES.
:type: :class:`BooleanOperator`
"""
TT_XOR_OP = BooleanOperator(_PRECEDENCE['MEDIUM'],
lambda a, b: a != b,
None, 'xor')
"""tt's operator implementation of a Boolean XOR.
:type: :class:`BooleanOperator`
"""
TT_XNOR_OP = BooleanOperator(_PRECEDENCE['MEDIUM'],
lambda a, b: a == b,
None, 'xnor')
"""tt's operator implementation of a Boolean XNOR.
:type: :class:`BooleanOperator`
"""
TT_AND_OP = BooleanOperator(_PRECEDENCE['LOW'],
lambda a, b: a and b,
'/\\', 'and')
"""tt's operator implementation of a Boolean AND.
:type: :class:`BooleanOperator`
"""
TT_NAND_OP = BooleanOperator(_PRECEDENCE['LOW'],
lambda a, b: not(a and b),
None, 'nand')
"""tt's operator implementation of a Boolean NAND.
:type: :class:`BooleanOperator`
"""
TT_OR_OP = BooleanOperator(_PRECEDENCE['ZERO'],
lambda a, b: a or b,
'\\/', 'or')
"""tt's operator implementation of a Boolean OR.
:type: :class:`BooleanOperator`
"""
TT_NOR_OP = BooleanOperator(_PRECEDENCE['ZERO'],
lambda a, b: not(a or b),
None, 'nor')
"""tt's operator implementation of a Boolean NOR.
:type: :class:`BooleanOperator`
"""
BINARY_OPERATORS = {
TT_AND_OP,
TT_IMPL_OP,
TT_NAND_OP,
TT_NOR_OP,
TT_OR_OP,
TT_XNOR_OP,
TT_XOR_OP
}
"""The set of all binary operators available in tt.
:type: Set{:class:`BooleanOperator`}
"""
NON_PRIMITIVE_OPERATORS = BINARY_OPERATORS - {TT_AND_OP, TT_OR_OP}
"""The set of non-primitive operators available in tt.
This includes all binary operators other than AND and OR.
:type: Set{:class:`BooleanOperator`}
"""
SYMBOLIC_OPERATOR_MAPPING = {
'~': TT_NOT_OP,
'!': TT_NOT_OP,
'->': TT_IMPL_OP,
'<->': TT_XNOR_OP,
'&&': TT_AND_OP,
'&': TT_AND_OP,
'/\\': TT_AND_OP,
'||': TT_OR_OP,
'|': TT_OR_OP,
'\\/': TT_OR_OP
}
"""A mapping of Boolean operators.
This mapping includes the symbolic variants of the available Boolean
operators.
:type: Dict{:class:`str <python:str>`: :class:`BooleanOperator`}
"""
PLAIN_ENGLISH_OPERATOR_MAPPING = {
'not': TT_NOT_OP,
'NOT': TT_NOT_OP,
'xor': TT_XOR_OP,
'XOR': TT_XOR_OP,
'impl': TT_IMPL_OP,
'IMPL': TT_IMPL_OP,
'iff': TT_XNOR_OP,
'IFF': TT_XNOR_OP,
'xnor': TT_XNOR_OP,
'XNOR': TT_XNOR_OP,
'nxor': TT_XNOR_OP,
'NXOR': TT_XNOR_OP,
'and': TT_AND_OP,
'AND': TT_AND_OP,
'nand': TT_NAND_OP,
'NAND': TT_NAND_OP,
'or': TT_OR_OP,
'OR': TT_OR_OP,
'nor': TT_NOR_OP,
'NOR': TT_NOR_OP
}
"""A mapping of Boolean operators.
This mapping includes the plain-English variants of the available Boolean
operators.
:type: Dict{:class:`str <python:str>`: :class:`BooleanOperator`}
"""
OPERATOR_MAPPING = {}
"""A mapping of all available Boolean operators.
This dictionary is the concatentation of the
:data:`PLAIN_ENGLISH_OPERATOR_MAPPING` and :data:`SYMBOLIC_OPERATOR_MAPPING`
dictionaries.
:type: Dict{:class:`str <python:str>`: :class:`BooleanOperator`}
"""
OPERATOR_MAPPING.update(PLAIN_ENGLISH_OPERATOR_MAPPING)
OPERATOR_MAPPING.update(SYMBOLIC_OPERATOR_MAPPING)
MAX_OPERATOR_STR_LEN = max(len(k) for k in OPERATOR_MAPPING.keys())
"""The length of the longest operator from :data:`OPERATOR_MAPPING`.
:type: :class:`int <python:int>`
"""
|
import ctypes
import ctypes.util
import _ctypes
import hashlib
import base64
import time
import logging
import sys
import os
addrtype = 0
class _OpenSSL:
"""
Wrapper for OpenSSL using ctypes
"""
def __init__(self, library):
self.time_opened = time.time()
"""
Build the wrapper
"""
try:
self._lib = ctypes.CDLL(library)
except:
self._lib = ctypes.cdll.LoadLibrary(library)
self.pointer = ctypes.pointer
self.c_int = ctypes.c_int
self.byref = ctypes.byref
self.create_string_buffer = ctypes.create_string_buffer
self.BN_new = self._lib.BN_new
self.BN_new.restype = ctypes.c_void_p
self.BN_new.argtypes = []
self.BN_copy = self._lib.BN_copy
self.BN_copy.restype = ctypes.c_void_p
self.BN_copy.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_mul_word = self._lib.BN_mul_word
self.BN_mul_word.restype = ctypes.c_int
self.BN_mul_word.argtypes = [ctypes.c_void_p, ctypes.c_int]
self.BN_set_word = self._lib.BN_set_word
self.BN_set_word.restype = ctypes.c_int
self.BN_set_word.argtypes = [ctypes.c_void_p, ctypes.c_int]
self.BN_add = self._lib.BN_add
self.BN_add.restype = ctypes.c_void_p
self.BN_add.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_sub = self._lib.BN_mod_sub
self.BN_mod_sub.restype = ctypes.c_int
self.BN_mod_sub.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_mul = self._lib.BN_mod_mul
self.BN_mod_mul.restype = ctypes.c_int
self.BN_mod_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_inverse = self._lib.BN_mod_inverse
self.BN_mod_inverse.restype = ctypes.c_void_p
self.BN_mod_inverse.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_cmp = self._lib.BN_cmp
self.BN_cmp.restype = ctypes.c_int
self.BN_cmp.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_bn2bin = self._lib.BN_bn2bin
self.BN_bn2bin.restype = ctypes.c_int
self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_bin2bn = self._lib.BN_bin2bn
self.BN_bin2bn.restype = ctypes.c_void_p
self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int,
ctypes.c_void_p]
self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name
self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group
self.EC_KEY_get0_group.restype = ctypes.c_void_p
self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
self.EC_KEY_set_private_key.restype = ctypes.c_int
self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key
self.EC_KEY_set_public_key.restype = ctypes.c_int
self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_set_compressed_coordinates_GFp = self._lib.EC_POINT_set_compressed_coordinates_GFp
self.EC_POINT_set_compressed_coordinates_GFp.restype = ctypes.c_int
self.EC_POINT_set_compressed_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
self.EC_POINT_new = self._lib.EC_POINT_new
self.EC_POINT_new.restype = ctypes.c_void_p
self.EC_POINT_new.argtypes = [ctypes.c_void_p]
self.EC_POINT_free = self._lib.EC_POINT_free
self.EC_POINT_free.restype = None
self.EC_POINT_free.argtypes = [ctypes.c_void_p]
self.EC_GROUP_get_order = self._lib.EC_GROUP_get_order
self.EC_GROUP_get_order.restype = ctypes.c_void_p
self.EC_GROUP_get_order.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.EC_GROUP_get_degree = self._lib.EC_GROUP_get_degree
self.EC_GROUP_get_degree.restype = ctypes.c_void_p
self.EC_GROUP_get_degree.argtypes = [ctypes.c_void_p]
self.EC_GROUP_get_curve_GFp = self._lib.EC_GROUP_get_curve_GFp
self.EC_GROUP_get_curve_GFp.restype = ctypes.c_void_p
self.EC_GROUP_get_curve_GFp.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_mul = self._lib.EC_POINT_mul
self.EC_POINT_mul.restype = ctypes.c_int
self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p]
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
self.EC_KEY_set_private_key.restype = ctypes.c_int
self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_KEY_set_conv_form = self._lib.EC_KEY_set_conv_form
self.EC_KEY_set_conv_form.restype = None
self.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p,
ctypes.c_int]
self.BN_CTX_new = self._lib.BN_CTX_new
self._lib.BN_CTX_new.restype = ctypes.c_void_p
self._lib.BN_CTX_new.argtypes = []
self.BN_CTX_start = self._lib.BN_CTX_start
self._lib.BN_CTX_start.restype = ctypes.c_void_p
self._lib.BN_CTX_start.argtypes = [ctypes.c_void_p]
self.BN_CTX_get = self._lib.BN_CTX_get
self._lib.BN_CTX_get.restype = ctypes.c_void_p
self._lib.BN_CTX_get.argtypes = [ctypes.c_void_p]
self.ECDSA_sign = self._lib.ECDSA_sign
self.ECDSA_sign.restype = ctypes.c_int
self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.ECDSA_verify = self._lib.ECDSA_verify
self.ECDSA_verify.restype = ctypes.c_int
self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey
self.i2o_ECPublicKey.restype = ctypes.c_void_p
self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_CTX_free = self._lib.BN_CTX_free
self.BN_CTX_free.restype = None
self.BN_CTX_free.argtypes = [ctypes.c_void_p]
self.EC_POINT_free = self._lib.EC_POINT_free
self.EC_POINT_free.restype = None
self.EC_POINT_free.argtypes = [ctypes.c_void_p]
ssl = None
def openLibrary():
global ssl
try:
if sys.platform.startswith("win"):
dll_path = os.path.dirname(__file__) + "/" + "libeay32.dll"
elif sys.platform == "cygwin":
dll_path = "/bin/cygcrypto-1.0.0.dll"
elif os.path.isfile("../lib/libcrypto.so"): # ZeroBundle OSX
dll_path = "../lib/libcrypto.so"
else:
dll_path = "/usr/local/ssl/lib/libcrypto.so"
ssl = _OpenSSL(dll_path)
assert ssl
except Exception, err:
ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32')
logging.debug("opensslVerify loaded: %s", ssl._lib)
openLibrary()
openssl_version = "%.9X" % ssl._lib.SSLeay()
NID_secp256k1 = 714
def check_result(val, func, args):
if val == 0:
raise ValueError
else:
return ctypes.c_void_p(val)
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.errcheck = check_result
POINT_CONVERSION_COMPRESSED = 2
POINT_CONVERSION_UNCOMPRESSED = 4
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += (256 ** i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0':
nPad += 1
else:
break
return (__b58chars[0] * nPad) + result
def hash_160(public_key):
md = hashlib.new('ripemd160')
md.update(hashlib.sha256(public_key).digest())
return md.digest()
def hash_160_to_bc_address(h160):
vh160 = chr(addrtype) + h160
h = Hash(vh160)
addr = vh160 + h[0:4]
return b58encode(addr)
def public_key_to_bc_address(public_key):
h160 = hash_160(public_key)
return hash_160_to_bc_address(h160)
def encode(val, base, minlen=0):
base, minlen = int(base), int(minlen)
code_string = ''.join([chr(x) for x in range(256)])
result = ""
while val > 0:
result = code_string[val % base] + result
val //= base
return code_string[0] * max(minlen - len(result), 0) + result
def num_to_var_int(x):
x = int(x)
if x < 253:
return chr(x)
elif x < 65536:
return chr(253) + encode(x, 256, 2)[::-1]
elif x < 4294967296:
return chr(254) + encode(x, 256, 4)[::-1]
else:
return chr(255) + encode(x, 256, 8)[::-1]
def msg_magic(message):
return "\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + message
def get_address(eckey):
size = ssl.i2o_ECPublicKey(eckey, 0)
mb = ctypes.create_string_buffer(size)
ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb)))
return public_key_to_bc_address(mb.raw)
def Hash(data):
return hashlib.sha256(hashlib.sha256(data).digest()).digest()
def bx(bn, size=32):
b = ctypes.create_string_buffer(size)
ssl.BN_bn2bin(bn, b)
return b.raw.encode('hex')
def verify_message(address, signature, message):
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), signature)
addr = get_address(eckey)
return (address == addr)
def SetCompactSignature(pkey, hash, signature):
sig = base64.b64decode(signature)
if len(sig) != 65:
raise BaseException("Wrong encoding")
nV = ord(sig[0])
if nV < 27 or nV >= 35:
return False
if nV >= 31:
ssl.EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED)
nV -= 4
r = ssl.BN_bin2bn(sig[1:33], 32, None)
s = ssl.BN_bin2bn(sig[33:], 32, None)
eckey = ECDSA_SIG_recover_key_GFp(pkey, r, s, hash, len(hash), nV - 27,
False)
return eckey
def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check):
n = 0
i = recid / 2
ctx = R = O = Q = None
try:
group = ssl.EC_KEY_get0_group(eckey)
ctx = ssl.BN_CTX_new()
ssl.BN_CTX_start(ctx)
order = ssl.BN_CTX_get(ctx)
ssl.EC_GROUP_get_order(group, order, ctx)
x = ssl.BN_CTX_get(ctx)
ssl.BN_copy(x, order)
ssl.BN_mul_word(x, i)
ssl.BN_add(x, x, r)
field = ssl.BN_CTX_get(ctx)
ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx)
if (ssl.BN_cmp(x, field) >= 0):
return False
R = ssl.EC_POINT_new(group)
ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)
if check:
O = ssl.EC_POINT_new(group)
ssl.EC_POINT_mul(group, O, None, R, order, ctx)
if ssl.EC_POINT_is_at_infinity(group, O):
return False
Q = ssl.EC_POINT_new(group)
n = ssl.EC_GROUP_get_degree(group)
e = ssl.BN_CTX_get(ctx)
ssl.BN_bin2bn(msg, msglen, e)
if 8 * msglen > n:
ssl.BN_rshift(e, e, 8 - (n & 7))
zero = ssl.BN_CTX_get(ctx)
ssl.BN_set_word(zero, 0)
ssl.BN_mod_sub(e, zero, e, order, ctx)
rr = ssl.BN_CTX_get(ctx)
ssl.BN_mod_inverse(rr, r, order, ctx)
sor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(sor, s, rr, order, ctx)
eor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(eor, e, rr, order, ctx)
ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx)
ssl.EC_KEY_set_public_key(eckey, Q)
return eckey
finally:
if ctx:
ssl.BN_CTX_free(ctx)
if R:
ssl.EC_POINT_free(R)
if O:
ssl.EC_POINT_free(O)
if Q:
ssl.EC_POINT_free(Q)
def closeLibrary():
handle = ssl._lib._handle
if "FreeLibrary" in dir(_ctypes):
_ctypes.FreeLibrary(handle)
_ctypes.FreeLibrary(handle)
print "OpenSSL closed, handle:", handle
else:
_ctypes.dlclose(handle)
_ctypes.dlclose(handle)
print "OpenSSL dlclosed, handle:", handle
def getMessagePubkey(message, sig):
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
if type(pkey) is not int and not pkey.value:
raise Exception(
"OpenSSL %s (%s) EC_KEY_new_by_curve_name failed: %s, probably your OpenSSL lib does not support secp256k1 elliptic curve. Please check: https://github.com/HelloZeroNet/ZeroNet/issues/132" %
(openssl_version, ssl._lib._name, pkey.value)
)
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), sig)
size = ssl.i2o_ECPublicKey(eckey, 0)
mb = ctypes.create_string_buffer(size)
ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb)))
pub = mb.raw
"""
if time.time() - ssl.time_opened > 60 * 5: # Reopen every 5 min
logging.debug("Reopening OpenSSL...")
closeLibrary()
openLibrary()
"""
return pub
def test():
sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ="
pubkey = "044827c756561b8ef6b28b5e53a000805adbf4938ab82e1c2b7f7ea16a0d6face9a509a0a13e794d742210b00581f3e249ebcc705240af2540ea19591091ac1d41"
assert getMessagePubkey("hello", sign).encode("hex") == pubkey
test() # Make sure it working right
if __name__ == "__main__":
import time
import os
import sys
sys.path.append("..")
from pybitcointools import bitcoin as btctools
print "OpenSSL version %s" % openssl_version
print ssl._lib
priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr"
sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=
s = time.time()
for i in range(1000):
pubkey = getMessagePubkey("hello", sign)
verified = btctools.pubkey_to_address(pubkey) == address
print "100x Verified", verified, time.time() - s
|
from __future__ import unicode_literals
import logging
import re
from rbtools.api.errors import APIError
from rbtools.clients.errors import InvalidRevisionSpecError
from rbtools.deprecation import RemovedInRBTools40Warning
from rbtools.utils.match_score import Score
from rbtools.utils.repository import get_repository_id
from rbtools.utils.users import get_user
def get_draft_or_current_value(field_name, review_request):
"""Returns the draft or current field value from a review request.
If a draft exists for the supplied review request, return the draft's
field value for the supplied field name, otherwise return the review
request's field value for the supplied field name.
"""
if review_request.draft:
fields = review_request.draft[0]
else:
fields = review_request
return fields[field_name]
def get_possible_matches(review_requests, summary, description, limit=5):
"""Returns a sorted list of tuples of score and review request.
Each review request is given a score based on the summary and
description provided. The result is a sorted list of tuples containing
the score and the corresponding review request, sorted by the highest
scoring review request first.
"""
candidates = []
# Get all potential matches.
for review_request in review_requests.all_items:
summary_pair = (get_draft_or_current_value('summary', review_request),
summary)
description_pair = (get_draft_or_current_value('description',
review_request),
description)
score = Score.get_match(summary_pair, description_pair)
candidates.append((score, review_request))
# Sort by summary and description on descending rank.
sorted_candidates = sorted(
candidates,
key=lambda m: (m[0].summary_score, m[0].description_score),
reverse=True
)
return sorted_candidates[:limit]
def get_revisions(tool, cmd_args):
"""Returns the parsed revisions from the command line arguments.
These revisions are used for diff generation and commit message
extraction. They will be cached for future calls.
"""
# Parse the provided revisions from the command line and generate
# a spec or set of specialized extra arguments that the SCMClient
# can use for diffing and commit lookups.
try:
revisions = tool.parse_revision_spec(cmd_args)
except InvalidRevisionSpecError:
if not tool.supports_diff_extra_args:
raise
revisions = None
return revisions
def find_review_request_by_change_id(api_client,
api_root,
repository_info=None,
repository_name=None,
revisions=None,
repository_id=None):
"""Ask ReviewBoard for the review request ID for the tip revision.
Note that this function calls the ReviewBoard API with the only_fields
paramater, thus the returned review request will contain only the fields
specified by the only_fields variable.
If no review request is found, None will be returned instead.
Version Changed:
3.0:
The ``repository_info`` and ``repository_name`` arguments were
deprecated in favor of adding the new ``repository_id`` argument.
Args:
api_client (rbtools.api.client.RBClient):
The API client.
api_root (rbtools.api.resource.RootResource):
The root resource of the Review Board server.
repository_info (rbtools.clients.RepositoryInfo, deprecated):
The repository info object.
repository_name (unicode, deprecated):
The repository name.
revisions (dict):
The parsed revision information, including the ``tip`` key.
repository_id (int, optional):
The repository ID to use.
"""
assert api_client is not None
assert api_root is not None
assert revisions is not None
only_fields = 'id,commit_id,changenum,status,url,absolute_url'
change_id = revisions['tip']
logging.debug('Attempting to find review request from tip revision ID: %s',
change_id)
# Strip off any prefix that might have been added by the SCM.
change_id = change_id.split(':', 1)[1]
optional_args = {}
if change_id.isdigit():
# Populate integer-only changenum field also for compatibility
# with older API versions
optional_args['changenum'] = int(change_id)
user = get_user(api_client, api_root, auth_required=True)
if repository_info or repository_name:
RemovedInRBTools40Warning.warn(
'The repository_info and repository_name arguments to '
'find_review_request_by_change_id are deprecated and will be '
'removed in RBTools 4.0. Please change your command to use the '
'needs_repository attribute and pass in the repository ID '
'directly.')
repository_id = get_repository_id(
repository_info, api_root, repository_name)
# Don't limit query to only pending requests because it's okay to stamp a
# submitted review.
review_requests = api_root.get_review_requests(repository=repository_id,
from_user=user.username,
commit_id=change_id,
only_links='self',
only_fields=only_fields,
**optional_args)
if review_requests:
count = review_requests.total_results
# Only one review can be associated with a specific commit ID.
if count > 0:
assert count == 1, '%d review requests were returned' % count
review_request = review_requests[0]
logging.debug('Found review request %s with status %s',
review_request.id, review_request.status)
if review_request.status != 'discarded':
return review_request
return None
def guess_existing_review_request(repository_info=None,
repository_name=None,
api_root=None,
api_client=None,
tool=None,
revisions=None,
guess_summary=None,
guess_description=None,
is_fuzzy_match_func=None,
no_commit_error=None,
submit_as=None,
additional_fields=None,
repository_id=None):
"""Try to guess the existing review request ID if it is available.
The existing review request is guessed by comparing the existing
summary and description to the current post's summary and description,
respectively. The current post's summary and description are guessed if
they are not provided.
If the summary and description exactly match those of an existing
review request, that request is immediately returned. Otherwise,
the user is prompted to select from a list of potential matches,
sorted by the highest ranked match first.
Note that this function calls the ReviewBoard API with the only_fields
paramater, thus the returned review request will contain only the fields
specified by the only_fields variable.
Version Changed:
3.0:
The ``repository_info`` and ``repository_name`` arguments were
deprecated in favor of adding the new ``repository_id`` argument.
Args:
repository_info (rbtools.clients.RepositoryInfo, deprecated):
The repository info object.
repository_name (unicode, deprecated):
The repository name.
api_root (rbtools.api.resource.RootResource):
The root resource of the Review Board server.
api_client (rbtools.api.client.RBClient):
The API client.
tool (rbtools.clients.SCMClient):
The SCM client.
revisions (dict):
The parsed revisions object.
guess_summary (bool):
Whether to attempt to guess the summary for comparison.
guess_description (bool):
Whether to attempt to guess the description for comparison.
is_fuzzy_match_func (callable, optional):
A function which can check if a review request is a match for the
data being posted.
no_commit_error (callable, optional):
A function to be called when there's no local commit.
submit_as (unicode, optional):
A username on the server which is used for posting review requests.
If provided, review requests owned by this user will be matched.
additional_fields (list of unicode, optional):
A list of additional fields to include in the fetched review
request resource.
repository_id (int, optional):
The ID of the repository to match.
"""
assert api_root is not None
assert api_client is not None
assert tool is not None
assert revisions is not None
only_fields = [
'id', 'summary', 'description', 'draft', 'url', 'absolute_url',
'bugs_closed', 'status', 'public'
]
if additional_fields:
only_fields += additional_fields
if submit_as:
username = submit_as
else:
user = get_user(api_client, api_root, auth_required=True)
username = user.username
if repository_info or repository_name:
RemovedInRBTools40Warning.warn(
'The repository_info and repository_name arguments to '
'find_review_request_by_change_id are deprecated and will be '
'removed in RBTools 4.0. Please change your command to use the '
'needs_repository attribute and pass in the repository ID '
'directly.')
repository_id = get_repository_id(
repository_info, api_root, repository_name)
try:
# Get only pending requests by the current user for this
# repository.
review_requests = api_root.get_review_requests(
repository=repository_id,
from_user=username,
status='pending',
expand='draft',
only_fields=','.join(only_fields),
only_links='diffs,draft',
show_all_unpublished=True)
if not review_requests:
raise ValueError('No existing review requests to update for '
'user %s'
% username)
except APIError as e:
raise ValueError('Error getting review requests for user %s: %s'
% (username, e))
summary = None
description = None
if not guess_summary or not guess_description:
try:
commit_message = tool.get_commit_message(revisions)
if commit_message:
if not guess_summary:
summary = commit_message['summary']
if not guess_description:
description = commit_message['description']
elif callable(no_commit_error):
no_commit_error()
except NotImplementedError:
raise ValueError('--summary and --description are required.')
if not summary and not description:
return None
possible_matches = get_possible_matches(review_requests, summary,
description)
exact_match_count = num_exact_matches(possible_matches)
for score, review_request in possible_matches:
# If the score is the only exact match, return the review request
# ID without confirmation, otherwise prompt.
if ((score.is_exact_match() and exact_match_count == 1) or
(callable(is_fuzzy_match_func) and
is_fuzzy_match_func(review_request))):
return review_request
return None
def num_exact_matches(possible_matches):
"""Returns the number of exact matches in the possible match list."""
count = 0
for score, request in possible_matches:
if score.is_exact_match():
count += 1
return count
def parse_review_request_url(url):
"""Parse a review request URL and return its component parts.
Args:
url (unicode):
The URL to parse.
Returns:
tuple:
A 3-tuple consisting of the server URL, the review request ID, and the
diff revision.
"""
regex = (r'^(?P<server_url>https?:\/\/.*\/(?:\/s\/[^\/]+\/)?)'
r'r\/(?P<review_request_id>\d+)'
r'\/?(diff\/(?P<diff_id>\d+-?\d*))?\/?')
match = re.match(regex, url)
if match:
server_url = match.group('server_url')
request_id = match.group('review_request_id')
diff_id = match.group('diff_id')
return (server_url, request_id, diff_id)
return (None, None, None)
|
"""
Compute streamfunction and velocity potential from the long-term-mean
flow.
This example uses the xarray interface.
Additional requirements for this example:
* xarray (http://xarray.pydata.org)
* matplotlib (http://matplotlib.org/)
* cartopy (http://scitools.org.uk/cartopy/)
"""
import cartopy.crs as ccrs
import matplotlib as mpl
import matplotlib.pyplot as plt
import xarray as xr
from windspharm.xarray import VectorWind
from windspharm.examples import example_data_path
mpl.rcParams['mathtext.default'] = 'regular'
ds = xr.open_mfdataset([example_data_path(f)
for f in ('uwnd_mean.nc', 'vwnd_mean.nc')])
uwnd = ds['uwnd']
vwnd = ds['vwnd']
w = VectorWind(uwnd, vwnd)
sf, vp = w.sfvp()
sf_dec = sf[sf['time.month'] == 12]
vp_dec = vp[vp['time.month'] == 12]
clevs = [-120, -100, -80, -60, -40, -20, 0, 20, 40, 60, 80, 100, 120]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
sf_dec *= 1e-6
fill_sf = sf_dec[0].plot.contourf(ax=ax, levels=clevs, cmap=plt.cm.RdBu_r,
transform=ccrs.PlateCarree(), extend='both',
add_colorbar=False)
ax.coastlines()
ax.gridlines()
plt.colorbar(fill_sf, orientation='horizontal')
plt.title('Streamfunction ($10^6$m$^2$s$^{-1}$)', fontsize=16)
plt.figure()
clevs = [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10]
ax = plt.subplot(111, projection=ccrs.PlateCarree(central_longitude=180))
vp_dec *= 1e-6
fill_vp = vp_dec[0].plot.contourf(ax=ax, levels=clevs, cmap=plt.cm.RdBu_r,
transform=ccrs.PlateCarree(), extend='both',
add_colorbar=False)
ax.coastlines()
ax.gridlines()
plt.colorbar(fill_vp, orientation='horizontal')
plt.title('Velocity Potential ($10^6$m$^2$s$^{-1}$)', fontsize=16)
plt.show()
|
from django.views.generic.edit import FormView
from django.core.urlresolvers import reverse, reverse_lazy
from django.shortcuts import get_object_or_404, redirect
from django.views.generic.base import TemplateResponseMixin, View, TemplateView
from django.contrib.auth.tokens import default_token_generator
from django.utils.http import base36_to_int
from django.contrib import messages
from django.http import Http404
from core.apps.accounts.models import User, EmailConfirmation
from . import forms
from .tools import complete_signup
from core.apps.accounts import tools
class LoginView(FormView):
form_class = forms.LoginForm
template_name = "taccount/login.html"
success_url = "/teacher"
redirect_field_name = "next"
def form_valid(self, form):
success_url = self.get_success_url()
return form.login(self.request, redirect_url=success_url)
def get_success_url(self):
# Explicitly passed ?next= URL takes precedence
ret = (tools.get_next_redirect_url(self.request,
self.redirect_field_name)
or self.success_url)
return ret
def get_context_data(self, **kwargs):
ret = super(LoginView, self).get_context_data(**kwargs)
singup_url = reverse("teacher_account_singup")
signup_url = tools.pass_through_next_redirect_url(self.request, singup_url, self.redirect_field_name)
redirect_field_value = self.request.REQUEST \
.get(self.redirect_field_name)
ret.update({"signup_url": signup_url,
"redirect_field_name": self.redirect_field_name,
"redirect_field_value": redirect_field_value})
return ret
login = LoginView.as_view()
class SignupView(FormView):
redirect_field_name = "next"
success_url = None
def get_form_class(self):
request = self.request
if not request.user.is_authenticated():
return forms.TeacherCreationForm
else :
return forms.TeacherUpgradeForm
def get_template_names(self):
request = self.request
if not request.user.is_authenticated():
return ["taccount/signup.html"]
else :
return ["taccount/upgrade.html"]
def get_success_url(self):
# Explicitly passed ?next= URL takes precedence
ret = (tools.get_next_redirect_url(self.request,
self.redirect_field_name)
or self.success_url)
return ret
def form_valid(self, form):
user = form.save(self.request)
return complete_signup(self.request, user,
True,
self.get_success_url())
def get_initial(self):
return {"request" : self.request}
def get_context_data(self, **kwargs):
request = self.request
if not request.user.is_authenticated():
form = kwargs['form']
form.fields["email"].initial = self.request.session \
.get('account_verified_email', None)
ret = super(SignupView, self).get_context_data(**kwargs)
login_url = tools.pass_through_next_redirect_url(self.request, reverse("teacher_account_login", urlconf="teacher.apps.accounts.urls"),
self.redirect_field_name)
redirect_field_name = self.redirect_field_name
redirect_field_value = self.request.REQUEST.get(redirect_field_name)
ret.update({"login_url": login_url,
"redirect_field_name": redirect_field_name,
"redirect_field_value": redirect_field_value})
else :
ret = super(SignupView, self).get_context_data(**kwargs)
return ret
register = SignupView.as_view()
class ConfirmEmailView(TemplateResponseMixin, View):
def get_template_names(self):
if self.request.method == 'POST':
return ["accounts/teacher/email_confirmed.html"]
else:
return ["accounts/teacher/email_confirm.html"]
def get(self, *args, **kwargs):
try:
self.object = self.get_object()
except Http404:
self.object = None
ctx = self.get_context_data()
return self.render_to_response(ctx)
def post(self, *args, **kwargs):
self.object = confirmation = self.get_object()
confirmation.confirm(self.request)
# Don't -- is_active so that sys admin can
# use it to block users et al
#
# user = confirmation.email_address.user
# user.is_active = True
# user.save()
redirect_url = self.get_redirect_url()
if not redirect_url:
ctx = self.get_context_data()
return self.render_to_response(ctx)
from core.apps.tools.common import add_message
add_message(self.request,
messages.SUCCESS,
'accounts/teacher/messages/email_confirmed.txt',
{'email': confirmation.email_address.email})
return redirect(redirect_url)
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
try:
return queryset.get(key=self.kwargs["key"].lower())
except EmailConfirmation.DoesNotExist:
raise Http404()
def get_queryset(self):
qs = EmailConfirmation.objects.all_valid()
qs = qs.select_related("email_address__user")
return qs
def get_context_data(self, **kwargs):
ctx = kwargs
ctx["confirmation"] = self.object
return ctx
def get_redirect_url(self):
"""adapter = get_helper()"""
return None # adapter().get_email_confirmation_redirect_url(self.request)
confirm_email = ConfirmEmailView.as_view()
class PasswordResetView(FormView):
form_class = forms.ResetPasswordForm
template_name = "taccount/reset_password.html"
success_url = reverse_lazy("teacher_account_reset_password_done")
def form_valid(self, form):
form.save()
return super(PasswordResetView, self).form_valid(form)
def get_context_data(self, **kwargs):
ret = super(PasswordResetView, self).get_context_data(**kwargs)
# NOTE: For backwards compatibility
ret['password_reset_form'] = ret.get('form')
# (end NOTE)
return ret
password_reset = PasswordResetView.as_view()
class PasswordResetFromKeyView(FormView):
template_name = "taccount/password_reset_from_key.html"
form_class = forms.ResetPasswordKeyForm
token_generator = default_token_generator
success_url = "/teacher" # reverse_lazy("teacher_account_reset_password_from_key_done")
def _get_user(self, uidb36):
# pull out user
try:
uid_int = base36_to_int(uidb36)
except ValueError:
raise Http404
return get_object_or_404(User, id=uid_int)
def dispatch(self, request, uidb36, key, **kwargs):
self.uidb36 = uidb36
self.key = key
self.request.user = self._get_user(uidb36)
if not self.token_generator.check_token(self.request.user, key):
return self._response_bad_token(request, uidb36, key, **kwargs)
else:
return super(PasswordResetFromKeyView, self).dispatch(request,
uidb36,
key,
**kwargs)
def get_form_kwargs(self):
kwargs = super(PasswordResetFromKeyView, self).get_form_kwargs()
kwargs["user"] = self.request.user
kwargs["temp_key"] = self.key
return kwargs
def form_valid(self, form):
form.save()
from core.apps.tools.common import add_message
add_message(self.request,
messages.SUCCESS,
'account/messages/password_changed.txt')
return super(PasswordResetFromKeyView, self).form_valid(form)
def _response_bad_token(self, request, uidb36, key, **kwargs):
return self.render_to_response(self.get_context_data(token_fail=True))
password_reset_from_key = PasswordResetFromKeyView.as_view()
class PasswordResetDoneView(TemplateView):
template_name = "taccount/password_reset_done.html"
password_reset_done = PasswordResetDoneView.as_view()
|
"""
This is the getkey module. It is meant for small files that are human readable and are small enough to be loaded into
memory. It depends on PyNaCl.
NOTE:
THIS MODULE COMES WITH NO GUARANTEES WHATSOEVER.
Untested on Mac and Windows systems.
Tested with Python 3.5.2 on Linux (Ubuntu Mate 16.04.3) systems.
Tested with Python 3.6.4 on a few Android systems through the Termux application.
Written by: Micah Parks.
"""
from getpass import getpass
from os.path import exists, isdir
from sys import argv
from nacl.exceptions import CryptoError
from coupletools.encryption.someencryptionfunctions import buffer_decrypt_file
from coupletools.somefunctions import password_32_bytes
def main(filepathstring, findinlinestring=str()):
"""
The logic of the file.
"""
if not isinstance(filepathstring, str):
raise ValueError('"filepathstring" must be a string.')
if not isinstance(findinlinestring, str):
raise ValueError('"findinlinestring" must be a string.')
print('Password to decrypt the file:')
passwordbytes = password_32_bytes(getpass())
decryptgen = buffer_decrypt_file(filepathstring=filepathstring, passwordbytes=passwordbytes)
nowbytes = bytes('', 'utf-8')
wholefilestring = bytes('', 'utf-8')
try:
while nowbytes is not None:
wholefilestring += nowbytes
nowbytes = next(decryptgen)
except CryptoError as e:
print(e)
quit()
wholefilestring = wholefilestring.decode('utf-8')
print('File decrypted properly and in memory.')
if len(findinlinestring) == 0:
findinlinestring = input('Phrase to find key:\n')
findinlinestring = findinlinestring.strip()
for nowlinestring in wholefilestring.split('\n'):
if findinlinestring.lower() in nowlinestring.strip().lower():
print(nowlinestring)
def print_usage(quitbool=False):
"""
Print the command line usage. Quit if quitbool is True.
"""
print('Usage: python3 getkey.py [filepathstring] [findinlinestring]')
print('The "findinlinestring" parameter is optional. If omitted, it must be given at runtime.')
if quitbool is True:
quit()
if __name__ == '__main__':
try:
FILE_PATH_STRING = argv[1]
except IndexError:
print_usage(quitbool=True)
if FILE_PATH_STRING.startswith("'") and FILE_PATH_STRING.endswith("'"): # Dragged file into terminal.
FILE_PATH_STRING = FILE_PATH_STRING[1:-1]
if not exists(FILE_PATH_STRING):
print('Cannot find "{}".'.format(FILE_PATH_STRING))
quit()
elif isdir(FILE_PATH_STRING):
print('"{}" is a directory.'.format(FILE_PATH_STRING))
quit()
if len(FILE_PATH_STRING) <= 10 or FILE_PATH_STRING.endswith('.encrypted') is False:
print('File must end with ".encrypted" and not only be named ".encrypted".')
quit()
try:
FIND_IN_LINE_STRING = argv[2]
except IndexError:
FIND_IN_LINE_STRING = str()
main(filepathstring=FILE_PATH_STRING, findinlinestring=FIND_IN_LINE_STRING)
|
from lib import primes_up_to
NUM_ROWS = 15
def compute():
data = map(int, DATA.split())
return max_sum(data, 0, frozenset(range(0, NUM_ROWS)), {})
def max_sum(data, column, free_rows, cache):
if column == NUM_ROWS - 1:
x, = free_rows
return data[x * NUM_ROWS + column]
if free_rows not in cache:
cache[free_rows] = 0
for i in free_rows:
cache[free_rows] = max(cache[free_rows], \
data[i * NUM_ROWS + column] + max_sum(data, column + 1, free_rows - set([i]), cache))
return cache[free_rows]
DATA = '''
7 53 183 439 863 497 383 563 79 973 287 63 343 169 583
627 343 773 959 943 767 473 103 699 303 957 703 583 639 913
447 283 463 29 23 487 463 993 119 883 327 493 423 159 743
217 623 3 399 853 407 103 983 89 463 290 516 212 462 350
960 376 682 962 300 780 486 502 912 800 250 346 172 812 350
870 456 192 162 593 473 915 45 989 873 823 965 425 329 803
973 965 905 919 133 673 665 235 509 613 673 815 165 992 326
322 148 972 962 286 255 941 541 265 323 925 281 601 95 973
445 721 11 525 473 65 511 164 138 672 18 428 154 448 848
414 456 310 312 798 104 566 520 302 248 694 976 430 392 198
184 829 373 181 631 101 969 613 840 740 778 458 284 760 390
821 461 843 513 17 901 711 993 293 157 274 94 192 156 574
34 124 4 878 450 476 712 914 838 669 875 299 823 329 699
815 559 813 459 522 788 168 586 966 232 308 833 251 631 107
813 883 451 509 615 77 281 613 459 205 380 274 302 35 805
'''
if __name__ == "__main__":
print(compute())
|
"""
Django settings for events project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (events/config/settings/base.py - 3 = events/)
APPS_DIR = ROOT_DIR.path('events')
env = environ.Env()
env_file = str(ROOT_DIR.path('.env'))
env.read_env(env_file)
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# Operating System Environment variables have precedence over variables defined in the .env file,
# that is to say variables from the .env files will only be used if not defined
# as environment variables.
env_file = str(ROOT_DIR.path('.env'))
print('Loading : {}'.format(env_file))
env.read_env(env_file)
print('The .env file has been loaded. See base.py for more information')
DJANGO_APPS = [
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
]
THIRD_PARTY_APPS = [
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
]
LOCAL_APPS = [
# custom users app
'events.users.apps.UsersConfig',
# Your stuff: custom apps go here
]
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
MIGRATION_MODULES = {
'sites': 'events.contrib.sites.migrations'
}
DEBUG = env.bool('DJANGO_DEBUG', False)
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
ADMINS = [
("""Bryan Lara""", 'bralavar@gmail.com'),
]
MANAGERS = ADMINS
DATABASES = {
'default': env.db('DATABASE_URL', default='postgres:///events'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
STATIC_URL = '/static/'
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
]
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
MEDIA_ROOT = str(APPS_DIR('media'))
MEDIA_URL = '/media/'
ROOT_URLCONF = 'config.urls'
WSGI_APPLICATION = 'config.wsgi.application'
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
]
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
ACCOUNT_ADAPTER = 'events.users.adapters.AccountAdapter'
SOCIALACCOUNT_ADAPTER = 'events.users.adapters.SocialAccountAdapter'
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
ADMIN_URL = r'^admin/'
|
from price_dump import *
import matplotlib.pyplot as plt
import math
data = get_first_N(15, show=True)
price = prices(data)
float_price = map(float, price)
logs = list(map(math.log10,float_price))
bins = range(0, len(price))
plt.plot(bins, logs[::-1])
plt.show()
|
from __future__ import division
import optparse
from mpi4py import MPI
import numpy as np
from serial import find_solution, write_results
from settings import SEARCH_SPACE
def refine_work(work, p, new_min, verbose=False, split=False, split_min=4):
"""
Filter remaining work based on newly found solution and split up
remaining work if necessary to give processes that finish more
quickly work from other processes.
"""
# remove the minimum value numbers that have been skipped over
work[p] = work[p][work[p] > new_min]
# if option selected, divvy up work to processes that have run out
if split and not work[p].size:
# select the biggest queue remaining and split its work
big_p, big_arr = sorted(work.iteritems(),
reverse=True,
key=lambda x: x[1].size)[0]
# only take the other process's work if there are more than the min
# number of values (this prevents each empty "stealing" the last
# element in turn)
if big_arr.size >= split_min:
# print out status if desired
if verbose:
print('taking from process {} ({} left) to give '
'to process {}'.format(big_p, big_arr.size, p))
# split it between the two processes
work[big_p], work[p] = np.array_split(big_arr, 2)
return work
def calculate_progress(work):
"""
Calculate a % progress based on how much of the search space has been
covered.
"""
numerator = SEARCH_SPACE - queue_left(work)
return numerator / SEARCH_SPACE
def queue_left(work):
"""
Figure out how much work is left to in the queue.
"""
return sum([arr.size for arr in work.values()])
def master(comm, min, max, verbose=True, split=False, split_min=4):
"""
Assign work as appropriate to slave processes. When they run out of
work in their range, take work from another process. When nobody has
any work left to do, collect up all the leftovers and write out.
"""
size = comm.Get_size()
status = MPI.Status()
start = MPI.Wtime()
# placeholder for our output data
data_list = []
solved_so_far = 0
# get all possible minimum values and split them up among the non-root processes
process_min_ranges = np.array_split(np.arange(min, max), size - 1)
work = {p + 1: work for p, work in enumerate(process_min_ranges)}
# keep track of which slave processes are still alive
processes_working = range(1, size)
# setup: seed all processes with some work to start with
for p in processes_working:
comm.send(work[p].min(), dest=p)
# collect work as it comes in and assign new minima as processes finish
while True:
# get the result of the slave process
row = comm.recv(None, source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status)
p = status.Get_source()
linear_value = status.Get_tag()
# update counters
data_list.append(row)
solved_so_far += 1
# update work queues based on p's found linear value
work = refine_work(work, p, linear_value,
verbose=verbose,
split=split,
split_min=split_min)
# if there's no work left for process p, remove it from the working pool
# (otherwise we'll mistakenly try to collect work from it at the end)
if not work[p].size:
processes_working.remove(p)
print('master killing process {}, ({} left)'.
format(p, len(processes_working)))
comm.send(-1, dest=p)
# if nobody has any work left, break out
if not queue_left(work):
print('no work left; master breaking out of while loop, '
'{} processes still working'.format(len(processes_working)))
break
# ... otherwise, if there is work left for p, send it
else:
# pop the lowest value from process p's work queue and send it out
new_min = work[p].min()
comm.send(new_min, dest=p)
work[p] = work[p][work[p] > new_min]
if verbose or solved_so_far % 10 == 0:
print('{:0.2f}s: solved {} so far ({:0.2%} of search space)'.
format(MPI.Wtime() - start, solved_so_far, calculate_progress(work)))
# get the last few
for _ in processes_working:
row = comm.recv(None, source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status)
data_list.append(row)
solved_so_far += 1
# kill the other processes
for p in processes_working:
comm.send(-1, dest=p)
return data_list
def slave(comm):
"""
Take work from master process until given the signal to shut down.
"""
status = MPI.Status()
rank = comm.Get_rank()
print('slave {} working'.format(rank))
while True:
# get work from the master
min_value = comm.recv(source=0, tag=MPI.ANY_TAG, status=status)
# die if sent a negative value
if min_value < 0:
break
# do the actual computation for row i of the image
linear_value, row = find_solution(min_value)
comm.send(row, dest=0, tag=linear_value)
if __name__ == '__main__':
# parse command line options
parser = optparse.OptionParser()
parser.add_option("-v", dest="verbose", action="store_true",
default=False,
help="verbose (print every solution instead of every 10)")
parser.add_option("-s", dest="split", action="store_true",
default=False,
help="split work queues",)
parser.add_option("--split-min", dest="split_min", type="int",
default=2,
help="threshold for splitting work queues")
parser.add_option("--min", dest="min", type="float",
default=9927.0,
help="minimum objective value")
parser.add_option("--max", dest="max", type="float",
default=11534.0,
help="maximum objective value")
options, args = parser.parse_args()
# get MPI data
comm = MPI.COMM_WORLD
size = comm.Get_size()
rank = comm.Get_rank()
# if master, take change of work, otherwise prepare to do work
if rank == 0:
print 'master: started with size', size
start_time = MPI.Wtime()
results = master(comm, options.min, options.max,
verbose=options.verbose,
split=options.split,
split_min=options.split_min)
end_time = MPI.Wtime()
total_time = end_time - start_time
write_results(results, total_time, processors=size)
print 'total time', total_time
else:
slave(comm)
|
'''
Given a binary tree and a sum, determine if the tree has a root-to-leaf path such that adding up all the values along the path equals the given sum.
For example:
Given the below binary tree and sum = 22,
5
/ \
4 8
/ / \
11 13 4
/ \ \
7 2 1
return true, as there exist a root-to-leaf path 5->4->11->2 which sum is 22.
'''
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
if not root:
return False
sum -= root.val
if sum == 0 and root.left is None and root.right is None:
return True
return self.hasPathSum(root.left, sum) or self.hasPathSum(root.right, sum)
if __name__ == "__main__":
None
|
import os
import pytest
from appdirs import user_config_dir
import genomepy
import genomepy.utils
from tests import linux, travis
def test_head_annotations(caplog, capsys):
genomepy.functions.head_annotations("ASM14646v1", provider="ncbi", n=1)
captured = capsys.readouterr().out.strip()
assert "NCBI" in caplog.text
assert 'gene_name "Eint_010010";' in captured
def test_list_available_genomes():
g = genomepy.functions.list_available_genomes("Ensembl")
metadata = next(g)
assert isinstance(metadata, list)
assert metadata[0:2] == ["athCun1", "Ensembl"]
def test_list_installed_genomes():
assert isinstance(genomepy.functions.list_installed_genomes(os.getcwd()), list)
gdir = os.path.join(os.getcwd(), "tests", "data")
genomes = genomepy.functions.list_installed_genomes(gdir)
assert set(genomes) == {
"regexp",
"sacCer3",
"sanitize",
} # OSX likes to sort differently
empty_list = genomepy.functions.list_installed_genomes("./thisdirdoesnotexist")
assert empty_list == []
def test__lazy_provider_selection():
# Xenopus_tropicalis_v9.1 can be found on both Ensembl and NCBI.
# Ensembl is first in lazy selection.
# find genome in specified provider (NCBI)
name = "Xenopus_tropicalis_v9.1"
provider = "NCBI"
p = genomepy.functions._lazy_provider_selection(name, provider)
assert "ncbi" in str(p)
# GENCODE's FTP does not work on Travis-Linux
if not (travis and linux):
# find the first provider (Ensembl)
provider = None
p = genomepy.functions._lazy_provider_selection(name, provider)
assert "ensembl" in str(p)
# cant find genome anywhere
name = "not_a_genome"
with pytest.raises(genomepy.exceptions.GenomeDownloadError):
genomepy.functions._lazy_provider_selection(name, provider)
def test__provider_selection():
# specified provider
name = "Xenopus_tropicalis_v9.1"
localname = "test_genome"
genomes_dir = os.getcwd()
provider = "NCBI"
p = genomepy.functions._provider_selection(name, localname, genomes_dir, provider)
assert "ncbi" in str(p)
# provider from readme
readme = os.path.join(genomes_dir, localname, "README.txt")
os.makedirs(os.path.dirname(readme), exist_ok=True)
with open(readme, "w") as r:
r.write("provider: NCBI")
provider = None
p = genomepy.functions._provider_selection(name, localname, genomes_dir, provider)
assert "ncbi" in str(p)
genomepy.utils.rm_rf(os.path.dirname(readme))
# lazy provider
p = genomepy.functions._provider_selection(name, localname, genomes_dir, provider)
assert "ensembl" in str(p)
def test__get_fasta_regex_func():
# filter alt regions (default)
func = genomepy.functions._get_fasta_regex_func(regex=None, keep_alt=False)
assert func("alt1") is False
assert func("chr1") is True
assert func("ALT1") is False # case insensitive
# filter user specified regex
func = genomepy.functions._get_fasta_regex_func(
regex="chr", invert_match=False, keep_alt=True
)
assert func("chr1") is True
assert func("alt1") is False
assert func("something_else") is False
# filter user specified regex (inverted)
func = genomepy.functions._get_fasta_regex_func(
regex="chr", invert_match=True, keep_alt=True
)
assert func("chr1") is False
assert func("alt1") is True
assert func("something_else") is True
# filter both
func = genomepy.functions._get_fasta_regex_func(
regex="chr", invert_match=True, keep_alt=False
)
assert func("chr1") is False
assert func("alt1") is False
assert func("something_else") is True
def test_install_genome():
localname = "my_genome"
genomepy.functions.install_genome(
name="tests/data/sacCer3/sacCer3.fa",
provider="Local",
genomes_dir=None,
localname=localname,
regex="chrIV",
annotation=True,
force=True,
)
genomes_dir = genomepy.functions.get_genomes_dir(None, False)
genome_file = os.path.join(genomes_dir, localname, localname + ".fa")
assert os.path.exists(genome_file)
sizes_file = os.path.join(genomes_dir, localname, localname + ".fa.sizes")
assert os.path.exists(sizes_file)
gaps_file = os.path.join(genomes_dir, localname, localname + ".gaps.bed")
assert os.path.exists(gaps_file)
annotation_file = os.path.join(
genomes_dir, localname, localname + ".annotation.gtf"
)
assert os.path.exists(annotation_file)
# regex test:
sizes = genomepy.Genome(localname).sizes.keys()
assert "chrIV" in sizes
def test_generate_exports():
exports = genomepy.functions._generate_exports()
assert isinstance(exports, list)
# check if my_genome was installed in the last test
assert any([x for x in exports if x.startswith("export MY_GENOME")])
# add genome that throws an IndexNotFoundError
gd = genomepy.utils.get_genomes_dir(None, True)
name = "testgenome"
os.makedirs(os.path.join(gd, name), exist_ok=True)
path = os.path.join(gd, name, f"{name}.fa")
with open(path, "w") as fa:
fa.write("genome without index")
exports = genomepy.functions._generate_exports()
assert f"export TESTGENOME={path}" not in exports
# add genome that works
with open(path, "w") as fa:
fa.write(">chr1\nallowed characters")
genomepy.Genome(name, gd) # create index
exports = genomepy.functions._generate_exports()
assert f"export TESTGENOME={path}" in exports
genomepy.utils.rm_rf(os.path.join(gd, "testgenome"))
def test_generate_env():
config_dir = str(user_config_dir("genomepy"))
path = os.path.join(config_dir, "exports.txt")
# give file path
my_path = "~/exports.txt"
genomepy.functions.generate_env(my_path)
assert os.path.exists(os.path.expanduser(my_path))
os.unlink(os.path.expanduser(my_path))
# give file name
my_file = os.path.join(config_dir, "my_exports.txt")
genomepy.functions.generate_env("my_exports.txt")
assert os.path.exists(my_file)
os.unlink(os.path.expanduser(my_file))
# give nothing
if os.path.exists(path):
os.unlink(path)
genomepy.functions.generate_env()
assert os.path.exists(path)
with open(path) as f:
exports = []
for line in f.readlines():
vals = line.strip()
exports.append(vals)
assert any([x for x in exports if x.startswith("export MY_GENOME")])
os.unlink(path)
def test__delete_extensions():
fpath1 = "tests/data/empty/weird_ext1.test123"
fpath2 = "tests/data/empty/weird_ext2.test123"
for fpath in [fpath1, fpath2]:
with open(fpath, "w") as f:
f.write("asd\n")
assert os.path.exists(fpath1)
assert os.path.exists(fpath2)
genomepy.functions._delete_extensions("tests/data/empty", ["test123"])
assert not os.path.exists(fpath1)
assert not os.path.exists(fpath2)
def test__is_genome_dir():
# dir contains a fasta
assert genomepy.functions._is_genome_dir("tests/data/regexp")
# dir does not contain a fasta
assert not genomepy.functions._is_genome_dir("tests/genome")
|
import math
import numpy as np
import matplotlib.pyplot as plt
from porousmedialab.phcalc import Acid
import seaborn as sns
from matplotlib.colors import ListedColormap
sns.set_style("whitegrid")
def custom_plot(lab, x, y, ttl='', y_lbl='', x_lbl=''):
plt.figure()
ax = plt.subplot(111)
plt.plot(x, y, lw=3)
plt.title(ttl)
plt.xlim(x[0], x[-1])
plt.ylabel(y_lbl)
plt.xlabel(x_lbl)
ax.grid(linestyle='-', linewidth=0.2)
return ax
def plot_batch_rates(batch, *args, **kwargs):
for rate in sorted(batch.estimated_rates):
plt.figure()
plot_batch_rate(batch, rate, *args, **kwargs)
def plot_batch_rate(batch, rate, time_factor=1):
plt.plot(batch.time * time_factor,
batch.estimated_rates[rate][0] / time_factor, label=rate, lw=3)
plt.ylabel('Rate, $[\Delta C/\Delta T]$')
plt.xlabel('Time, [T]')
plt.legend(frameon=1)
plt.grid(linestyle='-', linewidth=0.2)
def plot_batch_deltas(batch, *args, **kwargs):
for element in sorted(batch.species):
plt.figure()
plot_batch_delta(batch, element, *args, **kwargs)
def plot_batch_delta(batch, element, time_factor=1):
plt.plot(batch.time[1:] * time_factor, batch.species[element]
['rates'][0] / time_factor, label=element, lw=3)
plt.ylabel('Rate of change, $[\Delta C/ \Delta T]$')
plt.xlabel('Time, [T]')
plt.legend(frameon=1)
plt.grid(linestyle='-', linewidth=0.2)
def saturation_index_countour(lab, elem1, elem2, Ks, labels=False):
plt.figure()
plt.title('Saturation index %s%s' % (elem1, elem2))
resoluion = 100
n = math.ceil(lab.time.size / resoluion)
plt.xlabel('Time')
z = np.log10((lab.species[elem1]['concentration'][:, ::n] + 1e-8) * (
lab.species[elem2]['concentration'][:, ::n] + 1e-8) / lab.constants[Ks])
lim = np.max(abs(z))
lim = np.linspace(-lim - 0.1, +lim + 0.1, 51)
X, Y = np.meshgrid(lab.time[::n], -lab.x)
plt.xlabel('Time')
CS = plt.contourf(X, Y, z, 20, cmap=ListedColormap(sns.color_palette(
"RdBu_r", 101)), origin='lower', levels=lim, extend='both')
if labels:
plt.clabel(CS, inline=1, fontsize=10, colors='w')
# cbar = plt.colorbar(CS)
if labels:
plt.clabel(CS, inline=1, fontsize=10, colors='w')
cbar = plt.colorbar(CS)
plt.ylabel('Depth')
ax = plt.gca()
ax.ticklabel_format(useOffset=False)
cbar.ax.set_ylabel('Saturation index %s%s' % (elem1, elem2))
return ax
def plot_fractions(lab):
for component in lab.acid_base_components:
if isinstance(component['pH_object'], Acid):
plt.figure()
for idx in range(len(component['species'])):
plt.plot(lab.time, lab.species[component['species'][idx]]
['alpha'][0, :], label=component['species'][idx])
plt.ylabel('Fraction')
plt.xlabel('Time')
plt.legend(frameon=1)
plt.grid(linestyle='-', linewidth=0.2)
def all_plot_depth_index(lab, *args, **kwargs):
for element in sorted(lab.species):
plt.figure()
plot_depth_index(lab, element, *args, **kwargs, ax=None)
def plot_depth_index(lab, element, idx=0, time_to_plot=False, time_factor=1, ax=None):
if ax is None:
ax = plt.subplot(111)
if element == 'Temperature':
ax.set_title('Temperature')
ax.set_ylabel('Temperature, C')
elif element == 'pH':
ax.set_title('pH')
ax.set_ylabel('pH')
else:
ax.set_ylabel('Concentration')
if time_to_plot:
num_of_elem = int(time_to_plot / lab.dt)
else:
num_of_elem = len(lab.time)
t = lab.time[-num_of_elem:] * time_factor
ax.set_xlabel('Time')
if isinstance(element, str):
ax.plot(t, lab.species[element]['concentration']
[idx][-num_of_elem:], lw=3)
ax.set_title(element + ' concentration')
elif isinstance(element, (list, tuple)):
for e in element:
ax.plot(t, lab.species[e]['concentration']
[idx][-num_of_elem:], lw=3, label=e)
ax.legend(frameon=1)
ax.grid(linestyle='-', linewidth=0.2)
return ax
def plot_depths(lab, element, depths=[0, 1, 2, 3, 4], time_to_plot=False):
plt.figure()
ax = plt.subplot(111)
if element == 'Temperature':
plt.title('Temperature at specific depths')
plt.ylabel('Temperature, C')
else:
plt.title(element + ' concentration at specific depths')
plt.ylabel('Concentration')
if time_to_plot:
num_of_elem = int(time_to_plot / lab.dt)
else:
num_of_elem = len(lab.time)
t = lab.time[-num_of_elem:]
plt.xlabel('Time')
for depth in depths:
lbl = str(depth)
plt.plot(t, lab.species[element]['concentration'][int(
depth / lab.dx)][-num_of_elem:], lw=3, label=lbl)
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax.grid(linestyle='-', linewidth=0.2)
return ax
def plot_times(lab, element, time_slices=[0, 1, 2, 3, 4]):
plt.figure()
ax = plt.subplot(111)
if element == 'Temperature':
plt.title('Temperature profile')
plt.xlabel('Temperature, C')
else:
plt.title(element + ' concentration')
plt.xlabel('Concentration')
plt.ylabel('Depth, cm')
for tms in time_slices:
lbl = 'at time: %.2f ' % (tms)
plt.plot(lab.species[element]['concentration'][
:, int(tms / lab.dt)], -lab.x, lw=3, label=lbl)
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), ncol=2)
ax.grid(linestyle='-', linewidth=0.2)
return ax
def plot_profiles(lab):
for element in sorted(lab.species):
plot_profile(lab, element)
def plot_profile(lab, element):
plt.figure()
plt.plot(lab.profiles[element], -lab.x,
sns.xkcd_rgb["denim blue"], lw=3, label=element)
if element == 'Temperature':
plt.title('Temperature profile')
plt.xlabel('Temperature, C')
elif element == 'pH':
plt.title('pH profile')
plt.xlabel('pH')
else:
plt.title('%s concentration' % (element, ))
plt.xlabel('Concentration')
plt.ylabel('Depth')
ax = plt.gca()
ax.ticklabel_format(useOffset=False)
ax.grid(linestyle='-', linewidth=0.2)
plt.legend()
plt.tight_layout()
return ax
def plot_contourplots(lab, **kwargs):
for element in sorted(lab.species):
contour_plot(lab, element, **kwargs)
def contour_plot(lab, element, labels=False, days=False, last_year=False):
plt.figure()
plt.title(element + ' concentration')
resoluion = 100
n = math.ceil(lab.time.size / resoluion)
if last_year:
k = n - int(1 / lab.dt)
else:
k = 1
if days:
X, Y = np.meshgrid(lab.time[k::n] * 365, -lab.x)
plt.xlabel('Time')
else:
X, Y = np.meshgrid(lab.time[k::n], -lab.x)
plt.xlabel('Time')
z = lab.species[element]['concentration'][:, k - 1:-1:n]
CS = plt.contourf(X, Y, z, 51, cmap=ListedColormap(
sns.color_palette("Blues", 51)), origin='lower')
if labels:
plt.clabel(CS, inline=1, fontsize=10, colors='w')
cbar = plt.colorbar(CS)
plt.ylabel('Depth')
ax = plt.gca()
ax.ticklabel_format(useOffset=False)
cbar.ax.set_ylabel('%s [M/V]' % element)
if element == 'Temperature':
plt.title('Temperature contour plot')
cbar.ax.set_ylabel('Temperature, C')
if element == 'pH':
plt.title('pH contour plot')
cbar.ax.set_ylabel('pH')
return ax
def plot_contourplots_of_rates(lab, **kwargs):
rate = sorted(lab.estimated_rates)
for r in rate:
contour_plot_of_rates(lab, r, **kwargs)
def contour_plot_of_rates(lab, r, labels=False, last_year=False):
plt.figure()
plt.title('{}'.format(r))
resoluion = 100
n = math.ceil(lab.time.size / resoluion)
if last_year:
k = n - int(1 / lab.dt)
else:
k = 1
z = lab.estimated_rates[r][:, k - 1:-1:n]
# lim = np.max(np.abs(z))
# lim = np.linspace(-lim - 0.1, +lim + 0.1, 51)
X, Y = np.meshgrid(lab.time[k::n], -lab.x)
plt.xlabel('Time')
CS = plt.contourf(X, Y, z, 20, cmap=ListedColormap(
sns.color_palette("Blues", 51)))
if labels:
plt.clabel(CS, inline=1, fontsize=10, colors='w')
cbar = plt.colorbar(CS)
plt.ylabel('Depth')
ax = plt.gca()
ax.ticklabel_format(useOffset=False)
cbar.ax.set_ylabel('Rate %s [M/V/T]' % r)
return ax
def plot_contourplots_of_deltas(lab, **kwargs):
elements = sorted(lab.species)
if 'Temperature' in elements:
elements.remove('Temperature')
for element in elements:
contour_plot_of_delta(lab, element, **kwargs)
def contour_plot_of_delta(lab, element, labels=False, last_year=False):
plt.figure()
plt.title('Rate of %s consumption/production' % element)
resoluion = 100
n = math.ceil(lab.time.size / resoluion)
if last_year:
k = n - int(1 / lab.dt)
else:
k = 1
z = lab.species[element]['rates'][:, k - 1:-1:n]
lim = np.max(np.abs(z))
lim = np.linspace(-lim - 0.1, +lim + 0.1, 51)
X, Y = np.meshgrid(lab.time[k:-1:n], -lab.x)
plt.xlabel('Time')
CS = plt.contourf(X, Y, z, 20, cmap=ListedColormap(sns.color_palette(
"RdBu_r", 101)), origin='lower', levels=lim, extend='both')
if labels:
plt.clabel(CS, inline=1, fontsize=10, colors='w')
cbar = plt.colorbar(CS)
plt.ylabel('Depth')
ax = plt.gca()
ax.ticklabel_format(useOffset=False)
cbar.ax.set_ylabel('Rate of %s change $[\Delta/T]$' % element)
return ax
|
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class P2SVpnGatewaysOperations:
"""P2SVpnGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> "_models.P2SVpnGateway":
"""Retrieves the details of a virtual wan p2s vpn gateway.
:param resource_group_name: The resource group name of the P2SVpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: P2SVpnGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.P2SVpnGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.P2SVpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
gateway_name: str,
p2_s_vpn_gateway_parameters: "_models.P2SVpnGateway",
**kwargs: Any
) -> "_models.P2SVpnGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.P2SVpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(p2_s_vpn_gateway_parameters, 'P2SVpnGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
gateway_name: str,
p2_s_vpn_gateway_parameters: "_models.P2SVpnGateway",
**kwargs: Any
) -> AsyncLROPoller["_models.P2SVpnGateway"]:
"""Creates a virtual wan p2s vpn gateway if it doesn't exist else updates the existing gateway.
:param resource_group_name: The resource group name of the P2SVpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param p2_s_vpn_gateway_parameters: Parameters supplied to create or Update a virtual wan p2s
vpn gateway.
:type p2_s_vpn_gateway_parameters: ~azure.mgmt.network.v2019_11_01.models.P2SVpnGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either P2SVpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_11_01.models.P2SVpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.P2SVpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
p2_s_vpn_gateway_parameters=p2_s_vpn_gateway_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
gateway_name: str,
p2_s_vpn_gateway_parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.P2SVpnGateway":
"""Updates virtual wan p2s vpn gateway tags.
:param resource_group_name: The resource group name of the P2SVpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param p2_s_vpn_gateway_parameters: Parameters supplied to update a virtual wan p2s vpn gateway
tags.
:type p2_s_vpn_gateway_parameters: ~azure.mgmt.network.v2019_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: P2SVpnGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.P2SVpnGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.P2SVpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(p2_s_vpn_gateway_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a virtual wan p2s vpn gateway.
:param resource_group_name: The resource group name of the P2SVpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ListP2SVpnGatewaysResult"]:
"""Lists all the P2SVpnGateways in a resource group.
:param resource_group_name: The resource group name of the P2SVpnGateway.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListP2SVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_11_01.models.ListP2SVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListP2SVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListP2SVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways'} # type: ignore
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ListP2SVpnGatewaysResult"]:
"""Lists all the P2SVpnGateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListP2SVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_11_01.models.ListP2SVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListP2SVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListP2SVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/p2svpnGateways'} # type: ignore
async def _generate_vpn_profile_initial(
self,
resource_group_name: str,
gateway_name: str,
parameters: "_models.P2SVpnProfileParameters",
**kwargs: Any
) -> Optional["_models.VpnProfileResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnProfileResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generate_vpn_profile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'P2SVpnProfileParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnProfileResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generate_vpn_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}/generatevpnprofile'} # type: ignore
async def begin_generate_vpn_profile(
self,
resource_group_name: str,
gateway_name: str,
parameters: "_models.P2SVpnProfileParameters",
**kwargs: Any
) -> AsyncLROPoller["_models.VpnProfileResponse"]:
"""Generates VPN profile for P2S client of the P2SVpnGateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gateway_name: The name of the P2SVpnGateway.
:type gateway_name: str
:param parameters: Parameters supplied to the generate P2SVpnGateway VPN client package
operation.
:type parameters: ~azure.mgmt.network.v2019_11_01.models.P2SVpnProfileParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnProfileResponse or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_11_01.models.VpnProfileResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnProfileResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generate_vpn_profile_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnProfileResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generate_vpn_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}/generatevpnprofile'} # type: ignore
async def _get_p2_s_vpn_connection_health_initial(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> Optional["_models.P2SVpnGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.P2SVpnGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self._get_p2_s_vpn_connection_health_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_p2_s_vpn_connection_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}/getP2sVpnConnectionHealth'} # type: ignore
async def begin_get_p2_s_vpn_connection_health(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.P2SVpnGateway"]:
"""Gets the connection health of P2S clients of the virtual wan P2SVpnGateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gateway_name: The name of the P2SVpnGateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either P2SVpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_11_01.models.P2SVpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.P2SVpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_p2_s_vpn_connection_health_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('P2SVpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_p2_s_vpn_connection_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}/getP2sVpnConnectionHealth'} # type: ignore
async def _get_p2_s_vpn_connection_health_detailed_initial(
self,
resource_group_name: str,
gateway_name: str,
request: "_models.P2SVpnConnectionHealthRequest",
**kwargs: Any
) -> Optional["_models.P2SVpnConnectionHealth"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.P2SVpnConnectionHealth"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_p2_s_vpn_connection_health_detailed_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(request, 'P2SVpnConnectionHealthRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('P2SVpnConnectionHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_p2_s_vpn_connection_health_detailed_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}/getP2sVpnConnectionHealthDetailed'} # type: ignore
async def begin_get_p2_s_vpn_connection_health_detailed(
self,
resource_group_name: str,
gateway_name: str,
request: "_models.P2SVpnConnectionHealthRequest",
**kwargs: Any
) -> AsyncLROPoller["_models.P2SVpnConnectionHealth"]:
"""Gets the sas url to get the connection health detail of P2S clients of the virtual wan
P2SVpnGateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gateway_name: The name of the P2SVpnGateway.
:type gateway_name: str
:param request: Request parameters supplied to get p2s vpn connections detailed health.
:type request: ~azure.mgmt.network.v2019_11_01.models.P2SVpnConnectionHealthRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either P2SVpnConnectionHealth or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_11_01.models.P2SVpnConnectionHealth]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.P2SVpnConnectionHealth"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_p2_s_vpn_connection_health_detailed_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
request=request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('P2SVpnConnectionHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_p2_s_vpn_connection_health_detailed.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{gatewayName}/getP2sVpnConnectionHealthDetailed'} # type: ignore
async def _disconnect_p2_s_vpn_connections_initial(
self,
resource_group_name: str,
p2_s_vpn_gateway_name: str,
request: "_models.P2SVpnConnectionRequest",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._disconnect_p2_s_vpn_connections_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'p2sVpnGatewayName': self._serialize.url("p2_s_vpn_gateway_name", p2_s_vpn_gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(request, 'P2SVpnConnectionRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_disconnect_p2_s_vpn_connections_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{p2sVpnGatewayName}/disconnectP2sVpnConnections'} # type: ignore
async def begin_disconnect_p2_s_vpn_connections(
self,
resource_group_name: str,
p2_s_vpn_gateway_name: str,
request: "_models.P2SVpnConnectionRequest",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Disconnect P2S vpn connections of the virtual wan P2SVpnGateway in the specified resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param p2_s_vpn_gateway_name: The name of the P2S Vpn Gateway.
:type p2_s_vpn_gateway_name: str
:param request: The parameters are supplied to disconnect p2s vpn connections.
:type request: ~azure.mgmt.network.v2019_11_01.models.P2SVpnConnectionRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._disconnect_p2_s_vpn_connections_initial(
resource_group_name=resource_group_name,
p2_s_vpn_gateway_name=p2_s_vpn_gateway_name,
request=request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'p2sVpnGatewayName': self._serialize.url("p2_s_vpn_gateway_name", p2_s_vpn_gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_disconnect_p2_s_vpn_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/p2svpnGateways/{p2sVpnGatewayName}/disconnectP2sVpnConnections'} # type: ignore
|
from molecule.driver import basedriver
class ProxmoxDriver(basedriver.BaseDriver):
def __init__(self, molecule):
super(ProxmoxDriver, self).__init__()
self.molecule = molecule
|
"""Update encrypted deploy password in Travis config file
"""
from __future__ import print_function
import base64
import json
import os
from getpass import getpass
import yaml
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15
try:
from urllib import urlopen
except:
from urllib.request import urlopen
GITHUB_REPO = 'jarshwah/flake8_formatter_abspath'
TRAVIS_CONFIG_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '.travis.yml')
def load_key(pubkey):
"""Load public RSA key, with work-around for keys using
incorrect header/footer format.
Read more about RSA encryption with cryptography:
https://cryptography.io/latest/hazmat/primitives/asymmetric/rsa/
"""
try:
return load_pem_public_key(pubkey.encode(), default_backend())
except ValueError:
# workaround for https://github.com/travis-ci/travis-api/issues/196
pubkey = pubkey.replace('BEGIN RSA', 'BEGIN').replace('END RSA', 'END')
return load_pem_public_key(pubkey.encode(), default_backend())
def encrypt(pubkey, password):
"""Encrypt password using given RSA public key and encode it with base64.
The encrypted password can only be decrypted by someone with the
private key (in this case, only Travis).
"""
key = load_key(pubkey)
encrypted_password = key.encrypt(password, PKCS1v15())
return base64.b64encode(encrypted_password)
def fetch_public_key(repo):
"""Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
"""
keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo)
data = json.loads(urlopen(keyurl).read().decode())
if 'key' not in data:
errmsg = "Could not find public key for repo: {}.\n".format(repo)
errmsg += "Have you already added your GitHub repo to Travis?"
raise ValueError(errmsg)
return data['key']
def prepend_line(filepath, line):
"""Rewrite a file adding a line to its beginning.
"""
with open(filepath) as f:
lines = f.readlines()
lines.insert(0, line)
with open(filepath, 'w') as f:
f.writelines(lines)
def load_yaml_config(filepath):
with open(filepath) as f:
return yaml.load(f)
def save_yaml_config(filepath, config):
with open(filepath, 'w') as f:
yaml.dump(config, f, default_flow_style=False)
def update_travis_deploy_password(encrypted_password):
"""Update the deploy section of the .travis.yml file
to use the given encrypted password.
"""
config = load_yaml_config(TRAVIS_CONFIG_FILE)
config['deploy']['password'] = dict(secure=encrypted_password)
save_yaml_config(TRAVIS_CONFIG_FILE, config)
line = ('# This file was autogenerated and will overwrite'
' each time you run travis_pypi_setup.py\n')
prepend_line(TRAVIS_CONFIG_FILE, line)
def main(args):
public_key = fetch_public_key(args.repo)
password = args.password or getpass('PyPI password: ')
update_travis_deploy_password(encrypt(public_key, password.encode()))
print("Wrote encrypted password to .travis.yml -- you're ready to deploy")
if '__main__' == __name__:
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--repo', default=GITHUB_REPO,
help='GitHub repo (default: %s)' % GITHUB_REPO)
parser.add_argument('--password',
help='PyPI password (will prompt if not provided)')
args = parser.parse_args()
main(args)
|
import sys, os
sys.path.insert(0, os.path.abspath('files/codes'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'mp3fm'
copyright = u'2013, Akshit Agarwal'
version = '1.0.1'
release = '1.0.1'
exclude_patterns = []
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
html_theme = 'default'
html_logo = "files/images/logo.jpg"
html_static_path = ['_static']
html_domain_indices = False
html_use_index = False
html_show_sphinx = False
html_show_copyright = True
htmlhelp_basename = 'mp3fmdoc'
latex_elements = {
}
latex_documents = [
('index', 'mp3fm.tex', u'mp3fm Documentation',
u'Akshit Agarwal', 'manual'),
]
man_pages = [
('index', 'mp3fm', u'mp3fm Documentation',
[u'Akshit Agarwal'], 1)
]
texinfo_documents = [
('index', 'mp3fm', u'mp3fm Documentation',
u'Akshit Agarwal', 'mp3fm', 'One line description of project.',
'Miscellaneous'),
]
epub_title = u'mp3fm'
epub_author = u'Akshit Agarwal'
epub_publisher = u'Akshit Agarwal'
epub_copyright = u'2013, Akshit Agarwal'
|
from dcbase.tests.baseTestCase import BaseTestCase
|
import unittest
import os
import sys
if __name__ == "__main__":
test_dirs = os.listdir('./tests')
loader = unittest.TestLoader()
for directory in test_dirs:
if directory == "app":
test_path = "./tests/{}".format(directory)
suite = loader.discover(test_path)
result = unittest.TextTestRunner(verbosity=2).run(suite)
i = len(result.failures) + len(result.errors)
if i != 0:
sys.exit(1)
|
import re
import markdown
from django.conf import settings
from django.template.loader import render_to_string
from filer.models import File
from .base import MarkymarkExtension
class FilerPostprocessor(markdown.postprocessors.Postprocessor):
"""
Filer markdown extension for django-filer to show files and images.
"""
FILE_RE = re.compile(r'(\[file\:(?P<id>\d+)\])', re.IGNORECASE)
def run(self, text):
def re_callback(match):
options = match.groupdict()
try:
obj = File.objects.get(pk=int(options['id']))
return render_to_string(
getattr(
settings,
'MARKYMARK_TEMPLATE_FILER',
'markymark/filer.html'
),
{'file': obj.get_real_instance()}
).strip()
except (KeyError, File.DoesNotExist):
if settings.DEBUG:
raise
return match.group(0).replace(match.group(1), '')
return self.FILE_RE.sub(re_callback, text)
class FilerExtension(MarkymarkExtension):
"""
Extension to look for file tags, replaces them with html tags.
In case of image, the image is added as img-tag, files are added as download links.
"""
postprocessors = (FilerPostprocessor,)
class Media:
js = ('markymark/extensions/filer.js',)
css = {'all': ('markymark/extensions/filer.css',)}
def makeExtension(**kwargs):
return FilerExtension(**kwargs)
|
from unittest import TestCase
from src.utilise.json_serialiser import JsonSerialiser
from src.utilise.built_in_extensions import *
import typing
__author__ = 'James Stidard'
JSS = typing.TypeVar( 'JSS', bound=JsonSerialiser )
class Thing(JsonSerialiser):
str_attr = "string"
int_attr = 23
bool_attr = True
float_attr = 23.54
set_attr = set()
list_attr = []
dict_attr = {}
child_attr = None
def __init__(self,
str_attr: str='string', int_attr: int=0, bool_attr: bool=True,
float_attr: float=0, set_attr: set=None, list_attr: list=None,
dict_attr: dict=None, child_attr: JSS=None):
self.str_attr = str_attr
self.int_attr = int_attr
self.bool_attr = bool_attr
self.float_attr = float_attr
self.set_attr = set_attr
self.list_attr = list_attr
self.dict_attr = dict_attr
self.child_attr = child_attr
@classmethod
def surrogate_vars( cls ):
return { 'str_attr', 'int_attr' }
def test_method(self, a: int, b: int):
return a + b + self.int_attr
class TestObjectSerialiser(TestCase):
def setUp(self):
self.thing = Thing()
def test_str_to_json(self):
self.thing.str_attr = "A String"
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['str_attr'] == "A String")
def test_positive_int_to_json(self):
self.thing.int_attr = 5
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['int_attr'] == 5)
def test_negative_int_to_json(self):
self.thing.int_attr = -502
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['int_attr'] == -502)
def test_false_bool_to_json(self):
self.thing.bool_attr = False
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['bool_attr'] == False)
def test_true_bool_to_json(self):
self.thing.bool_attr = True
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['bool_attr'] == True)
def test_positive_float_to_json(self):
self.thing.float_attr = 40.0
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['float_attr'] == 40.0)
def test_negative_float_to_json(self):
self.thing.float_attr = -40.4
json_thing = self.thing.to_json_dictionary()
self.assertTrue(json_thing['float_attr'] == -40.4)
def test_child_str_to_json(self):
self.thing.child_attr = Thing(str_attr='hello')
json_thing = self.thing.to_json_dictionary(depth=2)
self.assertTrue(json_thing['child_attr']['str_attr'] == 'hello')
def test_child_int_to_json(self):
self.thing.child_attr = Thing(int_attr=3)
json_thing = self.thing.to_json_dictionary(depth=2)
self.assertTrue(json_thing['child_attr']['int_attr'] == 3)
def test_child_surrogate_to_json(self):
self.thing.child_attr = Thing()
surrogate_attributes = Thing.surrogate_vars()
non_surrogate_attributes = [attr for attr in public_vars(self.thing) if attr not in surrogate_attributes]
json_thing = self.thing.to_json_dictionary()
json_child = json_thing['child_attr']
for surrogate_attr in surrogate_attributes:
self.assertTrue(surrogate_attr in json_child)
for non_surrogate_attr in non_surrogate_attributes:
self.assertTrue(non_surrogate_attr not in json_child)
def test_child_full_to_json(self):
self.thing.child_attr = Thing()
json_thing = self.thing.to_json_dictionary(depth=2)
json_child = json_thing['child_attr']
for attr in json_child:
self.assertTrue(attr in public_vars(self.thing))
|
import six
from ...errors.httpconflictexception import HttpConflictException
import saklient
str = six.text_type
class SnapshotInMigrationException(HttpConflictException):
## 要求された操作を行えません。このスナップショット または これより新しいスナップショットから他のリソースへのコピー処理が進行中です。完了後に再度お試しください。
## @param {int} status
# @param {str} code=None
# @param {str} message=""
def __init__(self, status, code=None, message=""):
super(SnapshotInMigrationException, self).__init__(status, code, "要求された操作を行えません。このスナップショット または これより新しいスナップショットから他のリソースへのコピー処理が進行中です。完了後に再度お試しください。" if message is None or message == "" else message)
|
import argparse , ast , json
from influxdb import InfluxDBClient
def write(client,data):
result = client.write_points(data)
print "{\"status\":\""+str(result)+"\"}"
def query(client,query):
result = client.query(query)
print json.dumps(result.raw)#, indent=4, sort_keys=True)
def createDB(client,dbname):
result = client.create_database(dbname)
def main(args):
client = InfluxDBClient(host=args.host, port=args.port, database = args.db)
if args.write:
list_data = []
data = ast.literal_eval(args.write)
list_data.append(data)
write(client,list_data)
elif args.query:
query(client,str(args.query))
elif args.createDB:
createDB(client,args.createDB)
def parse_args():
parser = argparse.ArgumentParser(
description='InfluxDB driver script')
parser.add_argument('--host', type=str, required=False, default='localhost',
help='hostname of InfluxDB http API')
parser.add_argument('--port', type=int, required=False, default=8086,
help='port of InfluxDB http API')
parser.add_argument('--db', type=str, required=False, default='devicelog',
help='hostname of InfluxDB http API')
parser.add_argument('--write', type=str, required=False,
help='JSON with write data')
parser.add_argument('--query', type=str, required=False,
help='Query string')
parser.add_argument('--createDB', type=str, required=False,
help='Create new Database')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
main(args)
|
import os
BASEDIR = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
SECRET_KEY = 'secret'
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + \
os.path.join(BASEDIR, 'bucketlist.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + \
os.path.join(BASEDIR, 'bucketlist-database.sqlite')
class DevelopmentConfig(Config):
DEBUG = True
class TestingConfig(Config):
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + \
os.path.join(BASEDIR, 'tests/bucketlist_test.sqlite')
|
"""
Genetic Algorithm for solving Selective Travelling Salesman Problem.
:Author: Fabio Scala <fabio.scala@gmail.com>
"""
import logging
import random
import time
import numpy
class GaSolver(object):
""" Genetic algorithm based on `A. Piwonska selective travelling salesman algorithm <http://yadda.icm.edu.pl/baztech/element/bwmeta1.element.baztech-article-BPB1-0051-0005>`_
:param start: The index of the tour starting point in distances.
:type start: int
:param end: The index of the tour ending point in distances. If end equals start, a different approach for generation of the initial tours (population) is used.
:type end: int
:param distances: A distance matrix for all the available points.
:type distances: numpy.ndarray
:param max_cost: The maximum cost (sum of distances) a tour is allowed to have.
:type max_cost: float
:param profits: Optional profits associated with each point in the distance matrix. No profits are used (equal) if omitted.
:type profits: numpy.ndarray
:param population_size: The number of solutions to be used during calculation. A higher number can lead to better results but will run longer.
:type populaiton_size: int
:param tournament_size: The group size of random individuals for selection. The higher this value, the faster it will converge but also comes with less different solutions and a greater chance to get stuck in local minimas.
:type tournament_size: int
:param min_generations: The minimum number of iterations to run the algorithm for.
:type min_generations: int
:param max_generation: The maximum number of iterations to run the algorithm for.
:type max_generation: int
:param termination_threshold: If fitness in terms of costs does no longer improve more than the specified value, the algorithm will stop. (1 = 100%). E.g. with termination_threshold = 0.01 if the costs do no improve at least 1% it will stop.
:type termination_threshold: float
:param max_runtime: Maximum runtime in milliseconds before stopping the algorithm regardless of a good or bad fitness.
:type termination_threshold: int
"""
def __init__(self, start, end, distances, max_cost, profits=None, population_size=1000, tournament_size=5, min_generations=5, max_generations=200, termination_threshold=.01, max_runtime=10000):
self.start = start
self.end = end
self.distances = distances
self.max_cost = max_cost
self.profits = profits
self.population_size = population_size
self.tournament_size = tournament_size
self.min_generations = min_generations
self.max_generations = max_generations
self.termination_threshold = termination_threshold
self.max_runtime = max_runtime
self._init_population = self._init_population_loop if start == end else self._init_population_tour
def _init_population_loop(self):
""" Generates initial population for the "tsp" (loop) version (start equals end)
* Start from the specified starting point
* Add random points to the tour until the cost max_cost/2 is reached
* Return back to the start using the same points
"""
max_init_cost = 0.5 * self.max_cost
paths = self.population['path']
costs = self.population['cost']
for i in xrange(self.population_size):
path = [self.start]
c = 0
ind_last = 0
while True:
d_from = self.distances[ind_last, :]
cands = numpy.where((d_from != 0) & (d_from < max_init_cost - c))[0]
if cands.shape[0] != 0:
# ind_next = numpy.random.choice(numpy.argsort(d_from)[:5])
ind_next = random.choice(cands)
# ind_next = np.random.randint(0, n_pois)
if ind_last != ind_next or cands.shape[0] == 1: # relax condition
c += d_from[ind_next]
# hop to next
path.append(ind_next)
ind_last = ind_next
else:
break
# go back the way we came
paths[i] = path + path[::-1][-max(0, len(path) - 1):]
costs[i] = 2 * c
def _init_population_tour(self):
""" Generates initial population for the "tour" version (start and end are different point)
* Fix start end end point
* Add random points from a set of candidates (candidate to end cannot exceed maximum cost) until no further points can be added
* Do this for the half of the population and the opposite (from end to start) for the other half.
"""
paths = self.population['path']
costs = self.population['cost']
distances = self.distances
mid = self.population_size / 2
for i_from, i_to, start, end in ((0, mid, self.start, self.end), (mid, self.population_size, self.end, self.start)):
is_reverse = start != self.start
for i in xrange(i_from, i_to):
individual = [start]
c = 0
ind_last = start
while True: # we break manually
d_from = distances[ind_last, :]
max_cost = self.max_cost - c
# only hopping to these points would not exceed max_cost
cands = numpy.where((distances[ind_last] + distances[end]) <= max_cost)[0]
cands = cands[(cands != end) & (cands != ind_last)]
if cands.shape[0] != 0:
ind_next = random.choice(cands)
if ind_last != ind_next:
c += d_from[ind_next]
individual.append(ind_next)
ind_last = ind_next
else:
c += d_from[end]
individual.append(end)
break
if is_reverse:
# if we started from "end", reverse the tour
individual = individual[::-1]
paths[i] = individual
costs[i] = c
def _calc_fitness(self, population):
""" Calculates the fitness for a given population so that it would sort equivalent to: path length descending, cost ascending
:param population: Population
:type population: structured population numpy.ndarray
"""
paths = population['path']
costs = population['cost']
path_lens = numpy.array([len(path) for path in paths], int)
# same as lexsort cost, -len (len desc, cost asc)
if self.profits is not None:
profits = [numpy.take(self.profits, p).sum() for p in paths]
fitness = (profits + path_lens) * self.max_cost
else:
fitness = path_lens * self.max_cost
fitness -= costs
return fitness
def _unique_path(self, path):
""" Removes duplicate points in a tour respecting start and end.
:param path: Path encoded as list of integers
:return: List with duplicates removed
"""
# Tested, faster than numpy.unique
# http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order
seen = {self.start, self.end}
seen_add = seen.add
return [self.start] + [x for x in path[1:-1] if not (x in seen or seen_add(x))] + [self.end]
def _do_selection(self):
""" Does the "selection" part of the genetic algorithm.
* Pick random <tournament_size> indiduals from the population and replace the current individual the fittest of those.
* Do this for each individual in the population
"""
population = self.population
self.offspring = offspring = numpy.zeros(self.population_size, population.dtype)
population['fitness'] = fitness = self._calc_fitness(population)
n = self.population_size
self.fittest[self.current_generation] = tuple(self.population[fitness.argmax()])
for i, i_samples in ((j, numpy.random.randint(0, n, 10)) for j in xrange(n)):
fittest = population[i_samples[numpy.argmax(fitness[i_samples])]]
offspring[i] = tuple(fittest)
self.population = offspring
def _iter_couples(self):
""" Helper generator function to iterate over random couples of the population having used each one only once
:return: Generator (index, individual)
"""
n = self.population_size
population = self.population
a = range(n)
random.shuffle(a)
for i in xrange(0, n, 2):
i1 = a[i]
i2 = a[i + 1]
yield (i1, population[i1]), (i2, population[i2])
def _do_crossover(self):
""" Crosses random couples of the population and replaces the individual if the cost constraints of the child is still met.
* Find common genes in both indivudals
* Pick a random common gene
* Switch the parts from the common gene to the end of both individuals
"""
population = self.population
for (i_individual, individual), (i_partner, partner) in self._iter_couples():
common_genes = set(individual['path']).intersection(partner['path']).difference({self.start, self.end})
if common_genes:
crossing_gene = random.sample(common_genes, 1)[0]
# index after crossing point
i_cross_individual = individual['path'].index(crossing_gene) + 1
i_cross_partner = partner['path'].index(crossing_gene) + 1
first_child = individual['path'][:i_cross_individual] + partner['path'][i_cross_partner:]
second_child = partner['path'][:i_cross_partner] + individual['path'][i_cross_individual:]
child_cost = self.distances[first_child[:-1], first_child[1:]].sum()
if child_cost < self.max_cost:
population[i_individual]['cost'] = child_cost
population[i_individual]['path'] = first_child
child_cost = self.distances[second_child[:-1], second_child[1:]].sum()
if child_cost < self.max_cost:
population[i_partner]['cost'] = child_cost
population[i_partner]['path'] = second_child
def _do_mutation(self):
""" Mutates each individual of the population:
* Remove duplicate points
* Delete a random gene (point)
* Pick a random gene
* Insert as many points as possible while still meeting the cost constraints
"""
population = self.population
paths = population['path']
costs = population['cost']
for i in xrange(self.population_size):
path = paths[i]
if len(path) > 2:
# remove dups
path_new = self._unique_path(path)
cost_new = self.distances[path_new[:-1], path_new[1:]].sum()
costs[i] = cost_new
paths[i] = path = path_new
# remove random point
i_remove = random.randint(1, len(path) - 2)
costs[i] = self.distances[path[:-1], path[1:]].sum()
del path[i_remove]
i_insert = random.randint(1, len(path) - 1)
from_ = path[i_insert - 1]
increments = self.distances[from_, :]
if self.profits is not None:
# we have given weights
i_sorted = numpy.lexsort([increments, -self.profits])
else:
i_sorted = numpy.argsort(increments)
for ins_cand in i_sorted:
if ins_cand not in path:
path_new = list(path)
path_new.insert(i_insert, ins_cand)
c_temp = self.distances[path_new[:-1], path_new[1:]].sum()
if c_temp < self.max_cost:
paths[i] = path = path_new
costs[i] = c_temp
else:
break
def _init(self):
self.fittest = numpy.zeros(self.max_generations, [('path', 'O'), ('cost', 'f'), ('fitness', 'f')])
self.population = numpy.zeros(self.population_size, [('path', 'O'), ('cost', 'f'), ('fitness', 'f')])
def calc_tour(self, last_ng=None):
""" Runs the genetic algorithm and returns the best tour
.. warning:: Never use all generations for comparisons. The first generation is usually very good in its fitness because of the symmetries of the initial population.
:param last_ng: The last n generations to compare
:type param: int
:return: Ordered indices of the tour points in the distance matrix
:rtype: list
"""
if self.max_cost < self.distances[self.start, self.end]:
return [], 0
self._init()
self._run()
last_ng = last_ng or self.current_generation
group = self.fittest[self.current_generation - last_ng:self.current_generation]
fittest = group[group['fitness'].argmax()]
return fittest['path'], fittest['cost']
def _iter_generations(self):
""" Generator which controls number of generations/iterations based on fitness improvement/convergence and maximum number of iterations or runtime.
"""
max_runtime_s = self.max_runtime / 1000.0
start_time = time.time()
compare_generations = min(self.min_generations, 5)
for generation in xrange(self.max_generations):
if time.time() - start_time > max_runtime_s:
logging.info('Ending Genetic Algorithnm after {}ms'.format(self.max_runtime))
break
if generation > self.min_generations:
# basically compare the fittest of last compare_generations with all others and see if fitness has improves
last_ng = self.fittest[generation - compare_generations - 1:generation - 1]
fittest_ng = last_ng[last_ng['fitness'].argmax()]
# rest but at least 1, so no -1 in index
compare_to = self.fittest[:generation - compare_generations]
fittest_compare_to = compare_to[compare_to['fitness'].argmax()]
delta_fitness = fittest_ng['fitness'] - fittest_compare_to['fitness']
delta_cost = last_ng['cost'].max() - last_ng['cost'].min()
# fitness improvement is less than max_cost -> path length remained the same. see _calc_fitness()
# path len constant & costs did not improve (lowered) in the last compare_generations iterations, stop!
if delta_fitness < self.max_cost and (delta_cost / self.max_cost < self.termination_threshold):
logging.info('Ending Genetic Algorithm after desired convergence {}, {}'.format(delta_fitness, delta_cost / self.max_cost))
break
yield generation
def _run(self):
""" Runs the main loop (generations) of the genetic algorithm. Should not be called directly.
"""
self._init_population()
self.current_generation = 0
self._do_selection()
for generation in self._iter_generations():
self.current_generation = generation
self._do_crossover()
self._do_mutation()
# we do selection at the end, which is at the same time the start of a potentially next generation
# like this we have a "good" generation with updated fitness values at the end of the algorithm
self._do_selection()
if __name__ == '__main__':
def plot_tour(path, points):
import matplotlib.pyplot as plt
x = []
y = []
for i in path:
x.append(points[i][0])
y.append(points[i][1])
plt.plot(*zip(*points[1:]), marker='o', color='b', ls='')
plt.plot(x, y, 'go')
arrow_scale = float(max(x)) / float(100)
for i in range(0, len(x) - 1):
plt.arrow(x[i], y[i], (x[i + 1] - x[i]), (y[i + 1] - y[i]), head_width=arrow_scale,
color='g', length_includes_head=True)
plt.plot(points[path[0]][0], points[path[0]][1], marker='o', color='r', ls='')
plt.plot(points[path[-1]][0], points[path[-1]][1], marker='o', color='r', ls='')
plt.show()
def test(start=0, end=0, t_size=5, population_size=1000, n_coords=500, n_points=400, max_cost=1000, plot=False, plot_convergence=False, termination_threshold=.01):
import scipy.spatial.distance as distance
points = numpy.random.randint(0, n_coords, (n_points, 2))
distances = distance.squareform(distance.pdist(points))
ga = GaSolver(population_size=population_size, tournament_size=t_size,
max_cost=max_cost, start=start, end=end, distances=distances, termination_threshold=termination_threshold)
path = ga.calc_tour()
numpy.testing.assert_array_less(ga.population['cost'], max_cost)
numpy.testing.assert_array_less(ga.fittest['cost'], max_cost)
assert all([p[0] == start for p in ga.population['path']])
assert all([p[-1] == end for p in ga.population['path']])
if plot:
print ga.fittest[ga.current_generation]
plot_tour(path, points)
if plot_convergence:
import matplotlib.pyplot as plt
plt.plot(ga.fittest['fitness'])
plt.show()
return ga
|
import time
while True:
print "I need to make sure I'm still alive."
time.sleep(1)
|
from django import template
from djqgrid import json_helpers
register = template.Library()
@register.simple_tag(takes_context=True)
def jqgrid(context, grid, prefix='', pager=True, urlquery=None, **kwargs):
"""
Adds a complete jqGrid - HTML and JavaScript - to the template.
Two HTML elements are added, a ``<table id='grid'>`` and a ``<div id='pager'>``. JavaScript code to initialize the
jqGrid is also added.
After the grid is set up in the browser, it will access the server again and ask for the grid data. The URL is
defined in the Grid object, and is appended by the ``urlquery`` argument and the current request's query dict.
For example, if the template is rendering the URL ``/view?p=1``, the grid's URL is ``/grid/17?g=2`` and
``urlquery`` is ``q=3``, the grid's data will be retrieved from ``/grid/17?g=2&q=3&p=1``
Args:
context - The template context
grid - the Grid
prefix - A prefix for the grid and pager element IDs. The default is no prefix, meaning the elements are
named ``grid`` and ``pager``. Adding ``prefix='prefix'`` creates elements with the IDs ``prefix-grid``
and ``prefix-pager``.
pager - True if a pager is added to the grid. If no pager is added, the row count is set to 99,999.
urlquery - An additional query string that will be added to the data request that will be sent to the server.
**kwargs - All additional arguments are added as is to the jqGrid initialization option object.
Returns:
The generated HTML
"""
if prefix and prefix[-1]!='-':
prefix += '-' # Append a - to the prefix
gridId = prefix + "grid"
pagerId = prefix + "pager"
options = grid.get_options(kwargs)
# Add the query dict to the url
if not '?' in options['url']:
options['url'] += '?'
if urlquery:
options['url'] += urlquery
options['url'] += context['request'].GET.urlencode()
if pager:
options['pager'] = '#' + pagerId
else:
options['rowNum'] = 99999
options = json_helpers.dumps(options, indent=4)
html = """
<table id="%s"><tr><td></td></tr></table>
<div id="%s"></div>
<script type="text/javascript">
$(function() {
$('#%s').jqGrid(%s);
});
</script>""" % (gridId, pagerId, gridId, options);
return html
|
from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(shared_option_name="IlmBase:shared")
builder.run()
|
"""Fifth step for RITE: train or test classifier with extract feature"""
if __name__ == '__main__':
pass
|
from __future__ import print_function
import argparse
import logging
from clang.cindex import Config
from operator import itemgetter
import sys
from analyzer.designparam import DesignAnalysis
from analyzer.jsonparam import TasksConfig
def write_source_files(output, hardware_file, software_file):
hardware_output = map(itemgetter(2),
filter(lambda t: t[0] is True, output))
software_output = map(itemgetter(2),
filter(lambda t: t[1] is True, output))
with open(hardware_file, "w") as f:
f.write("\n".join(hardware_output))
with open(software_file, "w") as f:
f.write("\n".join(software_output))
def print_output(output):
for is_hardware, is_software, line in output:
if is_hardware and not is_software:
tag = "H "
elif is_software and not is_hardware:
tag = " S"
else:
tag = "HS"
logging.info("%s %s", tag, line.rstrip())
def main():
parser = argparse.ArgumentParser()
parser.add_argument("c_file")
parser.add_argument("conf_file")
parser.add_argument("--llvm-libdir", default=None, required=False)
parser.add_argument("--llvm-libfile", default=None, required=False)
parser.add_argument(
"--logging", default="WARNING",
choices=["debug", "info", "warning", "error", "critical"])
parser.add_argument("--debug", required=False, action="store_true")
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.logging.upper()))
c_file = args.c_file
conf_file = args.conf_file
hardware_file = args.c_file[:-2] + "_hw.c"
software_file = args.c_file[:-2] + "_sw.c"
logging.debug("input C source file: %s", c_file)
logging.debug("input JSON config file: %s", conf_file)
logging.debug("output C hardware source file: %s", hardware_file)
logging.debug("output C software source file: %s", software_file)
# JSONから設定を読み込み
config = TasksConfig.parse_config(conf_file)
if config is None:
return 1
# clang で関数の情報を収集
analyzer = DesignAnalysis(c_file, llvm_libdir=args.llvm_libdir,
llvm_libfile=args.llvm_libfile)
# 設定を元に解析
analyzer.analyze(config)
# 解析結果を出力
output = analyzer.generate_output()
# ソースコードを分解
print_output(output)
write_source_files(output, hardware_file, software_file)
if __name__ == "__main__":
sys.exit(main())
|
from tests.test_helper import *
class TestSearch(unittest.TestCase):
def test_text_node_is(self):
node = Search.TextNodeBuilder("name")
self.assertEqual({"is": "value"}, (node == "value").to_param())
def test_text_node_is_not(self):
node = Search.TextNodeBuilder("name")
self.assertEqual({"is_not": "value"}, (node != "value").to_param())
def test_text_node_starts_with(self):
node = Search.TextNodeBuilder("name")
self.assertEqual({"starts_with": "value"}, (node.starts_with("value")).to_param())
def test_text_node_ends_with(self):
node = Search.TextNodeBuilder("name")
self.assertEqual({"ends_with": "value"}, (node.ends_with("value")).to_param())
def test_text_node_contains(self):
node = Search.TextNodeBuilder("name")
self.assertEqual({"contains": "value"}, (node.contains("value")).to_param())
def test_multiple_value_node_in_list(self):
node = Search.MultipleValueNodeBuilder("name")
self.assertEqual(["value1", "value2"], (node.in_list(["value1", "value2"])).to_param())
def test_multiple_value_node_in_list_as_arg_list(self):
node = Search.MultipleValueNodeBuilder("name")
self.assertEqual(["value1", "value2"], (node.in_list("value1", "value2")).to_param())
def test_multiple_value_node_is(self):
node = Search.MultipleValueNodeBuilder("name")
self.assertEqual(["value1"], (node == "value1").to_param())
def test_multiple_value_node_with_value_in_whitelist(self):
node = Search.MultipleValueNodeBuilder("name", ["okay"])
self.assertEqual(["okay"], (node == "okay").to_param())
@raises(AttributeError)
def test_multiple_value_node_with_value_not_in_whitelist(self):
node = Search.MultipleValueNodeBuilder("name", ["okay", "also okay"])
node == "not okay"
def test_multiple_value_or_text_node_is(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual({"is": "value"}, (node == "value").to_param())
def test_multiple_value_or_text_node_is_not(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual({"is_not": "value"}, (node != "value").to_param())
def test_multiple_value_or_text_node_starts_with(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual({"starts_with": "value"}, (node.starts_with("value")).to_param())
def test_multiple_value_or_text_node_ends_with(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual({"ends_with": "value"}, (node.ends_with("value")).to_param())
def test_multiple_value_or_text_node_contains(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual({"contains": "value"}, (node.contains("value")).to_param())
def test_multiple_value_or_text_node_in_list(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual(["value1", "value2"], (node.in_list(["value1", "value2"])).to_param())
def test_multiple_value_or_text_node_in_list_as_arg_list(self):
node = Search.MultipleValueOrTextNodeBuilder("name")
self.assertEqual(["value1", "value2"], (node.in_list("value1", "value2")).to_param())
def test_multiple_value_or_text_node_with_value_in_whitelist(self):
node = Search.MultipleValueOrTextNodeBuilder("name", ["okay"])
self.assertEqual(["okay"], node.in_list("okay").to_param())
@raises(AttributeError)
def test_multiple_value_or_text_node_with_value_not_in_whitelist(self):
node = Search.MultipleValueOrTextNodeBuilder("name", ["okay"])
node.in_list("not okay").to_param()
def test_range_node_min_ge(self):
node = Search.RangeNodeBuilder("name")
self.assertEqual({"min": "value"}, (node >= "value").to_param())
def test_range_node_min_greater_than_or_equal_to(self):
node = Search.RangeNodeBuilder("name")
self.assertEqual({"min": "value"}, (node.greater_than_or_equal_to("value")).to_param())
def test_range_node_max_le(self):
node = Search.RangeNodeBuilder("name")
self.assertEqual({"max": "value"}, (node <= "value").to_param())
def test_range_node_max_less_than_or_equal_to(self):
node = Search.RangeNodeBuilder("name")
self.assertEqual({"max": "value"}, (node.less_than_or_equal_to("value")).to_param())
def test_range_node_between(self):
node = Search.RangeNodeBuilder("name")
self.assertEqual({"min": "min_value", "max": "max_value"}, (node.between("min_value", "max_value")).to_param())
def test_range_node_is(self):
node = Search.RangeNodeBuilder("name")
self.assertEqual({"is": "value"}, (node == "value").to_param())
def test_key_value_node_is_eq(self):
node = Search.KeyValueNodeBuilder("name")
self.assertTrue((node == True).to_param())
def test_key_value_node_is_equal(self):
node = Search.KeyValueNodeBuilder("name")
self.assertEqual(True, (node.is_equal(True)).to_param())
def test_key_value_node_is_not_equal(self):
node = Search.KeyValueNodeBuilder("name")
self.assertEqual(False, (node.is_not_equal(True)).to_param())
def test_key_value_node_symbols_is_not_equal(self):
node = Search.KeyValueNodeBuilder("name")
self.assertEqual(False, (node != True).to_param())
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HSS3_if1_CompleteLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HSS3_if1_CompleteLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HSS3_if1_CompleteLHS, self).__init__(name='HSS3_if1_CompleteLHS', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = []
self["MT_constraint__"] = """#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'SS3_if1')
# Nodes that represent match classes
#Nodes that represent apply classes
# match class Inst() node
self.add_node()
self.vs[0]["MT_subtypeMatching__"] = False
self.vs[0]["MT_pre__attr1"] = """
return True
"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["MT_subtypes__"] = []
self.vs[0]["MT_dirty__"] = False
self.vs[0]["mm__"] = """MT_pre__Inst"""
self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class Name() node
self.add_node()
self.vs[1]["MT_subtypeMatching__"] = False
self.vs[1]["MT_pre__attr1"] = """
return True
"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["MT_subtypes__"] = []
self.vs[1]["MT_dirty__"] = False
self.vs[1]["mm__"] = """MT_pre__Name"""
self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class Name() node
self.add_node()
self.vs[2]["MT_subtypeMatching__"] = False
self.vs[2]["MT_pre__attr1"] = """
return True
"""
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["MT_subtypes__"] = []
self.vs[2]["MT_dirty__"] = False
self.vs[2]["mm__"] = """MT_pre__Name"""
self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class Name() node
self.add_node()
self.vs[3]["MT_subtypeMatching__"] = False
self.vs[3]["MT_pre__attr1"] = """
return True
"""
self.vs[3]["MT_label__"] = """4"""
self.vs[3]["MT_subtypes__"] = []
self.vs[3]["MT_dirty__"] = False
self.vs[3]["mm__"] = """MT_pre__Name"""
self.vs[3]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# Nodes that represent the match associations of the property.
# Nodes that represent the apply associations of the property.
# apply association Inst--channelNames-->Name node
self.add_node()
self.vs[4]["MT_subtypeMatching__"] = False
self.vs[4]["MT_pre__attr1"] = """
return attr_value == "channelNames"
"""
self.vs[4]["MT_label__"] = """5"""
self.vs[4]["MT_subtypes__"] = []
self.vs[4]["MT_dirty__"] = False
self.vs[4]["mm__"] = """MT_pre__directLink_T"""
self.vs[4]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'assoc4')
# apply association Inst--channelNames-->Name node
self.add_node()
self.vs[5]["MT_subtypeMatching__"] = False
self.vs[5]["MT_pre__attr1"] = """
return attr_value == "channelNames"
"""
self.vs[5]["MT_label__"] = """6"""
self.vs[5]["MT_subtypes__"] = []
self.vs[5]["MT_dirty__"] = False
self.vs[5]["mm__"] = """MT_pre__directLink_T"""
self.vs[5]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'assoc5')
# apply association Inst--channelNames-->Name node
self.add_node()
self.vs[6]["MT_subtypeMatching__"] = False
self.vs[6]["MT_pre__attr1"] = """
return attr_value == "channelNames"
"""
self.vs[6]["MT_label__"] = """7"""
self.vs[6]["MT_subtypes__"] = []
self.vs[6]["MT_dirty__"] = False
self.vs[6]["mm__"] = """MT_pre__directLink_T"""
self.vs[6]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'assoc6')
# Nodes that represent trace relations
# Add the edges
self.add_edges([
(0,4), # apply_class Inst() -> association channelNames
(4,1), # association channelNames -> apply_class Name()
(0,5), # apply_class Inst() -> association channelNames
(5,2), # association channelNames -> apply_class Name()
(0,6), # apply_class Inst() -> association channelNames
(6,3), # association channelNames -> apply_class Name()
])
# Add the attribute equations
self["equations"] = [((0,'pivot'),('constant','INST')), ]
def eval_attr11(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr12(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr13(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr14(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr15(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "channelNames"
def eval_attr16(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "channelNames"
def eval_attr17(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "channelNames"
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
import bisect
import codecs
from PyQt4.QtCore import (QAbstractItemModel, QModelIndex, QString,
QVariant, Qt)
from PyQt4.QtCore import pyqtSignal as Signal
KEY, NODE = range(2)
class BranchNode(object):
def __init__(self, name, parent=None):
super(BranchNode, self).__init__()
self.name = name
self.parent = parent
self.children = []
def orderKey(self):
return self.name.lower()
def toString(self):
return self.name
def __len__(self):
return len(self.children)
def childAtRow(self, row):
assert 0 <= row < len(self.children)
return self.children[row][NODE]
def rowOfChild(self, child):
for i, item in enumerate(self.children):
if item[NODE] == child:
return i
return -1
def childWithKey(self, key):
if not self.children:
return None
# Causes a -3 deprecation warning. Solution will be to
# reimplement bisect_left and provide a key function.
i = bisect.bisect_left(self.children, (key, None))
if i < 0 or i >= len(self.children):
return None
if self.children[i][KEY] == key:
return self.children[i][NODE]
return None
def insertChild(self, child):
child.parent = self
bisect.insort(self.children, (child.orderKey(), child))
def hasLeaves(self):
if not self.children:
return False
return isinstance(self.children[0], LeafNode)
class LeafNode(object):
def __init__(self, fields, parent=None):
super(LeafNode, self).__init__()
self.parent = parent
self.fields = fields
def orderKey(self):
return "\t".join(self.fields).lower()
def toString(self, separator="\t"):
return separator.join(self.fields)
def __len__(self):
return len(self.fields)
def asRecord(self):
record = []
branch = self.parent
while branch is not None:
record.insert(0, branch.toString())
branch = branch.parent
assert record and not record[0]
record = record[1:]
return record + self.fields
def field(self, column):
assert 0 <= column <= len(self.fields)
return self.fields[column]
class TreeOfTableModel(QAbstractItemModel):
def __init__(self, parent=None):
super(TreeOfTableModel, self).__init__(parent)
self.columns = 0
self.root = BranchNode("")
self.headers = []
def load(self, filename, nesting, separator):
assert nesting > 0
self.nesting = nesting
self.root = BranchNode("")
exception = None
fh = None
try:
for line in codecs.open(unicode(filename), "rU", "utf8"):
if not line:
continue
self.addRecord(line.split(separator), False)
except IOError, err:
exception = err
finally:
if fh is not None:
fh.close()
self.reset()
for i in range(self.columns):
self.headers.append("Column #{0}".format(i))
if exception is not None:
raise exception
def addRecord(self, fields, callReset=True):
assert len(fields) > self.nesting
root = self.root
branch = None
for i in range(self.nesting):
key = fields[i].lower()
branch = root.childWithKey(key)
if branch is not None:
root = branch
else:
branch = BranchNode(fields[i])
root.insertChild(branch)
root = branch
assert branch is not None
items = fields[self.nesting:]
self.columns = max(self.columns, len(items))
branch.insertChild(LeafNode(items, branch))
if callReset:
self.reset()
def asRecord(self, index):
leaf = self.nodeFromIndex(index)
if leaf is not None and isinstance(leaf, LeafNode):
return leaf.asRecord()
return []
def rowCount(self, parent):
node = self.nodeFromIndex(parent)
if node is None or isinstance(node, LeafNode):
return 0
return len(node)
def columnCount(self, parent):
return self.columns
def data(self, index, role):
if role == Qt.TextAlignmentRole:
return QVariant(int(Qt.AlignTop|Qt.AlignLeft))
if role != Qt.DisplayRole:
return QVariant()
node = self.nodeFromIndex(index)
assert node is not None
if isinstance(node, BranchNode):
return (QVariant(node.toString())
if index.column() == 0 else QVariant(QString("")))
return QVariant(node.field(index.column()))
def headerData(self, section, orientation, role):
if (orientation == Qt.Horizontal and
role == Qt.DisplayRole):
assert 0 <= section <= len(self.headers)
return QVariant(self.headers[section])
return QVariant()
def index(self, row, column, parent):
assert self.root
branch = self.nodeFromIndex(parent)
assert branch is not None
return self.createIndex(row, column,
branch.childAtRow(row))
def parent(self, child):
node = self.nodeFromIndex(child)
if node is None:
return QModelIndex()
parent = node.parent
if parent is None:
return QModelIndex()
grandparent = parent.parent
if grandparent is None:
return QModelIndex()
row = grandparent.rowOfChild(parent)
assert row != -1
return self.createIndex(row, 0, parent)
def nodeFromIndex(self, index):
return (index.internalPointer()
if index.isValid() else self.root)
|
""" This package has the class that allows for you to access a neo4j database using the py2neo library. """
__author__ = "Manoel Horta Ribeiro"
|
from model import *
from sqlalchemy.orm import sessionmaker
from cassiopeia import riotapi
from cassiopeia.type.api.exception import APIError
import secret_keys
import time
import sys
def query_riot_api(function, *args):
while(True):
try:
data = function(*args)
except APIError as err:
print(err)
if(err.error_code == 404):
break
else:
time.sleep(10)
continue
break
return data
riotapi.set_api_key(secret_keys.riotapikey)
riotapi.set_rate_limits((9, 10), (499, 600))
Session = sessionmaker()
Session.configure(bind=engine) # engine is from model
if not sys.argv[1:]:
regions = ['BR', 'EUNE', 'EUW', 'KR',
'LAN', 'LAS', 'NA', 'OCE', 'RU', 'TR']
else:
regions = sys.argv[1:]
for region in regions:
# Customize libraries to region
session = Session()
riotapi.set_region(region)
# Query summoner ids
print('Querying {region} Challenger Tier list'.format(**locals()))
challengers = query_riot_api(riotapi.get_challenger)
print('Querying {region} Master Tier list'.format(**locals()))
masters = query_riot_api(riotapi.get_master)
players = [entry.summoner for entry in (
challengers.entries + masters.entries)]
# Iterating over each summoner
count = 0
total_count = len(players)
for player in players:
count += 1
print('[{region}][{count}/{total_count}]'
' {player.name}'.format(**locals()))
# Save summoner id
db_summoner = Summoner(
region=region, summoner_id=player.id, summoner_name=player.name)
session.merge(db_summoner)
# Save summoner mastery
champion_masteries = query_riot_api(player.champion_masteries)
if champion_masteries is not None:
for champion, champion_mastery in champion_masteries.items():
db_champion = Champion(
champion_id=champion.id,
champion_name=champion.name)
db_champion_mastery = ChampionMastery(
summoner_region=region,
summoner_id=player.id,
champion_id=champion.id,
champion_points=champion_mastery.points,
highest_grade=champion_mastery.highest_grade,
last_play_time=champion_mastery.last_played)
session.merge(db_champion)
session.merge(db_champion_mastery)
session.commit()
# Saves the session to db
print("Saving {region} summoners' ids and masteries".format(region=region))
session.commit()
session.close()
|
"""configuration for jmoiron.net"""
class Config(object):
DEBUG = False
TESTING = False
DATABASE_URI = "mongodb://localhost:27017/"
DATABASE_NAME = "jmoiron"
SECRET_KEY = '\x10t\x98\xaeR:0\xc2\xea\x8frl b=\xde'
USE_LESS = True
class DevelopmentConfig(Config):
DEBUG = True
OFFLINE_MODE = True
#USE_LESS = False
class ProductionConfig(Config):
pass
|
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import pyodbc
import yaml
import pprint
from sqlalchemy import create_engine, Column, MetaData, Table, Index
from sqlalchemy import Integer, String, Text, Float, Boolean, BigInteger, Numeric, SmallInteger
import ConfigParser, os
fileLocation = os.path.dirname(os.path.realpath(__file__))
inifile=fileLocation+'/sdeloader.cfg'
config = ConfigParser.ConfigParser()
config.read(inifile)
destination=config.get('Database','destination')
sourcePath=config.get('Files','sourcePath')
print "connecting to DB"
engine = create_engine(destination)
connection = engine.connect()
metadata = MetaData()
print "Setting up Tables"
invGroups = Table('invGroups',metadata,
Column('groupID',Integer,primary_key=True, autoincrement=False),
Column('categoryID',Integer),
Column('groupName',String(100)),
Column('iconID',BigInteger),
Column('useBasePrice',Boolean),
Column('anchored',Boolean),
Column('anchorable',Boolean),
Column('fittableNonSingleton',Boolean),
Column('published',Boolean),
)
Index('invTypes_categoryid',invGroups.c.categoryID)
trnTranslations = Table('trnTranslations',metadata,
Column('tcID',Integer,primary_key=True,autoincrement=False),
Column('keyID',Integer,primary_key=True,autoincrement=False),
Column('languageID',String,primary_key=True,autoincrement=False),
Column('text',Text)
);
metadata.create_all(engine,checkfirst=True)
print "opening Yaml"
with open(sourcePath+'groupIDs.yaml','r') as yamlstream:
print "importing"
trans = connection.begin()
groupids=yaml.load(yamlstream,Loader=yaml.CSafeLoader)
print "Yaml Processed into memory"
for groupid in groupids:
connection.execute(invGroups.insert(),
groupID=groupid,
categoryID=groupids[groupid].get('categoryID',0),
groupName=groupids[groupid].get('name',{}).get('en','').decode('utf-8'),
iconID=groupids[groupid].get('iconID'),
useBasePrice=groupids[groupid].get('useBasePrice'),
anchored=groupids[groupid].get('anchored',0),
anchorable=groupids[groupid].get('anchorable',0),
fittableNonSingleton=groupids[groupid].get('fittableNonSingleton',0),
published=groupids[groupid].get('published',0))
if (groupids[groupid].has_key('name')):
for lang in groupids[groupid]['name']:
connection.execute(trnTranslations.insert(),tcID=7,keyID=groupid,languageID=lang,text=groupids[groupid]['name'][lang].decode('utf-8'));
trans.commit()
|
from setuptools import setup
setup(name="moheve",
version="0.1",
description="Mohawk authentication for Eve APIs",
keywords="python-eve mohawk moheve",
url="https://github.com/xander-wr/moheve/",
author="https://github.com/xander-wr/",
author_email="hello@xander.frl",
license="MIT",
packages=['moheve'],
install_requires=['mohawk'],
classifiers=[
'Framework :: Flask'
],
include_package_data=True,
zip_safe=False)
|
from core.config import Settings
from core.syslog import Syslog
from core import utils
import redis
r = redis.StrictRedis(host=self.settings.redis.host,
port=int(self.settings.redis.port),
db=int(self.settings.redis.db))
|
import piston
import tornado.httpserver
import tornado.web
import tornado.options
import tornado.ioloop
import tornado.gen
import tornado.httpclient
import urllib
import pony
class UserHandler(piston.BaseHandler):
def read(self, user_id):
print "RRRRR"
query = "h"
client = tornado.httpclient.AsyncHTTPClient()
for _ in range(3):
self.write("hhh%d\n" % _)
r = yield tornado.gen.Task(client.fetch,
"http://www.amazon.com")
print r
self.finish()
# return {"hah": "user"}
def post(self, *args, **kwargs):
print self.request_obj
print "FIni USERHandler"
class BookHandler(piston.BaseHandler):
def read(self):
self.write("haha")
self.finish()
import asyncmongo
import tornado.web
route = [
(r"/user/(\w+)", UserHandler.allow("GET")),
(r"/book", BookHandler.allow("GET")),
]
settings = {
"autoreload": True,
"debug": True
}
def server_forever():
tornado.options.parse_command_line()
application = tornado.web.Application(route, **settings)
application.listen(8111)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
server_forever()
|
import os
from unittest import TestCase
from nose.tools import ok_
from mock import Mock
from lamvery.actions.build import BuildAction
def default_args():
args = Mock()
args.conf_file = '.lamvery.yml'
args.dry_run = True
args.no_libs = False
args.single_file = False
return args
class BuildActionTestCase(TestCase):
def tearDown(self):
if os.path.exists('test.zip'):
os.remove('test.zip')
def test_action(self):
action = BuildAction(default_args())
action._config = Mock()
action._config.get_archive_name = Mock(return_value='test.zip')
action._config.get_function_filename = Mock(return_value='test.py')
action._config.generate_lambda_secret = Mock(return_value={})
action._config.get_exclude = Mock(return_value=[])
action._config.get_build_hooks = Mock(return_value={'pre': ['whoami'], 'post': ['whoami']})
action.action()
ok_(os.path.exists('test.zip'))
action = BuildAction(default_args())
action._config = Mock()
action._config.get_archive_name = Mock(return_value='test.zip')
action._config.get_function_filename = Mock(return_value='test.py')
action._config.generate_lambda_secret = Mock(return_value={})
action._config.get_exclude = Mock(return_value=[])
action._config.is_clean_build = Mock(return_value=True)
action._config.get_build_hooks = Mock(
return_value={
'pre': ['whoami'],
'post': ['whoami']})
action.action()
ok_(os.path.exists('test.zip'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.